text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
"""
Copyright 2009 Zepheira
Provides basic functions for interacting with Yammer via the Yammer API v1:
https://www.yammer.com/api_doc.html
Depends on the installation of a Python OAuth library:
http://code.google.com/p/oauth/
http://oauth.googlecode.com/svn/code/python/
This will behave like a bot. To that end, there should probably be
a 'member' of the organization that is clearly not associated with
any actual user. bot@example.org, for instance. All public messages
will be available to the bot, and the bot can broadcast publically.
So the OAuth consumer in this case is also acting in concert with the
bot as an authenticated user. This is not a particularly safe method
security-wise; anybody who seizes on the configuration for this code
can immediately start reading from and posting to a Yammer group.
There are security concerns with securing any secret; this code
contains and relies on two secrets.
Usage:
client = YammerClient()
messages_xml = client.get_messages()
# Users of libyammer should be able to keep track of the
# latest message ID in the group to avoid overextending
# Yammer resources.
latest_messages_xml = client.get_messaegs(newest_msg_id)
message_xml = client.post_message('Talking to all my Yammer cohort')
To procure bot keys, run client.bootstrap_keys() and be sure to have
a browser nearby logged in to Yammer as the appropriate bot user.
The keys should reside in this file.
Written by Ryan Lee <ryanlee@zepheira.com>
Based heavily on oauth client/consumer demo code and Yammer documentation.
"""
# TODO: conf file for bot user keys
# TODO: make library friendly, with a setup.py and eggs
import httplib
import time
import oauth.oauth as oauth
# application-specific information
CONSUMER_KEY = 'OjQGqHtQIR54dyBHnMZOgQ'
CONSUMER_SECRET = 'ORUaT6j7av3bvaJxZPo4TPDBiyAt0wXwjp0a9KSUF0'
BOT_KEY = 'Pbej35AMuWmXbIkZmWQVRA'
BOT_SECRET = 'xyH1NPflGN9ks1MvieIqdzkkaAQZy1bZS01T0FFI8o'
# Yammer URLs
REQUEST_TOKEN_URL = 'https://www.yammer.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://www.yammer.com/oauth/access_token'
AUTHORIZATION_URL = 'https://www.yammer.com/oauth/authorize'
GET_MESSAGES_URL = 'https://www.yammer.com/api/v1/messages.xml'
POST_MESSAGE_URL = 'https://www.yammer.com/api/v1/messages/'
# Base fetch class
class OAuthClient(oauth.OAuthClient):
def __init__(self, server, port=httplib.HTTP_PORT, request_token_url = REQUEST_TOKEN_URL, access_token_url = ACCESS_TOKEN_URL, authorization_url = AUTHORIZATION_URL):
self.server = server
self.port = port
self.request_token_url = request_token_url
self.access_token_url = access_token_url
self.authorization_url = authorization_url
self.connection = httplib.HTTPSConnection("%s:%d" % (self.server, self.port))
def fetch_request_token(self, oauth_request):
self.connection.request(oauth_request.http_method, self.request_token_url, headers=oauth_request.to_header())
response = self.connection.getresponse()
return oauth.OAuthToken.from_string(response.read())
def fetch_access_token(self, oauth_request):
self.connection.request(oauth_request.http_method, self.access_token_url, headers=oauth_request.to_header())
response = self.connection.getresponse()
return oauth.OAuthToken.from_string(response.read())
def authorize_token(self, oauth_request):
self.connection.request(oauth_request.http_method, oauth_request.to_url())
response = self.connection.getresponse()
return response.read()
def access_resource(self, oauth_request):
if (oauth_request.http_method == 'POST'):
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
self.connection.request(oauth_request.http_method, oauth_request.http_url, body=oauth_request.to_postdata(), headers=headers)
else:
self.connection.request(oauth_request.http_method, oauth_request.to_url())
response = self.connection.getresponse()
return response.read()
class YammerClient():
def __init__(self):
self.client = OAuthClient('www.yammer.com', 443)
self.consumer = oauth.OAuthConsumer(CONSUMER_KEY, CONSUMER_SECRET)
self.bot_token = oauth.OAuthToken(BOT_KEY, BOT_SECRET)
self.sha1 = oauth.OAuthSignatureMethod_HMAC_SHA1()
def bootstrap_keys(self):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, http_url=self.client.request_token_url)
oauth_request.sign_request(self.sha1, self.consumer, None)
token = self.client.fetch_request_token(oauth_request)
print 'Visit the following URL in your browser and authorize Orth,'
print 'then return here with a verification code.'
print '%s%s%s' % (AUTHORIZATION_URL, '?oauth_token=', str(token.key))
verifier = raw_input('Enter 4-digit verification code: ')
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=token, verifier=verifier, http_url=self.client.access_token_url)
oauth_request.sign_request(self.sha1, self.consumer, token)
token = self.client.fetch_access_token(oauth_request)
print 'Copy and paste the following, replacing their values in the'
print 'header of this script.'
print 'BOT_KEY = %s' % str(token.key)
print 'BOT_SECRET = %s' % str(token.secret)
def get_messages(self, latest_mid):
parameters = { 'newer_than': latest_mid }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=self.bot_token, http_method='GET', http_url=GET_MESSAGES_URL, parameters=parameters)
oauth_request.sign_request(self.sha1, self.consumer, self.bot_token)
response = self.client.access_resource(oauth_request)
print response
def post_message(self, msg_text):
parameters = { 'body': msg_text }
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=self.bot_token, http_method='POST', http_url=POST_MESSAGE_URL, parameters=parameters)
oauth_request.sign_request(self.sha1, self.consumer, self.bot_token)
response = self.client.access_resource(oauth_request)
print response
if __name__ == '__main__':
y = YammerClient()
r = y.get_messages()
print r
|
dpla/zen
|
lib/yammer.py
|
Python
|
apache-2.0
| 6,366
|
[
"VisIt"
] |
48742901871b2909543e4c1579abbde0e0d389bde01d77394371bc598be33955
|
# -*- coding: utf-8 -*-
#
# twoneurons.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Two neuron example
------------------
This script simulates two connected pre- and postsynaptic neurons.
The presynaptic neuron receives a constant external current,
and the membrane potential of both neurons are recorded.
See Also
~~~~~~~~
:doc:`one_neuron`
"""
###############################################################################
# First, we import all necessary modules for simulation, analysis and plotting.
# Additionally, we set the verbosity to suppress info messages and reset
# the kernel.
import nest
import nest.voltage_trace
import matplotlib.pyplot as plt
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
###############################################################################
# Second, we create the two neurons and the recording device.
neuron_1 = nest.Create("iaf_psc_alpha")
neuron_2 = nest.Create("iaf_psc_alpha")
voltmeter = nest.Create("voltmeter")
###############################################################################
# Third, we set the external current of neuron 1.
neuron_1.I_e = 376.0
###############################################################################
# Fourth, we connect neuron 1 to neuron 2.
# Then, we connect a voltmeter to the two neurons.
# To learn more about the previous steps, please check out the
# :doc:`one neuron example <one_neuron>`.
weight = 20.0
delay = 1.0
nest.Connect(neuron_1, neuron_2, syn_spec={"weight": weight, "delay": delay})
nest.Connect(voltmeter, neuron_1)
nest.Connect(voltmeter, neuron_2)
###############################################################################
# Now we simulate the network using ``Simulate``, which takes the
# desired simulation time in milliseconds.
nest.Simulate(1000.0)
###############################################################################
# Finally, we plot the neurons' membrane potential as a function of
# time.
nest.voltage_trace.from_device(voltmeter)
plt.show()
|
suku248/nest-simulator
|
pynest/examples/twoneurons.py
|
Python
|
gpl-2.0
| 2,663
|
[
"NEURON"
] |
2f0c71d1f6733c14507f8dea2a3e2683f08c8ab27224ac107f05e7765a3f5c6d
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# adminfreeze - front end to freezing files into write-once archive
# Copyright (C) 2003-2014 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
import cgi
import cgitb
cgitb.enable()
from shared.functionality.adminfreeze import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
|
heromod/migrid
|
mig/cgi-bin/adminfreeze.py
|
Python
|
gpl-2.0
| 1,106
|
[
"Brian"
] |
360b50abec33ac56ef019e3ea9dbee572095685bc269b575d14b16c24e639c1d
|
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Robert Hammelrath
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Some parts of the software are a port of code provided by Rinky-Dink Electronics, Henning Karlsen,
# with the following copyright notice:
#
## Copyright (C)2015 Rinky-Dink Electronics, Henning Karlsen. All right reserved
## This library is free software; you can redistribute it and/or
## modify it under the terms of the CC BY-NC-SA 3.0 license.
## Please see the included documents for further information.
#
# Class supporting TFT LC-displays with a parallel Interface
# First example: Controller SSD1963 with a 4.3" or 7" display
#
# The minimal connection is:
# X1..X8 for data, Y9 for /Reset, Y10 for /RD, Y11 for /WR and Y12 for /RS
# Then LED must be hard tied to Vcc and /CS to GND.
#
import pyb, stm, gc
from uctypes import addressof
import TFT_io
# define constants
#
RESET = const(1 << 10) ## Y9
RD = const(1 << 11) ## Y10
WR = const(0x01) ## Y11
D_C = const(0x02) ## Y12
LED = const(1 << 8) ## Y3
POWER = const(1 << 9) ## Y4
## CS is not used and must be hard tied to GND
PORTRAIT = const(1)
LANDSCAPE = const(0)
class TFT:
def __init__(self, controller = "SSD1963", lcd_type = "LB04301", orientation = LANDSCAPE,
v_flip = False, h_flip = False, power_control = True):
self.tft_init(controller, lcd_type, orientation, v_flip, h_flip)
def tft_init(self, controller = "SSD1963", lcd_type = "LB04301", orientation = LANDSCAPE,
v_flip = False, h_flip = False, power_control = True):
#
# For convenience, define X1..X1 and Y9..Y12 as output port using thy python functions.
# X1..X8 will be redefind on the fly as Input by accessing the MODER control registers
# when needed. Y9 is treate seperately, since it is used for Reset, which is done at python level
# since it need long delays anyhow, 5 and 15 ms vs. 10 µs.
#
# Set TFT general defaults
self.controller = controller
self.lcd_type = lcd_type
self.orientation = orientation
self.v_flip = v_flip # flip vertical
self.h_flip = h_flip # flip horizontal
self.c_flip = 0 # flip blue/red
self.rc_flip = 0 # flip row/column
self.setColor((255, 255, 255)) # set FG color to white as can be.
self.setBGColor((0, 0, 0)) # set BG to black
self.bg_buf = bytearray()
#
self.pin_led = None # deferred init Flag
self.power_control = power_control
if self.power_control:
# special treat for Power Pin
self.pin_power = pyb.Pin("Y4", pyb.Pin.OUT_PP)
self.power(True) ## switch Power on
#
pyb.delay(10)
# this may have to be moved to the controller specific section
if orientation == PORTRAIT:
self.setXY = TFT_io.setXY_P
self.drawPixel = TFT_io.drawPixel_P
else:
self.setXY = TFT_io.setXY_L
self.drawPixel = TFT_io.drawPixel_L
self.swapbytes = TFT_io.swapbytes
self.swapcolors = TFT_io.swapcolors
# ----------
for pin_name in ["X1", "X2", "X3", "X4", "X5", "X6", "X7", "X8",
"Y10", "Y11", "Y12"]:
pin = pyb.Pin(pin_name, pyb.Pin.OUT_PP) # set as output
pin.value(1) ## set high as default
# special treat for Reset
self.pin_reset = pyb.Pin("Y9", pyb.Pin.OUT_PP)
# Reset the device
self.pin_reset.value(1) ## do a hard reset
pyb.delay(10)
self.pin_reset.value(0) ## Low
pyb.delay(20)
self.pin_reset.value(1) ## set high again
pyb.delay(20)
#
# Now initialiize the LCD
# This is for the SSD1963 controller and two specific LCDs. More may follow.
# Data taken from the SSD1963 data sheet, SSD1963 Application Note and the LCD Data sheets
#
if controller == "SSD1963": # 1st approach for 480 x 272
TFT_io.tft_cmd_data(0xe2, bytearray(b'\x1d\x02\x54'), 3) # PLL multiplier, set PLL clock to 100M
# N=0x2D for 6.5MHz, 0x1D for 10MHz crystal
# PLLClock = Crystal * (Mult + 1) / (Div + 1)
# The intermediate value Crystal * (Mult + 1) must be between 250MHz and 750 MHz
TFT_io.tft_cmd_data(0xe0, bytearray(b'\x01'), 1) # PLL Enable
pyb.delay(10)
TFT_io.tft_cmd_data(0xe0, bytearray(b'\x03'), 1)
pyb.delay(10)
TFT_io.tft_cmd(0x01) # software reset
pyb.delay(10)
#
# Settings for the LCD
#
# The LCDC_FPR depends on PLL clock and the reccomended LCD Dot clock DCLK
#
# LCDC_FPR = (DCLK * 1048576 / PLLClock) - 1
#
# The other settings are less obvious, since the definitions of the SSD1963 data sheet and the
# LCD data sheets differ. So what' common, even if the names may differ:
# HDP Horizontal Panel width (also called HDISP, Thd). The value store in the register is HDP - 1
# VDP Vertical Panel Width (also called VDISP, Tvd). The value stored in the register is VDP - 1
# HT Total Horizontal Period, also called HP, th... The exact value does not matter
# VT Total Vertical Period, alco called VT, tv, .. The exact value does not matter
# HPW Width of the Horizontal sync pulse, also called HS, thpw.
# VPW Width of the Vertical sync pulse, also called VS, tvpw
# Front Porch (HFP and VFP) Time between the end of display data and the sync pulse
# Back Porch (HBP and VBP Time between the start of the sync pulse and the start of display data.
# HT = FP + HDP + BP and VT = VFP + VDP + VBP (sometimes plus sync pulse width)
# Unfortunately, the controller does not use these front/back porch times, instead it uses an starting time
# in the front porch area and defines (see also figures in chapter 13.3 of the SSD1963 data sheet)
# HPS Time from that horiz. starting point to the start of the horzontal display area
# LPS Time from that horiz. starting point to the horizontal sync pulse
# VPS Time from the vert. starting point to the first line
# FPS Time from the vert. starting point to the vertical sync pulse
#
# So the following relations must be held:
#
# HT > HDP + HPS
# HPS >= HPW + LPS
# HPS = Back Porch - LPS, or HPS = Horizontal back Porch
# VT > VDP + VPS
# VPS >= VPW + FPS
# VPS = Back Porch - FPS, or VPS = Vertical back Porch
#
# LPS or FPS may have a value of zero, since the length of the front porch is detemined by the
# other figures
#
# The best is to start with the recomendations of the lCD data sheet for Back porch, grab a
# sync pulse with and the determine the other, such that they meet the relations. Typically, these
# values allow for some ambuigity.
#
if lcd_type == "LB04301": # Size 480x272, 4.3", 24 Bit, 4.3"
#
# Value Min Typical Max
# DotClock 5 MHZ 9 MHz 12 MHz
# HT (Hor. Total 490 531 612
# HDP (Hor. Disp) 480
# HBP (back porch) 8 43
# HFP (Fr. porch) 2 8
# HPW (Hor. sync) 1
# VT (Vert. Total) 275 288 335
# VDP (Vert. Disp) 272
# VBP (back porch) 2 12
# VFP (fr. porch) 1 4
# VPW (vert. sync) 1 10
#
# This table in combination with the relation above leads to the settings:
# HPS = 43, HPW = 8, LPS = 0, HT = 531
# VPS = 14, VPW = 10, FPS = 0, VT = 288
#
self.disp_x_size = 479
self.disp_y_size = 271
TFT_io.tft_cmd_data_AS(0xe6, bytearray(b'\x01\x70\xa3'), 3) # PLL setting for PCLK
# (9MHz * 1048576 / 100MHz) - 1 = 94371 = 0x170a3
TFT_io.tft_cmd_data_AS(0xb0, bytearray( # # LCD SPECIFICATION
[0x20, # 24 Color bits, HSync/VSync low, No Dithering
0x00, # TFT mode
self.disp_x_size >> 8, self.disp_x_size & 0xff, # physical Width of TFT
self.disp_y_size >> 8, self.disp_y_size & 0xff, # physical Height of TFT
0x00]), 7) # Last byte only required for a serial TFT
TFT_io.tft_cmd_data_AS(0xb4, bytearray(b'\x02\x13\x00\x2b\x08\x00\x00\x00'), 8)
# HSYNC, Set HT 531 HPS 43 HPW=Sync pulse 8 LPS 0
TFT_io.tft_cmd_data_AS(0xb6, bytearray(b'\x01\x20\x00\x0e\x0a\x00\x00'), 7)
# VSYNC, Set VT 288 VPS 14 VPW 10 FPS 0
TFT_io.tft_cmd_data_AS(0x36, bytearray([(orientation & 1) << 5 | (h_flip & 1) << 1 | (v_flip) & 1]), 1)
# rotation/ flip, etc., t.b.d.
elif lcd_type == "AT070TN92": # Size 800x480, 7", 18 Bit, lower color bits ignored
#
# Value Min Typical Max
# DotClock 26.4 MHz 33.3 MHz 46.8 MHz
# HT (Hor. Total 862 1056 1200
# HDP (Hor. Disp) 800
# HBP (back porch) 46 46 46
# HFP (Fr. porch) 16 210 254
# HPW (Hor. sync) 1 40
# VT (Vert. Total) 510 525 650
# VDP (Vert. Disp) 480
# VBP (back porch) 23 23 23
# VFP (fr. porch) 7 22 147
# VPW (vert. sync) 1 20
#
# This table in combination with the relation above leads to the settings:
# HPS = 46, HPW = 8, LPS = 0, HT = 1056
# VPS = 23, VPW = 10, VPS = 0, VT = 525
#
self.disp_x_size = 799
self.disp_y_size = 479
TFT_io.tft_cmd_data_AS(0xe6, bytearray(b'\x05\x53\xf6'), 3) # PLL setting for PCLK
# (33.3MHz * 1048576 / 100MHz) - 1 = 349174 = 0x553f6
TFT_io.tft_cmd_data_AS(0xb0, bytearray( # # LCD SPECIFICATION
[0x00, # 18 Color bits, HSync/VSync low, No Dithering/FRC
0x00, # TFT mode
self.disp_x_size >> 8, self.disp_x_size & 0xff, # physical Width of TFT
self.disp_y_size >> 8, self.disp_y_size & 0xff, # physical Height of TFT
0x00]), 7) # Last byte only required for a serial TFT
TFT_io.tft_cmd_data_AS(0xb4, bytearray(b'\x04\x1f\x00\x2e\x08\x00\x00\x00'), 8)
# HSYNC, Set HT 1056 HPS 46 HPW 8 LPS 0
TFT_io.tft_cmd_data_AS(0xb6, bytearray(b'\x02\x0c\x00\x17\x08\x00\x00'), 7)
# VSYNC, Set VT 525 VPS 23 VPW 08 FPS 0
TFT_io.tft_cmd_data_AS(0x36, bytearray([(orientation & 1) << 5 | (h_flip & 1) << 1 | (v_flip) & 1]), 1)
# rotation/ flip, etc., t.b.d.
elif lcd_type == "AT090TN10": # Size 800x480, 9", 24 Bit, lower color bits ignored
#
# Value Min Typical Max
# DotClock 26.4 MHz 33.3 MHz 46.8 MHz
# HT (Hor. Total 862 1056 1200
# HDP (Hor. Disp) 800
# HBP (back porch) 46 46 46
# HFP (Fr. porch) 16 210 354
# HPW (Hor. sync) 1 40
# VT (Vert. Total) 510 525 650
# VDP (Vert. Disp) 480
# VBP (back porch) 23 23 23
# VFP (fr. porch) 7 22 147
# VPW (vert. sync) 1 20
#
# This table in combination with the relation above leads to the settings:
# HPS = 46, HPW = 8, LPS = 0, HT = 1056
# VPS = 23, VPW = 10, VPS = 0, VT = 525
#
self.disp_x_size = 799
self.disp_y_size = 479
TFT_io.tft_cmd_data_AS(0xe6, bytearray(b'\x05\x53\xf6'), 3) # PLL setting for PCLK
# (33.3MHz * 1048576 / 100MHz) - 1 = 349174 = 0x553f6
TFT_io.tft_cmd_data_AS(0xb0, bytearray( # # LCD SPECIFICATION
[0x20, # 24 Color bits, HSync/VSync low, No Dithering/FRC
0x00, # TFT mode
self.disp_x_size >> 8, self.disp_x_size & 0xff, # physical Width of TFT
self.disp_y_size >> 8, self.disp_y_size & 0xff, # physical Height of TFT
0x00]), 7) # Last byte only required for a serial TFT
TFT_io.tft_cmd_data_AS(0xb4, bytearray(b'\x04\x1f\x00\x2e\x08\x00\x00\x00'), 8)
# HSYNC, Set HT 1056 HPS 46 HPW 8 LPS 0
TFT_io.tft_cmd_data_AS(0xb6, bytearray(b'\x02\x0c\x00\x17\x08\x00\x00'), 7)
# VSYNC, Set VT 525 VPS 23 VPW 08 FPS 0
TFT_io.tft_cmd_data_AS(0x36, bytearray([(orientation & 1) << 5 | (h_flip & 1) << 1 | (v_flip) & 1]), 1)
# rotation/ flip, etc., t.b.d.
else:
print("Wrong Parameter lcd_type: ", lcd_type)
return
TFT_io.tft_cmd_data_AS(0xBA, bytearray(b'\x0f'), 1) # GPIO[3:0] out 1
TFT_io.tft_cmd_data_AS(0xB8, bytearray(b'\x07\x01'), 1) # GPIO3=input, GPIO[2:0]=output
TFT_io.tft_cmd_data_AS(0xf0, bytearray(b'\x00'), 1) # Pixel data Interface 8 Bit
TFT_io.tft_cmd(0x29) # Display on
TFT_io.tft_cmd_data_AS(0xbe, bytearray(b'\x06\xf0\x01\xf0\x00\x00'), 6)
# Set PWM for B/L
TFT_io.tft_cmd_data_AS(0xd0, bytearray(b'\x0d'), 1) # Set DBC: enable, agressive
else:
print("Wrong Parameter controller: ", controller)
return
#
# Set character printing defaults
#
self.text_font = None
self.setTextStyle(self.color, self.BGcolor, 0, None, 0)
#
# Init done. clear Screen and switch BG LED on
#
self.text_x = self.text_y = self.text_yabs = 0
self.clrSCR() # clear the display
# self.backlight(100) ## switch BG LED on
#
# Return screen dimensions
#
def getScreensize(self):
if self.orientation == LANDSCAPE:
return (self.disp_x_size + 1, self.disp_y_size + 1)
else:
return (self.disp_y_size + 1, self.disp_x_size + 1)
#
# set backlight brightness
#
def backlight(self, percent):
# deferred init of LED PIN
if self.pin_led is None:
# special treat for BG LED
self.pin_led = pyb.Pin("Y3", pyb.Pin.OUT_PP)
self.led_tim = pyb.Timer(4, freq=500)
self.led_ch = self.led_tim.channel(3, pyb.Timer.PWM, pin=self.pin_led)
percent = max(0, min(percent, 100))
self.led_ch.pulse_width_percent(percent) # set LED
#
# switch power on/off
#
def power(self, onoff):
if self.power_control:
if onoff:
self.pin_power.value(True) ## switch power on or off
else:
self.pin_power.value(False)
#
# set the tft flip modes
#
def set_tft_mode(self, v_flip = False, h_flip = False, c_flip = False, orientation = LANDSCAPE):
self.v_flip = v_flip # flip vertical
self.h_flip = h_flip # flip horizontal
self.c_flip = c_flip # flip blue/red
self.orientation = orientation # LANDSCAPE/PORTRAIT
TFT_io.tft_cmd_data_AS(0x36,
bytearray([(self.orientation << 5) |(self.c_flip << 3) | (self.h_flip & 1) << 1 | (self.v_flip) & 1]), 1)
# rotation/ flip, etc., t.b.d.
#
# get the tft flip modes
#
def get_tft_mode(self):
return (self.v_flip, self.h_flip, self.c_flip, self.orientation) #
#
# set the color used for the draw commands
#
def setColor(self, fgcolor):
self.color = fgcolor
self.colorvect = bytearray(self.color) # prepare byte array
#
# Set BG color used for the draw commands
#
def setBGColor(self, bgcolor):
self.BGcolor = bgcolor
self.BGcolorvect = bytearray(self.BGcolor) # prepare byte array
self.BMPcolortable = bytearray([self.BGcolorvect[2], # create colortable
self.BGcolorvect[1], self.BGcolorvect[0],0,
self.colorvect[2], self.colorvect[1], self.colorvect[0],0])
#
# get the color used for the draw commands
#
def getColor(self):
return self.color
#
# get BG color used for
#
def getBGColor(self):
return self.BGcolor
#
# Draw a single pixel at location x, y with color
# Rather slow at 40µs/Pixel
#
def drawPixel_py(self, x, y, color):
self.setXY(x, y, x, y)
TFT_io.displaySCR_AS(color, 1) #
#
# clear screen, set it to BG color.
#
def clrSCR(self, color = None):
colorvect = self.BGcolorvect if color is None else bytearray(color)
self.clrXY()
TFT_io.fillSCR_AS(colorvect, (self.disp_x_size + 1) * (self.disp_y_size + 1))
self.setScrollArea(0, self.disp_y_size + 1, 0)
self.setScrollStart(0)
self.setTextPos(0,0)
#
# reset the address range to fullscreen
#
def clrXY(self):
if self.orientation == LANDSCAPE:
self.setXY(0, 0, self.disp_x_size, self.disp_y_size)
else:
self.setXY(0, 0, self.disp_y_size, self.disp_x_size)
#
# Draw a line from x1, y1 to x2, y2 with the color set by setColor()
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawLine(self, x1, y1, x2, y2, color = None):
if y1 == y2:
self.drawHLine(x1, y1, x2 - x1 + 1, color)
elif x1 == x2:
self.drawVLine(x1, y1, y2 - y1 + 1, color)
else:
colorvect = self.colorvect if color is None else bytearray(color)
dx, xstep = (x2 - x1, 1) if x2 > x1 else (x1 - x2, -1)
dy, ystep = (y2 - y1, 1) if y2 > y1 else (y1 - y2, -1)
col, row = x1, y1
if dx < dy:
t = - (dy >> 1)
while True:
self.drawPixel(col, row, colorvect)
if row == y2:
return
row += ystep
t += dx
if t >= 0:
col += xstep
t -= dy
else:
t = - (dx >> 1)
while True:
self.drawPixel(col, row, colorvect)
if col == x2:
return
col += xstep
t += dy
if t >= 0:
row += ystep
t -= dx
#
# Draw a horizontal line with 1 Pixel width, from x,y to x + l - 1, y
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawHLine(self, x, y, l, color = None): # draw horiontal Line
colorvect = self.colorvect if color is None else bytearray(color)
if l < 0: # negative length, swap parameters
l = -l
x -= l
self.setXY(x, y, x + l - 1, y) # set display window
TFT_io.fillSCR_AS(colorvect, l)
#
# Draw a vertical line with 1 Pixel width, from x,y to x, y + l - 1
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawVLine(self, x, y, l, color = None): # draw horiontal Line
colorvect = self.colorvect if color is None else bytearray(color)
if l < 0: # negative length, swap parameters
l = -l
y -= l
self.setXY(x, y, x, y + l - 1) # set display window
TFT_io.fillSCR_AS(colorvect, l)
#
# Draw rectangle from x1, y1, to x2, y2
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawRectangle(self, x1, y1, x2, y2, color = None):
if x1 > x2:
x1, x2 = x2, x1
if y1 > y2:
y1, y2 = y2, y1
self.drawHLine(x1, y1, x2 - x1 + 1, color)
self.drawHLine(x1, y2, x2 - x1 + 1, color)
self.drawVLine(x1, y1, y2 - y1 + 1, color)
self.drawVLine(x2, y1, y2 - y1 + 1, color)
#
# Fill rectangle
# Almost straight port from the UTFT Library at Rinky-Dink Electronics
#
def fillRectangle(self, x1, y1, x2, y2, color=None):
if x1 > x2:
x1, x2 = x2, x1
if y1 > y2:
y1, y2 = y2, y1
self.setXY(x1, y1, x2, y2) # set display window
if color:
TFT_io.fillSCR_AS(bytearray(color), (x2 - x1 + 1) * (y2 - y1 + 1))
else:
TFT_io.fillSCR_AS(self.colorvect, (x2 - x1 + 1) * (y2 - y1 + 1))
#
# Draw smooth rectangle from x1, y1, to x2, y2
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawClippedRectangle(self, x1, y1, x2, y2, color = None):
if x1 > x2:
x1, x2 = x2, x1
if y1 > y2:
y1, y2 = y2, y1
if (x2-x1) > 4 and (y2-y1) > 4:
colorvect = self.colorvect if color is None else bytearray(color)
self.drawPixel(x1 + 2,y1 + 1, colorvect)
self.drawPixel(x1 + 1,y1 + 2, colorvect)
self.drawPixel(x2 - 2,y1 + 1, colorvect)
self.drawPixel(x2 - 1,y1 + 2, colorvect)
self.drawPixel(x1 + 2,y2 - 1, colorvect)
self.drawPixel(x1 + 1,y2 - 2, colorvect)
self.drawPixel(x2 - 2,y2 - 1, colorvect)
self.drawPixel(x2 - 1,y2 - 2, colorvect)
self.drawHLine(x1 + 3, y1, x2 - x1 - 5, colorvect)
self.drawHLine(x1 + 3, y2, x2 - x1 - 5, colorvect)
self.drawVLine(x1, y1 + 3, y2 - y1 - 5, colorvect)
self.drawVLine(x2, y1 + 3, y2 - y1 - 5, colorvect)
#
# Fill smooth rectangle from x1, y1, to x2, y2
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def fillClippedRectangle(self, x1, y1, x2, y2, color = None):
if x1 > x2:
t = x1; x1 = x2; x2 = t
if y1 > y2:
t = y1; y1 = y2; y2 = t
if (x2-x1) > 4 and (y2-y1) > 4:
for i in range(((y2 - y1) // 2) + 1):
if i == 0:
self.drawHLine(x1 + 3, y1 + i, x2 - x1 - 5, color)
self.drawHLine(x1 + 3, y2 - i, x2 - x1 - 5, color)
elif i == 1:
self.drawHLine(x1 + 2, y1 + i, x2 - x1 - 3, color)
self.drawHLine(x1 + 2, y2 - i, x2 - x1 - 3, color)
elif i == 2:
self.drawHLine(x1 + 1, y1 + i, x2 - x1 - 1, color)
self.drawHLine(x1 + 1, y2 - i, x2 - x1 - 1, color)
else:
self.drawHLine(x1, y1 + i, x2 - x1 + 1, color)
self.drawHLine(x1, y2 - i, x2 - x1 + 1, color)
#
# draw a circle at x, y with radius
# Straight port from the UTFT Library at Rinky-Dink Electronics
#
def drawCircle(self, x, y, radius, color = None):
colorvect = self.colorvect if color is None else bytearray(color)
f = 1 - radius
ddF_x = 1
ddF_y = -2 * radius
x1 = 0
y1 = radius
self.drawPixel(x, y + radius, colorvect)
self.drawPixel(x, y - radius, colorvect)
self.drawPixel(x + radius, y, colorvect)
self.drawPixel(x - radius, y, colorvect)
while x1 < y1:
if f >= 0:
y1 -= 1
ddF_y += 2
f += ddF_y
x1 += 1
ddF_x += 2
f += ddF_x
self.drawPixel(x + x1, y + y1, colorvect)
self.drawPixel(x - x1, y + y1, colorvect)
self.drawPixel(x + x1, y - y1, colorvect)
self.drawPixel(x - x1, y - y1, colorvect)
self.drawPixel(x + y1, y + x1, colorvect)
self.drawPixel(x - y1, y + x1, colorvect)
self.drawPixel(x + y1, y - x1, colorvect)
self.drawPixel(x - y1, y - x1, colorvect)
#
# fill a circle at x, y with radius
# Straight port from the UTFT Library at Rinky-Dink Electronics
# Instead of calculating x = sqrt(r*r - y*y), it searches the x
# for r*r = x*x + x*x
#
def fillCircle(self, x, y, radius, color = None):
r_square = radius * radius * 4
for y1 in range (-(radius * 2), 1):
y_square = y1 * y1
for x1 in range (-(radius * 2), 1):
if x1*x1+y_square <= r_square:
x1i = x1 // 2
y1i = y1 // 2
self.drawHLine(x + x1i, y + y1i, 2 * (-x1i), color)
self.drawHLine(x + x1i, y - y1i, 2 * (-x1i), color)
break;
#
# Draw a bitmap at x,y with size sx, sy
# mode determines the type of expected data
# mode = 1: The data contains 1 bit per pixel, mapped to fg/bg color
# unless a colortable is provided
# mode = 2: The data contains 2 bit per pixel; a colortable with 4 entries must be provided
# mode = 4: The data contains 4 bit per pixel;
# a colortable with 16 entries must be provided
# mode = 8: The data contains 8 bit per pixel;
# a colortable with 256 entries must be provided
# mode = 16: The data must contain 2 packed bytes/pixel red/green/blue in 565 format
# mode = 24: The data must contain 3 bytes/pixel red/green/blue
#
def drawBitmap(self, x, y, sx, sy, data, mode = 24, colortable = None):
self.setXY(x, y, x + sx - 1, y + sy - 1)
if mode == 24:
TFT_io.displaySCR_AS(data, sx * sy)
elif mode == 16:
TFT_io.displaySCR565_AS(data, sx * sy)
elif mode == 1:
if colortable is None:
colortable = self.BMPcolortable # create colortable
TFT_io.displaySCR_bmp(data, sx*sy, 1, colortable)
elif mode == 2:
if colortable is None:
return
TFT_io.displaySCR_bmp(data, sx*sy, 2, colortable)
elif mode == 4:
if colortable is None:
return
TFT_io.displaySCR_bmp(data, sx*sy, 4, colortable)
elif mode == 8:
if colortable is None:
return
TFT_io.displaySCR_bmp(data, sx*sy, 8, colortable)
#
# set scroll area to the region between the first and last line
#
def setScrollArea(self, tfa, vsa, bfa):
TFT_io.tft_cmd_data_AS(0x33, bytearray( #set scrolling range
[(tfa >> 8) & 0xff, tfa & 0xff,
(vsa >> 8) & 0xff, vsa & 0xff,
(bfa >> 8) & 0xff, bfa & 0xff]), 6)
self.scroll_tfa = tfa
self.scroll_vsa = vsa
self.scroll_bfa = bfa
self.setScrollStart(self.scroll_tfa)
x, y = self.getTextPos()
self.setTextPos(x, y) # realign pointers
#
# get scroll area of the region between the first and last line
#
def getScrollArea(self):
return self.scroll_tfa, self.scroll_vsa, self.scroll_bfa
#
# set the line which is displayed first
#
def setScrollStart(self, lline):
self.scroll_start = lline # store the logical first line
TFT_io.tft_cmd_data_AS(0x37, bytearray([(lline >> 8) & 0xff, lline & 0xff]), 2)
#
# get the line which is displayed first
#
def getScrollStart(self):
return self.scroll_start # get the logical first line
#
# Scroll vsa up/down by a number of pixels
#
def scroll(self, pixels):
line = ((self.scroll_start - self.scroll_tfa + pixels) % self.scroll_vsa
+ self.scroll_tfa)
self.setScrollStart(line) # set the new line
#
# Set text position
#
def setTextPos(self, x, y, clip = False, scroll = True):
self.text_width, self.text_height = self.getScreensize() ## height possibly wrong
self.text_x = x
if self.scroll_tfa <= y < (self.scroll_tfa + self.scroll_vsa): # in scroll area ? check later for < or <=
# correct position relative to scroll start
self.text_y = (y + self.scroll_start - self.scroll_tfa)
if self.text_y >= (self.scroll_tfa + self.scroll_vsa):
self.text_y -= self.scroll_vsa
else: # absolute
self.text_y = y
self.text_yabs = y
# Hint: self.text_yabs = self.text_y - self.scroll_start) % self.scroll_vsa + self.scroll_tfa)
if clip and (self.text_x + clip) < self.text_width:
self.text_width = self.text_x + clip
self.text_scroll = scroll
#
# Get text position
#
def getTextPos(self, abs = True):
if abs:
return (self.text_x, self.text_yabs)
else:
return (self.text_x, self.text_y)
#
# Set Text Style
#
def setTextStyle(self, fgcolor=None, bgcolor=None, transparency=None, font=None, gap=None):
if font is not None:
self.text_font = font
self.text_rows, self.text_cols, nchar, first = font.get_properties() #
if transparency is not None:
self.transparency = transparency
if gap is not None:
self.text_gap = gap
if bgcolor is not None:
self.text_bgcolor = bgcolor
if fgcolor is not None:
self.text_fgcolor = fgcolor
self.text_color = (bytearray(self.text_bgcolor)
+ bytearray(self.text_fgcolor)
+ bytearray([self.transparency]))
#
# Get Text Style: return (color, bgcolor, font, transpareny, gap)
#
def getTextStyle(self):
return (self.text_color[3:6], self.text_color[0:3],
self.transparency, self.text_font, self.text_gap)
#
# Check, if a new line is to be opened
# if yes, advance, including scrolling, and clear line, if flags is set
# Obsolete?
#
def printNewline(self, clear = False):
if (self.text_yabs + self.text_rows) >= (self.scroll_tfa + self.scroll_vsa): # does the line fit?
self.scroll(self.text_rows) # no. scroll
else: # Yes, just advance pointers
self.text_yabs += self.text_rows
self.setTextPos(self.text_x, self.text_yabs)
if clear:
self.printClrLine(2) # clear actual line
#
# Carriage Return
#
def printCR(self): # clear to end of line
self.text_x = 0
#
# clear line modes
#
def printClrLine(self, mode = 0): # clear to end of line/bol/line
if mode == 0:
self.setXY(self.text_x, self.text_y,
self.text_width - 1, self.text_y + self.text_rows - 1) # set display window
TFT_io.fillSCR_AS(self.text_color, (self.text_width - self.text_x + 1) * self.text_rows)
elif mode == 1 and self.text_x > 0:
self.setXY(0, self.text_y,
self.text_x - 1, self.text_y + self.text_rows - 1) # set display window
TFT_io.fillSCR_AS(self.text_color, (self.text_x - 1) * self.text_rows)
elif mode == 2:
self.setXY(0, self.text_y,
self.text_width - 1, self.text_y + self.text_rows - 1) # set display window
TFT_io.fillSCR_AS(self.text_color, self.text_width * self.text_rows)
#
# clear sreen modes
#
def printClrSCR(self): # clear Area set by setScrollArea
self.setXY(0, self.scroll_tfa,
self.text_width - 1, self.scroll_tfa + self.scroll_vsa) # set display window
TFT_io.fillSCR_AS(self.text_color, self.text_width * self.scroll_vsa)
self.setScrollStart(self.scroll_tfa)
self.setTextPos(0, self.scroll_tfa)
#
# Print string s, returning the length of the printed string in pixels
#
def printString(self, s, bg_buf=None):
len = 0
for c in s:
cols = self.printChar(c, bg_buf)
if cols == 0: # could not print (any more)
break
len += cols
return len
#
# Print string c using the given char bitmap at location x, y, returning the width of the printed char in pixels
#
def printChar(self, c, bg_buf=None):
# get the charactes pixel bitmap and dimensions
if self.text_font:
fontptr, rows, cols = self.text_font.get_ch(ord(c))
else:
raise AttributeError('No font selected')
pix_count = cols * rows # number of bits in the char
# test char fit
if self.text_x + cols > self.text_width: # does the char fit on the screen?
if self.text_scroll:
self.printCR() # No, then CR
self.printNewline(True) # NL: advance to the next line
else:
return 0
# Retrieve Background data if transparency is required
if self.transparency: # in case of transpareny, the frame buffer content is needed
if bg_buf is None: # buffer allocation needed?
if len(self.bg_buf) < pix_count * 3:
del(self.bg_buf)
gc.collect()
self.bg_buf = bytearray(pix_count * 3) # Make it bigger
bg_buf = self.bg_buf
self.setXY(self.text_x, self.text_y, self.text_x + cols - 1, self.text_y + rows - 1) # set area
TFT_io.tft_read_cmd_data_AS(0x2e, bg_buf, pix_count * 3) # read background data
else:
bg_buf = 0 # dummy assignment, since None is not accepted
# Set XY range & print char
self.setXY(self.text_x, self.text_y, self.text_x + cols - 1, self.text_y + rows - 1) # set area
TFT_io.displaySCR_charbitmap(fontptr, pix_count, self.text_color, bg_buf) # display char!
#advance pointer
self.text_x += (cols + self.text_gap)
return cols + self.text_gap
|
robert-hh/SSD1963-TFT-Library-for-PyBoard
|
pyboard/tft.py
|
Python
|
mit
| 34,687
|
[
"CRYSTAL"
] |
9e804a48ad27ad56d0cd1b63d6539d08923e8456924246d42f1b4dc6aaddada3
|
from examples.stencil_grid.stencil_kernel import *
from examples.stencil_grid.stencil_grid import StencilGrid
from ctree.util import Timer
import sys
import numpy
import math
width = int(sys.argv[2])
height = int(sys.argv[3])
image_in = open(sys.argv[1], 'rb')
stdev_d = 3
stdev_s = 70
radius = stdev_d * 3
class Kernel(StencilKernel):
def kernel(self, in_img, filter_d, filter_s, out_img):
for x in out_img.interior_points():
for y in in_img.neighbors(x, 1):
out_img[x] += in_img[y] * filter_d[int(distance(x, y))] *\
filter_s[abs(int(in_img[x] - in_img[y]))]
def gaussian(stdev, length):
result = StencilGrid([length])
scale = 1.0/(stdev*math.sqrt(2.0*math.pi))
divisor = -1.0 / (2.0 * stdev * stdev)
for x in range(length):
result[x] = scale * math.exp(float(x) * float(x) * divisor)
return result
def distance(x, y):
return math.sqrt(sum([(x[i]-y[i])**2 for i in range(0, len(x))]))
pixels = map(ord, list(image_in.read(width * height))) # Read in grayscale values
# pixels = image_in.read(width * height) # Read in grayscale values
# intensity = float(sum(pixels))/len(pixels)
kernel = Kernel()
kernel.should_unroll = False
out_grid = StencilGrid([width, height])
out_grid.ghost_depth = radius
in_grid = StencilGrid([width, height])
in_grid.ghost_depth = radius
for x in range(-radius, radius+1):
for y in range(-radius, radius+1):
in_grid.neighbor_definition[1].append((x, y))
for x in range(0, width):
for y in range(0, height):
in_grid.data[(x, y)] = pixels[y * width + x]
gaussian1 = gaussian(stdev_d, radius*2)
gaussian2 = gaussian(stdev_s, 256)
kernel.kernel(in_grid, gaussian1, gaussian2, out_grid)
# class Runner(object):
# def __call__(self, *args, **kwargs):
# kernel = Kernel()
# kernel.should_unroll = False
# out_grid = StencilGrid([width, height])
# out_grid.ghost_depth = radius
# in_grid = StencilGrid([width, height])
# in_grid.ghost_depth = radius
# for x in range(-radius, radius+1):
# for y in range(-radius, radius+1):
# in_grid.neighbor_definition[1].append((x, y))
# for x in range(0, width):
# for y in range(0, height):
# in_grid.data[(x, y)] = pixels[y * width + x]
# kernel.kernel(in_grid, gaussian1, gaussian2, out_grid)
# import timeit
# print("Average C version time: %.03fs" % timeit.timeit(stmt=Runner(),
# number=100))
exit()
numpy.set_printoptions(threshold=numpy.nan)
actual_grid = StencilGrid([width, height])
actual_grid.ghost_depth = radius
naive = Kernel()
naive.pure_python = True
with Timer() as t:
naive.kernel(in_grid, gaussian1, gaussian2, actual_grid)
print("Python version time: %.03fs" % t.interval)
numpy.testing.assert_array_almost_equal(actual_grid.data,
out_grid.data, decimal=5)
for x in range(0, width):
for y in range(0,height):
pixels[y * width + x] = out_grid.data[(x, y)]
out_intensity = float(sum(pixels))/len(pixels)
for i in range(0, len(pixels)):
pixels[i] = min(255, max(0, int(pixels[i] * (intensity/out_intensity))))
image_out = open(sys.argv[4], 'wb')
image_out.write(''.join(map(chr, pixels)))
|
ucb-sejits/ctree
|
examples/stencil_grid/bilateral_filter.py
|
Python
|
bsd-2-clause
| 3,304
|
[
"Gaussian"
] |
2aa941f3877491ddc08cff77815b27c20c1f47a073558638964c5f950d631ffe
|
# -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email legal@cyan.com
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
def PtAcceptInviteInGame(friendName,inviteKey):
"""Sends a VaultTask to the server to perform the invite"""
pass
def PtAmCCR():
"""Returns true if local player is a CCR"""
pass
def PtAtTimeCallback(selfkey,time,id):
"""This will create a timer callback that will call OnTimer when complete
- 'selfkey' is the ptKey of the PythonFile component
- 'time' is how much time from now (in seconds) to call back
- 'id' is an integer id that will be returned in the OnTimer call"""
pass
def PtAttachObject(child,parent):
"""Attach child to parent based on ptKey or ptSceneobject
- childKey is the ptKey or ptSceneobject of the one being attached
- parentKey is the ptKey or ptSceneobject of the one being attached to
(both arguments must be ptKeys or ptSceneobjects, you cannot mix types)"""
pass
def PtAvatarEnterAFK():
"""Tells the local avatar to enter AwayFromKeyboard idle loop (netpropagated)"""
pass
def PtAvatarEnterAnimMode(animName):
"""Enter a custom anim loop (netpropagated)"""
pass
def PtAvatarEnterLookingAtKI():
"""Tells the local avatar to enter looking at KI idle loop (netpropagated)"""
pass
def PtAvatarEnterUsePersBook():
"""Tells the local avatar to enter using their personal book idle loop (netpropagated)"""
pass
def PtAvatarExitAFK():
"""Tells the local avatar to exit AwayFromKeyboard idle loop (netpropagated)"""
pass
def PtAvatarExitAnimMode(animName):
"""Exit custom anim loop (netpropagated)"""
pass
def PtAvatarExitLookingAtKI():
"""Tells the local avatar to exit looking at KI idle loop (netpropagated)"""
pass
def PtAvatarExitUsePersBook():
"""Tells the local avatar to exit using their personal book idle loop (netpropagated)"""
pass
def PtAvatarSitOnGround():
"""Tells the local avatar to sit on ground and enter sit idle loop (netpropagated)"""
pass
def PtAvatarSpawnNext():
"""Send the avatar to the next spawn point"""
pass
def PtCanShadowCast():
"""Can we cast shadows?"""
pass
def PtChangeAvatar(gender):
"""Change the local avatar's gender (or clothing type)"""
pass
def PtChangePassword(password):
"""Changes the current account's password"""
pass
def PtChangePlayerName(name):
"""Change the local avatar's name"""
pass
def PtCheckVisLOS(startPoint,endPoint):
"""Does LOS check from start to end"""
pass
def PtCheckVisLOSFromCursor():
"""Does LOS check from where the mouse cursor is, into the screen"""
pass
def PtClearCameraStack():
"""clears all cameras"""
pass
def PtClearOfferBookMode():
"""Cancel the offer book interface"""
pass
def PtClearPrivateChatList(memberKey):
"""Remove the local avatar from private vox messaging, and / or clear members from his chat list"""
pass
def PtClearTimerCallbacks(key):
"""This will remove timer callbacks to the specified key"""
pass
def PtConsole(command):
"""This will execute 'command' as if it were typed into the Plasma console."""
pass
def PtConsoleNet(command,netForce):
"""This will execute 'command' on the console, over the network, on all clients.
If 'netForce' is true then force command to be sent over the network."""
pass
def PtCreateDir(directory):
"""Creates the directory and all parent folders. Returns false on failure"""
pass
def PtCreatePlayer(playerName, avatarShape, invitation):
"""Creates a new player"""
pass
def PtCreatePlayerW(playerName, avatarShape, invitation):
"""Unicode version of PtCreatePlayer"""
pass
def PtCreatePublicAge(ageInfo, cbObject=None):
"""Create a public instance of the given age.
cbObject, if supplied should have a member called publicAgeCreated(self,ageInfo)"""
pass
def PtDebugAssert(cond, msg):
"""Debug only: Assert if condition is false."""
pass
def PtDebugPrint(*msgs, **kwargs):
"""Prints msgs to the Python log given the message's level"""
pass
def PtDeletePlayer(playerInt):
"""Deletes a player associated with the current account"""
pass
def PtDetachObject(child,parent):
"""Detach child from parent based on ptKey or ptSceneobject
- child is the ptKey or ptSceneobject of the one being detached
- parent is the ptKey or ptSceneobject of the one being detached from
(both arguments must be ptKeys or ptSceneobjects, you cannot mix types)"""
pass
def PtDirtySynchClients(selfKey,SDLStateName,flags):
"""DO NOT USE - handled by ptSDL"""
pass
def PtDirtySynchState(selfKey,SDLStateName,flags):
"""DO NOT USE - handled by ptSDL"""
pass
def PtDisableAvatarCursorFade():
"""Disable the avatar cursor fade"""
pass
def PtDisableAvatarJump():
"""Disable the ability of the avatar to jump"""
pass
def PtDisableControlKeyEvents(selfKey):
"""Disable the control key events from calling OnControlKeyEvent"""
pass
def PtDisableForwardMovement():
"""Disable the ability of the avatar to move forward"""
pass
def PtDisableMouseMovement():
"""Disable avatar mouse movement input"""
pass
def PtDisableMovementKeys():
"""Disable avatar movement input"""
pass
def PtDisableRenderScene():
"""UNKNOWN"""
pass
def PtDisableShadows():
"""Turns shadows off"""
pass
def PtDumpLogs(folder):
"""Dumps all current log files to the specified folder (a sub-folder to the log folder)"""
pass
def PtEmoteAvatar(emote):
"""Play an emote on the local avatar (netpropagated)"""
pass
def PtEnableAvatarCursorFade():
"""Enable the avatar cursor fade"""
pass
def PtEnableAvatarJump():
"""Enable the ability of the avatar to jump"""
pass
def PtEnableControlKeyEvents(selfKey):
"""Enable control key events to call OnControlKeyEvent(controlKey,activateFlag)"""
pass
def PtEnableForwardMovement():
"""Enable the ability of the avatar to move forward"""
pass
def PtEnableMouseMovement():
"""Enable avatar mouse movement input"""
pass
def PtEnableMovementKeys():
"""Enable avatar movement input"""
pass
def PtEnablePlanarReflections(on):
"""Enables/disables planar reflections"""
pass
def PtEnableRenderScene():
"""UNKNOWN"""
pass
def PtEnableShadows():
"""Turns shadows on"""
pass
def PtExcludeRegionSet(senderKey,regionKey,state):
"""This will set the state of an exclude region
- 'senderKey' is a ptKey of the PythonFile component
- 'regionKey' is a ptKey of the exclude region
- 'state' is either kExRegRelease or kExRegClear"""
pass
def PtExcludeRegionSetNow(senderKey,regionKey,state):
"""This will set the state of an exclude region immediately on the server
- 'senderKey' is a ptKey of the PythonFile component
- 'regionKey' is a ptKey of the exclude region
- 'state' is either kExRegRelease or kExRegClear"""
pass
def PtFadeIn(lenTime, holdFlag, noSound=0):
"""Fades screen in for lenTime seconds"""
pass
def PtFadeLocalAvatar(fade):
"""Fade (or unfade) the local avatar"""
pass
def PtFadeOut(lenTime, holdFlag, noSound=0):
"""Fades screen out for lenTime seconds"""
pass
def PtFakeLinkAvatarToObject(avatar,object):
"""Pseudo-links avatar to object within the same age
"""
pass
def PtFileExists(filename):
"""Returns true if the specified file exists"""
pass
def PtFindSceneobject(name,ageName):
"""This will try to find a sceneobject based on its name and what age its in
- it will return a ptSceneObject if found- if not found then a NameError exception will happen"""
pass
def PtFirstPerson():
"""is the local avatar in first person mode"""
pass
def PtFlashWindow():
"""Flashes the client window if it is not focused"""
pass
def PtFogSetDefColor(color):
"""Sets default fog color"""
pass
def PtFogSetDefExp(end,density):
"""Set exp fog values"""
pass
def PtFogSetDefExp2(end,density):
"""Set exp2 fog values"""
pass
def PtFogSetDefLinear(start,end,density):
"""Set linear fog values"""
pass
def PtForceCursorHidden():
"""Forces the cursor to hide, overriding everything.
Only call if other methods won't work. The only way to show the cursor after this call is PtForceMouseShown()"""
pass
def PtForceCursorShown():
"""Forces the cursor to show, overriding everything.
Only call if other methods won't work. This is the only way to show the cursor after a call to PtForceMouseHidden()"""
pass
def PtGMTtoDniTime(gtime):
"""Converts GMT time (passed in) to D'Ni time"""
pass
def PtGUICursorDimmed():
"""Dimms the GUI cursor"""
pass
def PtGUICursorOff():
"""Turns the GUI cursor off"""
pass
def PtGUICursorOn():
"""Turns the GUI cursor on"""
pass
def PtGetAccountName():
"""Returns the account name for the current account"""
pass
def PtGetAccountPlayerList():
"""Returns list of players associated with the current account"""
pass
def PtGetAgeInfo():
"""Returns ptAgeInfoStruct of the current Age"""
pass
def PtGetAgeName():
"""DEPRECIATED - use ptDniInfoSource instead"""
pass
def PtGetAgeSDL():
"""Returns the global ptSDL for the current Age"""
pass
def PtGetAgeTime():
"""DEPRECIATED - use ptDniInfoSource instead"""
pass
def PtGetAgeTimeOfDayPercent():
"""Returns the current age time of day as a percent (0 to 1)"""
pass
def PtGetAvatarKeyFromClientID(clientID):
"""From an integer that is the clientID, find the avatar and return its ptKey"""
pass
def PtGetCameraNumber(x):
"""Returns camera x's name from stack"""
pass
def PtGetClientIDFromAvatarKey(avatarKey):
"""From a ptKey that points at an avatar, return the players clientID (integer)"""
pass
def PtGetClientName(avatarKey=None):
"""This will return the name of the client that is owned by the avatar
- avatarKey is the ptKey of the avatar to get the client name of.
If avatarKey is omitted then the local avatar is used"""
pass
def PtGetControlEvents(on, key):
"""Registers or unregisters for control event messages"""
pass
def PtGetDefaultDisplayParams():
"""Returns the default resolution and display settings"""
pass
def PtGetDefaultSpawnPoint():
"""Returns the default spawnpoint definition (as a ptSpawnPointInfo)"""
pass
def PtGetDesktopColorDepth():
"""Returns desktop ColorDepth"""
pass
def PtGetDesktopHeight():
"""Returns desktop height"""
pass
def PtGetDesktopWidth():
"""Returns desktop width"""
pass
def PtGetDialogFromString(dialogName):
"""Get a ptGUIDialog from its name"""
pass
def PtGetDialogFromTagID(tagID):
"""Returns the dialog associated with the tagID"""
pass
def PtGetDniTime():
"""Returns current D'Ni time"""
pass
def PtGetFrameDeltaTime():
"""Returns the amount of time that has elapsed since last frame."""
pass
def PtGetGameTime():
"""Returns the system game time (frame based) in seconds."""
pass
def PtGetInitPath():
"""Returns the unicode path to the client's init directory. Do NOT convert to a standard string."""
pass
def PtGetLanguage():
"""Returns the current language as a PtLanguage enum"""
pass
def PtGetLocalAvatar():
"""This will return a ptSceneobject of the local avatar
- if there is no local avatar a NameError exception will happen."""
pass
def PtGetLocalClientID():
"""Returns our local client ID number"""
pass
def PtGetLocalKILevel():
"""returns local player's ki level"""
pass
def PtGetLocalPlayer():
"""Returns a ptPlayer object of the local player"""
pass
def PtGetLocalizedString(name, arguments=None):
"""Returns the localized string specified by name (format is Age.Set.Name) and substitutes the arguments in the list of strings passed in as arguments."""
pass
def PtGetMouseTurnSensitivity():
"""Returns the sensitivity"""
pass
def PtGetNPCCount():
"""This will return the number of NPCs in the current age"""
pass
def PtGetNPCByID(npcID):
"""This will return the NPC with a specific ID"""
pass
def PtGetNumCameras():
"""returns camera stack size"""
pass
def PtGetNumParticles(key):
"""Key is the key of scene object host to particle system"""
pass
def PtGetNumRemotePlayers():
"""Returns the number of remote players in this Age with you."""
pass
def PtGetPlayerList():
"""Returns a list of ptPlayer objects of all the remote players"""
pass
def PtGetPlayerListDistanceSorted():
"""Returns a list of ptPlayers, sorted by distance"""
pass
def PtGetPrevAgeInfo():
"""Returns ptAgeInfoStruct of previous age visited"""
pass
def PtGetPrevAgeName():
"""Returns filename of previous age visited"""
pass
def PtGetPublicAgeList(ageName, cbObject=None):
"""Get list of public ages for the given age name.
cbObject, if supplied should have a method called gotPublicAgeList(self,ageList). ageList is a list of tuple(ptAgeInfoStruct,nPlayersInAge)"""
pass
def PtGetPythonLoggingLevel():
"""Returns the current level of python logging"""
pass
def PtGetServerTime():
"""Returns the current time on the server (which is GMT)"""
pass
def PtGetShadowVisDistance():
"""Returns the maximum shadow visibility distance"""
pass
def PtGetSupportedDisplayModes():
"""Returns a list of supported resolutions"""
pass
def PtGetTime():
"""Returns the number of seconds since the game was started."""
pass
def PtGetUserPath():
"""Returns the unicode path to the client's root user directory. Do NOT convert to a standard string."""
pass
def PtHideDialog(dialogName):
"""Hide a GUI dialog by name (does not unload dialog)"""
pass
def PtIsActivePlayerSet():
"""Returns whether or not an active player is set"""
pass
def PtIsCCRAway():
"""Returns current status of CCR dept"""
pass
def PtIsClickToTurn():
"""Is click-to-turn on?"""
pass
def PtIsCurrentBrainHuman():
"""Returns whether the local avatar current brain is the human brain"""
pass
def PtIsDemoMode():
"""Returns whether the game is in Demo mode or not"""
pass
def PtIsDialogLoaded(dialogName):
"""Test to see if a GUI dialog is loaded, by name"""
pass
def PtIsEnterChatModeKeyBound():
"""Returns whether the EnterChatMode is bound to a key"""
pass
def PtIsGUIModal():
"""Returns true if the GUI is displaying a modal dialog and blocking input"""
pass
def PtIsInternalRelease():
"""Returns whether the client is an internal build or not"""
pass
def PtIsMouseInverted():
"""Is the mouse currently inverted?"""
pass
def PtIsShadowsEnabled():
"""Returns whether shadows are currently turned on"""
pass
def PtIsSinglePlayerMode():
"""Returns whether the game is in single player mode or not"""
pass
def PtIsSubscriptionActive():
"""Returns true if the current player is a paying subscriber"""
pass
def PtKillParticles(timeRemaining,pctToKill,particleSystem):
"""Tells particleSystem to kill pctToKill percent of its particles"""
pass
def PtLimitAvatarLOD(LODlimit):
"""Sets avatar's LOD limit"""
pass
def PtLoadAvatarModel(modelName, spawnPoint, userStr = ""):
"""Loads an avatar model at the given spawn point. Assigns the user specified string to it."""
pass
def PtLoadBookGUI(guiName):
"""Loads the gui specified, a gui must be loaded before it can be used. If the gui is already loaded, doesn't do anything"""
pass
def PtLoadDialog(dialogName,selfKey=None,ageName=""):
"""Loads a GUI dialog by name and optionally set the Notify proc key
If the dialog is already loaded then it won't load it again"""
pass
def PtLoadJPEGFromDisk(filename,width,height):
"""The image will be resized to fit the width and height arguments. Set to 0 if resizing is not desired.
Returns a pyImage of the specified file."""
pass
def PtLocalAvatarIsMoving():
"""Returns true if the local avatar is moving (a movement key is held down)"""
pass
def PtLocalAvatarRunKeyDown():
"""Returns true if the run key is being held down for the local avatar"""
pass
def PtMaxListenDistSq():
"""Returns the maximum distance (squared) of the listen range"""
pass
def PtMaxListenListSize():
"""Returns the maximum listen number of players"""
pass
def PtNotifyOffererLinkAccepted(offerer):
"""Tell the offerer that we accepted the link offer"""
pass
def PtNotifyOffererLinkCompleted(offerer):
"""Tell the offerer that we completed the link"""
pass
def PtNotifyOffererLinkRejected(offerer):
"""Tell the offerer that we rejected the link offer"""
pass
def PtPageInNode(nodeName, ageName=""):
"""Pages in node, or a list of nodes"""
pass
def PtPageOutNode(nodeName):
"""Pages out a node"""
pass
def PtPrintToScreen(message):
"""Prints 'message' to the status log, for debug only."""
pass
def PtRateIt(chronicleName,dialogPrompt,onceFlag):
"""Shows a dialog with dialogPrompt and stores user input rating into chronicleName"""
pass
def PtRebuildCameraStack(name,ageName):
"""Push camera with this name on the stack"""
pass
def PtRecenterCamera():
"""re-centers the camera"""
pass
def PtRemovePublicAge(ageInstanceGuid, cbObject=None):
"""Remove a public instance of the given age.
cbObject, if supplied should have a member called publicAgeRemoved(self,ageInstanceGuid)"""
pass
def PtRequestLOSScreen(selfKey,ID,xPos,yPos,distance,what,reportType):
"""Request a LOS check from a point on the screen"""
pass
def PtSaveScreenShot(fileName,width=640,height=480,quality=75):
"""Takes a screenshot with the specified filename, size, and quality"""
pass
def PtSendChatToCCR(message,CCRPlayerID):
"""Sends a chat message to a CCR that has contacted this player"""
pass
def PtSendKIGZMarkerMsg(markerNumber,sender):
"""Same as PtSendKIMessageInt except 'sender' could get a notify message back
"""
pass
def PtSendKIMessage(command,value):
"""Sends a command message to the KI frontend.
See PlasmaKITypes.py for list of commands"""
pass
def PtSendKIMessageInt(command,value):
"""Same as PtSendKIMessage except the value is guaranteed to be a UInt32
(for things like player IDs)"""
pass
def PtSendPetitionToCCR(message,reason=0,title=""):
"""Sends a petition with a message to the CCR group"""
pass
def PtSendPrivateChatList(chatList):
"""Lock the local avatar into private vox messaging, and / or add new members to his chat list"""
pass
def PtSendRTChat(fromPlayer,toPlayerList,message,flags):
"""Sends a realtime chat message to the list of ptPlayers
If toPlayerList is an empty list, it is a broadcast message"""
pass
def PtSetActivePlayer(playerInt):
"""Sets the active player associated with the current account"""
pass
def PtSetAlarm(secs, cbObject, cbContext):
"""secs is the amount of time before your alarm goes off.
cbObject is a python object with the method onAlarm(int context)
cbContext is an integer."""
pass
def PtSetBehaviorLoopCount(behaviorKey,stage,loopCount,netForce):
"""This will set the loop count for a particular stage in a multistage behavior"""
pass
def PtSetBehaviorNetFlags(behKey, netForce, netProp):
"""Sets net flags on the associated behavior"""
pass
def PtSetClearColor(red,green,blue):
"""Set the clear color"""
pass
def PtSetClickToTurn(state):
"""Turns on click-to-turn"""
pass
def PtSetGamma2(gamma):
"""Set the gamma with gamma2 rules"""
pass
def PtSetGlobalClickability(enable):
"""Enable or disable all clickables on the local client"""
pass
def PtSetGraphicsOptions(width, height, colordepth, windowed, numAAsamples, numAnisoSamples, VSync):
"""Set the graphics options"""
pass
def PtSetLightAnimStart(key,name,start):
""" Key is the key of scene object host to light, start is a bool. Name is the name of the light to manipulate"""
pass
def PtSetLightValue(key,name,r,g,b,a):
""" Key is the key of scene object host to light. Name is the name of the light to manipulate"""
pass
def PtSetMouseInverted():
"""Inverts the mouse"""
pass
def PtSetMouseTurnSensitivity(sensitivity):
"""Set the mouse sensitivity"""
pass
def PtSetMouseUninverted():
"""Uninverts the mouse"""
pass
def PtSetOfferBookMode(selfkey,ageFilename,ageInstanceName):
"""Put us into the offer book interface"""
pass
def PtSetParticleDissentPoint(x, y, z, particlesys):
"""Sets the dissent point of the particlesys to x,y,z"""
pass
def PtSetParticleOffset(x,y,z,particlesys):
"""Sets the particlesys particle system's offset"""
pass
def PtSetPythonLoggingLevel(level):
"""Sets the current level of python logging"""
pass
def PtSetShadowVisDistance(distance):
"""Set the maximum shadow visibility distance"""
pass
def PtSetShareSpawnPoint(spawnPoint):
"""This sets the desired spawn point for the receiver to link to"""
pass
def PtShootBulletFromObject(selfkey, gunObj, radius, range):
"""Shoots a bullet from an object"""
pass
def PtShootBulletFromScreen(selfkey, xPos, yPos, radius, range):
"""Shoots a bullet from a position on the screen"""
pass
def PtShowDialog(dialogName):
"""Show a GUI dialog by name (does not load dialog)"""
pass
def PtStartScreenCapture(selfKey,width=800,height=600):
"""Starts a capture of the screen"""
pass
def PtToggleAvatarClickability(on):
"""Turns on and off our avatar's clickability"""
pass
def PtTransferParticlesToObject(objFrom, objTo, num):
"""Transfers num particles from objFrom to objTo"""
pass
def PtUnLoadAvatarModel(avatarKey):
"""Unloads the specified avatar model"""
pass
def PtUnloadAllBookGUIs():
"""Unloads all loaded guis except for the default one"""
pass
def PtUnloadBookGUI(guiName):
"""Unloads the gui specified. If the gui isn't loaded, doesn't do anything"""
pass
def PtUnloadDialog(dialogName):
"""This will unload the GUI dialog by name. If not loaded then nothing will happen"""
pass
def PtUpgradeVisitorToExplorer(playerInt):
"""Upgrades the player to explorer status"""
pass
def PtUsingUnicode():
"""Returns true if the current language is a unicode language (like Japanese)"""
pass
def PtValidateKey(key):
"""Returns true(1) if 'key' is valid and loaded,
otherwise returns false(0)"""
pass
def PtWasLocallyNotified(selfKey):
"""Returns 1 if the last notify was local or 0 if the notify originated on the network"""
pass
def PtWearDefaultClothing(key):
"""Forces the avatar to wear the default clothing set"""
pass
def PtWearDefaultClothingType(key,type):
"""Forces the avatar to wear the default clothing of the specified type"""
pass
def PtWearMaintainerSuit(key,wearOrNot):
"""Wears or removes the maintainer suit of clothes"""
pass
def PtWhatGUIControlType(guiKey):
"""Returns the control type of the key passed in"""
pass
def PtYesNoDialog(selfkey,dialogMessage):
"""This will display a Yes/No dialog to the user with the text dialogMessage
This dialog _has_ to be answered by the user.
And their answer will be returned in a Notify message."""
pass
class ptAgeInfoStruct:
"""Class to hold AgeInfo struct data"""
def __init__(self):
"""None"""
pass
def copyFrom(self,other):
"""Copies data from one ptAgeInfoStruct or ptAgeInfoStructRef to this one"""
pass
def getAgeFilename(self):
"""Gets the Age's filename"""
pass
def getAgeInstanceGuid(self):
"""Get the Age's instance GUID"""
pass
def getAgeInstanceName(self):
"""Get the instance name of the Age"""
pass
def getAgeLanguage(self):
"""Gets the age's language (integer)"""
pass
def getAgeSequenceNumber(self):
"""Gets the unique sequence number"""
pass
def getAgeUserDefinedName(self):
"""Gets the user defined part of the Age name"""
pass
def getDisplayName(self):
"""Returns a string that is the displayable name of the age instance"""
pass
def setAgeFilename(self,filename):
"""Sets the filename of the Age"""
pass
def setAgeInstanceGuid(self,guid):
"""Sets the Age instance's GUID"""
pass
def setAgeInstanceName(self,instanceName):
"""Sets the instance name of the Age"""
pass
def setAgeLanguage(self,lang):
"""Sets the age's language (integer)"""
pass
def setAgeSequenceNumber(self,seqNumber):
"""Sets the unique sequence number"""
pass
def setAgeUserDefinedName(self,udName):
"""Sets the user defined part of the Age"""
pass
class ptAgeInfoStructRef:
"""Class to hold AgeInfo struct data"""
def __init__(self):
"""None"""
pass
def copyFrom(self,other):
"""Copies data from one ptAgeInfoStruct or ptAgeInfoStructRef to this one"""
pass
def getAgeFilename(self):
"""Gets the Age's filename"""
pass
def getAgeInstanceGuid(self):
"""Get the Age's instance GUID"""
pass
def getAgeInstanceName(self):
"""Get the instance name of the Age"""
pass
def getAgeSequenceNumber(self):
"""Gets the unique sequence number"""
pass
def getAgeUserDefinedName(self):
"""Gets the user defined part of the Age name"""
pass
def getDisplayName(self):
"""Returns a string that is the displayable name of the age instance"""
pass
def setAgeFilename(self,filename):
"""Sets the filename of the Age"""
pass
def setAgeInstanceGuid(self,guid):
"""Sets the Age instance's GUID"""
pass
def setAgeInstanceName(self,instanceName):
"""Sets the instance name of the Age"""
pass
def setAgeSequenceNumber(self,seqNumber):
"""Sets the unique sequence number"""
pass
def setAgeUserDefinedName(self,udName):
"""Sets the user defined part of the Age"""
pass
class ptAgeLinkStruct:
"""Class to hold the data of the AgeLink structure"""
def __init__(self):
"""None"""
pass
def copyFrom(self,other):
"""Copies data from one ptAgeLinkStruct or ptAgeLinkStructRef to this one"""
pass
def getAgeInfo(self):
"""Returns a ptAgeInfoStructRef of the AgeInfo for this link"""
pass
def getLinkingRules(self):
"""Returns the linking rules of this link"""
pass
def getParentAgeFilename(self):
"""Returns a string of the parent age filename"""
pass
def getSpawnPoint(self):
"""Gets the spawn point ptSpawnPointInfoRef of this link"""
pass
def setAgeInfo(self,ageInfo):
"""Sets the AgeInfoStruct from the data in ageInfo (a ptAgeInfoStruct)"""
pass
def setLinkingRules(self,rule):
"""Sets the linking rules for this link"""
pass
def setParentAgeFilename(self,filename):
"""Sets the parent age filename for child age links"""
pass
def setSpawnPoint(self,spawnPtInfo):
"""Sets the spawn point of this link (a ptSpawnPointInfo or ptSpawnPointInfoRef)"""
pass
class ptAgeLinkStructRef:
"""Class to hold the data of the AgeLink structure"""
def __init__(self):
"""None"""
pass
def copyFrom(self,other):
"""Copies data from one ptAgeLinkStruct or ptAgeLinkStructRef to this one"""
pass
def getAgeInfo(self):
"""Returns a ptAgeInfoStructRef of the AgeInfo for this link"""
pass
def getLinkingRules(self):
"""Returns the linking rules of this link"""
pass
def getSpawnPoint(self):
"""Gets the spawn point ptSpawnPointInfoRef of this link"""
pass
def setAgeInfo(self,ageInfo):
"""Sets the AgeInfoStruct from the data in ageInfo (a ptAgeInfoStruct)"""
pass
def setLinkingRules(self,rule):
"""Sets the linking rules for this link"""
pass
def setSpawnPoint(self,spawnPtInfo):
"""Sets the spawn point of this link (a ptSpawnPointInfo or ptSpawnPointInfoRef)"""
pass
class ptAgeVault:
"""Accessor class to the Age's vault"""
def __init__(self):
"""None"""
pass
def addChronicleEntry(self,name,type,value):
"""Adds a chronicle entry with the specified type and value"""
pass
def addDevice(self,deviceName,cb=None,cbContext=0):
"""Adds a device to the age"""
pass
def findChronicleEntry(self,entryName):
"""Returns the named ptVaultChronicleNode"""
pass
def getAgeDevicesFolder(self):
"""Returns a ptVaultFolderNode of the inboxes for the devices in this Age."""
pass
def getAgeGuid(self):
"""Returns the current Age's guid as a string."""
pass
def getAgeInfo(self):
"""Returns a ptVaultAgeInfoNode of the this Age"""
pass
def getAgeSDL(self):
"""Returns the age's SDL (ptSDLStateDataRecord)"""
pass
def getAgesIOwnFolder(self):
"""(depreciated, use getBookshelfFolder) Returns a ptVaultFolderNode that contain the Ages I own"""
pass
def getBookshelfFolder(self):
"""Personal age only: Returns a ptVaultFolderNode that contains the owning player's AgesIOwn age list"""
pass
def getChronicleFolder(self):
"""Returns a ptVaultFolderNode"""
pass
def getDevice(self,deviceName):
"""Returns the specified device (ptVaultTextNoteNode)"""
pass
def getDeviceInbox(self,deviceName):
"""Returns a ptVaultFolderNode of the inbox for the named device in this age."""
pass
def getPeopleIKnowAboutFolder(self):
"""Returns a ptVaultPlayerInfoListNode of the players the Age knows about(?)."""
pass
def getPublicAgesFolder(self):
"""Returns a ptVaultFolderNode that contains all the public Ages"""
pass
def getSubAgeLink(self,ageInfo):
"""Returns a ptVaultAgeLinkNode to 'ageInfo' (a ptAgeInfoStruct) for this Age."""
pass
def getSubAgesFolder(self):
"""Returns a ptVaultFolderNode of sub Age's folder."""
pass
def hasDevice(self,deviceName):
"""Does a device with this name exist?"""
pass
def removeDevice(self,deviceName):
"""Removes a device from the age"""
pass
def setDeviceInbox(self,deviceName,inboxName,cb=None,cbContext=0):
"""Set's the device's inbox"""
pass
def updateAgeSDL(self,pyrec):
"""Updates the age's SDL"""
pass
class ptAnimation:
"""Plasma animation class"""
def __init__(self,key=None):
"""None"""
pass
def addKey(self,key):
"""Adds an animation modifier to the list of receiver keys"""
pass
def backwards(self,backwardsFlag):
"""Turn on and off playing the animation backwards"""
pass
def getFirstKey(self):
"""This will return a ptKey object that is the first receiver (target)
However, if the parent is not a modifier or not loaded, then None is returned."""
pass
def incrementBackward(self):
"""Step the animation backward a frame"""
pass
def incrementForward(self):
"""Step the animation forward a frame"""
pass
def looped(self,loopedFlag):
"""Turn on and off looping of the animation"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
def play(self):
"""Plays the animation"""
pass
def playRange(self,start,end):
"""Play the animation from start to end"""
pass
def playToPercentage(self,zeroToOne):
"""Play the animation to the specified percentage (0 to 1)"""
pass
def playToTime(self,time):
"""Play the animation to the specified time"""
pass
def resume(self):
"""Resumes the animation from where it was stopped last"""
pass
def sender(self,selfKey):
"""Sets the sender of the messages being sent to the animation modifier"""
pass
def setAnimName(self,name):
"""Sets the animation notetrack name (or (Entire Animation))"""
pass
def setLoopEnd(self,loopEnd):
"""Sets the loop ending position
- 'loopEnd' is the number of seconds from the absolute beginning of the animation"""
pass
def setLoopStart(self,loopStart):
"""Sets the loop starting position
- 'loopStart' is the number of seconds from the absolute beginning of the animation"""
pass
def skipToBegin(self):
"""Skip to the beginning of the animation (don't play)"""
pass
def skipToEnd(self):
"""Skip to the end of the animation (don't play)"""
pass
def skipToLoopBegin(self):
"""Skip to the beginning of the animation loop (don't play)"""
pass
def skipToLoopEnd(self):
"""Skip to the end of the animation loop (don't play)"""
pass
def skipToTime(self,time):
"""Skip the animation to time (don't play)"""
pass
def speed(self,speed):
"""Sets the animation playback speed"""
pass
def stop(self):
"""Stops the animation"""
pass
class ptAudioControl:
"""Accessor class to the Audio controls"""
def __init__(self):
"""None"""
pass
def canSetMicLevel(self):
"""Can the microphone level be set? Returns 1 if true otherwise returns 0."""
pass
def disable(self):
"""Disabled audio"""
pass
def enable(self):
"""Enables audio"""
pass
def enableVoiceChat(self,state):
"""Enables or disables voice chat."""
pass
def enableVoiceCompression(self,state):
"""Enables or disables voice compression."""
pass
def enableVoiceNetBroadcast(self,state):
"""Enables or disables voice over network broadcast."""
pass
def enableVoiceRecording(self,state):
"""Enables or disables voice recording."""
pass
def getAmbienceVolume(self):
"""Returns the volume (0.0 to 1.0) for the Ambiance."""
pass
def getAudioDeviceName(self,index):
"""Gets the name of audio device for the given index"""
pass
def getDeviceName(self):
"""Gets the name for the device being used by the audio system"""
pass
def getGUIVolume(self):
"""Returns the volume (0.0 to 1.0) for the GUI dialogs."""
pass
def getHighestMode(self):
"""Gets the highest possible audio system mode"""
pass
def getMicLevel(self):
"""Returns the microphone recording level (0.0 to 1.0)."""
pass
def getMode(self):
"""Gets the audio system mode"""
pass
def getMusicVolume(self):
"""Returns the volume (0.0 to 1.0) for the Music."""
pass
def getNPCVoiceVolume(self):
"""Returns the volume (0.0 to 1.0) for the NPC's voice."""
pass
def getNumAudioDevices(self):
"""Returns the number of available audio devices."""
pass
def getPriorityCutoff(self):
"""Returns current sound priority"""
pass
def getSoundFXVolume(self):
"""Returns the volume (0.0 to 1.0) for the Sound FX."""
pass
def getVoiceVolume(self):
"""Returns the volume (0.0 to 1.0) for the Voices."""
pass
def hideIcons(self):
"""Hides (disables) the voice recording icons."""
pass
def isEnabled(self):
"""Is the audio enabled? Returns 1 if true otherwise returns 0."""
pass
def isHardwareAccelerated(self):
"""Is audio hardware acceleration enabled? Returns 1 if true otherwise returns 0."""
pass
def isMuted(self):
"""Are all sounds muted? Returns 1 if true otherwise returns 0."""
pass
def isUsingEAXAcceleration(self):
"""Is EAX sound acceleration enabled? Returns 1 if true otherwise returns 0."""
pass
def isVoiceCompressionEnabled(self):
"""Is voice compression enabled? Returns 1 if true otherwise returns 0."""
pass
def isVoiceNetBroadcastEnabled(self):
"""Is voice over net enabled? Returns 1 if true otherwise returns 0."""
pass
def isVoiceRecordingEnabled(self):
"""Is voice recording enabled? Returns 1 if true otherwise returns 0."""
pass
def muteAll(self):
"""Mutes all sounds."""
pass
def pushToTalk(self,state):
"""Enables or disables 'push-to-talk'."""
pass
def recordFrame(self,size):
"""Sets the voice packet frame size."""
pass
def recordSampleRate(self,sampleRate):
"""Sets the recording sample rate."""
pass
def setAmbienceVolume(self,volume):
"""Sets the Ambience volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def setDeviceName(self,devicename,restart):
"""Sets the device name for the audio system, and optionally restarts it"""
pass
def setGUIVolume(self,volume):
"""Sets the GUI dialog volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def setLoadOnDemand(self,state):
"""Enables or disables the load on demand for sounds."""
pass
def setMicLevel(self,level):
"""Sets the microphone recording level (0.0 to 1.0)."""
pass
def setMode(self,mode):
"""Sets the audio system mode"""
pass
def setMusicVolume(self,volume):
"""Sets the Music volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def setNPCVoiceVolume(self,volume):
"""Sets the NPC's voice volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def setPriorityCutoff(self,priority):
"""Sets the sound priority"""
pass
def setSoundFXVolume(self,volume):
"""Sets the SoundFX volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def setTwoStageLOD(self,state):
"""Enables or disables two-stage LOD, where sounds can be loaded into RAM but not into sound buffers.
...Less of a performance hit, harder on memory."""
pass
def setVoiceVolume(self,volume):
"""Sets the Voice volume (0.0 to 1.0) for the game.
This only sets the volume for this game session."""
pass
def showIcons(self):
"""Shows (enables) the voice recording icons."""
pass
def squelchLevel(self,level):
"""Sets the squelch level."""
pass
def supportsEAX(self):
"""Returns true or false based on whether or not a the device specified supports EAX"""
pass
def unmuteAll(self):
"""Unmutes all sounds."""
pass
def useEAXAcceleration(self,state):
"""Enables or disables EAX sound acceleration (requires hardware acceleration)."""
pass
def useHardwareAcceleration(self,state):
"""Enables or disables audio hardware acceleration."""
pass
class ptAvatar:
"""Plasma avatar class"""
def __init__(self):
"""None"""
pass
def addWardrobeClothingItem(self,clothing_name,tint1,tint2):
"""To add a clothing item to the avatar's wardrobe (closet)"""
pass
def enterSubWorld(self,sceneobject):
"""Places the avatar into the subworld of the ptSceneObject specified"""
pass
def exitSubWorld(self):
"""Exits the avatar from the subWorld where it was"""
pass
def getAllWithSameMesh(self,clothing_name):
"""Returns a lilst of all clothing items that use the same mesh as the specified one"""
pass
def getAvatarClothingGroup(self):
"""Returns what clothing group the avatar belongs to.
It is also a means to determine if avatar is male or female"""
pass
def getAvatarClothingList(self):
"""Returns a list of clothes that the avatar is currently wearing."""
pass
def getClosetClothingList(self,clothing_type):
"""Returns a list of clothes for the avatar that are in specified clothing group."""
pass
def getCurrentMode(self):
"""Returns current brain mode for avatar"""
pass
def getEntireClothingList(self,clothing_type):
"""Gets the entire list of clothing available. 'clothing_type' not used
NOTE: should use getClosetClothingList"""
pass
def getMatchingClothingItem(self,clothingName):
"""Finds the matching clothing item that goes with 'clothingName'
Used to find matching left and right gloves and shoes."""
pass
def getMorph(self,clothing_name,layer):
"""Get the current morph value"""
pass
def getSkinBlend(self,layer):
"""Get the current skin blend value"""
pass
def getTintClothingItem(self,clothing_name,layer=1):
"""Returns a ptColor of a particular item of clothing that the avatar is wearing.
The color will be a ptColor object."""
pass
def getTintSkin(self):
"""Returns a ptColor of the current skin tint for the avatar"""
pass
def getUniqueMeshList(self,clothing_type):
"""Returns a list of unique clothing items of the desired type (different meshes)"""
pass
def getWardrobeClothingList(self):
"""Return a list of items that are in the avatars closet"""
pass
def gotoStage(self,behaviorKey,stage,transitionTime,setTimeFlag,newTime,SetDirectionFlag,isForward,netForce):
"""Tells a multistage behavior to go to a particular stage"""
pass
def loadClothingFromFile(self,filename):
"""Load avatar clothing from a file"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
def nextStage(self,behaviorKey,transitionTime,setTimeFlag,newTime,SetDirectionFlag,isForward,netForce):
"""Tells a multistage behavior to go to the next stage (Why does Matt like so many parameters?)"""
pass
def oneShot(self,seekKey,duration,usePhysicsFlag,animationName,drivableFlag,reversibleFlag):
"""Plays a one-shot animation on the avatar"""
pass
def playSimpleAnimation(self,animName):
"""Play simple animation on avatar"""
pass
def previousStage(self,behaviorKey,transitionTime,setTimeFlag,newTime,SetDirectionFlag,isForward,netForce):
"""Tells a multistage behavior to go to the previous stage"""
pass
def registerForBehaviorNotify(self,selfKey):
"""This will register for behavior notifies from the avatar"""
pass
def removeClothingItem(self,clothing_name,update=1):
"""Tells the avatar to remove a particular item of clothing."""
pass
def runBehavior(self,behaviorKey,netForceFlag):
"""Runs a behavior on the avatar. Can be a single or multi-stage behavior."""
pass
def runBehaviorSetNotify(self,behaviorKey,replyKey,netForceFlag):
"""Same as runBehavior, except send notifications to specified keyed object"""
pass
def runCoopAnim(self,targetKey,activeAvatarAnim,targetAvatarAnim,range=6,dist=3,move=1):
"""Seek near another avatar and run animations on both."""
pass
def saveClothing(self):
"""Saves the current clothing options (including morphs) to the vault"""
pass
def saveClothingToFile(self,filename):
"""Save avatar clothing to a file"""
pass
def setMorph(self,clothing_name,layer,value):
"""Set the morph value (clipped between -1 and 1)"""
pass
def setReplyKey(self,key):
"""Sets the sender's key"""
pass
def setSkinBlend(self,layer,value):
"""Set the skin blend (value between 0 and 1)"""
pass
def tintClothingItem(self,clothing_name,tint,update=1):
"""Tells the avatar to tint(color) a particular item of clothing that they are already wearing.
'tint' is a ptColor object"""
pass
def tintClothingItemLayer(self,clothing_name,tint,layer,update=1):
"""Tells the avatar to tint(color) a particular layer of a particular item of clothing."""
pass
def tintSkin(self,tint,update=1):
"""Tints all of the skin on the avatar, with the ptColor tint"""
pass
def unRegisterForBehaviorNotify(self,selfKey):
"""This will unregister behavior notifications"""
pass
def wearClothingItem(self,clothing_name,update=1):
"""Tells the avatar to wear a particular item of clothing.
And optionally hold update until later (for applying tinting before wearing)."""
pass
class ptBook:
"""Creates a new book"""
def __init__(self,esHTMLSource,coverImage=None,callbackKey=None,guiName=''):
"""None"""
pass
def allowPageTurning(self,allow):
"""Turns on and off the ability to flip the pages in a book"""
pass
def close(self):
"""Closes the book"""
pass
def closeAndHide(self):
"""Closes the book and hides it once it finishes animating"""
pass
def getCurrentPage(self):
"""Returns the currently shown page"""
pass
def getEditableText(self):
"""Returns the editable text currently contained in the book."""
pass
def getMovie(self,index):
"""Grabs a ptAnimation object representing the movie indexed by index. The index is the index of the movie in the source code"""
pass
def goToPage(self,page):
"""Flips the book to the specified page"""
pass
def hide(self):
"""Hides the book"""
pass
def nextPage(self):
"""Flips the book to the next page"""
pass
def open(self,startingPage):
"""Opens the book to the specified page"""
pass
def previousPage(self):
"""Flips the book to the previous page"""
pass
def setEditable(self,editable):
"""Turn book editing on or off. If the book GUI does not support editing, nothing will happen"""
pass
def setEditableText(self,text):
"""Sets the book's editable text."""
pass
def setGUI(self,guiName):
"""Sets the gui to be used by the book, if the requested gui is not loaded, it will use the default
Do not call while the book is open!"""
pass
def setPageMargin(self,margin):
"""Sets the text margin for the book"""
pass
def setSize(self,width,height):
"""Sets the size of the book (width and height are floats from 0 to 1)"""
pass
def show(self,startOpened):
"""Shows the book closed, or open if the the startOpened flag is true"""
pass
class ptCCRAge:
"""CCR only: CCR age info struct"""
def __init__(self):
"""None"""
pass
class ptCCRMgr:
"""CCR only: accessor class to the CCR manager"""
def __init__(self):
"""None"""
pass
def banLinking(self,pid, banFlag):
"""Set the ban linking flag for a player"""
pass
def beginCommunication(self,pid, message):
"""Begin a CCR communication with a player"""
pass
def clippingOff(self):
"""Disables clipping for this player"""
pass
def clippingOn(self):
"""Enables clipping for this player"""
pass
def endCommunication(self,pid):
"""End CCR communications with a player"""
pass
def getClipping(self):
"""Is clipping on for this player? Returns 1 if true otherwise returns 0"""
pass
def getErrorString(self,errorNumber):
"""Returns the error string that corresponds to 'errorNumber'"""
pass
def getLevel(self):
"""Returns the current CCR level for this player"""
pass
def getPlayerInfo(self,player, cbObject, cbContext):
"""Finds a player that matches 'player' (which is an id or name)."""
pass
def linkPlayerHere(self,pid):
"""Links player to where I am"""
pass
def linkPlayerToAge(self,ageInfoStruct,pid):
"""Links player to a specified age"""
pass
def linkToAge(self,age,pid):
"""Links to player's version of age"""
pass
def linkToMyNeighborhoodAge(self,pid):
"""Links this player to their neighborhood"""
pass
def linkToMyPersonalAge(self,pid):
"""Links this player to their personal Age."""
pass
def linkToPlayersAge(self,pid):
"""Link to where the player is"""
pass
def logMessage(self,message):
"""Logs 'message' somewhere...?"""
pass
def makeInvisible(self,level):
"""Makes this player invisible to 'level'"""
pass
def sendCommunication(self,pid, message):
"""Send a CCR communication to a player"""
pass
def setAwayStatus(self,awayFlag):
"""Set the away flag for CCRs"""
pass
def silencePlayer(self,pid, silenceFlag):
"""Set the silence player flag for a player"""
pass
def systemMessage(self):
"""Params message
Send a system wide CCR message"""
pass
def toggleClipping(self):
"""Toggles clipping for this player"""
pass
def warpPlayerHere(self,pid):
"""warp the player to here"""
pass
def warpToPlayer(self,pid):
"""warp to where the player is"""
pass
class ptCCRPlayerInfo:
"""CCR only: CCR player info struct"""
def __init__(self):
"""None"""
pass
class ptCamera:
"""Plasma camera class"""
def __init__(self):
"""None"""
pass
def controlKey(self,controlKey,activateFlag):
"""Send a control key to the camera as if it was hit by the user.
This is for sending things like pan-up, pan-down, zoom-in, etc."""
pass
def disableFirstPersonOverride(self):
"""Does _not_ allow the user to override the camera to go to first person camera."""
pass
def enableFirstPersonOverride(self):
"""Allows the user to override the camera and go to a first person camera."""
pass
def getFOV(self):
"""Returns the current camera's FOV(h)"""
pass
def isSmootherCam(self):
"""Returns true if we are using the faster cams thing"""
pass
def isStayInFirstPerson(self):
"""Are we staying in first person?"""
pass
def isWalkAndVerticalPan(self):
"""Returns true if we are walking and chewing gum"""
pass
def restore(self,cameraKey):
"""Restores camera to saved one"""
pass
def save(self,cameraKey):
"""Saves the current camera and sets the camera to cameraKey"""
pass
def set(self,cameraKey,time,save):
"""DO NOT USE"""
pass
def setFOV(self,fov, time):
"""Sets the current cameras FOV (based on h)"""
pass
def setSmootherCam(self,state):
"""Set the faster cams thing"""
pass
def setStayInFirstPerson(self,state):
"""Set Stay In First Person Always"""
pass
def setWalkAndVerticalPan(self,state):
"""Set Walk and chew gum"""
pass
def undoFirstPerson(self):
"""If the user has overridden the camera to be in first person, this will take them out of first person.
If the user didn't override the camera, then this will do nothing."""
pass
class ptCluster:
"""Creates a new ptCluster"""
def __init__(self,ey):
"""None"""
pass
def setVisible(self,isible):
"""Shows or hides the cluster object"""
pass
class ptColor:
"""Plasma color class"""
def __init__(self,red=0, green=0, blue=0, alpha=0):
"""None"""
pass
def black(self):
"""Sets the color to be black
Example: black = ptColor().black()"""
pass
def blue(self):
"""Sets the color to be blue
Example: blue = ptColor().blue()"""
pass
def brown(self):
"""Sets the color to be brown
Example: brown = ptColor().brown()"""
pass
def cyan(self):
"""Sets the color to be cyan
Example: cyan = ptColor.cyan()"""
pass
def darkbrown(self):
"""Sets the color to be darkbrown
Example: darkbrown = ptColor().darkbrown()"""
pass
def darkgreen(self):
"""Sets the color to be darkgreen
Example: darkgreen = ptColor().darkgreen()"""
pass
def darkpurple(self):
"""Sets the color to be darkpurple
Example: darkpurple = ptColor().darkpurple()"""
pass
def getAlpha(self):
"""Get the alpha blend component of the color"""
pass
def getBlue(self):
"""Get the blue component of the color"""
pass
def getGreen(self):
"""Get the green component of the color"""
pass
def getRed(self):
"""Get the red component of the color"""
pass
def gray(self):
"""Sets the color to be gray
Example: gray = ptColor().gray()"""
pass
def green(self):
"""Sets the color to be green
Example: green = ptColor().green()"""
pass
def magenta(self):
"""Sets the color to be magenta
Example: magenta = ptColor().magenta()"""
pass
def maroon(self):
"""Sets the color to be maroon
Example: maroon = ptColor().maroon()"""
pass
def navyblue(self):
"""Sets the color to be navyblue
Example: navyblue = ptColor().navyblue()"""
pass
def orange(self):
"""Sets the color to be orange
Example: orange = ptColor().orange()"""
pass
def pink(self):
"""Sets the color to be pink
Example: pink = ptColor().pink()"""
pass
def red(self):
"""Sets the color to be red
Example: red = ptColor().red()"""
pass
def setAlpha(self,alpha):
"""Set the alpha blend component of the color. 0.0 to 1.0"""
pass
def setBlue(self,blue):
"""Set the blue component of the color. 0.0 to 1.0"""
pass
def setGreen(self,green):
"""Set the green component of the color. 0.0 to 1.0"""
pass
def setRed(self,red):
"""Set the red component of the color. 0.0 to 1.0"""
pass
def slateblue(self):
"""Sets the color to be slateblue
Example: slateblue = ptColor().slateblue()"""
pass
def steelblue(self):
"""Sets the color to be steelblue
Example: steelblue = ptColor().steelblue()"""
pass
def tan(self):
"""Sets the color to be tan
Example: tan = ptColor().tan()"""
pass
def white(self):
"""Sets the color to be white
Example: white = ptColor().white()"""
pass
def yellow(self):
"""Sets the color to be yellow
Example: yellow = ptColor().yellow()"""
pass
class ptCritterBrain:
"""Object to manipulate critter brains"""
def __init__(self):
"""None"""
pass
def addBehavior(self,animName, behaviorName, loop = 1, randomStartPos = 1, fadeInLen = 2.0, fadeOutLen = 2.0):
"""Adds a new animation to the brain as a behavior with the specified name and parameters. If multiple animations are assigned to the same behavior, they will be randomly picked from when started."""
pass
def addReceiver(self,key):
"""Tells the brain that the specified key wants AI messages"""
pass
def animationName(self,behavior):
"""Returns the animation name associated with the specified integral behavior."""
pass
def atGoal(self):
"""Are we currently are our final destination?"""
pass
def avoidingAvatars(self):
"""Are we currently avoiding avatars while pathfinding?"""
pass
def behaviorName(self,behavior):
"""Returns the behavior name associated with the specified integral behavior."""
pass
def canHearAvatar(self,avatarID):
"""Returns whether this brain can hear the avatar with the specified id."""
pass
def canSeeAvatar(self,avatarID):
"""Returns whether this brain can see the avatar with the specified id."""
pass
def curBehavior(self):
"""Returns the current integral behavior the brain is running."""
pass
def currentGoal(self):
"""Returns the current ptPoint that the brain is running towards."""
pass
def getHearingDistance(self):
"""Returns how far away the brain can hear."""
pass
def getSceneObject(self):
"""Returns the ptSceneObject this brain controls."""
pass
def getSightCone(self):
"""Returns the width of the brain's field of view in radians."""
pass
def getSightDistance(self):
"""Returns how far the brain can see."""
pass
def getStopDistance(self):
"""Returns how far away from the goal we could be and still be considered there."""
pass
def goToGoal(self,newGoal, avoidingAvatars = 0):
"""Tells the brain to start running towards the specified location, avoiding avatars it can see or hear if told to."""
pass
def idleBehaviorName(self):
"""Returns the name of the brain's idle behavior."""
pass
def nextBehavior(self):
"""Returns the behavior the brain will be switching to next frame. (-1 if no change)"""
pass
def playersICanHear(self):
"""Returns a list of player ids which this brain can hear."""
pass
def playersICanSee(self):
"""Returns a list of player ids which this brain can see."""
pass
def removeReceiver(self,key):
"""Tells the brain that the specified key no longer wants AI messages"""
pass
def runBehaviorName(self):
"""Returns the name of the brain's run behavior."""
pass
def runningBehavior(self,behaviorName):
"""Returns true if the named behavior is running."""
pass
def setHearingDistance(self,dist):
"""Set how far away the brain can hear (360 degree field of hearing)."""
pass
def setSightCone(self,radians):
"""Set how wide the brain's field of view is in radians. Note that it is the total angle of the cone, half on one side of the brain's line of sight, half on the other."""
pass
def setSightDistance(self,dist):
"""Set how far away the brain can see."""
pass
def setStopDistance(self,dist):
"""Set how far away from the goal we should be when we are considered there and stop running."""
pass
def startBehavior(self,behaviorName, fade = 1):
"""Starts playing the named behavior. If fade is true, it will fade out the previous behavior and fade in the new one. If false, they will immediately switch."""
pass
def vectorToPlayer(self,avatarID):
"""Returns the vector between us and the specified player."""
pass
class ptDniCoordinates:
"""Constructor for a D'Ni coordinate"""
def __init__(self):
"""None"""
pass
def fromPoint(self,pt):
"""Update these coordinates with the specified ptPoint3"""
pass
def getHSpans(self):
"""Returns the HSpans component of the coordinate"""
pass
def getTorans(self):
"""Returns the Torans component of the coordinate"""
pass
def getVSpans(self):
"""Returns the VSpans component of the coordinate"""
pass
def update(self):
"""Update these coordinates with the players current position"""
pass
class ptDniInfoSource:
"""DO NOT USE"""
def __init__(self):
"""None"""
pass
def getAgeCoords(self):
"""Current coords of the player in current age as a ptDniCoordinates"""
pass
def getAgeGuid(self):
"""Unique identifier for this age instance"""
pass
def getAgeName(self):
"""Name of current age"""
pass
def getAgeTime(self):
"""Current time in current age (tbd)"""
pass
class ptDraw:
"""Plasma Draw class"""
def __init__(self):
"""None"""
pass
def disable(self):
"""Disables the draw on the sceneobject attached
In other words, makes it invisible"""
pass
def enable(self,state=1):
"""Sets the draw enable for the sceneobject attached"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
class ptDynamicMap:
"""Creates a ptDynamicMap object"""
def __init__(self,key=None):
"""None"""
pass
def addKey(self,key):
"""Add a receiver... in other words a DynamicMap"""
pass
def calcTextExtents(self,text):
"""Calculates the extent of the specified text, returns it as a (width, height) tuple"""
pass
def clearKeys(self):
"""Clears the receiver list"""
pass
def clearToColor(self,color):
"""Clear the DynamicMap to the specified color
- 'color' is a ptColor object"""
pass
def drawImage(self,x,y,image,respectAlphaFlag):
"""Draws a ptImage object on the dynamicTextmap starting at the location x,y"""
pass
def drawImageClipped(self,x,y,image,cx,cy,cw,ch,respectAlphaFlag):
"""Draws a ptImage object clipped to cx,cy with cw(width),ch(height)"""
pass
def drawText(self,x,y,text):
"""Draw text at a specified location
- x,y is the point to start drawing the text
- 'text' is a string of the text to be drawn"""
pass
def fillRect(self,left,top,right,bottom,color):
"""Fill in the specified rectangle with a color
- left,top,right,bottom define the rectangle
- 'color' is a ptColor object"""
pass
def flush(self):
"""Flush all the commands that were issued since the last flush()"""
pass
def frameRect(self,left,top,right,bottom,color):
"""Frame a rectangle with a specified color
- left,top,right,bottom define the rectangle
- 'color' is a ptColor object"""
pass
def getHeight(self):
"""Returns the height of the dynamicTextmap"""
pass
def getImage(self):
"""Returns a pyImage associated with the dynamicTextmap"""
pass
def getWidth(self):
"""Returns the width of the dynamicTextmap"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object
This only applies when NetPropagate is set to true"""
pass
def netPropagate(self,propagateFlag):
"""Specify whether this object needs to use messages that are sent on the network
- The default is for this to be false."""
pass
def purgeImage(self):
"""Purge the DynamicTextMap images"""
pass
def sender(self,sender):
"""Set the sender of the message being sent to the DynamicMap"""
pass
def setClipping(self,clipLeft,clipTop,clipRight,clipBottom):
"""Sets the clipping rectangle
- All drawtext will be clipped to this until the
unsetClipping() is called"""
pass
def setFont(self,facename,size):
"""Set the font of the text to be written
- 'facename' is a string with the name of the font
- 'size' is the point size of the font to use"""
pass
def setJustify(self,justify):
"""Sets the justification of the text. (justify is a PtJustify)"""
pass
def setLineSpacing(self,spacing):
"""Sets the line spacing (in pixels)"""
pass
def setTextColor(self,color, blockRGB=0):
"""Set the color of the text to be written
- 'color' is a ptColor object
- 'blockRGB' must be true if you're trying to render onto a transparent or semi-transparent color"""
pass
def setWrapping(self,wrapWidth,wrapHeight):
"""Set where text will be wrapped horizontally and vertically
- All drawtext commands will be wrapped until the
unsetWrapping() is called"""
pass
def unsetClipping(self):
"""Stop the clipping of text"""
pass
def unsetWrapping(self):
"""Stop text wrapping"""
pass
class ptGameScore:
"""Plasma Game Score"""
def __init__(self):
"""None"""
pass
def addPoints(self, points, key=None):
"""Adds points to the score"""
pass
@staticmethod
def createAgeScore(scoreName, type, points=0, key=None):
"""Creates a new score associated with this age"""
pass
@staticmethod
def createGlobalScore(scoreName, type, points=0, key=None):
"""Creates a new global score"""
pass
@staticmethod
def createPlayerScore(scoreName, type, points=0, key=None):
"""Creates a new score associated with this player"""
pass
@staticmethod
def createScore(ownerID, scoreName, type, points=0, key=None):
"""Creates a new score for an arbitrary owner"""
pass
@staticmethod
def findAgeScores(scoreName, key):
"""Finds matching scores for this age"""
pass
@staticmethod
def findAgeHighScores(name, maxScores, key):
"""Finds the highest matching scores for the current age's owners"""
pass
@staticmethod
def findGlobalScores(scoreName, key):
"""Finds matching global scores"""
pass
@staticmethod
def findGlobalHighScores(name, maxScores, key):
"""Finds the highest matching scores"""
pass
@staticmethod
def findPlayerScores(scoreName, key):
"""Finds matching player scores"""
pass
@staticmethod
def findScores(ownerID, scoreName, key):
"""Finds matching scores for an arbitrary owner"""
pass
def getGameType(self):
"""Returns the score game type."""
pass
def getName(self):
"""Returns the score game name."""
pass
def getOwnerID(self):
"""Returns the score game owner."""
pass
def getPoints(self):
"""Returns the number of points in this score"""
pass
def remove(self):
"""Removes this score from the server"""
pass
def setPoints(self):
"""Sets the number of points in the score
Don't use to add/remove points, use only to reset values!"""
pass
def transferPoints(self, dest, points=0, key=None):
"""Transfers points from this score to another"""
pass
class ptGameScoreMsg:
"""Game Score operation callback message"""
def __init__(self):
"""None"""
pass
class ptGameScoreListMsg(ptGameScoreMsg):
"""Game Score message for scores found on the server"""
def __init__(self):
"""None"""
pass
def getName(self):
"""Returns the template score name"""
pass
def getOwnerID(self):
"""Returns the template score ownerID"""
pass
def getScores(self):
"""Returns a list of scores found by the server"""
pass
class ptGameScoreTransferMsg(ptGameScoreMsg):
"""Game Score message indicating a score point transfer"""
def __init__(self):
"""None"""
pass
def getDestination(self):
"""Returns the score points were transferred to"""
pass
def getSource(self):
"""Returns the score points were transferred from"""
pass
class ptGameScoreUpdateMsg(ptGameScoreMsg):
"""Game Score message for a score update operation"""
def __init__(self):
"""None"""
pass
def getScore(self):
"""Returns the updated game score"""
pass
class ptGUIControl:
"""Base class for all GUI controls"""
def __init__(self,controlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlButton(ptGUIControl):
"""Plasma GUI Control Button class"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getNotifyType(self):
"""Returns this button's notify type. See PtButtonNotifyTypes"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isButtonDown(self):
"""Is the button down? Returns 1 for true otherwise returns 0"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setNotifyType(self,kind):
"""Sets this button's notify type. See PtButtonNotifyTypes"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlCheckBox(ptGUIControl):
"""Plasma GUI Control Checkbox class"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isChecked(self):
"""Is this checkbox checked? Returns 1 for true otherwise returns 0"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setChecked(self,checkedState):
"""Sets this checkbox to the 'checkedState'"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlClickMap(ptGUIControl):
"""Plasma GUI control Click Map"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getLastMouseDragPoint(self):
"""Returns the last point the mouse was dragged to"""
pass
def getLastMousePoint(self):
"""Returns the last point the mouse was at"""
pass
def getLastMouseUpPoint(self):
"""Returns the last point the mouse was released at"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlDragBar(ptGUIControl):
"""Plasma GUI Control DragBar class"""
def __init__(self,ctrlKey):
"""None"""
pass
def anchor(self):
"""Don't allow this dragbar object to be moved by the user.
Drop anchor!"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isAnchored(self):
"""Is this dragbar control anchored? Returns 1 if true otherwise returns 0"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
def unanchor(self):
"""Allow the user to drag this control around the screen.
Raise anchor."""
pass
class ptGUIControlDraggable(ptGUIControl):
"""Plasma GUI control for something draggable"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getLastMousePoint(self):
"""Returns the last point we were dragged to"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def stopDragging(self,cancelFlag):
"""UNKNOWN"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlDynamicText(ptGUIControl):
"""Plasma GUI Control DynamicText class"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getMap(self,index):
"""Returns a specific ptDynamicText attached to this contol
If there is no map at 'index' then a KeyError exception will be raised"""
pass
def getNumMaps(self):
"""Returns the number of ptDynamicText maps attached"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlEditBox(ptGUIControl):
"""Plasma GUI Control Editbox class"""
def __init__(self,ctrlKey):
"""None"""
pass
def clearString(self):
"""Clears the editbox."""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def end(self):
"""Sets the cursor in the editbox to the after the last character."""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getLastKeyCaptured(self):
"""Gets the last capture key"""
pass
def getLastModifiersCaptured(self):
"""Gets the last modifiers flags captured"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getString(self):
"""Returns the sting that the user typed in."""
pass
def getStringW(self):
"""Unicode version of getString."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def home(self):
"""Sets the cursor in the editbox to before the first character."""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setChatMode(self,state):
"""Set the Chat mode on this control"""
pass
def setColor(self,foreColor,backColor):
"""Sets the fore and back color of the editbox."""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setLastKeyCapture(self,key, modifiers):
"""Set last key captured"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setSelectionColor(self,foreColor,backColor):
"""Sets the selection color of the editbox."""
pass
def setSpecialCaptureKeyMode(self,state):
"""Set the Capture mode on this control"""
pass
def setString(self,text):
"""Pre-sets the editbox to a atring."""
pass
def setStringSize(self,size):
"""Sets the maximum size of the string that can be inputted by the user."""
pass
def setStringW(self,text):
"""Unicode version of setString."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
def wasEscaped(self):
"""If the editbox was escaped then return 1 else return 0"""
pass
class ptGUIControlValue(ptGUIControl):
"""Plasma GUI Control Value class - knobs, spinners"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getMax(self):
"""Returns the maximum of the control."""
pass
def getMin(self):
"""Returns the minimum of the control."""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getStep(self):
"""Returns the step increment of the control."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def getValue(self):
"""Returns the current value of the control."""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setRange(self,minimum,maximum):
"""Sets the minimum and maximum range of the control."""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setStep(self,step):
"""Sets the step increment of the control."""
pass
def setValue(self,value):
"""Sets the current value of the control."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlKnob(ptGUIControlValue):
"""Plasma GUI control for knob"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getMax(self):
"""Returns the maximum of the control."""
pass
def getMin(self):
"""Returns the minimum of the control."""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getStep(self):
"""Returns the step increment of the control."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def getValue(self):
"""Returns the current value of the control."""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setRange(self,minimum,maximum):
"""Sets the minimum and maximum range of the control."""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setStep(self,step):
"""Sets the step increment of the control."""
pass
def setValue(self,value):
"""Sets the current value of the control."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlListBox(ptGUIControl):
"""Plasma GUI Control List Box class"""
def __init__(self,ctrlKey):
"""None"""
pass
def add2StringsWithColors(self,text1,color1,text2,color2,respectAlpha):
"""Doesn't work right - DONT USE"""
pass
def addBranch(self,name,initiallyOpen):
"""UNKNOWN"""
pass
def addBranchW(self,name,initiallyOpen):
"""Unicode version of addBranch"""
pass
def addImage(self,image,respectAlphaFlag):
"""Appends an image item to the listbox"""
pass
def addImageAndSwatchesInBox(self,image,x,y,width,height,respectAlpha,primary,secondary):
"""Add the image and color swatches to the list"""
pass
def addImageInBox(self,image,x,y,width,height,respectAlpha):
"""Appends an image item to the listbox, centering within the box dimension."""
pass
def addSelection(self,item):
"""Adds item to selection list"""
pass
def addString(self,text):
"""Appends a list item 'text' to the listbox."""
pass
def addStringInBox(self,text,min_width,min_height):
"""Adds a text list item that has a minimum width and height"""
pass
def addStringW(self,text):
"""Unicode version of addString."""
pass
def addStringWithColor(self,text,color,inheritAlpha):
"""Adds a colored string to the list box"""
pass
def addStringWithColorWithSize(self,text,color,inheritAlpha,fontsize):
"""Adds a text list item with a color and different font size"""
pass
def allowNoSelect(self):
"""Allows the listbox to have no selection"""
pass
def clearAllElements(self):
"""Removes all the items from the listbox, making it empty."""
pass
def clickable(self):
"""Sets this listbox to be clickable by the user."""
pass
def closeBranch(self):
"""UNKNOWN"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def disallowNoSelect(self):
"""The listbox must always have a selection"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def findString(self,text):
"""Finds and returns the index of the item that matches 'text' in the listbox."""
pass
def findStringW(self,text):
"""Unicode version of findString."""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getBranchList(self):
"""get a list of branches in this list (index,isShowingChildren)"""
pass
def getElement(self,index):
"""Get the string of the item at 'index' in the listbox."""
pass
def getElementW(self,index):
"""Unicode version of getElement."""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getNumElements(self):
"""Return the number of items in the listbox."""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getScrollPos(self):
"""Returns the current scroll position in the listbox."""
pass
def getScrollRange(self):
"""Returns the max scroll position"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getSelection(self):
"""Returns the currently selected list item in the listbox."""
pass
def getSelectionList(self):
"""Returns the current selection list"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def lock(self):
"""Locks the updates to a listbox, so a number of operations can be performed
NOTE: an unlock() call must be made before the next lock() can be."""
pass
def refresh(self):
"""Refresh the display of the listbox (after updating contents)."""
pass
def removeElement(self,index):
"""Removes element at 'index' in the listbox."""
pass
def removeSelection(self,item):
"""Removes item from selection list"""
pass
def scrollToBegin(self):
"""Scrolls the listbox to the beginning of the list"""
pass
def scrollToEnd(self):
"""Scrolls the listbox to the end of the list"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setElement(self,index,text):
"""Set a particular item in the listbox to a string."""
pass
def setElementW(self,index,text):
"""Unicode version of setElement."""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setGlobalSwatchEdgeOffset(self,offset):
"""Sets the edge offset of the color swatches"""
pass
def setGlobalSwatchSize(self,size):
"""Sets the size of the color swatches"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setScrollPos(self,pos):
"""Sets the scroll position of the listbox to 'pos'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setSelection(self,selectionIndex):
"""Sets the current selection in the listbox."""
pass
def setStringJustify(self,index,justify):
"""Sets the text justification"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
def unclickable(self):
"""Makes this listbox not clickable by the user.
Useful when just displaying a list that is not really selectable."""
pass
def unlock(self):
"""Unlocks updates to a listbox and does any saved up changes"""
pass
class ptGUIControlMultiLineEdit(ptGUIControl):
"""Plasma GUI Control Multi-line edit class"""
def __init__(self,ctrlKey):
"""None"""
pass
def clearBuffer(self):
"""Clears all text from the multi-line edit control."""
pass
def clickable(self):
"""Sets this listbox to be clickable by the user."""
pass
def deleteChar(self):
"""Deletes a character at the current cursor position."""
pass
def deleteLinesFromTop(self,numLines):
"""Deletes the specified number of lines from the top of the text buffer"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def disableScrollControl(self):
"""Disables the scroll control if there is one"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def enableScrollControl(self):
"""Enables the scroll control if there is one"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getBufferLimit(self):
"""Returns the current buffer limit"""
pass
def getBufferSize(self):
"""Returns the size of the buffer"""
pass
def getEncodedBuffer(self):
"""Returns the encoded buffer in a python buffer object. Do NOT use result with setEncodedBufferW."""
pass
def getEncodedBufferW(self):
"""Unicode version of getEncodedBuffer. Do NOT use result with setEncodedBuffer."""
pass
def getFontSize(self):
"""Returns the current default font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getScrollPosition(self):
"""Returns what line is the top line."""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getString(self):
"""Gets the string of the edit control."""
pass
def getStringW(self):
"""Unicode version of getString."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def insertChar(self,c):
"""Inserts a character at the current cursor position."""
pass
def insertCharW(self,c):
"""Unicode version of insertChar."""
pass
def insertColor(self,color):
"""Inserts an encoded color object at the current cursor position.
'color' is a ptColor object."""
pass
def insertString(self,string):
"""Inserts a string at the current cursor position."""
pass
def insertStringW(self,string):
"""Unicode version of insertString"""
pass
def insertStyle(self,style):
"""Inserts an encoded font style at the current cursor position."""
pass
def isAtEnd(self):
"""Returns whether the cursor is at the end."""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isLocked(self):
"""Is the multi-line edit control locked? Returns 1 if true otherwise returns 0"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def lock(self):
"""Locks the multi-line edit control so the user cannot make changes."""
pass
def moveCursor(self,direction):
"""Move the cursor in the specified direction (see PtGUIMultiLineDirection)"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setBufferLimit(self,bufferLimit):
"""Sets the buffer max for the editbox"""
pass
def setEncodedBuffer(self,bufferObject):
"""Sets the edit control to the encoded buffer in the python buffer object. Do NOT use with a result from getEncodedBufferW."""
pass
def setEncodedBufferW(self,bufferObject):
"""Unicode version of setEncodedBuffer. Do NOT use with a result from getEncodedBuffer."""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the default font size for the edit control"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setScrollPosition(self,topLine):
"""Sets the what line is the top line."""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setString(self,asciiText):
"""Sets the multi-line edit control string."""
pass
def setStringW(self,unicodeText):
"""Unicode version of setString."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
def unclickable(self):
"""Makes this listbox not clickable by the user.
Useful when just displaying a list that is not really selectable."""
pass
def unlock(self):
"""Unlocks the multi-line edit control so that the user can make changes."""
pass
class ptGUIControlProgress(ptGUIControlValue):
"""Plasma GUI control for progress bar"""
def __init__(self,ctrlKey):
"""None"""
pass
def animateToPercent(self,percent):
"""Sets the value of the control and animates to that point."""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getMax(self):
"""Returns the maximum of the control."""
pass
def getMin(self):
"""Returns the minimum of the control."""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getStep(self):
"""Returns the step increment of the control."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def getValue(self):
"""Returns the current value of the control."""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setRange(self,minimum,maximum):
"""Sets the minimum and maximum range of the control."""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setStep(self,step):
"""Sets the step increment of the control."""
pass
def setValue(self,value):
"""Sets the current value of the control."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlRadioGroup(ptGUIControl):
"""Plasma GUI Control Radio Group class"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def getValue(self):
"""Returns the current selection of the radio group."""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setValue(self,value):
"""Sets the current selection to 'value'"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlTextBox(ptGUIControl):
"""Plasma GUI Control Textbox class"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the current forecolor"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getString(self):
"""Returns the string that the TextBox is set to (in case you forgot)"""
pass
def getStringJustify(self):
"""Returns current justify"""
pass
def getStringW(self):
"""Unicode version of getString"""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,color):
"""Sets the text backcolor to 'color', which is a ptColor object."""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,size):
"""Don't use"""
pass
def setForeColor(self,color):
"""Sets the text forecolor to 'color', which is a ptColor object."""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setString(self,text):
"""Sets the textbox string to 'text'"""
pass
def setStringJustify(self,justify):
"""Sets current justify"""
pass
def setStringW(self,text):
"""Unicode version of setString"""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIControlUpDownPair(ptGUIControlValue):
"""Plasma GUI control for up/down pair"""
def __init__(self,ctrlKey):
"""None"""
pass
def disable(self):
"""Disables this GUI control"""
pass
def enable(self,flag=1):
"""Enables this GUI control"""
pass
def focus(self):
"""Gets focus for this GUI control"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns the ptKey for this GUI control"""
pass
def getMax(self):
"""Returns the maximum of the control."""
pass
def getMin(self):
"""Returns the minimum of the control."""
pass
def getObjectCenter(self):
"""Returns ptPoint3 of the center of the GUI control object"""
pass
def getOwnerDialog(self):
"""Returns a ptGUIDialog of the dialog that owns this GUI control"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getStep(self):
"""Returns the step increment of the control."""
pass
def getTagID(self):
"""Returns the Tag ID for this GUI control"""
pass
def getValue(self):
"""Returns the current value of the control."""
pass
def hide(self):
"""Hides this GUI control"""
pass
def isEnabled(self):
"""Returns whether this GUI control is enabled"""
pass
def isFocused(self):
"""Returns whether this GUI control has focus"""
pass
def isInteresting(self):
"""Returns whether this GUI control is interesting at the moment"""
pass
def isVisible(self):
"""Returns whether this GUI control is visible"""
pass
def refresh(self):
"""UNKNOWN"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setFocus(self,state):
"""Sets the state of the focus of this GUI control"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setNotifyOnInteresting(self,state):
"""Sets whether this control should send interesting events or not"""
pass
def setObjectCenter(self,point):
"""Sets the GUI controls object center to 'point'"""
pass
def setRange(self,minimum,maximum):
"""Sets the minimum and maximum range of the control."""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def setStep(self,step):
"""Sets the step increment of the control."""
pass
def setValue(self,value):
"""Sets the current value of the control."""
pass
def setVisible(self,state):
"""Sets the state of visibility of this GUI control"""
pass
def show(self):
"""Shows this GUI control"""
pass
def unFocus(self):
"""Releases focus for this GUI control"""
pass
class ptGUIDialog:
"""Plasma GUI dialog class"""
def __init__(self,dialogKey):
"""None"""
pass
def disable(self):
"""Disables this dialog"""
pass
def enable(self,enableFlag=1):
"""Enable this dialog"""
pass
def getBackColor(self):
"""Returns the back color as a ptColor object"""
pass
def getBackSelectColor(self):
"""Returns the select back color as a ptColor object"""
pass
def getControlFromIndex(self,index):
"""Returns the ptKey of the control with the specified index (not tag ID!)"""
pass
def getControlFromTag(self,tagID):
"""Returns the ptKey of the control with the specified tag ID"""
pass
def getFontSize(self):
"""Returns the font size"""
pass
def getForeColor(self):
"""Returns the fore color as a ptColor object"""
pass
def getKey(self):
"""Returns this dialog's ptKey"""
pass
def getName(self):
"""Returns the dialog's name"""
pass
def getNumControls(self):
"""Returns the number of controls in this dialog"""
pass
def getSelectColor(self):
"""Returns the select color as a ptColor object"""
pass
def getTagID(self):
"""Returns this dialog's tag ID"""
pass
def getVersion(self):
"""UNKNOWN"""
pass
def hide(self):
"""Hides the dialog"""
pass
def isEnabled(self):
"""Is this dialog currently enabled?"""
pass
def noFocus(self):
"""Makes sure no control has input focus"""
pass
def refreshAllControls(self):
"""Tells the dialog to redraw all its controls"""
pass
def setBackColor(self,red,green,blue,alpha):
"""Sets the back color, -1 means don't change"""
pass
def setBackSelectColor(self,red,green,blue,alpha):
"""Sets the select back color, -1 means don't change"""
pass
def setFocus(self,ctrlKey):
"""Sets the control that has input focus"""
pass
def setFontSize(self,fontSize):
"""Sets the font size"""
pass
def setForeColor(self,red,green,blue,alpha):
"""Sets the fore color, -1 means don't change"""
pass
def setSelectColor(self,red,green,blue,alpha):
"""Sets the select color, -1 means don't change"""
pass
def show(self):
"""Shows the dialog"""
pass
def showNoReset(self):
"""Show dialog without resetting clickables"""
pass
def updateAllBounds(self):
"""Tells the dialog to recompute all the bounds for its controls"""
pass
class ptGUIPopUpMenu:
"""Takes three diferent argument lists:
gckey
name,screenOriginX,screenOriginY
name,parent,screenOriginX,screenOriginY"""
def __init__(self,arg1,arg2=None,arg3=None,arg4=None):
"""None"""
pass
def addConsoleCmdItem(self,name,consoleCmd):
"""Adds a new item to the menu that fires a console command"""
pass
def addConsoleCmdItemW(self,name,consoleCmd):
"""Unicode version of addConsoleCmdItem"""
pass
def addNotifyItem(self,name):
"""Adds a new item ot the mneu"""
pass
def addNotifyItemW(self,name):
"""Unicode version of addNotifyItem"""
pass
def addSubMenuItem(self,name,subMenu):
"""Adds a submenu to this menu"""
pass
def addSubMenuItemW(self,name,subMenu):
"""Unicode version of addSubMenuItem"""
pass
def disable(self):
"""Disables this menu"""
pass
def enable(self,state=1):
"""Enables/disables this menu"""
pass
def getBackColor(self):
"""Returns the background color"""
pass
def getBackSelectColor(self):
"""Returns the background selection color"""
pass
def getForeColor(self):
"""Returns the foreground color"""
pass
def getKey(self):
"""Returns this menu's key"""
pass
def getName(self):
"""Returns this menu's name"""
pass
def getSelectColor(self):
"""Returns the selection color"""
pass
def getTagID(self):
"""Returns this menu's tag id"""
pass
def getVersion(self):
"""UNKNOWN"""
pass
def hide(self):
"""Hides this menu"""
pass
def isEnabled(self):
"""Returns whether this menu is enabled or not"""
pass
def setBackColor(self,r,g,b,a):
"""Sets the background color"""
pass
def setBackSelectColor(self,r,g,b,a):
"""Sets the selection background color"""
pass
def setForeColor(self,r,g,b,a):
"""Sets the foreground color"""
pass
def setSelectColor(self,r,g,b,a):
"""Sets the selection color"""
pass
def show(self):
"""Shows this menu"""
pass
class ptGUISkin:
"""Plasma GUI Skin object"""
def __init__(self,key):
"""None"""
pass
def getKey(self):
"""Returns this object's ptKey"""
pass
class ptGrassShader:
"""Plasma Grass Shader class"""
def __init__(self,key):
"""None"""
pass
def getWaveDirection(self,waveNum):
"""Gets the wave waveNum's direction as a tuple of x,y. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
def getWaveDistortion(self,waveNum):
"""Gets the wave waveNum's distortion as a tuple of x,y,z. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
def getWaveSpeed(self,waveNum):
"""Gets the wave waveNum's speed as a float. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
def resetWaves(self):
"""Resets wave data to 0"""
pass
def setWaveDirection(self,waveNum, direction):
"""Sets the wave waveNum's direction as a tuple of x,y. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
def setWaveDistortion(self,waveNum, distortion):
"""Sets the wave waveNum's distortion as a tuple of x,y,z. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
def setWaveSpeed(self,waveNum, speed):
"""Sets the wave waveNum's speed as a float. waveNum must be between 0 and plGrassShaderMod::kNumWaves-1 (currently 3) inclusive"""
pass
class ptImage:
"""Plasma image class"""
def __init__(self,imgKey):
"""None"""
pass
def getColorLoc(self,color):
"""Returns the ptPoint3 where the specified color is located"""
pass
def getHeight(self):
"""Returns the height of the image"""
pass
def getPixelColor(self,x,y):
"""Returns the ptColor at the specified location (float from 0 to 1)"""
pass
def getWidth(self):
"""Returns the width of the image"""
pass
def saveAsJPEG(self,filename,quality=75):
"""Saves this image to disk as a JPEG file"""
pass
class ptInputInterface:
"""Plasma input interface class"""
def __init__(self):
"""None"""
pass
def popTelescope(self):
"""pops off the telescope interface and gos back to previous interface"""
pass
def pushTelescope(self):
"""pushes on the telescope interface"""
pass
class ptKey:
"""Plasma Key class"""
def __init__(self):
"""None"""
pass
def disable(self):
"""Sends a disable message to whatever this ptKey is pointing to"""
pass
def enable(self):
"""Sends an enable message to whatever this ptKey is pointing to"""
pass
def getName(self):
"""Get the name of the object that this ptKey is pointing to"""
pass
def getParentKey(self):
"""This will return a ptKey object that is the parent of this modifer
However, if the parent is not a modifier or not loaded, then None is returned."""
pass
def getSceneObject(self):
"""This will return a ptSceneobject object that is associated with this ptKey
However, if this ptKey is _not_ a sceneobject, then unpredicatable results will ensue"""
pass
def isAttachedToClone(self):
"""Returns whether the python file mod is attached to a clone"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
class ptKeyMap:
"""Accessor class to the Key Mapping functions"""
def __init__(self):
"""None"""
pass
def bindKey(self):
"""Params key1,key2,action
Bind keys to an action"""
pass
def bindKeyToConsoleCommand(self,keyStr1, command):
"""Binds key to console command"""
pass
def convertCharToControlCode(self,controlCodeString):
"""Convert string version of control code to number"""
pass
def convertCharToFlags(self,charString):
"""Convert char string to flags"""
pass
def convertCharToVKey(self,charString):
"""Convert char string to virtual key"""
pass
def convertControlCodeToString(self):
"""Params controlCode
Convert control code to character string"""
pass
def convertVKeyToChar(self,virtualKey,flags):
"""Convert virtual key and shift flags to string"""
pass
def getBindingFlags1(self):
"""Params controlCode
Returns modifier flags for controlCode"""
pass
def getBindingFlags2(self):
"""Params controlCode
Returns modifier flags for controlCode"""
pass
def getBindingFlagsConsole(self,command):
"""Returns modifier flags for the console command mapping"""
pass
def getBindingKey1(self):
"""Params controlCode
Returns key code for controlCode"""
pass
def getBindingKey2(self):
"""Params controlCode
Returns key code for controlCode"""
pass
def getBindingKeyConsole(self,command):
"""Returns key for console command mapping"""
pass
def writeKeyMap(self):
"""Forces write of the keymap file"""
pass
class ptMarkerMgr:
"""Marker manager accessor class"""
def __init__(self):
"""None"""
pass
def addMarker(self,x, y, z, id, justCreated):
"""Add a marker in the specified location with the specified id"""
pass
def areLocalMarkersShowing(self):
"""Returns true if we are showing the markers on this local machine"""
pass
def captureQuestMarker(self,id, captured):
"""Sets a marker as captured or not"""
pass
def captureTeamMarker(self,id, team):
"""Sets a marker as captured by the specified team (0 = not captured)"""
pass
def clearSelectedMarker(self):
"""Unselects the selected marker"""
pass
def getMarkersRespawn(self):
"""Returns whether markers respawn after being captured, or not"""
pass
def getSelectedMarker(self):
"""Returns the id of the selected marker"""
pass
def hideMarkersLocal(self):
"""Hides the markers on your machine, so you can no longer see where they are"""
pass
def removeAllMarkers(self):
"""Removes all markers"""
pass
def removeMarker(self,id):
"""Removes the specified marker from the game"""
pass
def setMarkersRespawn(self,respawn):
"""Sets whether markers respawn after being captured, or not"""
pass
def setSelectedMarker(self,id):
"""Sets the selected marker to the one with the specified id"""
pass
def showMarkersLocal(self):
"""Shows the markers on your machine, so you can see where they are"""
pass
class ptMatrix44:
"""Plasma Matrix44 class"""
def __init__(self):
"""None"""
pass
def copy(self):
"""Copies the matrix and returns the copy"""
pass
def getAdjoint(self,adjointMat):
"""Returns the adjoint of the matrix"""
pass
def getData(self):
"""Returns the matrix in tuple form"""
pass
def getDeterminant(self):
"""Get the matrix's determinant"""
pass
def getInverse(self,inverseMat):
"""Returns the inverse of the matrix"""
pass
def getParity(self):
"""Get the parity of the matrix"""
pass
def getTranslate(self,vector):
"""Returns the translate vector of the matrix (and sets vector to it as well)"""
pass
def getTranspose(self,transposeMat):
"""Returns the transpose of the matrix"""
pass
def make(self,fromPt, atPt, upVec):
"""Creates the matrix from from and at points, and the up vector"""
pass
def makeRotateMat(self,axis,radians):
"""Makes the matrix a rotation matrix"""
pass
def makeScaleMat(self,scale):
"""Makes the matrix a scaling matrix"""
pass
def makeTranslateMat(self,trans):
"""Makes the matrix a translation matrix"""
pass
def makeUpPreserving(self,fromPt, atPt, upVec):
"""Creates the matrix from from and at points, and the up vector (perserving the up vector)"""
pass
def reset(self):
"""Reset the matrix to identity"""
pass
def right(self):
"""Returns the right vector of the matrix"""
pass
def rotate(self,axis,radians):
"""Rotates the matrix by radians around the axis"""
pass
def scale(self,scale):
"""Scales the matrix by the vector"""
pass
def setData(self,mat):
"""Sets the matrix using tuples"""
pass
def translate(self,vector):
"""Translates the matrix by the vector"""
pass
def up(self):
"""Returns the up vector of the matrix"""
pass
def view(self):
"""Returns the view vector of the matrix"""
pass
class ptMoviePlayer:
"""Accessor class to play in the MoviePlayer"""
def __init__(self,movieName,selfKey):
"""None"""
pass
def pause(self):
"""Pauses the movie"""
pass
def play(self):
"""Plays the movie"""
pass
def playPaused(self):
"""Plays movie, but pauses at first frame"""
pass
def resume(self):
"""Resumes movie after pausing"""
pass
def setCenter(self,x,y):
"""Sets the center of the movie"""
pass
def setColor(self,color):
"""Sets the color of the movie"""
pass
def setOpacity(self,opacity):
"""Sets the opacity of the movie"""
pass
def setScale(self,width,height):
"""Sets the width and height scale of the movie"""
pass
def setVolume(self,volume):
"""Set the volume of the movie"""
pass
def stop(self):
"""Stops the movie"""
pass
class ptNetLinkingMgr:
"""Constructor to get access to the net link manager"""
def __init__(self):
"""None"""
pass
def getCurrAgeLink(self):
"""Get the ptAgeLinkStruct for the current age"""
pass
def getPrevAgeLink(self):
"""Get the ptAgeLinkStruct for the previous age"""
pass
def isEnabled(self):
"""True if linking is enabled."""
pass
def linkPlayerHere(self,pid):
"""link player(pid) to where I am"""
pass
def linkPlayerToAge(self,ageLink,pid):
"""Link player(pid) to ageLink"""
pass
def linkToAge(self,ageLink):
"""Links to ageLink (ptAgeLinkStruct)"""
pass
def linkToMyNeighborhoodAge(self):
"""Link to my Neighborhood Age"""
pass
def linkToMyPersonalAge(self):
"""Link to my Personal Age"""
pass
def linkToMyPersonalAgeWithYeeshaBook(self):
"""Link to my Personal Age with the YeeshaBook"""
pass
def linkToPlayersAge(self,pid):
"""Link me to where player(pid) is"""
pass
def setEnabled(self,enable):
"""Enable/Disable linking."""
pass
class ptNotify:
"""Creates a Notify message
- selfKey is ptKey of your PythonFile modifier"""
def __init__(self,selfKey):
"""None"""
pass
def addActivateEvent(self,activeFlag,activateFlag):
"""Add an activate event record to the notify message"""
pass
def addCallbackEvent(self,eventNumber):
"""Add a callback event record to the notify message"""
pass
def addCollisionEvent(self,enterFlag,hitterKey,hitteeKey):
"""Add a collision event record to the Notify message"""
pass
def addContainerEvent(self,enteringFlag,containerKey,containedKey):
"""Add a container event record to the notify message"""
pass
def addControlKeyEvent(self,keynumber,downFlag):
"""Add a keyboard event record to the Notify message"""
pass
def addFacingEvent(self,enabledFlag,facerKey, faceeKey, dotProduct):
"""Add a facing event record to the Notify message"""
pass
def addPickEvent(self,enabledFlag,pickerKey,pickeeKey,hitPoint):
"""Add a pick event record to the Notify message"""
pass
def addReceiver(self,key):
"""Add a receivers key to receive this Notify message"""
pass
def addResponderState(self,state):
"""Add a responder state event record to the notify message"""
pass
def addVarKey(self,name,key):
"""Add a ptKey variable event record to the Notify message
This event record is used to pass a ptKey variable to another python program"""
pass
def addVarNumber(self,name,number):
"""Add a number variable event record to the Notify message
Method will try to pick appropriate variable type
This event record is used to pass a number variable to another python program"""
pass
def addVarFloat(self,name,number):
"""Add a float variable event record to the Notify message
This event record is used to pass a number variable to another python program"""
pass
def addVarInt(self,name,number):
"""Add a integer variable event record to the Notify message
This event record is used to pass a number variable to another python program"""
pass
def addVarNull(self,name):
"""Add a null (no data) variable event record to the Notify message
This event record is used to pass a number variable to another python program"""
pass
def clearReceivers(self):
"""Remove all the receivers that this Notify message has
- receivers are automatically added if from a ptAttribActivator"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
def netPropagate(self,netFlag):
"""Sets the net propagate flag - default to set"""
pass
def send(self):
"""Send the notify message"""
pass
def setActivate(self,state):
"""Set the activate state to true(1.0) or false(0.0)"""
pass
def setType(self,type):
"""Sets the message type"""
pass
class ptParticle:
"""Plasma particle system class"""
def __init__(self):
"""None"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
def setGeneratorLife(self,value):
"""NEEDS DOCSTRING"""
pass
def setHeightSize(self,value):
"""NEEDS DOCSTRING"""
pass
def setInitPitchRange(self,value):
"""NEEDS DOCSTRING"""
pass
def setInitYawRange(self,value):
"""NEEDS DOCSTRING"""
pass
def setParticleLifeMaximum(self,value):
"""NEEDS DOCSTRING"""
pass
def setParticleLifeMinimum(self,value):
"""NEEDS DOCSTRING"""
pass
def setParticlesPerSecond(self,value):
"""NEEDS DOCSTRING"""
pass
def setScaleMaximum(self,value):
"""NEEDS DOCSTRING"""
pass
def setScaleMinimum(self,value):
"""NEEDS DOCSTRING"""
pass
def setVelocityMaximum(self,value):
"""NEEDS DOCSTRING"""
pass
def setVelocityMinimum(self,value):
"""NEEDS DOCSTRING"""
pass
def setWidthSize(self,value):
"""NEEDS DOCSTRING"""
pass
class ptPhysics:
"""Plasma physics class"""
def __init__(self):
"""None"""
pass
def angularImpulse(self,impulseVector):
"""Add the given vector (representing a rotation axis and magnitude) to
the attached sceneobject's velocity"""
pass
def damp(self,damp):
"""Reduce all velocities on the object (0 = all stop, 1 = no effect)"""
pass
def disable(self):
"""Disables physics on the sceneobject attached"""
pass
def disableCollision(self):
"""Disables collision detection on the attached sceneobject"""
pass
def enable(self,state=1):
"""Sets the physics enable state for the sceneobject attached"""
pass
def enableCollision(self):
"""Enables collision detection on the attached sceneobject"""
pass
def force(self,forceVector):
"""Applies the specified force to the attached sceneobject"""
pass
def forceWithOffset(self,forceVector,offsetPt):
"""Applies the specified offsetted force to the attached sceneobject"""
pass
def impulse(self,impulseVector):
"""Adds the given vector to the attached sceneobject's velocity"""
pass
def impulseWithOffset(self,impulseVector,offsetPt):
"""Adds the given vector to the attached sceneobject's velocity
with the specified offset"""
pass
def move(self,direction,distance):
"""Moves the attached sceneobject the specified distance in the specified direction"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object"""
pass
def rotate(self,radians,axis):
"""Rotates the attached sceneobject the specified radians around the specified axis"""
pass
def shiftMass(self,offsetVector):
"""Shifts the attached sceneobject's center to mass in the specified direction and distance"""
pass
def suppress(self,doSuppress):
"""Completely remove the physical, but keep it around so it
can be added back later."""
pass
def torque(self,torqueVector):
"""Applies the specified torque to the attached sceneobject"""
pass
def warp(self,position):
"""Warps the sceneobject to a specified location.
'position' can be a ptPoint3 or a ptMatrix44"""
pass
def warpObj(self,objkey):
"""Warps the sceneobject to match the location and orientation of the specified object"""
pass
class ptPlayer:
"""And optionally __init__(name,playerID)"""
def __init__(self,avkey,name,playerID,distanceSq):
"""None"""
pass
def getDistanceSq(self):
"""Returns the distance to remote player from local player"""
pass
def getPlayerID(self):
"""Returns the unique player ID"""
pass
def getPlayerName(self):
"""Returns the name of the player"""
pass
def isCCR(self):
"""Is this player a CCR?"""
pass
def isServer(self):
"""Is this player a server?"""
pass
class ptPoint3:
"""Plasma Point class"""
def __init__(self,x=0, y=0, z=0):
"""None"""
pass
def copy(self):
"""Returns a copy of the point in another ptPoint3 object"""
pass
def distance(self,other):
"""Computes the distance from this point to 'other' point"""
pass
def distanceSq(self,other):
"""Computes the distance squared from this point to 'other' point
- this function is faster than distance(other)"""
pass
def getX(self):
"""Returns the 'x' component of the point"""
pass
def getY(self):
"""Returns the 'y' component of the point"""
pass
def getZ(self):
"""Returns the 'z' component of the point"""
pass
def setX(self,x):
"""Sets the 'x' component of the point"""
pass
def setY(self,y):
"""Sets the 'y' component of the point"""
pass
def setZ(self,z):
"""Sets the 'z' component of the point"""
pass
def zero(self):
"""Sets the 'x','y' and the 'z' component to zero"""
pass
class ptSDL:
"""SDL accessor"""
def __init__(self):
"""None"""
pass
def sendToClients(self,key):
"""Sets it so changes to this key are sent to the
server AND the clients. (Normally it just goes
to the server.)"""
pass
def setDefault(self,key,value):
"""Like setitem, but doesn't broadcast over the net.
Only use for setting defaults that everyone will
already know (from reading it off disk)"""
pass
def setFlags(self,name,sendImmediate,skipOwnershipCheck):
"""Sets the flags for a variable in this SDL"""
pass
def setIndex(self,key,idx,value):
"""Sets the value at a specific index in the tuple,
so you don't have to pass the whole thing in"""
pass
def setIndexNow(self,key,idx,value):
"""Same as setIndex but sends immediately"""
pass
def setNotify(self,selfkey,key,tolerance):
"""Sets the OnSDLNotify to be called when 'key'
SDL variable changes by 'tolerance' (if number)"""
pass
def setTagString(self,name,tag):
"""Sets the tag string for a variable"""
pass
class ptSDLStateDataRecord:
"""Basic SDL state data record class"""
def __init__(self):
"""None"""
pass
def findVar(self,name):
"""Finds and returns the specified ptSimpleStateVariable"""
pass
def getName(self):
"""Returns our record's name"""
pass
def getVarList(self):
"""Returns the names of the vars we hold as a list of strings"""
pass
def setFromDefaults(self,timeStampNow):
"""Sets all our vars to their defaults"""
pass
class ptSceneobject:
"""Plasma Sceneobject class"""
def __init__(self,objKey, selfKey):
"""None"""
pass
def addKey(self,key):
"""Mostly used internally.
Add another sceneobject ptKey"""
pass
def animate(self):
"""If we can animate, start animating"""
pass
def avatarVelocity(self):
"""Returns the velocity of the first attached avatar scene object"""
pass
def fastForwardAttachedResponder(self,state):
"""Fast forward the attached responder to the specified state"""
pass
def findObject(self,name):
"""Find a particular object in just the sceneobjects that are attached"""
pass
def getKey(self):
"""Get the ptKey of this sceneobject
If there are more then one attached, get the first one"""
pass
def getLocalToParent(self):
"""Returns ptMatrix44 of the local to parent transform for this sceneobject
- If there is more than one sceneobject attached, returns just the first one"""
pass
def getLocalToWorld(self):
"""Returns ptMatrix44 of the local to world transform for this sceneobject
- If there is more than one sceneobject attached, returns just the first one"""
pass
def getName(self):
"""Returns the name of the sceneobject (Max name)
- If there are more than one sceneobject attached, return just the first one"""
pass
def getParentToLocal(self):
"""Returns ptMatrix44 of the parent to local transform for this sceneobject
- If there is more than one sceneobject attached, returns just the first one"""
pass
def getPythonMods(self):
"""Returns list of ptKeys of the python modifiers attached to this sceneobject"""
pass
def getResponderState(self):
"""Return the responder state (if we are a responder)"""
pass
def getResponders(self):
"""Returns list of ptKeys of the responders attached to this sceneobject"""
pass
def getSoundIndex(self,sndComponentName):
"""Get the index of the requested sound component"""
pass
def getWorldToLocal(self):
"""Returns ptMatrix44 of the world to local transform for this sceneobject
- If there is more than one sceneobject attached, returns just the first one"""
pass
def isAvatar(self):
"""Returns true if the scene object is an avatar"""
pass
def isHuman(self):
"""Returns true if the scene object is a human avatar"""
pass
def isLocallyOwned(self):
"""Returns true(1) if this object is locally owned by this client
or returns false(0) if it is not or don't know"""
pass
def netForce(self,forceFlag):
"""Specify whether this object needs to use messages that are forced to the network
- This is to be used if your Python program is running on only one client
Such as a game master, only running on the client that owns a particular object
- Setting the netForce flag on a sceneobject will also set the netForce flag on
its draw, physics, avatar, particle objects"""
pass
def playAnimNamed(self,animName):
"""Play the attached named animation"""
pass
def popCamera(self,avKey):
"""Pop the camera stack and go back to the previous camera"""
pass
def popCutsceneCamera(self,avKey):
"""Pop the camera stack and go back to previous camera."""
pass
def position(self):
"""Returns the scene object's current position"""
pass
def pushCamera(self,avKey):
"""Switch to this object (if it is a camera)"""
pass
def pushCameraCut(self,avKey):
"""Switch to this object, cutting the view (if it is a camera)"""
pass
def pushCutsceneCamera(self,cutFlag,avKey):
"""Switch to this object (assuming that it is actually a camera)"""
pass
def rewindAnimNamed(self,animName):
"""Rewind the attached named animation"""
pass
def right(self):
"""Returns the scene object's current right vector"""
pass
def runAttachedResponder(self,state):
"""Run the attached responder to the specified state"""
pass
def setSoundFilename(self,index, filename, isCompressed):
"""Sets the sound attached to this sceneobject to use the specified sound file."""
pass
def setTransform(self,local2world,world2local):
"""Set our current transforms"""
pass
def stopAnimNamed(self,animName):
"""Stop the attached named animation"""
pass
def up(self):
"""Returns the scene object's current up vector"""
pass
def view(self):
"""Returns the scene object's current view vector"""
pass
def volumeSensorIgnoreExtraEnters(self,ignore):
"""Tells the volume sensor attached to this object to ignore extra enters (default), or not (hack for garrison)."""
pass
class ptSimpleStateVariable:
"""Basic SDL state data record class"""
def __init__(self):
"""None"""
pass
def getBool(self,idx=0):
"""Returns a boolean variable's value"""
pass
def getByte(self,idx=0):
"""Returns a byte variable's value"""
pass
def getDefault(self):
"""Returns the variable's default"""
pass
def getDisplayOptions(self):
"""Returns the variable's display options"""
pass
def getDouble(self,idx=0):
"""Returns a double variable's value"""
pass
def getFloat(self,idx=0):
"""Returns a float variable's value"""
pass
def getInt(self,idx=0):
"""Returns an int variable's value"""
pass
def getShort(self,idx=0):
"""Returns a short variable's value"""
pass
def getString(self,idx=0):
"""Returns a string variable's value"""
pass
def getType(self):
"""Returns the variable's type"""
pass
def isAlwaysNew(self):
"""Is this variable always new?"""
pass
def isInternal(self):
"""Is this an internal variable?"""
pass
def isUsed(self):
"""Is this variable used?"""
pass
def setBool(self,val,idx=0):
"""Sets a boolean variable's value"""
pass
def setByte(self,val,idx=0):
"""Sets a byte variable's value"""
pass
def setDouble(self,val,idx=0):
"""Sets a double variable's value"""
pass
def setFloat(self,val,idx=0):
"""Sets a float variable's value"""
pass
def setInt(self,val,idx=0):
"""Sets an int variable's value"""
pass
def setShort(self,val,idx=0):
"""Sets a short variable's value"""
pass
def setString(self,val,idx=0):
"""Sets a string variable's value"""
pass
class ptSpawnPointInfo:
"""Class to hold spawn point data"""
def __init__(self,title=None,spawnPt=None):
"""None"""
pass
def getCameraStack(self):
"""Returns the camera stack for this spawnpoint as a string"""
pass
def getName(self):
"""Returns the spawnpoint's name"""
pass
def getTitle(self):
"""Returns the spawnpoint's title"""
pass
def setCameraStack(self,stack):
"""Sets the spawnpoint's camera stack (as a string)"""
pass
def setName(self,name):
"""Sets the spawnpoint's name"""
pass
def setTitle(self,title):
"""Sets the spawnpoint's title"""
pass
class ptSpawnPointInfoRef:
"""Class to hold spawn point data"""
def __init__(self):
"""None"""
pass
def getCameraStack(self):
"""Returns the camera stack for this spawnpoint as a string"""
pass
def getName(self):
"""Returns the spawnpoint's name"""
pass
def getTitle(self):
"""Returns the spawnpoint's title"""
pass
def setCameraStack(self,stack):
"""Sets the spawnpoint's camera stack (as a string)"""
pass
def setName(self,name):
"""Sets the spawnpoint's name"""
pass
def setTitle(self,title):
"""Sets the spawnpoint's title"""
pass
class ptStatusLog:
"""A status log class"""
def __init__(self):
"""None"""
pass
def close(self):
"""Close the status log file"""
pass
def isOpen(self):
"""Returns whether the status log is currently opened"""
pass
def open(self,logName,numLines,flags):
"""Open a status log for writing to
'logname' is the name of the log file (example: special.log)
'numLines' is the number of lines to display on debug screen
'flags' is a PlasmaConstants.PtStatusLogFlags"""
pass
def write(self,text,color=None):
"""If the status log is open, write 'text' to log
'color' is the display color in debug screen"""
pass
class ptStream:
"""A basic stream class"""
def __init__(self):
"""None"""
pass
def close(self):
"""Close the status log file"""
pass
def isOpen(self):
"""Returns whether the stream file is currently opened"""
pass
def open(self,fileName,flags):
"""Open a stream file for reading or writing"""
pass
def readlines(self):
"""Reads a list of strings from the file"""
pass
def writelines(self,lines):
"""Write a list of strings to the file"""
pass
class ptSwimCurrentInterface:
"""Creates a new ptSwimCurrentInterface"""
def __init__(self,key):
"""None"""
pass
def disable(self):
"""UNKNOWN"""
pass
def enable(self):
"""UNKNOWN"""
pass
class ptVault:
"""Accessor class to the player's vault"""
def __init__(self):
"""None"""
pass
def addChronicleEntry(self,entryName,type,string):
"""Adds an entry to the player's chronicle with a value of 'string'."""
pass
def amAgeCzar(self,ageInfo):
"""Are we the czar (WTH is this?) of the specified age?"""
pass
def amAgeOwner(self,ageInfo):
"""Are we the owner of the specified age?"""
pass
def amCzarOfCurrentAge(self):
"""Are we the czar (WTH is this?) of the current age?"""
pass
def amOwnerOfCurrentAge(self):
"""Are we the owner of the current age?"""
pass
def createNeighborhood(self):
"""Creates a new neighborhood"""
pass
def findChronicleEntry(self,entryName):
"""Returns a ptVaultNode of type kNodeTypeChronicle of the current player's chronicle entry by entryName."""
pass
def findNode(self,templateNode):
"""Find the node matching the template"""
pass
def getAgeJournalsFolder(self):
"""Returns a ptVaultFolderNode of the current player's age journals folder."""
pass
def getAgesICanVisitFolder(self):
"""Returns a ptVaultFolderNode of ages I can visit"""
pass
def getAgesIOwnFolder(self):
"""Returns a ptVaultFolderNode of ages that I own"""
pass
def getAllPlayersFolder(self):
"""Returns a ptVaultPlayerInfoListNode of the all players folder."""
pass
def getAvatarClosetFolder(self):
"""Do not use.
Returns a ptVaultFolderNode of the avatars outfit in their closet."""
pass
def getAvatarOutfitFolder(self):
"""Do not use.
Returns a ptVaultFolderNode of the avatars outfit."""
pass
def getBuddyListFolder(self):
"""Returns a ptVaultPlayerInfoListNode of the current player's buddy list folder."""
pass
def getChronicleFolder(self):
"""Returns a ptVaultFolderNode of the current player's chronicle folder."""
pass
def getGlobalInbox(self):
"""Returns a ptVaultFolderNode of the global inbox folder."""
pass
def getIgnoreListFolder(self):
"""Returns a ptVaultPlayerInfoListNode of the current player's ignore list folder."""
pass
def getInbox(self):
"""Returns a ptVaultFolderNode of the current player's inbox folder."""
pass
def getInviteFolder(self):
"""Returns a ptVaultFolderNode of invites"""
pass
def getKIUsage(self):
"""Returns a tuple with usage statistics of the KI (# of pics, # of text notes, # of marker games)"""
pass
def getLinkToCity(self):
"""Returns a ptVaultAgeLinkNode that will go to the city"""
pass
def getLinkToMyNeighborhood(self):
"""Returns a ptVaultAgeLinkNode that will go to my neighborhood"""
pass
def getOwnedAgeLink(self,ageInfo):
"""Returns a ptVaultAgeLinkNode to my owned age(ageInfo)"""
pass
def getPeopleIKnowAboutFolder(self):
"""Returns a ptVaultPlayerInfoListNode of the current player's people I know about (Recent) list folder."""
pass
def getPlayerInfo(self):
"""Returns a ptVaultNode of type kNodeTypePlayerInfo of the current player"""
pass
def getPsnlAgeSDL(self):
"""Returns the personal age SDL"""
pass
def getVisitAgeLink(self,ageInfo):
"""Returns a ptVaultAgeLinkNode for a visitor to age(ageInfo)"""
pass
def inMyNeighborhoodAge(self):
"""Are we in the player's neighborhood age?"""
pass
def inMyPersonalAge(self):
"""Are we in the player's personal age?"""
pass
def invitePlayerToAge(self,link,playerID):
"""Sends an invitation to visit the age to the specified player"""
pass
def offerLinkToPlayer(self,link,playerID):
"""Offer a one-time link to the specified player"""
pass
def registerMTStation(self,stationName,mtSpawnPoint):
"""Registers this player at the specified mass-transit point"""
pass
def registerOwnedAge(self,link):
"""Registers the specified age as owned by the player"""
pass
def registerVisitAge(self,link):
"""Register this age as visitable by this player"""
pass
def sendToDevice(self,node,deviceName):
"""Sends a ptVaultNode object to an Age's device by deviceName."""
pass
def setAgePublic(self,ageInfo,makePublic):
"""Makes the specified age public or private"""
pass
def unInvitePlayerToAge(self,guid,playerID):
"""Revokes the invitation to visit the age"""
pass
def unRegisterOwnedAge(self,ageFilename):
"""Unregisters the specified age so it's no longer owned by this player"""
pass
def unRegisterVisitAge(self,guid):
"""Unregisters the specified age so it can no longer be visited by this player"""
pass
def updatePsnlAgeSDL(self,pyrec):
"""Updates the personal age SDL to the specified data"""
pass
class ptVaultNode:
"""Vault node class"""
def __init__(self):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultFolderNode(ptVaultNode):
"""Plasma vault folder node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def folderGetName(self):
"""LEGACY
Returns the folder's name"""
pass
def folderGetType(self):
"""LEGACY
Returns the folder type (of the standard folder types)"""
pass
def folderSetName(self,name):
"""LEGACY
Set the folder name"""
pass
def folderSetType(self,type):
"""LEGACY
Set the folder type"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getFolderName(self):
"""Returns the folder's name"""
pass
def getFolderNameW(self):
"""Unicode version of getFolerName"""
pass
def getFolderType(self):
"""Returns the folder type (of the standard folder types)"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setFolderName(self,name):
"""Set the folder name"""
pass
def setFolderNameW(self,name):
"""Unicode version of setFolderName"""
pass
def setFolderType(self,type):
"""Set the folder type"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultAgeInfoListNode(ptVaultFolderNode):
"""Plasma vault age info list node"""
def __init__(self,n=0):
"""None"""
pass
def addAge(self,ageID):
"""Adds ageID to list of ages"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def folderGetName(self):
"""LEGACY
Returns the folder's name"""
pass
def folderGetType(self):
"""LEGACY
Returns the folder type (of the standard folder types)"""
pass
def folderSetName(self,name):
"""LEGACY
Set the folder name"""
pass
def folderSetType(self,type):
"""LEGACY
Set the folder type"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getFolderName(self):
"""Returns the folder's name"""
pass
def getFolderNameW(self):
"""Unicode version of getFolerName"""
pass
def getFolderType(self):
"""Returns the folder type (of the standard folder types)"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasAge(self,ageID):
"""Returns whether ageID is in the list of ages"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAge(self,ageID):
"""Removes ageID from list of ages"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setFolderName(self,name):
"""Set the folder name"""
pass
def setFolderNameW(self,name):
"""Unicode version of setFolderName"""
pass
def setFolderType(self,type):
"""Set the folder type"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultAgeInfoNode(ptVaultNode):
"""Plasma vault age info node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def asAgeInfoStruct(self):
"""Returns this ptVaultAgeInfoNode as a ptAgeInfoStruct"""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getAgeDescription(self):
"""Returns the description of the age"""
pass
def getAgeFilename(self):
"""Returns the age filename"""
pass
def getAgeID(self):
"""Returns the age ID"""
pass
def getAgeInstanceGuid(self):
"""Returns the age instance guid"""
pass
def getAgeInstanceName(self):
"""Returns the instance name of the age"""
pass
def getAgeLanguage(self):
"""Returns the age's language (integer)"""
pass
def getAgeOwnersFolder(self):
"""Returns a ptVaultPlayerInfoList of the players that own this age"""
pass
def getAgeSDL(self):
"""Returns a ptVaultSDLNode of the age's SDL"""
pass
def getAgeSequenceNumber(self):
"""Returns the sequence number of this instance of the age"""
pass
def getAgeUserDefinedName(self):
"""Returns the user define part of the age name"""
pass
def getCanVisitFolder(self):
"""Returns a ptVaultPlayerInfoList of the players that can visit this age"""
pass
def getChildAgesFolder(self):
"""Returns a ptVaultFolderNode of the child ages of this age"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getCzar(self):
"""Returns ptVaultPlayerInfoNode of the player that is the Czar"""
pass
def getCzarID(self):
"""Returns the ID of the age's czar"""
pass
def getDisplayName(self):
"""Returns the displayable version of the age name"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getParentAgeLink(self):
"""Returns ptVaultAgeLinkNode of the age's parent age, or None if not a child age"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def isPublic(self):
"""Returns whether the age is Public or Not"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setAgeDescription(self,description):
"""Sets the description of the age"""
pass
def setAgeFilename(self,fileName):
"""Sets the filename"""
pass
def setAgeID(self,ageID):
"""Sets the age ID"""
pass
def setAgeInstanceGuid(self,guid):
"""Sets the age instance GUID"""
pass
def setAgeInstanceName(self,instanceName):
"""Sets the instance name"""
pass
def setAgeLanguage(self,lang):
"""Sets the age's language (integer)"""
pass
def setAgeSequenceNumber(self,seqNumber):
"""Sets the sequence number"""
pass
def setAgeUserDefinedName(self,udname):
"""Sets the user defined part of the name"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultAgeLinkNode(ptVaultNode):
"""Plasma vault age link node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def addSpawnPoint(self,point):
"""Adds the specified ptSpawnPointInfo or ptSpawnPointInfoRef"""
pass
def asAgeLinkStruct(self):
"""Returns this ptVaultAgeLinkNode as a ptAgeLinkStruct"""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getAgeInfo(self):
"""Returns the ageInfo as a ptAgeInfoStruct"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getLocked(self):
"""Returns whether the link is locked or not"""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getSpawnPoints(self):
"""Returns a list of ptSpawnPointInfo objects"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def getVolatile(self):
"""Returns whether the link is volatile or not"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def hasSpawnPoint(self,spawnPtName):
"""Returns true if this link has the specified spawn point"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def removeSpawnPoint(self,point):
"""Removes the specified spawn point based on a ptSpawnPointInfo, ptSpawnPointInfoRef, or string"""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setLocked(self,state):
"""Sets whether the link is locked or not"""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def setVolatile(self,state):
"""Sets the state of the volitility of the link"""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultChronicleNode(ptVaultNode):
"""Plasma vault chronicle node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def chronicleGetName(self):
"""LEGACY: Returns the name of the chronicle node."""
pass
def chronicleGetType(self):
"""LEGACY: Returns the user defined type of the chronicle node."""
pass
def chronicleGetValue(self):
"""LEGACY: Returns the value as a string of this chronicle node."""
pass
def chronicleSetName(self,name):
"""LEGACY: Sets the name of the chronicle node."""
pass
def chronicleSetType(self,type):
"""LEGACY: Sets this chronicle node to a user defined type."""
pass
def chronicleSetValue(self,value):
"""LEGACY: Sets the chronicle to a value that is a string"""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getEntryType(self):
"""Returns the user defined type of the chronicle node."""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getName(self):
"""Returns the name of the chronicle node."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def getValue(self):
"""Returns the value as a string of this chronicle node."""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setEntryType(self,type):
"""Sets this chronicle node to a user defined type."""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setName(self,name):
"""Sets the name of the chronicle node."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def setValue(self,value):
"""Sets the chronicle to a value that is a string"""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultImageNode(ptVaultNode):
"""Plasma vault image node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getImage(self):
"""Returns the image(ptImage) of this image node"""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getTitle(self):
"""Returns the title (caption) of this image node"""
pass
def getTitleW(self):
"""Unicode version of getTitle"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def imageGetImage(self):
"""LEGACY
Returns the image(ptImage) of this image node"""
pass
def imageGetTitle(self):
"""LEGACY
Returns the title (caption) of this image node"""
pass
def imageSetImage(self,image):
"""LEGACY
Sets the image(ptImage) of this image node"""
pass
def imageSetTitle(self,title):
"""LEGACY
Sets the title (caption) of this image node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setImage(self,image):
"""Sets the image(ptImage) of this image node"""
pass
def setImageFromBuf(self,buf):
"""Sets our image from a buffer"""
pass
def setImageFromScrShot(self):
"""Grabs a screenshot and stuffs it into this node"""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setTitle(self,title):
"""Sets the title (caption) of this image node"""
pass
def setTitleW(self,title):
"""Unicode version of setTitle"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultMarkerGameNode(ptVaultNode):
"""Plasma vault age info node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getGameGuid(self):
"""Returns the marker game's guid"""
pass
def getGameName(self):
"""Returns the marker game's name"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setGameGuid(self,guid):
"""Sets the marker game's guid"""
pass
def setGameName(self,name):
"""Sets marker game's name"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultNodeRef:
"""Vault node relationship pseudo class"""
def __init__(self):
"""None"""
pass
def beenSeen(self):
"""Returns true until we reimplement this"""
pass
def getChild(self):
"""Returns a ptVaultNode that is the child of this reference"""
pass
def getChildID(self):
"""Returns id of the child node"""
pass
def getParent(self):
"""Returns a ptVaultNode that is the parent of the reference"""
pass
def getParentID(self):
"""Returns id of the parent node"""
pass
def getSaver(self):
"""Returns a ptVaultPlayerInfoNode of player that created this relationship"""
pass
def getSaverID(self):
"""Returns id of player that created this relationship"""
pass
def setSeen(self):
"""Does nothing until we reimplement this"""
pass
class ptVaultPlayerInfoListNode(ptVaultFolderNode):
"""Plasma vault player info list node"""
def __init__(self,n=0):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def addPlayer(self,playerID):
"""Adds playerID player to this player info list node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def folderGetName(self):
"""LEGACY
Returns the folder's name"""
pass
def folderGetType(self):
"""LEGACY
Returns the folder type (of the standard folder types)"""
pass
def folderSetName(self,name):
"""LEGACY
Set the folder name"""
pass
def folderSetType(self,type):
"""LEGACY
Set the folder type"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getFolderName(self):
"""Returns the folder's name"""
pass
def getFolderNameW(self):
"""Unicode version of getFolerName"""
pass
def getFolderType(self):
"""Returns the folder type (of the standard folder types)"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getPlayer(self,playerID):
"""Gets the player info node for the specified player."""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def hasPlayer(self,playerID):
"""Returns whether the 'playerID' is a member of this player info list node."""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def playerlistAddPlayer(self,playerID):
"""LEGACY: Adds playerID player to this player info list node."""
pass
def playerlistGetPlayer(self,playerID):
"""LEGACY: Gets the player info node for the specified player."""
pass
def playerlistHasPlayer(self,playerID):
"""LEGACY: Returns whether the 'playerID' is a member of this player info list node."""
pass
def playerlistRemovePlayer(self,playerID):
"""LEGACY: Removes playerID player from this player info list node."""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def removePlayer(self,playerID):
"""Removes playerID player from this player info list node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setFolderName(self,name):
"""Set the folder name"""
pass
def setFolderNameW(self,name):
"""Unicode version of setFolderName"""
pass
def setFolderType(self,type):
"""Set the folder type"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def sort(self):
"""Sorts the player list by some means...?"""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultPlayerInfoNode(ptVaultNode):
"""Plasma vault folder node"""
def __init__(self):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def playerGetAgeGuid(self):
"""Returns the guid as a string of where the player is for this player info node."""
pass
def playerGetAgeInstanceName(self):
"""Returns the name of the Age where the player is for this player info node."""
pass
def playerGetCCRLevel(self):
"""Returns the ccr level of the player for this player info node."""
pass
def playerGetID(self):
"""Returns the player ID for this player info node."""
pass
def playerGetName(self):
"""Returns the player name of this player info node."""
pass
def playerIsOnline(self):
"""Returns the online status of the player for this player info node."""
pass
def playerSetAgeGuid(self,guidString):
"""Not sure this should be used. Sets the guid for this player info node."""
pass
def playerSetAgeInstanceName(self,name):
"""Not sure this should be used. Sets the name of the age where the player is for this player info node."""
pass
def playerSetID(self,playerID):
"""Not sure this should be used. Sets the playerID for this player info node."""
pass
def playerSetName(self,name):
"""Not sure this should be used. Sets the player name of this player info node."""
pass
def playerSetOnline(self,state):
"""Not sure this should be used. Sets the state of the player online status for this player info node."""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultSDLNode(ptVaultNode):
"""Plasma vault SDL node"""
def __init__(self):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getIdent(self):
"""UNKNOWN"""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getStateDataRecord(self):
"""Returns the ptSDLStateDataRecord associated with this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def initStateDataRecord(self,filename,flags):
"""Read the SDL Rec from File if needed"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setIdent(self,v):
"""UNKNOWN"""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setStateDataRecord(self,rec,writeOptions=0):
"""Sets the ptSDLStateDataRecord"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultSystemNode(ptVaultNode):
"""Plasma vault system node"""
def __init__(self):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getType(self):
"""Returns the type of ptVaultNode this is.
See PlasmaVaultTypes.py"""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setType(self,type):
"""Set the type of ptVaultNode this is."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVaultTextNoteNode(ptVaultNode):
"""Plasma vault text note node"""
def __init__(self):
"""None"""
pass
def addNode(self,node,cb=None,cbContext=0):
"""Adds 'node'(ptVaultNode) as a child to this node."""
pass
def findNode(self,templateNode):
"""Returns ptVaultNode if child node found matching template, or None"""
pass
def getChildNodeCount(self):
"""Returns how many children this node has."""
pass
def getChildNodeRefList(self):
"""Returns a list of ptVaultNodeRef that are the children of this node."""
pass
def getClientID(self):
"""Returns the client's ID."""
pass
def getCreateAgeCoords(self):
"""Returns the location in the Age where this node was created."""
pass
def getCreateAgeGuid(self):
"""Returns the guid as a string of the Age where this node was created."""
pass
def getCreateAgeName(self):
"""Returns the name of the Age where this node was created."""
pass
def getCreateAgeTime(self):
"""Returns the time in the Age that the node was created...(?)"""
pass
def getCreateTime(self):
"""Returns the when this node was created, that is useable by python's time library."""
pass
def getCreatorNode(self):
"""Returns the creator's node"""
pass
def getCreatorNodeID(self):
"""Returns the creator's node ID"""
pass
def getDeviceInbox(self):
"""Returns a ptVaultFolderNode"""
pass
def getID(self):
"""Returns the unique ID of this ptVaultNode."""
pass
def getModifyTime(self):
"""Returns the modified time of this node, that is useable by python's time library."""
pass
def getNode(self,id):
"""Returns ptVaultNodeRef if is a child node, or None"""
pass
def getOwnerNode(self):
"""Returns a ptVaultNode of the owner of this node"""
pass
def getOwnerNodeID(self):
"""Returns the node ID of the owner of this node"""
pass
def getSubType(self):
"""Returns the subtype of this text note node."""
pass
def getText(self):
"""Returns the text of this text note node."""
pass
def getTextW(self):
"""Unicode version of getText."""
pass
def getTitle(self):
"""Returns the title of this text note node."""
pass
def getTitleW(self):
"""Unicode version of getTitle"""
pass
def getType(self):
"""Returns the type of text note for this text note node."""
pass
def hasNode(self,id):
"""Returns true if node if a child node"""
pass
def linkToNode(self,nodeID,cb=None,cbContext=0):
"""Adds a link to the node designated by nodeID"""
pass
def noteGetSubType(self):
"""LEGACY
Returns the subtype of this text note node."""
pass
def noteGetText(self):
"""LEGACY
Returns the text of this text note node."""
pass
def noteGetTitle(self):
"""LEGACY
Returns the title of this text note node."""
pass
def noteGetType(self):
"""LEGACY
Returns the type of text note for this text note node."""
pass
def noteSetSubType(self,subType):
"""LEGACY
Sets the subtype of the this text note node."""
pass
def noteSetText(self,text):
"""LEGACY
Sets text of the this text note node."""
pass
def noteSetTitle(self,title):
"""LEGACY
Sets the title of this text note node."""
pass
def noteSetType(self,type):
"""LEGACY
Sets the type of text note for this text note node."""
pass
def removeAllNodes(self):
"""Removes all the child nodes on this node."""
pass
def removeNode(self,node,cb=None,cbContext=0):
"""Removes the child 'node'(ptVaultNode) from this node."""
pass
def save(self,cb=None,cbContext=0):
"""Save the changes made to this node."""
pass
def saveAll(self,cb=None,cbContext=0):
"""Saves this node and all its children nodes."""
pass
def sendTo(self,destID,cb=None,cbContext=0):
"""Send this node to inbox at 'destID'"""
pass
def setCreateAgeGuid(self,guid):
"""Set guid as a string of the Age where this node was created."""
pass
def setCreateAgeName(self,name):
"""Set name of the Age where this node was created."""
pass
def setCreatorNodeID(self,id):
"""Set creator's node ID"""
pass
def setDeviceInbox(self,inboxName,cb=None,cbContext=0):
"""Sets the device inbox"""
pass
def setID(self,id):
"""Sets ID of this ptVaultNode."""
pass
def setOwnerNodeID(self,id):
"""Set node ID of the owner of this node"""
pass
def setSubType(self,subType):
"""Sets the subtype of the this text note node."""
pass
def setText(self,text):
"""Sets text of the this text note node."""
pass
def setTextW(self,text):
"""Unicode version of setText"""
pass
def setTitle(self,title):
"""Sets the title of this text note node."""
pass
def setTitleW(self,title):
"""Unicode version of setTitle"""
pass
def setType(self,type):
"""Sets the type of text note for this text note node."""
pass
def upcastToAgeInfoListNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoListNode"""
pass
def upcastToAgeInfoNode(self):
"""Returns this ptVaultNode as ptVaultAgeInfoNode"""
pass
def upcastToAgeLinkNode(self):
"""Returns this ptVaultNode as ptVaultAgeLinkNode"""
pass
def upcastToChronicleNode(self):
"""Returns this ptVaultNode as ptVaultChronicleNode"""
pass
def upcastToFolderNode(self):
"""Returns this ptVaultNode as ptVaultFolderNode"""
pass
def upcastToImageNode(self):
"""Returns this ptVaultNode as ptVaultImageNode"""
pass
def upcastToMarkerGameNode(self):
"""Returns this ptVaultNode as ptVaultMarkerNode"""
pass
def upcastToPlayerInfoListNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoListNode"""
pass
def upcastToPlayerInfoNode(self):
"""Returns this ptVaultNode as ptVaultPlayerInfoNode"""
pass
def upcastToPlayerNode(self):
"""Returns this ptVaultNode as a ptVaultPlayerNode"""
pass
def upcastToSDLNode(self):
"""Returns this ptVaultNode as a ptVaultSDLNode"""
pass
def upcastToSystemNode(self):
"""Returns this ptVaultNode as a ptVaultSystemNode"""
pass
def upcastToTextNoteNode(self):
"""Returns this ptVaultNode as ptVaultTextNoteNode"""
pass
class ptVector3:
"""Plasma Point class"""
def __init__(self,x=0, y=0, z=0):
"""None"""
pass
def add(self,other):
"""Adds other to the current vector"""
pass
def copy(self):
"""Copies the vector into another one (which it returns)"""
pass
def crossProduct(self,other):
"""Finds the cross product between other and this vector"""
pass
def dotProduct(self,other):
"""Finds the dot product between other and this vector"""
pass
def getX(self):
"""Returns the 'x' component of the vector"""
pass
def getY(self):
"""Returns the 'y' component of the vector"""
pass
def getZ(self):
"""Returns the 'z' component of the vector"""
pass
def length(self):
"""Returns the length of the vector"""
pass
def lengthSq(self):
"""Returns the length of the vector, squared
- this function is faster then length(other)"""
pass
def normalize(self):
"""Normalizes the vector to length 1"""
pass
def scale(self,scale):
"""Scale the vector by scale"""
pass
def setX(self,x):
"""Sets the 'x' component of the vector"""
pass
def setY(self,y):
"""Sets the 'y' component of the vector"""
pass
def setZ(self,z):
"""Sets the 'z' component of the vector"""
pass
def subtract(self,other):
"""Subtracts other from the current vector"""
pass
def zero(self):
"""Zeros the vector's components"""
pass
class ptWaveSet:
"""Creates a new ptWaveSet"""
def __init__(self,ey):
"""None"""
pass
def getDepthFalloff(self):
"""Returns the attribute's value"""
pass
def getEnvCenter(self):
"""Returns the attribute's value"""
pass
def getEnvRadius(self):
"""Returns the attribute's value"""
pass
def getGeoAmpOverLen(self):
"""Returns the attribute's value"""
pass
def getGeoAngleDev(self):
"""Returns the attribute's value"""
pass
def getGeoChop(self):
"""Returns the attribute's value"""
pass
def getGeoMaxLength(self):
"""Returns the attribute's value"""
pass
def getGeoMinLength(self):
"""Returns the attribute's value"""
pass
def getMaxAtten(self):
"""Returns the attribute's value"""
pass
def getMinAtten(self):
"""Returns the attribute's value"""
pass
def getOpacFalloff(self):
"""Returns the attribute's value"""
pass
def getOpacOffset(self):
"""Returns the attribute's value"""
pass
def getReflFalloff(self):
"""Returns the attribute's value"""
pass
def getReflOffset(self):
"""Returns the attribute's value"""
pass
def getRippleScale(self):
"""Returns the attribute's value"""
pass
def getSpecularEnd(self):
"""Returns the attribute's value"""
pass
def getSpecularMute(self):
"""Returns the attribute's value"""
pass
def getSpecularNoise(self):
"""Returns the attribute's value"""
pass
def getSpecularStart(self):
"""Returns the attribute's value"""
pass
def getSpecularTint(self):
"""Returns the attribute's value"""
pass
def getTexAmpOverLen(self):
"""Returns the attribute's value"""
pass
def getTexAngleDev(self):
"""Returns the attribute's value"""
pass
def getTexChop(self):
"""Returns the attribute's value"""
pass
def getTexMaxLength(self):
"""Returns the attribute's value"""
pass
def getTexMinLength(self):
"""Returns the attribute's value"""
pass
def getWaterHeight(self):
"""Returns the attribute's value"""
pass
def getWaterOffset(self):
"""Returns the attribute's value"""
pass
def getWaterOpacity(self):
"""Returns the attribute's value"""
pass
def getWaterTint(self):
"""Returns the attribute's value"""
pass
def getWaveFalloff(self):
"""Returns the attribute's value"""
pass
def getWaveOffset(self):
"""Returns the attribute's value"""
pass
def getWindDir(self):
"""Returns the attribute's value"""
pass
def setDepthFalloff(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setEnvCenter(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setEnvRadius(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setGeoAmpOverLen(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setGeoAngleDev(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setGeoChop(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setGeoMaxLength(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setGeoMinLength(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setMaxAtten(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setMinAtten(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setOpacFalloff(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setOpacOffset(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setReflFalloff(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setReflOffset(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setRippleScale(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setSpecularEnd(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setSpecularMute(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setSpecularNoise(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setSpecularStart(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setSpecularTint(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setTexAmpOverLen(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setTexAngleDev(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setTexChop(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setTexMaxLength(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setTexMinLength(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaterHeight(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaterOffset(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaterOpacity(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaterTint(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaveFalloff(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWaveOffset(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
def setWindDir(self,s, secs = 0):
"""Sets the attribute to s over secs time"""
pass
|
zrax/moul-scripts
|
Python/plasma/Plasma.py
|
Python
|
gpl-3.0
| 263,368
|
[
"VisIt"
] |
e8b01fb607b31d22313aca18eb6c951c5ef9cbc8e4cdc46a2d6847cf1a42628f
|
import numpy as np
import mdtraj as md
from simtk.openmm import app
import simtk.openmm as mm
from simtk import unit as u
from repex import rest
code = "benzene"
ff_name = "amber99sbildn"
water_name = 'tip3p'
which_forcefield = "%s.xml" % ff_name
which_water = '%s.xml' % water_name
ligand_filename = "./benzene2.pdb"
padding = 0.9 * u.nanometers
cutoff = 0.95 * u.nanometers
output_frequency = 1000
n_equil_steps = 15000
n_steps = 50000000
ligand_traj = md.load(ligand_filename)
ligand_xyz = ligand_traj.openmm_positions(0)
ligand_top = ligand_traj.top.to_openmm()
ff = app.ForceField(which_forcefield, "benzene.xml", which_water)
temperature = 300.
pressure = 1.0 * u.atmospheres
modeller = app.modeller.Modeller(ligand_top, ligand_xyz)
modeller.addSolvent(ff, padding=padding, model='tip3p')
topology = modeller.topology
positions = modeller.positions
system = ff.createSystem(topology, nonbondedMethod=app.PME, nonbondedCutoff=cutoff, constraints=app.HBonds)
desired_temperature = 300. * u.kelvin
hot_atoms = np.arange(12)
rest.REST.perturb_system(system, temperature=desired_temperature, reference_temperature=temperature, hot_atoms=hot_atoms)
system.addForce(mm.MonteCarloBarostat(pressure, temperature, 25))
integrator = mm.LangevinIntegrator(temperature, 1.0 / u.picoseconds, 1.0 * u.femtoseconds)
simulation = app.Simulation(topology, system, integrator)
simulation.context.setPositions(positions)
print('Minimizing...')
simulation.minimizeEnergy()
simulation.context.setVelocitiesToTemperature(temperature)
print('Equilibrating...')
simulation.step(n_equil_steps)
print('Production...')
dcd_filename = "./water/%s_%s_%s_%s.dcd" % (code, ff_name, water_name, desired_temperature)
log_filename = "./water/%s_%s_%s_%s.log" % (code, ff_name, water_name, desired_temperature)
simulation.reporters.append(app.DCDReporter(dcd_filename, output_frequency))
simulation.reporters.append(app.StateDataReporter(open(log_filename, 'w'), output_frequency, step=True, time=True, speed=True))
simulation.step(n_steps)
|
kyleabeauchamp/T4Binding
|
code/benzene_box.py
|
Python
|
gpl-2.0
| 2,030
|
[
"MDTraj",
"OpenMM"
] |
a32fa37a836b560fce72f78495f5d7e4b01d3c86b367fb880042bc329d6a2a1b
|
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2011-2014 Google, Inc.
# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Brett Cannon <brett@python.org>
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""Checks for various exception related errors."""
import builtins
import inspect
import sys
import typing
import astroid
from pylint import checkers
from pylint.checkers import utils
from pylint import interfaces
def _builtin_exceptions():
def predicate(obj):
return isinstance(obj, type) and issubclass(obj, BaseException)
members = inspect.getmembers(builtins, predicate)
return {exc.__name__ for (_, exc) in members}
def _annotated_unpack_infer(stmt, context=None):
"""
Recursively generate nodes inferred by the given statement.
If the inferred value is a list or a tuple, recurse on the elements.
Returns an iterator which yields tuples in the format
('original node', 'infered node').
"""
if isinstance(stmt, (astroid.List, astroid.Tuple)):
for elt in stmt.elts:
inferred = utils.safe_infer(elt)
if inferred and inferred is not astroid.Uninferable:
yield elt, inferred
return
for infered in stmt.infer(context):
if infered is astroid.Uninferable:
continue
yield stmt, infered
def _is_raising(body: typing.List) -> bool:
"""Return true if the given statement node raise an exception"""
for node in body:
if isinstance(node, astroid.Raise):
return True
return False
PY3K = sys.version_info >= (3, 0)
OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
BUILTINS_NAME = builtins.__name__
MSGS = {
"E0701": (
"Bad except clauses order (%s)",
"bad-except-order",
"Used when except clauses are not in the correct order (from the "
"more specific to the more generic). If you don't fix the order, "
"some exceptions may not be caught by the most specific handler.",
),
"E0702": (
"Raising %s while only classes or instances are allowed",
"raising-bad-type",
"Used when something which is neither a class, an instance or a "
"string is raised (i.e. a `TypeError` will be raised).",
),
"E0703": (
"Exception context set to something which is not an exception, nor None",
"bad-exception-context",
'Used when using the syntax "raise ... from ...", '
"where the exception context is not an exception, "
"nor None.",
),
"E0704": (
"The raise statement is not inside an except clause",
"misplaced-bare-raise",
"Used when a bare raise is not used inside an except clause. "
"This generates an error, since there are no active exceptions "
"to be reraised. An exception to this rule is represented by "
"a bare raise inside a finally clause, which might work, as long "
"as an exception is raised inside the try block, but it is "
"nevertheless a code smell that must not be relied upon.",
),
"E0710": (
"Raising a new style class which doesn't inherit from BaseException",
"raising-non-exception",
"Used when a new style class which doesn't inherit from "
"BaseException is raised.",
),
"E0711": (
"NotImplemented raised - should raise NotImplementedError",
"notimplemented-raised",
"Used when NotImplemented is raised instead of NotImplementedError",
),
"E0712": (
"Catching an exception which doesn't inherit from Exception: %s",
"catching-non-exception",
"Used when a class which doesn't inherit from "
"Exception is used as an exception in an except clause.",
),
"W0702": (
"No exception type(s) specified",
"bare-except",
"Used when an except clause doesn't specify exceptions type to catch.",
),
"W0703": (
"Catching too general exception %s",
"broad-except",
"Used when an except catches a too general exception, "
"possibly burying unrelated errors.",
),
"W0705": (
"Catching previously caught exception type %s",
"duplicate-except",
"Used when an except catches a type that was already caught by "
"a previous handler.",
),
"W0706": (
"The except handler raises immediately",
"try-except-raise",
"Used when an except handler uses raise as its first or only "
"operator. This is useless because it raises back the exception "
"immediately. Remove the raise operator or the entire "
"try-except-raise block!",
),
"W0711": (
'Exception to catch is the result of a binary "%s" operation',
"binary-op-exception",
"Used when the exception to catch is of the form "
'"except A or B:". If intending to catch multiple, '
'rewrite as "except (A, B):"',
),
"W0715": (
"Exception arguments suggest string formatting might be intended",
"raising-format-tuple",
"Used when passing multiple arguments to an exception "
"constructor, the first of them a string literal containing what "
"appears to be placeholders intended for formatting",
),
"W0716": (
"Invalid exception operation. %s",
"wrong-exception-operation",
"Used when an operation is done against an exception, but the operation "
"is not valid for the exception in question. Usually emitted when having "
"binary operations between exceptions in except handlers.",
),
}
class BaseVisitor:
"""Base class for visitors defined in this module."""
def __init__(self, checker, node):
self._checker = checker
self._node = node
def visit(self, node):
name = node.__class__.__name__.lower()
dispatch_meth = getattr(self, "visit_" + name, None)
if dispatch_meth:
dispatch_meth(node)
else:
self.visit_default(node)
def visit_default(self, node): # pylint: disable=unused-argument
"""Default implementation for all the nodes."""
class ExceptionRaiseRefVisitor(BaseVisitor):
"""Visit references (anything that is not an AST leaf)."""
def visit_name(self, name):
if name.name == "NotImplemented":
self._checker.add_message("notimplemented-raised", node=self._node)
def visit_call(self, call):
if isinstance(call.func, astroid.Name):
self.visit_name(call.func)
if (
len(call.args) > 1
and isinstance(call.args[0], astroid.Const)
and isinstance(call.args[0].value, str)
):
msg = call.args[0].value
if "%" in msg or ("{" in msg and "}" in msg):
self._checker.add_message("raising-format-tuple", node=self._node)
class ExceptionRaiseLeafVisitor(BaseVisitor):
"""Visitor for handling leaf kinds of a raise value."""
def visit_const(self, const):
if not isinstance(const.value, str):
# raising-string will be emitted from python3 porting checker.
self._checker.add_message(
"raising-bad-type", node=self._node, args=const.value.__class__.__name__
)
def visit_instance(self, instance):
# pylint: disable=protected-access
cls = instance._proxied
self.visit_classdef(cls)
# Exception instances have a particular class type
visit_exceptioninstance = visit_instance
def visit_classdef(self, cls):
if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
if cls.newstyle:
self._checker.add_message("raising-non-exception", node=self._node)
else:
self._checker.add_message("nonstandard-exception", node=self._node)
def visit_tuple(self, tuple_node):
if PY3K or not tuple_node.elts:
self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
return
# On Python 2, using the following is not an error:
# raise (ZeroDivisionError, None)
# raise (ZeroDivisionError, )
# What's left to do is to check that the first
# argument is indeed an exception. Verifying the other arguments
# is not the scope of this check.
first = tuple_node.elts[0]
inferred = utils.safe_infer(first)
if not inferred or inferred is astroid.Uninferable:
return
if (
isinstance(inferred, astroid.Instance)
and inferred.__class__.__name__ != "Instance"
):
# TODO: explain why
self.visit_default(tuple_node)
else:
self.visit(inferred)
def visit_default(self, node):
name = getattr(node, "name", node.__class__.__name__)
self._checker.add_message("raising-bad-type", node=self._node, args=name)
class ExceptionsChecker(checkers.BaseChecker):
"""Exception related checks."""
__implements__ = interfaces.IAstroidChecker
name = "exceptions"
msgs = MSGS
priority = -4
options = (
(
"overgeneral-exceptions",
{
"default": OVERGENERAL_EXCEPTIONS,
"type": "csv",
"metavar": "<comma-separated class names>",
"help": "Exceptions that will emit a warning "
'when being caught. Defaults to "%s".'
% (", ".join(OVERGENERAL_EXCEPTIONS),),
},
),
)
def open(self):
self._builtin_exceptions = _builtin_exceptions()
super(ExceptionsChecker, self).open()
@utils.check_messages(
"nonstandard-exception",
"misplaced-bare-raise",
"raising-bad-type",
"raising-non-exception",
"notimplemented-raised",
"bad-exception-context",
"raising-format-tuple",
)
def visit_raise(self, node):
if node.exc is None:
self._check_misplaced_bare_raise(node)
return
if PY3K and node.cause:
self._check_bad_exception_context(node)
expr = node.exc
ExceptionRaiseRefVisitor(self, node).visit(expr)
try:
inferred_value = expr.inferred()[-1]
except astroid.InferenceError:
pass
else:
if inferred_value:
ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
def _check_misplaced_bare_raise(self, node):
# Filter out if it's present in __exit__.
scope = node.scope()
if (
isinstance(scope, astroid.FunctionDef)
and scope.is_method()
and scope.name == "__exit__"
):
return
current = node
# Stop when a new scope is generated or when the raise
# statement is found inside a TryFinally.
ignores = (astroid.ExceptHandler, astroid.FunctionDef)
while current and not isinstance(current.parent, ignores):
current = current.parent
expected = (astroid.ExceptHandler,)
if not current or not isinstance(current.parent, expected):
self.add_message("misplaced-bare-raise", node=node)
def _check_bad_exception_context(self, node):
"""Verify that the exception context is properly set.
An exception context can be only `None` or an exception.
"""
cause = utils.safe_infer(node.cause)
if cause in (astroid.Uninferable, None):
return
if isinstance(cause, astroid.Const):
if cause.value is not None:
self.add_message("bad-exception-context", node=node)
elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
cause
):
self.add_message("bad-exception-context", node=node)
def _check_catching_non_exception(self, handler, exc, part):
if isinstance(exc, astroid.Tuple):
# Check if it is a tuple of exceptions.
inferred = [utils.safe_infer(elt) for elt in exc.elts]
if any(node is astroid.Uninferable for node in inferred):
# Don't emit if we don't know every component.
return
if all(
node
and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
for node in inferred
):
return
if not isinstance(exc, astroid.ClassDef):
# Don't emit the warning if the infered stmt
# is None, but the exception handler is something else,
# maybe it was redefined.
if isinstance(exc, astroid.Const) and exc.value is None:
if (
isinstance(handler.type, astroid.Const)
and handler.type.value is None
) or handler.type.parent_of(exc):
# If the exception handler catches None or
# the exception component, which is None, is
# defined by the entire exception handler, then
# emit a warning.
self.add_message(
"catching-non-exception",
node=handler.type,
args=(part.as_string(),),
)
else:
self.add_message(
"catching-non-exception",
node=handler.type,
args=(part.as_string(),),
)
return
if (
not utils.inherit_from_std_ex(exc)
and exc.name not in self._builtin_exceptions
):
if utils.has_known_bases(exc):
self.add_message(
"catching-non-exception", node=handler.type, args=(exc.name,)
)
def _check_try_except_raise(self, node):
def gather_exceptions_from_handler(handler):
exceptions = []
if handler.type:
exceptions_in_handler = utils.safe_infer(handler.type)
if isinstance(exceptions_in_handler, astroid.Tuple):
exceptions = {
exception
for exception in exceptions_in_handler.elts
if isinstance(exception, astroid.Name)
}
elif exceptions_in_handler:
exceptions = [exceptions_in_handler]
return exceptions
bare_raise = False
handler_having_bare_raise = None
excs_in_bare_handler = []
for handler in node.handlers:
if bare_raise:
# check that subsequent handler is not parent of handler which had bare raise.
# since utils.safe_infer can fail for bare except, check it before.
# also break early if bare except is followed by bare except.
excs_in_current_handler = gather_exceptions_from_handler(handler)
if not excs_in_current_handler:
bare_raise = False
break
for exc_in_current_handler in excs_in_current_handler:
inferred_current = utils.safe_infer(exc_in_current_handler)
if any(
utils.is_subclass_of(
utils.safe_infer(exc_in_bare_handler), inferred_current
)
for exc_in_bare_handler in excs_in_bare_handler
):
bare_raise = False
break
# `raise` as the first operator inside the except handler
if _is_raising([handler.body[0]]):
# flags when there is a bare raise
if handler.body[0].exc is None:
bare_raise = True
handler_having_bare_raise = handler
excs_in_bare_handler = gather_exceptions_from_handler(handler)
if bare_raise:
self.add_message("try-except-raise", node=handler_having_bare_raise)
@utils.check_messages("wrong-exception-operation")
def visit_binop(self, node):
if isinstance(node.parent, astroid.ExceptHandler):
# except (V | A)
suggestion = "Did you mean '(%s, %s)' instead?" % (
node.left.as_string(),
node.right.as_string(),
)
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
@utils.check_messages("wrong-exception-operation")
def visit_compare(self, node):
if isinstance(node.parent, astroid.ExceptHandler):
# except (V < A)
suggestion = "Did you mean '(%s, %s)' instead?" % (
node.left.as_string(),
", ".join(operand.as_string() for _, operand in node.ops),
)
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
@utils.check_messages(
"bare-except",
"broad-except",
"try-except-raise",
"binary-op-exception",
"bad-except-order",
"catching-non-exception",
"duplicate-except",
)
def visit_tryexcept(self, node):
"""check for empty except"""
self._check_try_except_raise(node)
exceptions_classes = []
nb_handlers = len(node.handlers)
for index, handler in enumerate(node.handlers):
if handler.type is None:
if not _is_raising(handler.body):
self.add_message("bare-except", node=handler)
# check if an "except:" is followed by some other
# except
if index < (nb_handlers - 1):
msg = "empty except clause should always appear last"
self.add_message("bad-except-order", node=node, args=msg)
elif isinstance(handler.type, astroid.BoolOp):
self.add_message(
"binary-op-exception", node=handler, args=handler.type.op
)
else:
try:
excs = list(_annotated_unpack_infer(handler.type))
except astroid.InferenceError:
continue
for part, exc in excs:
if exc is astroid.Uninferable:
continue
if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
exc
):
# pylint: disable=protected-access
exc = exc._proxied
self._check_catching_non_exception(handler, exc, part)
if not isinstance(exc, astroid.ClassDef):
continue
exc_ancestors = [
anc
for anc in exc.ancestors()
if isinstance(anc, astroid.ClassDef)
]
for previous_exc in exceptions_classes:
if previous_exc in exc_ancestors:
msg = "%s is an ancestor class of %s" % (
previous_exc.name,
exc.name,
)
self.add_message(
"bad-except-order", node=handler.type, args=msg
)
if (
exc.name in self.config.overgeneral_exceptions
and exc.root().name == utils.EXCEPTIONS_MODULE
and not _is_raising(handler.body)
):
self.add_message(
"broad-except", args=exc.name, node=handler.type
)
if exc in exceptions_classes:
self.add_message(
"duplicate-except", args=exc.name, node=handler.type
)
exceptions_classes += [exc for _, exc in excs]
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(ExceptionsChecker(linter))
|
ekwoodrich/python-dvrip
|
env/lib/python3.5/site-packages/pylint/checkers/exceptions.py
|
Python
|
mit
| 21,418
|
[
"VisIt"
] |
5e752bf3d2619e1ffb492fdf6aa31cce14031a7547d01348421bfe2e4ad53c81
|
# Generated by Django 1.11.2 on 2017-06-22 10:22
import bitfield.models
import django.contrib.auth.models
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.db import migrations, models
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.models import generate_email_token_for_stream
def migrate_existing_attachment_data(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
Attachment = apps.get_model('zerver', 'Attachment')
Recipient = apps.get_model('zerver', 'Recipient')
Stream = apps.get_model('zerver', 'Stream')
attachments = Attachment.objects.all()
for entry in attachments:
owner = entry.owner
entry.realm = owner.realm
for message in entry.messages.all():
if owner == message.sender:
if message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
if settings.POSTGRES_MISSING_DICTIONARIES:
fts_sql = """
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
"""
else:
fts_sql = """
CREATE TEXT SEARCH DICTIONARY english_us_hunspell
(template = ispell, DictFile = en_us, AffFile = en_us, StopWords = zulip_english);
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
ALTER TEXT SEARCH CONFIGURATION zulip.english_us_search
ALTER MAPPING FOR asciiword, asciihword, hword_asciipart, word, hword, hword_part
WITH english_us_hunspell, english_stem;
"""
fts_sql += """
CREATE FUNCTION escape_html(text) RETURNS text IMMUTABLE LANGUAGE 'sql' AS $$
SELECT replace(replace(replace(replace(replace($1, '&', '&'), '<', '<'),
'>', '>'), '"', '"'), '''', ''');
$$ ;
ALTER TABLE zerver_message ADD COLUMN search_tsvector tsvector;
CREATE INDEX zerver_message_search_tsvector ON zerver_message USING gin(search_tsvector);
ALTER INDEX zerver_message_search_tsvector SET (fastupdate = OFF);
CREATE TABLE fts_update_log (id SERIAL PRIMARY KEY, message_id INTEGER NOT NULL);
CREATE FUNCTION do_notify_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN NOTIFY fts_update_log; RETURN NEW; END $$;
CREATE TRIGGER fts_update_log_notify AFTER INSERT ON fts_update_log
FOR EACH STATEMENT EXECUTE PROCEDURE do_notify_fts_update_log();
CREATE FUNCTION append_to_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN INSERT INTO fts_update_log (message_id) VALUES (NEW.id); RETURN NEW; END $$;
CREATE TRIGGER zerver_message_update_search_tsvector_async
BEFORE INSERT OR UPDATE OF subject, rendered_content ON zerver_message
FOR EACH ROW EXECUTE PROCEDURE append_to_fts_update_log();
"""
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(db_index=True, max_length=75, unique=True)),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_bot', models.BooleanField(default=False)),
('date_joined', models.DateTimeField(default=django.utils.timezone.now)),
('is_mirror_dummy', models.BooleanField(default=False)),
('full_name', models.CharField(max_length=100)),
('short_name', models.CharField(max_length=100)),
('pointer', models.IntegerField()),
('last_pointer_updater', models.CharField(max_length=64)),
('api_key', models.CharField(max_length=32)),
('enable_stream_desktop_notifications', models.BooleanField(default=True)),
('enable_stream_sounds', models.BooleanField(default=True)),
('enable_desktop_notifications', models.BooleanField(default=True)),
('enable_sounds', models.BooleanField(default=True)),
('enable_offline_email_notifications', models.BooleanField(default=True)),
('enable_offline_push_notifications', models.BooleanField(default=True)),
('enable_digest_emails', models.BooleanField(default=True)),
('default_desktop_notifications', models.BooleanField(default=True)),
('last_reminder', models.DateTimeField(default=django.utils.timezone.now, null=True)),
('rate_limits', models.CharField(default='', max_length=100)),
('default_all_public_streams', models.BooleanField(default=False)),
('enter_sends', models.NullBooleanField(default=True)),
('autoscroll_forever', models.BooleanField(default=False)),
('twenty_four_hour_time', models.BooleanField(default=False)),
('avatar_source', models.CharField(choices=[('G', 'Hosted by Gravatar'), ('U', 'Uploaded by user'), ('S', 'System generated')], default='G', max_length=1)),
('tutorial_status', models.CharField(choices=[('W', 'Waiting'), ('S', 'Started'), ('F', 'Finished')], default='W', max_length=1)),
('onboarding_steps', models.TextField(default='[]')),
('invites_granted', models.IntegerField(default=0)),
('invites_used', models.IntegerField(default=0)),
('alert_words', models.TextField(default='[]')),
('muted_topics', models.TextField(default='[]')),
('bot_owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=30, unique=True)),
],
),
migrations.CreateModel(
name='DefaultStream',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Huddle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('huddle_hash', models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(db_index=True, max_length=60)),
('content', models.TextField()),
('rendered_content', models.TextField(null=True)),
('rendered_content_version', models.IntegerField(null=True)),
('pub_date', models.DateTimeField(db_index=True, verbose_name='date published')),
('last_edit_time', models.DateTimeField(null=True)),
('edit_history', models.TextField(null=True)),
('has_attachment', models.BooleanField(db_index=True, default=False)),
('has_image', models.BooleanField(db_index=True, default=False)),
('has_link', models.BooleanField(db_index=True, default=False)),
],
),
migrations.CreateModel(
name='PreregistrationUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=75)),
('invited_at', models.DateTimeField(auto_now=True)),
('status', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='PushDeviceToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('kind', models.PositiveSmallIntegerField(choices=[(1, 'apns'), (2, 'gcm')])),
('token', models.CharField(max_length=4096, unique=True)),
('last_updated', models.DateTimeField(auto_now=True, default=django.utils.timezone.now)),
('ios_app_id', models.TextField(null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Realm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('domain', models.CharField(db_index=True, max_length=40, unique=True)),
('name', models.CharField(max_length=40, null=True)),
('restricted_to_domain', models.BooleanField(default=True)),
('invite_required', models.BooleanField(default=False)),
('invite_by_admins_only', models.BooleanField(default=False)),
('mandatory_topics', models.BooleanField(default=False)),
('show_digest_email', models.BooleanField(default=True)),
('name_changes_disabled', models.BooleanField(default=False)),
('date_created', models.DateTimeField(default=django.utils.timezone.now)),
('deactivated', models.BooleanField(default=False)),
],
options={
'permissions': (('administer', 'Administer a realm'),),
},
),
migrations.CreateModel(
name='RealmAlias',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('domain', models.CharField(db_index=True, max_length=80, unique=True)),
('realm', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='RealmEmoji',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('img_url', models.TextField()),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='RealmFilter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pattern', models.TextField()),
('url_format_string', models.TextField()),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='Recipient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_id', models.IntegerField(db_index=True)),
('type', models.PositiveSmallIntegerField(db_index=True)),
],
),
migrations.CreateModel(
name='Referral',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=75)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ScheduledJob',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scheduled_timestamp', models.DateTimeField()),
('type', models.PositiveSmallIntegerField()),
('data', models.TextField()),
('filter_id', models.IntegerField(null=True)),
('filter_string', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Stream',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=60)),
('invite_only', models.NullBooleanField(default=False)),
('email_token', models.CharField(default=generate_email_token_for_stream, max_length=32)),
('description', models.CharField(default='', max_length=1024)),
('date_created', models.DateTimeField(default=django.utils.timezone.now)),
('deactivated', models.BooleanField(default=False)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True)),
('in_home_view', models.NullBooleanField(default=True)),
('color', models.CharField(default='#c2c2c2', max_length=10)),
('desktop_notifications', models.BooleanField(default=True)),
('audible_notifications', models.BooleanField(default=True)),
('notifications', models.BooleanField(default=False)),
('recipient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserActivity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('query', models.CharField(db_index=True, max_length=50)),
('count', models.IntegerField()),
('last_visit', models.DateTimeField(verbose_name='last visit')),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Client')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserActivityInterval',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start', models.DateTimeField(db_index=True, verbose_name='start time')),
('end', models.DateTimeField(db_index=True, verbose_name='end time')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('flags', bitfield.models.BitField(['read', 'starred', 'collapsed', 'mentioned', 'wildcard_mentioned', 'summarize_in_home', 'summarize_in_stream', 'force_expand', 'force_collapse', 'has_alert_word', 'historical', 'is_me_message'], default=0)),
('message', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Message')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserPresence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(verbose_name='presence changed')),
('status', models.PositiveSmallIntegerField(default=1)),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Client')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='userpresence',
unique_together={('user_profile', 'client')},
),
migrations.AlterUniqueTogether(
name='usermessage',
unique_together={('user_profile', 'message')},
),
migrations.AlterUniqueTogether(
name='useractivity',
unique_together={('user_profile', 'client', 'query')},
),
migrations.AlterUniqueTogether(
name='subscription',
unique_together={('user_profile', 'recipient')},
),
migrations.AlterUniqueTogether(
name='stream',
unique_together={('name', 'realm')},
),
migrations.AlterUniqueTogether(
name='recipient',
unique_together={('type', 'type_id')},
),
migrations.AlterUniqueTogether(
name='realmfilter',
unique_together={('realm', 'pattern')},
),
migrations.AlterUniqueTogether(
name='realmemoji',
unique_together={('realm', 'name')},
),
migrations.AddField(
model_name='realm',
name='notifications_stream',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='zerver.Stream'),
),
migrations.AddField(
model_name='preregistrationuser',
name='realm',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
migrations.AddField(
model_name='preregistrationuser',
name='referred_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='preregistrationuser',
name='streams',
field=models.ManyToManyField(null=True, to='zerver.Stream'),
),
migrations.AddField(
model_name='message',
name='recipient',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient'),
),
migrations.AddField(
model_name='message',
name='sender',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='message',
name='sending_client',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Client'),
),
migrations.AddField(
model_name='defaultstream',
name='realm',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
migrations.AddField(
model_name='defaultstream',
name='stream',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream'),
),
migrations.AlterUniqueTogether(
name='defaultstream',
unique_together={('realm', 'stream')},
),
migrations.AddField(
model_name='userprofile',
name='default_events_register_stream',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='zerver.Stream'),
),
migrations.AddField(
model_name='userprofile',
name='default_sending_stream',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='zerver.Stream'),
),
migrations.AddField(
model_name='userprofile',
name='groups',
field=models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups'),
),
migrations.AddField(
model_name='userprofile',
name='realm',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
migrations.AddField(
model_name='userprofile',
name='user_permissions',
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
),
migrations.RunSQL(
sql=fts_sql,
),
migrations.AlterModelManagers(
name='userprofile',
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.AlterField(
model_name='preregistrationuser',
name='email',
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name='preregistrationuser',
name='streams',
field=models.ManyToManyField(to='zerver.Stream'),
),
migrations.AlterField(
model_name='pushdevicetoken',
name='last_updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='referral',
name='email',
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name='userprofile',
name='email',
field=models.EmailField(db_index=True, max_length=254, unique=True),
),
migrations.AlterField(
model_name='userprofile',
name='last_login',
field=models.DateTimeField(blank=True, null=True, verbose_name='last login'),
),
migrations.RunSQL(
sql='CREATE INDEX upper_subject_idx ON zerver_message ((upper(subject)));',
reverse_sql='DROP INDEX upper_subject_idx;',
),
migrations.RunSQL(
sql='CREATE INDEX upper_stream_name_idx ON zerver_stream ((upper(name)));',
reverse_sql='DROP INDEX upper_stream_name_idx;',
),
migrations.AddField(
model_name='userprofile',
name='left_side_userlist',
field=models.BooleanField(default=False),
),
migrations.AlterModelOptions(
name='realm',
options={'permissions': (('administer', 'Administer a realm'), ('api_super_user', 'Can send messages as other users for mirroring'))},
),
migrations.RunSQL(
sql='CREATE INDEX upper_userprofile_email_idx ON zerver_userprofile ((upper(email)));',
reverse_sql='DROP INDEX upper_userprofile_email_idx;',
),
migrations.AlterField(
model_name='userprofile',
name='is_active',
field=models.BooleanField(db_index=True, default=True),
),
migrations.AlterField(
model_name='userprofile',
name='is_bot',
field=models.BooleanField(db_index=True, default=False),
),
migrations.RunSQL(
sql='CREATE INDEX upper_preregistration_email_idx ON zerver_preregistrationuser ((upper(email)));',
reverse_sql='DROP INDEX upper_preregistration_email_idx;',
),
migrations.AlterField(
model_name='userprofile',
name='enable_stream_desktop_notifications',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='userprofile',
name='enable_stream_sounds',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='userprofile',
name='is_api_super_user',
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddField(
model_name='userprofile',
name='is_realm_admin',
field=models.BooleanField(db_index=True, default=False),
),
migrations.AlterField(
model_name='realmemoji',
name='img_url',
field=models.URLField(),
),
migrations.AlterField(
model_name='realmemoji',
name='name',
field=models.TextField(validators=[django.core.validators.MinLengthValidator(1), django.core.validators.RegexValidator(regex='^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$')]),
),
migrations.AlterField(
model_name='realmemoji',
name='img_url',
field=models.URLField(max_length=1000),
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file_name', models.CharField(db_index=True, max_length=100)),
('path_id', models.TextField(db_index=True)),
('create_time', models.DateTimeField(db_index=True, default=django.utils.timezone.now)),
('messages', models.ManyToManyField(to='zerver.Message')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('is_realm_public', models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name='realm',
name='create_stream_by_admins_only',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='userprofile',
name='bot_type',
field=models.PositiveSmallIntegerField(db_index=True, null=True),
),
migrations.AlterField(
model_name='realmemoji',
name='name',
field=models.TextField(validators=[django.core.validators.MinLengthValidator(1), django.core.validators.RegexValidator(message='Invalid characters in emoji name', regex='^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$')]),
),
migrations.AddField(
model_name='preregistrationuser',
name='realm_creation',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='attachment',
name='realm',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
migrations.RunPython(
code=migrate_existing_attachment_data,
elidable=True,
),
migrations.AddField(
model_name='subscription',
name='pin_to_top',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='userprofile',
name='default_language',
field=models.CharField(default='en', max_length=50),
),
migrations.AddField(
model_name='realm',
name='allow_message_editing',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='realm',
name='message_content_edit_limit_seconds',
field=models.IntegerField(default=600),
),
migrations.AddField(
model_name='realm',
name='default_language',
field=models.CharField(default='en', max_length=50),
),
migrations.AddField(
model_name='userprofile',
name='tos_version',
field=models.CharField(max_length=10, null=True),
),
]
|
brainwane/zulip
|
zerver/migrations/0001_initial.py
|
Python
|
apache-2.0
| 29,467
|
[
"VisIt"
] |
e801b4fb19acf48c97a7e5c31c2c5872a1572b9c81472446dbbb3e77fbc3a6b6
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import matplotlib.pyplot as plt
import numpy as np
"""
This script makes log-log plots of the error vs. h for the tests in this directory.
"""
filenames = ['hermite_converge_dirichlet_out.csv',
'hermite_converge_periodic_out.csv']
for filename in filenames:
fig = plt.figure()
ax1 = fig.add_subplot(111)
# passing names=True option is supposed to treat first row as column
# header names, and then everything is stored by column name in data.
data = np.genfromtxt(filename, delimiter=',', names=True)
log_h1_error = np.log10(data['H1error'])
log_l2_error = np.log10(data['L2error'])
logh = np.log10(data['h'])
h1_fit = np.polyfit(logh, log_h1_error, 1)
l2_fit = np.polyfit(logh, log_l2_error, 1)
ax1.plot(logh, log_h1_error, linewidth=2, marker='o', label=r'$H^1$ error')
ax1.text(-0.4, -2., '{:.2f}'.format(h1_fit[0]))
ax1.plot(logh, log_l2_error, linewidth=2, marker='o', label=r'$L^2$ error')
ax1.text(-0.4, -3.5, '{:.2f}'.format(l2_fit[0]))
ax1.set_xlabel('log(h)')
ax1.legend(loc='upper left')
plt.savefig(filename.rsplit( ".", 1)[0] + '.pdf')
|
nuclear-wizard/moose
|
test/tests/variables/fe_hermite_convergence/plot.py
|
Python
|
lgpl-2.1
| 1,471
|
[
"MOOSE"
] |
bd888078b8f0aee70c3baf8b5589fd46d1660ac0837bac36a9c2586c6531869c
|
import sys
import pylink
from time import sleep
from threading import Thread, Event, Condition
import logging
import re
if sys.version_info < (3, 0):
import Queue as queue
# __class__ = instance.__class__
else:
import queue
from avatar2.archs.arm import ARM
from avatar2.targets import TargetStates
from avatar2.message import AvatarMessage, UpdateStateMessage, BreakpointHitMessage
class JLinkProtocol(Thread):
"""Main class for the JLink bprotocol, via pylink-square
:ivar serial: The serial number of the JLink to connect to
:ivar device: The JLink device name for the target
:ivar avatar: the avatar object
:ivar origin: the target utilizing this protocol
"""
def __init__(self, serial="12345678", device="ARM7", avatar=None, origin=None):
self._shutdown = Event()
self.avatar = avatar
self.origin = origin
self.jlink = pylink.JLink()
self.jlink.open(serial)
self.log = logging.getLogger('%s.%s' %
(origin.log.name, self.__class__.__name__)
) if origin else \
logging.getLogger(self.__class__.__name__)
Thread.__init__(self)
self.connect(device=device)
def __del__(self):
self.shutdown()
def connect(self, device="ARM7"):
# Todo add a time out here
while True:
try:
self.jlink.connect(device, verbose=True)
self.jlink.ir_len()
break
except pylink.errors.JLinkException:
self.log.info("Connection failed, trying again...")
sleep(0.25)
self.log.info("Connected to JLink target")
self.start()
return True
def reset(self, halt=True):
self.log.info("Resetting target")
return self.jlink.reset(halt=halt)
def shutdown(self):
self._shutdown.set()
def update_target_regs(self):
"""
This function will try to update the TargetRegs based on the list of
registers known to gdb.
"""
regs = {}
for idx in self.jlink.register_list():
name = self.jlink.register_name(idx)
regs[name] = idx
if hasattr(self.origin, 'regs'):
self.origin.regs._update(regs)
def run(self):
# Target state management thread
# This thread needs to poll for the halted state
# of the target
# JLink is lame and doesn't let you do this asynch
# Also, not all targets produce a "moe" (Mode of Entry)
# so we have to actually do that here.
try:
while not self._shutdown.is_set():
is_halted = self.jlink.halted()
if is_halted and self.origin.state == TargetStates.RUNNING:
# We just halted
# But did we hit a BP?
self.log.debug("JLink Target is halting...")
avatar_msg = UpdateStateMessage(self.origin, TargetStates.STOPPED)
self.avatar.fast_queue.put(avatar_msg)
self.origin.wait()
self.log.debug("JLink target has halted")
pc = self.get_pc()
if self.jlink.breakpoint_find(pc):
self.log.debug("JLink Target hit breakpoint %d" % self.jlink.breakpoint_find(pc))
avatar_msg = BreakpointHitMessage(self.origin, self.jlink.breakpoint_find(pc), pc)
self.avatar.queue.put(avatar_msg)
elif not is_halted and self.origin.state == TargetStates.STOPPED:
self.log.debug("About to resume target.")
avatar_msg = UpdateStateMessage(self.origin, TargetStates.RUNNING)
self.avatar.fast_queue.put(avatar_msg)
while self.origin.state != TargetStates.RUNNING:
pass
self.log.debug("JLink target has resumed")
except:
self.log.exception("JLink target errored")
finally:
self.log.info("JLink target exiting")
self.jlink.close()
def set_breakpoint(self, line,
hardware=False,
temporary=False,
regex=False,
condition=None,
ignore_count=0,
thread=0,
pending=False):
"""Inserts a breakpoint
:param bool hardware: Hardware breakpoint
:param bool temporary: Tempory breakpoint
:param str regex: If set, inserts breakpoints matching the regex
:param str condition: If set, inserts a breakpoint with specified condition
:param int ignore_count: Amount of times the bp should be ignored
:param int thread: Threadno in which this breakpoints should be added
:returns: The number of the breakpoint
"""
# TODO: Hw/Sw breakpoint control
self.log.info("Setting breakpoint at %#08x" % line)
ret = self.jlink.breakpoint_set(line)
self.log.info("Got BP ID %d" % ret)
return ret
def set_watchpoint(self, variable, write=True, read=False):
return self.jlink.watchpoint_set(variable, write=write, read=read)
def remove_breakpoint(self, bkpt):
"""Deletes a breakpoint"""
# TODO: Check this
return self.jlink.breakpoint_clear(bkpt)
def write_memory(self, address, wordsize, val, num_words=1, raw=False):
"""Writes memory
:param address: Address to write to
:param wordsize: the size of the write (1, 2, 4 or 8)
:param val: the written value
:type val: int if num_words == 1 and raw == False
list if num_words > 1 and raw == False
str or byte if raw == True
:param num_words: The amount of words to read
:param raw: Specifies whether to write in raw or word mode
:returns: True on success else False
"""
if raw:
new_val = []
if not len(val):
raise ValueError("val had zero length")
new_val = [ord(v) for v in val]
val = new_val
try:
self.jlink.memory_write(address, contents)
return True
except pylink.JLinkException:
return False
def read_memory(self, address, wordsize=4, num_words=1, raw=False):
"""reads memory
:param address: Address to write to
:param wordsize: the size of a read word (1, 2, 4 or 8)
:param num_words: the amount of read words
:param raw: Whether the read memory should be returned unprocessed
:return: The read memory
"""
ret = self.jlink.memory_read(address, num_units=num_words, nbits=wordsize)
if raw:
raw_mem = "".join([newint.to_bytes(i, length=int(math.ceil(i.bit_length() / 8.0))) for i in ret])
return raw_mem
return ret
def read_register(self, reg):
the_reg = tolower(reg)
the_idx = -1
for idx in self.jlink.register_list():
if the_reg == self.jlink.register_name(idx):
the_idx = idx
break
return self.register_read(the_idx)
def get_pc(self):
# Get PC a shitty way
for idx in self.jlink.register_list():
if "PC" in self.jlink.register_name(idx):
return self.jlink.register_read(idx)
def write_register(self, reg, val):
"""Set one register on the target
:returns: True on success"""
the_reg = tolower(reg)
the_idx = -1
for idx in self.jlink.register_list():
if the_reg == self.jlink.register_name(idx):
the_idx = idx
break
return self.jlink.register_write(the_idx, val)
def step(self):
"""Step one instruction on the target
:returns: True on success"""
return self.jlink.step()
def cont(self):
"""Continues the execution of the target
:returns: True on success"""
self.log.info("Resuming target...")
return self.jlink.restart()
def stop(self):
"""Stops execution of the target
:returns: True on success"""
self.log.info("Stopping target...")
return self.jlink.halt()
def set_endianness(self, endianness='little'):
if 'little' in endianness:
self.jlink.set_little_endian()
elif "big" in endianness:
self.jlink.set_big_endian()
|
avatartwo/avatar2
|
avatar2/protocols/jlink.py
|
Python
|
apache-2.0
| 8,768
|
[
"MOE"
] |
7e3ce4c7ed9927d80a3ec45a342d46472a3c7a052ba3bf9a1d549faa5aac4543
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2005-2007 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2010 Nick Hall
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2012 Gary Burton
# Copyright (C) 2012 Doug Blank <doug.blank@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Manages the main window and the pluggable views
"""
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
from collections import defaultdict
import os
import time
import datetime
from io import StringIO
import posixpath
import gc
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".")
#-------------------------------------------------------------------------
#
# GNOME modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
from gi.repository import Gdk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.cli.grampscli import CLIManager
from .user import User
from .plug import tool
from gramps.gen.plug import START
from gramps.gen.plug import REPORT
from gramps.gen.plug.report._constants import standalone_categories
from .plug import (PluginWindows, ReportPluginDialog, ToolPluginDialog)
from .plug.report import report, BookSelector
from .utils import AvailableUpdates
from .pluginmanager import GuiPluginManager
from gramps.gen.relationship import get_relationship_calculator
from .displaystate import DisplayState, RecentDocsMenu
from gramps.gen.const import (HOME_DIR, ICON, URL_BUGTRACKER, URL_HOMEPAGE,
URL_MAILINGLIST, URL_MANUAL_PAGE, URL_WIKISTRING,
WIKI_EXTRAPLUGINS, URL_BUGHOME)
from gramps.gen.constfunc import is_quartz
from gramps.gen.config import config
from gramps.gen.errors import WindowActiveError
from .dialog import ErrorDialog, WarningDialog, QuestionDialog2, InfoDialog
from .widgets import Statusbar
from .undohistory import UndoHistory
from gramps.gen.utils.file import media_path_full
from .dbloader import DbLoader
from .display import display_help, display_url
from .configure import GrampsPreferences
from .aboutdialog import GrampsAboutDialog
from .navigator import Navigator
from .views.tags import Tags
from .actiongroup import ActionGroup
from gramps.gen.lib import (Person, Surname, Family, Media, Note, Place,
Source, Repository, Citation, Event, EventType,
ChildRef)
from gramps.gui.editors import (EditPerson, EditFamily, EditMedia, EditNote,
EditPlace, EditSource, EditRepository,
EditCitation, EditEvent)
from gramps.gen.db.exceptions import DbWriteFailure
from .managedwindow import ManagedWindow
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
if is_quartz():
try:
import gi
gi.require_version('GtkosxApplication', '1.0')
from gi.repository import GtkosxApplication as QuartzApp
_GTKOSXAPPLICATION = True
except:
print("Failed to import gtk_osxapplication")
_GTKOSXAPPLICATION = False
else:
_GTKOSXAPPLICATION = False
_UNSUPPORTED = ("Unsupported", _("Unsupported"))
UIDEFAULT = '''<ui>
<menubar name="MenuBar">
<menu action="FileMenu">
<menuitem action="Open"/>
<menu action="OpenRecent">
</menu>
<menuitem action="Close"/>
<separator/>
<menuitem action="Import"/>
<menuitem action="Export"/>
<placeholder name="LocalExport"/>
<menuitem action="Backup"/>
<separator/>
<menuitem action="Abandon"/>
<menuitem action="Quit"/>
</menu>
<menu action="AddMenu">
<menuitem action="PersonAdd"/>
<separator/>
<menuitem action="FamilyAdd"/>
<separator/>
<menuitem action="EventAdd"/>
<separator/>
<menuitem action="PlaceAdd"/>
<menuitem action="SourceAdd"/>
<menuitem action="CitationAdd"/>
<menuitem action="RepositoryAdd"/>
<menuitem action="MediaAdd"/>
<menuitem action="NoteAdd"/>
</menu>
<menu action="EditMenu">
<menuitem action="Undo"/>
<menuitem action="Redo"/>
<menuitem action="UndoHistory"/>
<separator/>
<placeholder name="CommonEdit"/>
<separator/>
<placeholder name="TagMenu"/>
<separator/>
<menuitem action="Clipboard"/>
<separator/>
<menuitem action="Preferences"/>
</menu>
<menu action="ViewMenu">
<menuitem action="ConfigView"/>
<menuitem action="Navigator"/>
<menuitem action="Toolbar"/>
<placeholder name="Bars"/>
<menuitem action="Fullscreen"/>
<separator/>
<placeholder name="ViewsInCategory"/>
<separator/>
</menu>
<menu action="GoMenu">
<placeholder name="CommonGo"/>
<placeholder name="CommonHistory"/>
</menu>
<menu action="BookMenu">
<placeholder name="AddEditBook"/>
<separator/>
<placeholder name="GoToBook"/>
</menu>
<menu action="ReportsMenu">
<menuitem action="Books"/>
<separator/>
<placeholder name="P_ReportsMenu"/>
</menu>
<menu action="ToolsMenu">
<placeholder name="P_ToolsMenu"/>
</menu>
<menu action="WindowsMenu">
<placeholder name="WinMenu"/>
</menu>
<menu action="HelpMenu">
<menuitem action="UserManual"/>
<menuitem action="FAQ"/>
<menuitem action="KeyBindings"/>
<menuitem action="TipOfDay"/>
<menuitem action="PluginStatus"/>
<separator/>
<menuitem action="HomePage"/>
<menuitem action="MailingLists"/>
<menuitem action="ReportBug"/>
<menuitem action="ExtraPlugins"/>
<separator/>
<menuitem action="About"/>
</menu>
</menubar>
<toolbar name="ToolBar">
<placeholder name="CommonNavigation"/>
<separator/>
<placeholder name="CommonEdit"/>
<placeholder name="TagTool"/>
<toolitem action="Clipboard"/>
<separator/>
<toolitem action="ConfigView"/>
<placeholder name="ViewsInCategory"/>
<separator/>
<toolitem action="Reports"/>
<toolitem action="Tools"/>
</toolbar>
<accelerator action="F2"/>
<accelerator action="F3"/>
<accelerator action="F4"/>
<accelerator action="F5"/>
<accelerator action="F6"/>
<accelerator action="F7"/>
<accelerator action="F8"/>
<accelerator action="F9"/>
<accelerator action="F11"/>
<accelerator action="F12"/>
<accelerator action="<PRIMARY>1"/>
<accelerator action="<PRIMARY>2"/>
<accelerator action="<PRIMARY>3"/>
<accelerator action="<PRIMARY>4"/>
<accelerator action="<PRIMARY>5"/>
<accelerator action="<PRIMARY>6"/>
<accelerator action="<PRIMARY>7"/>
<accelerator action="<PRIMARY>8"/>
<accelerator action="<PRIMARY>9"/>
<accelerator action="<PRIMARY>0"/>
<accelerator action="<PRIMARY>BackSpace"/>
<accelerator action="<PRIMARY>J"/>
<accelerator action="<PRIMARY>N"/>
<accelerator action="<PRIMARY>P"/>
</ui>
'''
WIKI_HELP_PAGE_FAQ = '%s_-_FAQ' % URL_MANUAL_PAGE
WIKI_HELP_PAGE_KEY = '%s_-_Keybindings' % URL_MANUAL_PAGE
WIKI_HELP_PAGE_MAN = '%s' % URL_MANUAL_PAGE
#-------------------------------------------------------------------------
#
# ViewManager
#
#-------------------------------------------------------------------------
class ViewManager(CLIManager):
"""
**Overview**
The ViewManager is the session manager of the program.
Specifically, it manages the main window of the program. It is closely tied
into the Gtk.UIManager to control all menus and actions.
The ViewManager controls the various Views within the Gramps programs.
Views are organised in categories. The categories can be accessed via
a sidebar. Within a category, the different views are accesible via the
toolbar of view menu.
A View is a particular way of looking a information in the Gramps main
window. Each view is separate from the others, and has no knowledge of
the others.
Examples of current views include:
- Person View
- Relationship View
- Family View
- Source View
The View Manager does not have to know the number of views, the type of
views, or any other details about the views. It simply provides the
method of containing each view, and has methods for creating, deleting and
switching between the views.
"""
def __init__(self, dbstate, view_category_order, user=None):
"""
The viewmanager is initialised with a dbstate on which Gramps is
working, and a fixed view_category_order, which is the order in which
the view categories are accessible in the sidebar.
"""
CLIManager.__init__(self, dbstate, setloader=False, user=user)
if _GTKOSXAPPLICATION:
self.macapp = QuartzApp.Application()
self.macapp.set_use_quartz_accelerators(False)
self.view_category_order = view_category_order
#set pluginmanager to GUI one
self._pmgr = GuiPluginManager.get_instance()
self.merge_ids = []
self.toolactions = None
self.tool_menu_ui_id = None
self.reportactions = None
self.report_menu_ui_id = None
self.active_page = None
self.pages = []
self.page_lookup = {}
self.views = None
self.current_views = [] # The current view in each category
self.view_changing = False
self.show_navigator = config.get('interface.view')
self.show_toolbar = config.get('interface.toolbar-on')
self.fullscreen = config.get('interface.fullscreen')
self.__build_main_window() # sets self.uistate
if self.user is None:
self.user = User(error=ErrorDialog,
parent=self.window,
callback=self.uistate.pulse_progressbar,
uistate=self.uistate,
dbstate=self.dbstate)
self.__connect_signals()
if _GTKOSXAPPLICATION:
self.macapp.ready()
self.do_reg_plugins(self.dbstate, self.uistate)
#plugins loaded now set relationship class
self.rel_class = get_relationship_calculator()
self.uistate.set_relationship_class()
# Need to call after plugins have been registered
self.uistate.connect('update-available', self.process_updates)
self.check_for_updates()
# Set autobackup
self.uistate.connect('autobackup', self.autobackup)
self.uistate.set_backup_timer()
def check_for_updates(self):
"""
Check for add-on updates.
"""
howoften = config.get("behavior.check-for-addon-updates")
update = False
if howoften != 0: # update never if zero
year, mon, day = list(map(
int, config.get("behavior.last-check-for-addon-updates").split("/")))
days = (datetime.date.today() - datetime.date(year, mon, day)).days
if howoften == 1 and days >= 30: # once a month
update = True
elif howoften == 2 and days >= 7: # once a week
update = True
elif howoften == 3 and days >= 1: # once a day
update = True
elif howoften == 4: # always
update = True
if update:
AvailableUpdates(self.uistate).start()
def process_updates(self, addon_update_list):
"""
Called when add-on updates are available.
"""
rescan = PluginWindows.UpdateAddons(self.uistate, [],
addon_update_list).rescan
self.do_reg_plugins(self.dbstate, self.uistate, rescan=rescan)
def _errordialog(self, title, errormessage):
"""
Show the error.
In the GUI, the error is shown, and a return happens
"""
ErrorDialog(title, errormessage,
parent=self.uistate.window)
return 1
def __build_main_window(self):
"""
Builds the GTK interface
"""
width = config.get('interface.main-window-width')
height = config.get('interface.main-window-height')
horiz_position = config.get('interface.main-window-horiz-position')
vert_position = config.get('interface.main-window-vert-position')
self.window = Gtk.Window()
self.window.set_icon_from_file(ICON)
self.window.set_default_size(width, height)
self.window.move(horiz_position, vert_position)
#Set the mnemonic modifier on Macs to alt-ctrl so that it
#doesn't interfere with the extended keyboard, see
#https://gramps-project.org/bugs/view.php?id=6943
if is_quartz():
self.window.set_mnemonic_modifier(
Gdk.ModifierType.CONTROL_MASK | Gdk.ModifierType.MOD1_MASK)
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.window.add(vbox)
hpane = Gtk.Paned()
self.ebox = Gtk.EventBox()
self.navigator = Navigator(self)
self.ebox.add(self.navigator.get_top())
hpane.add1(self.ebox)
hpane.show()
self.notebook = Gtk.Notebook()
self.notebook.set_scrollable(True)
self.notebook.set_show_tabs(False)
self.notebook.show()
self.__init_lists()
self.__build_ui_manager()
hpane.add2(self.notebook)
self.menubar = self.uimanager.get_widget('/MenuBar')
self.toolbar = self.uimanager.get_widget('/ToolBar')
self.__attach_menubar(vbox)
vbox.pack_start(self.toolbar, False, True, 0)
vbox.pack_start(hpane, True, True, 0)
self.statusbar = Statusbar()
self.statusbar.show()
vbox.pack_end(self.statusbar, False, True, 0)
vbox.show()
self.uistate = DisplayState(self.window, self.statusbar,
self.uimanager, self)
# Create history objects
for nav_type in ('Person', 'Family', 'Event', 'Place', 'Source',
'Citation', 'Repository', 'Note', 'Media'):
self.uistate.register(self.dbstate, nav_type, 0)
self.dbstate.connect('database-changed', self.uistate.db_changed)
self.tags = Tags(self.uistate, self.dbstate)
self.sidebar_menu = self.uimanager.get_widget(
'/MenuBar/ViewMenu/Sidebar/')
# handle OPEN button, insert it into the toolbar. Unfortunately,
# UIManager has no built in support for and Open Recent button
openbtn = self.__build_open_button()
self.uistate.set_open_widget(openbtn)
self.toolbar.insert(openbtn, 0)
self.recent_manager = RecentDocsMenu(
self.uistate, self.dbstate, self._read_recent_file)
self.recent_manager.build()
self.db_loader = DbLoader(self.dbstate, self.uistate)
self.__setup_navigator()
if self.show_toolbar:
self.toolbar.show()
else:
self.toolbar.hide()
if self.fullscreen:
self.window.fullscreen()
self.window.set_title("%s - Gramps" % _('No Family Tree'))
self.window.show()
def __setup_navigator(self):
"""
If we have enabled te sidebar, show it, and turn off the tabs. If
disabled, hide the sidebar and turn on the tabs.
"""
if self.show_navigator:
self.ebox.show()
else:
self.ebox.hide()
def __build_open_button(self):
"""
Build the OPEN button. Since GTK's UIManager does not have support for
the Open Recent button, we must build in on our own.
"""
openbtn = Gtk.MenuToolButton()
openbtn.set_icon_name('gramps')
openbtn.connect('clicked', self.__open_activate)
openbtn.set_sensitive(False)
openbtn.set_tooltip_text(_("Connect to a recent database"))
openbtn.show()
return openbtn
def __connect_signals(self):
"""
Connects the signals needed
"""
self.window.connect('delete-event', self.quit)
self.notebook.connect('switch-page', self.view_changed)
if _GTKOSXAPPLICATION:
self.macapp.connect('NSApplicationWillTerminate', self.quit)
def __init_lists(self):
"""
Initialize the actions lists for the UIManager
"""
self._file_action_list = [
('FileMenu', None, _('_Family Trees')),
('Open', 'gramps-db', _('_Manage Family Trees...'), "<PRIMARY>o",
_("Manage databases"), self.__open_activate),
('OpenRecent', None, _('Open _Recent'), None,
_("Open an existing database")),
('Quit', 'application-exit', _('_Quit'), "<PRIMARY>q", None,
self.quit),
('ViewMenu', None, _('_View')),
('EditMenu', None, _('_Edit')),
('Preferences', 'preferences-system', _('_Preferences...'), None,
None, self.preferences_activate),
('HelpMenu', None, _('_Help')),
('HomePage', None, _('Gramps _Home Page'), None, None,
home_page_activate),
('MailingLists', None, _('Gramps _Mailing Lists'), None, None,
mailing_lists_activate),
('ReportBug', None, _('_Report a Bug'), None, None,
report_bug_activate),
('ExtraPlugins', None, _('_Extra Reports/Tools'), None, None,
extra_plugins_activate),
('About', 'help-about', _('_About'), None, None,
self.display_about_box),
('PluginStatus', None, _('_Plugin Manager'), None, None,
self.__plugin_status),
('FAQ', None, _('_FAQ'), None, None, faq_activate),
('KeyBindings', None, _('_Key Bindings'), None, None, key_bindings),
('UserManual', 'help-browser', _('_User Manual'), 'F1', None,
manual_activate),
('TipOfDay', None, _('Tip of the Day'), None, None,
self.tip_of_day_activate),
]
self._readonly_action_list = [
('Close', None, _('_Close'), "<control>w",
_("Close the current database"), self.close_database),
('Export', 'gramps-export', _('_Export...'), "<PRIMARY>e", None,
self.export_data),
('Backup', None, _("Make Backup..."), None,
_("Make a Gramps XML backup of the database"), self.quick_backup),
('Abandon', 'document-revert',
_('_Abandon Changes and Quit'), None, None, self.abort),
('Reports', 'gramps-reports', _('_Reports'), None,
_("Open the reports dialog"), self.reports_clicked),
('GoMenu', None, _('_Go')),
('ReportsMenu', None, _('_Reports')),
('Books', None, _('Books...'), None, None, self.run_book),
('WindowsMenu', None, _('_Windows')),
('F2', None, 'F2', "F2", None, self.__keypress),
('F3', None, 'F3', "F3", None, self.__keypress),
('F4', None, 'F4', "F4", None, self.__keypress),
('F5', None, 'F5', "F5", None, self.__keypress),
('F6', None, 'F6', "F6", None, self.__keypress),
('F7', None, 'F7', "F7", None, self.__keypress),
('F8', None, 'F9', "F8", None, self.__keypress),
('F9', None, 'F9', "F9", None, self.__keypress),
('F11', None, 'F11', "F11", None, self.__keypress),
('<PRIMARY>1', None, '<PRIMARY>1', "<PRIMARY>1", None,
self.__gocat),
('<PRIMARY>2', None, '<PRIMARY>2', "<PRIMARY>2", None,
self.__gocat),
('<PRIMARY>3', None, '<PRIMARY>3', "<PRIMARY>3", None,
self.__gocat),
('<PRIMARY>4', None, '<PRIMARY>4', "<PRIMARY>4", None,
self.__gocat),
('<PRIMARY>5', None, '<PRIMARY>5', "<PRIMARY>5", None,
self.__gocat),
('<PRIMARY>6', None, '<PRIMARY>6', "<PRIMARY>6", None,
self.__gocat),
('<PRIMARY>7', None, '<PRIMARY>7', "<PRIMARY>7", None,
self.__gocat),
('<PRIMARY>8', None, '<PRIMARY>8', "<PRIMARY>8", None,
self.__gocat),
('<PRIMARY>9', None, '<PRIMARY>9', "<PRIMARY>9", None,
self.__gocat),
('<PRIMARY>0', None, '<PRIMARY>0', "<PRIMARY>0", None,
self.__gocat),
# NOTE: CTRL+ALT+NUMBER is set in src/plugins/sidebar/cat...py
('<PRIMARY>BackSpace', None, '<PRIMARY>BackSpace',
"<PRIMARY>BackSpace", None, self.__keypress),
('<PRIMARY>Delete', None, '<PRIMARY>Delete',
"<PRIMARY>Delete", None, self.__keypress),
('<PRIMARY>Insert', None, '<PRIMARY>Insert',
"<PRIMARY>Insert", None, self.__keypress),
('F12', None, 'F12', "F12", None, self.__keypress),
('<PRIMARY>J', None, '<PRIMARY>J',
"<PRIMARY>J", None, self.__keypress),
('<PRIMARY>N', None, '<PRIMARY>N', "<PRIMARY>N", None,
self.__next_view),
('<PRIMARY>P', None, '<PRIMARY>P', "<PRIMARY>P", None,
self.__prev_view),
]
self._action_action_list = [
('Clipboard', 'edit-paste', _('Clip_board'), "<PRIMARY>b",
_("Open the Clipboard dialog"), self.clipboard),
('AddMenu', None, _('_Add')),
#('AddNewMenu', None, _('New')),
('PersonAdd', None, _('Person'), "<shift><Alt>p", None,
self.add_new_person),
('FamilyAdd', None, _('Family'), "<shift><Alt>f", None,
self.add_new_family),
('EventAdd', None, _('Event'), "<shift><Alt>e", None,
self.add_new_event),
('PlaceAdd', None, _('Place'), "<shift><Alt>l", None,
self.add_new_place),
('SourceAdd', None, _('Source'), "<shift><Alt>s", None,
self.add_new_source),
('CitationAdd', None, _('Citation'), "<shift><Alt>c", None,
self.add_new_citation),
('RepositoryAdd', None, _('Repository'), "<shift><Alt>r", None,
self.add_new_repository),
('MediaAdd', None, _('Media'), "<shift><Alt>m", None,
self.add_new_media),
('NoteAdd', None, _('Note'), "<shift><Alt>n", None,
self.add_new_note),
#--------------------------------------
('Import', 'gramps-import', _('_Import...'), "<PRIMARY>i", None,
self.import_data),
('Tools', 'gramps-tools', _('_Tools'), None,
_("Open the tools dialog"), self.tools_clicked),
('BookMenu', None, _('_Bookmarks')),
('ToolsMenu', None, _('_Tools')),
('ConfigView', 'gramps-config', _('_Configure...'),
'<shift><PRIMARY>c', _('Configure the active view'),
self.config_view),
]
self._file_toggle_action_list = [
('Navigator', None, _('_Navigator'), "<PRIMARY>m", None,
self.navigator_toggle, self.show_navigator),
('Toolbar', None, _('_Toolbar'), None, None, self.toolbar_toggle,
self.show_toolbar),
('Fullscreen', None, _('F_ull Screen'), "F11", None,
self.fullscreen_toggle, self.fullscreen),
]
self._undo_action_list = [
('Undo', 'edit-undo', _('_Undo'), '<PRIMARY>z', None,
self.undo),
]
self._redo_action_list = [
('Redo', 'edit-redo', _('_Redo'), '<shift><PRIMARY>z', None,
self.redo),
]
self._undo_history_action_list = [
('UndoHistory', 'gramps-undo-history',
_('Undo History...'), "<PRIMARY>H", None, self.undo_history),
]
def run_book(self, action):
"""
Run a book.
"""
try:
BookSelector(self.dbstate, self.uistate)
except WindowActiveError:
return
def __keypress(self, action):
"""
Callback that is called on a keypress. It works by extracting the
name of the associated action, and passes that to the active page
(current view) so that it can take the associated action.
"""
name = action.get_name()
try:
self.active_page.call_function(name)
except Exception:
self.uistate.push_message(self.dbstate,
_("Key %s is not bound") % name)
def __gocat(self, action):
"""
Callback that is called on ctrl+number press. It moves to the
requested category like __next_view/__prev_view. 0 is 10
"""
cat = int(action.get_name()[-1])
if cat == 0:
cat = 10
cat -= 1
if cat >= len(self.current_views):
#this view is not present
return False
self.goto_page(cat, None)
def __next_view(self, action):
"""
Callback that is called when the next category action is selected. It
selects the next category as the active category. If we reach the end,
we wrap around to the first.
"""
curpage = self.notebook.get_current_page()
#find cat and view of the current page
for key in self.page_lookup:
if self.page_lookup[key] == curpage:
cat_num, view_num = key
break
#now go to next category
if cat_num >= len(self.current_views)-1:
self.goto_page(0, None)
else:
self.goto_page(cat_num+1, None)
def __prev_view(self, action):
"""
Callback that is called when the previous category action is selected.
It selects the previous category as the active category. If we reach
the beginning of the list, we wrap around to the last.
"""
curpage = self.notebook.get_current_page()
#find cat and view of the current page
for key in self.page_lookup:
if self.page_lookup[key] == curpage:
cat_num, view_num = key
break
#now go to next category
if cat_num > 0:
self.goto_page(cat_num-1, None)
else:
self.goto_page(len(self.current_views)-1, None)
def init_interface(self):
"""
Initialize the interface.
"""
self.views = self.get_available_views()
defaults = views_to_show(self.views,
config.get('preferences.use-last-view'))
self.current_views = defaults[2]
self.navigator.load_plugins(self.dbstate, self.uistate)
self.goto_page(defaults[0], defaults[1])
self.fileactions.set_sensitive(False)
self.__build_tools_menu(self._pmgr.get_reg_tools())
self.__build_report_menu(self._pmgr.get_reg_reports())
self._pmgr.connect('plugins-reloaded',
self.__rebuild_report_and_tool_menus)
self.fileactions.set_sensitive(True)
self.uistate.widget.set_sensitive(True)
if not self.file_loaded:
self.actiongroup.set_sensitive(False)
self.readonlygroup.set_sensitive(False)
self.undoactions.set_sensitive(False)
self.redoactions.set_sensitive(False)
self.undohistoryactions.set_sensitive(False)
self.actiongroup.set_visible(False)
self.readonlygroup.set_visible(False)
self.undoactions.set_visible(False)
self.redoactions.set_visible(False)
self.undohistoryactions.set_visible(False)
self.uimanager.ensure_update()
config.connect("interface.statusbar", self.__statusbar_key_update)
def __statusbar_key_update(self, client, cnxn_id, entry, data):
"""
Callback function for statusbar key update
"""
self.uistate.modify_statusbar(self.dbstate)
def post_init_interface(self, show_manager=True):
"""
Showing the main window is deferred so that
ArgHandler can work without it always shown
"""
self.window.show()
if not self.dbstate.is_open() and show_manager:
self.__open_activate(None)
def do_reg_plugins(self, dbstate, uistate, rescan=False):
"""
Register the plugins at initialization time. The plugin status window
is opened on an error if the user has requested.
"""
# registering plugins
self.uistate.status_text(_('Registering plugins...'))
error = CLIManager.do_reg_plugins(self, dbstate, uistate,
rescan=rescan)
# get to see if we need to open the plugin status window
if error and config.get('behavior.pop-plugin-status'):
self.__plugin_status()
self.uistate.push_message(self.dbstate, _('Ready'))
def close_database(self, action=None, make_backup=True):
"""
Close the database
"""
self.dbstate.no_database()
self.post_close_db()
def quit(self, *obj):
"""
Closes out the program, backing up data
"""
# mark interface insenstitive to prevent unexpected events
self.uistate.set_sensitive(False)
# backup data
if config.get('database.backup-on-exit'):
self.autobackup()
# close the database
if self.dbstate.is_open():
self.dbstate.db.close(user=self.user)
# have each page save anything, if they need to:
self.__delete_pages()
# save the current window size
(width, height) = self.window.get_size()
config.set('interface.main-window-width', width)
config.set('interface.main-window-height', height)
# save the current window position
(horiz_position, vert_position) = self.window.get_position()
config.set('interface.main-window-horiz-position', horiz_position)
config.set('interface.main-window-vert-position', vert_position)
config.save()
Gtk.main_quit()
def abort(self, obj=None):
"""
Abandon changes and quit.
"""
if self.dbstate.db.abort_possible:
dialog = QuestionDialog2(
_("Abort changes?"),
_("Aborting changes will return the database to the state "
"it was before you started this editing session."),
_("Abort changes"),
_("Cancel"),
parent=self.uistate.window)
if dialog.run():
self.dbstate.db.disable_signals()
while self.dbstate.db.undo():
pass
self.quit()
else:
WarningDialog(
_("Cannot abandon session's changes"),
_('Changes cannot be completely abandoned because the '
'number of changes made in the session exceeded the '
'limit.'), parent=self.uistate.window)
def __init_action_group(self, name, actions, sensitive=True, toggles=None):
"""
Initialize an action group for the UIManager
"""
new_group = ActionGroup(name=name)
new_group.add_actions(actions)
if toggles:
new_group.add_toggle_actions(toggles)
new_group.set_sensitive(sensitive)
self.uimanager.insert_action_group(new_group, 1)
return new_group
def __build_ui_manager(self):
"""
Builds the UIManager, and the associated action groups
"""
self.uimanager = Gtk.UIManager()
accelgroup = self.uimanager.get_accel_group()
self.actiongroup = self.__init_action_group(
'MainWindow', self._action_action_list)
self.readonlygroup = self.__init_action_group(
'AllMainWindow', self._readonly_action_list)
self.undohistoryactions = self.__init_action_group(
'UndoHistory', self._undo_history_action_list)
self.fileactions = self.__init_action_group(
'FileWindow', self._file_action_list,
toggles=self._file_toggle_action_list)
self.undoactions = self.__init_action_group(
'Undo', self._undo_action_list, sensitive=False)
self.redoactions = self.__init_action_group(
'Redo', self._redo_action_list, sensitive=False)
self.window.add_accel_group(accelgroup)
self.uimanager.add_ui_from_string(UIDEFAULT)
self.uimanager.ensure_update()
def __attach_menubar(self, vbox):
"""
Attach the menubar
"""
vbox.pack_start(self.menubar, False, True, 0)
if _GTKOSXAPPLICATION:
self.menubar.hide()
quit_item = self.uimanager.get_widget("/MenuBar/FileMenu/Quit")
about_item = self.uimanager.get_widget("/MenuBar/HelpMenu/About")
prefs_item = self.uimanager.get_widget(
"/MenuBar/EditMenu/Preferences")
self.macapp.set_menu_bar(self.menubar)
self.macapp.insert_app_menu_item(about_item, 0)
self.macapp.insert_app_menu_item(prefs_item, 1)
def preferences_activate(self, obj):
"""
Open the preferences dialog.
"""
try:
GrampsPreferences(self.uistate, self.dbstate)
except WindowActiveError:
return
def tip_of_day_activate(self, obj):
"""
Display Tip of the day
"""
from .tipofday import TipOfDay
TipOfDay(self.uistate)
def __plugin_status(self, obj=None, data=None):
"""
Display plugin status dialog
"""
try:
PluginWindows.PluginStatus(self.dbstate, self.uistate, [])
except WindowActiveError:
pass
def navigator_toggle(self, obj, data=None):
"""
Set the sidebar based on the value of the toggle button. Save the
results in the configuration settings
"""
if obj.get_active():
self.ebox.show()
config.set('interface.view', True)
self.show_navigator = True
else:
self.ebox.hide()
config.set('interface.view', False)
self.show_navigator = False
config.save()
def toolbar_toggle(self, obj, data=None):
"""
Set the toolbar based on the value of the toggle button. Save the
results in the configuration settings
"""
if obj.get_active():
self.toolbar.show()
config.set('interface.toolbar-on', True)
else:
self.toolbar.hide()
config.set('interface.toolbar-on', False)
config.save()
def fullscreen_toggle(self, obj, data=None):
"""
Set the main Granps window fullscreen based on the value of the
toggle button. Save the setting in the config file.
"""
if obj.get_active():
self.window.fullscreen()
config.set('interface.fullscreen', True)
else:
self.window.unfullscreen()
config.set('interface.fullscreen', False)
config.save()
def get_views(self):
"""
Return the view definitions.
"""
return self.views
def goto_page(self, cat_num, view_num):
"""
Create the page if it doesn't exist and make it the current page.
"""
if view_num is None:
view_num = self.current_views[cat_num]
else:
self.current_views[cat_num] = view_num
page_num = self.page_lookup.get((cat_num, view_num))
if page_num is None:
page_def = self.views[cat_num][view_num]
page_num = self.notebook.get_n_pages()
self.page_lookup[(cat_num, view_num)] = page_num
self.__create_page(page_def[0], page_def[1])
self.notebook.set_current_page(page_num)
return self.pages[page_num]
def get_category(self, cat_name):
"""
Return the category number from the given category name.
"""
for cat_num, cat_views in enumerate(self.views):
if cat_name == cat_views[0][0].category[1]:
return cat_num
return None
def __create_dummy_page(self, pdata, error):
""" Create a dummy page """
from .views.pageview import DummyPage
return DummyPage(pdata.name, pdata, self.dbstate, self.uistate,
_("View failed to load. Check error output."), error)
def __create_page(self, pdata, page_def):
"""
Create a new page and set it as the current page.
"""
try:
page = page_def(pdata, self.dbstate, self.uistate)
except:
import traceback
LOG.warning("View '%s' failed to load.", pdata.id)
traceback.print_exc()
page = self.__create_dummy_page(pdata, traceback.format_exc())
try:
page_display = page.get_display()
except:
import traceback
print("ERROR: '%s' failed to create view" % pdata.name)
traceback.print_exc()
page = self.__create_dummy_page(pdata, traceback.format_exc())
page_display = page.get_display()
page.define_actions()
page.post()
self.pages.append(page)
# create icon/label for notebook tab (useful for debugging)
hbox = Gtk.Box()
image = Gtk.Image()
image.set_from_icon_name(page.get_stock(), Gtk.IconSize.MENU)
hbox.pack_start(image, False, True, 0)
hbox.add(Gtk.Label(label=pdata.name))
hbox.show_all()
page_num = self.notebook.append_page(page.get_display(), hbox)
if not self.file_loaded:
self.actiongroup.set_sensitive(False)
self.readonlygroup.set_sensitive(False)
self.undoactions.set_sensitive(False)
self.redoactions.set_sensitive(False)
self.undohistoryactions.set_sensitive(False)
self.actiongroup.set_visible(False)
self.readonlygroup.set_visible(False)
self.undoactions.set_visible(False)
self.redoactions.set_visible(False)
self.undohistoryactions.set_visible(False)
self.uimanager.ensure_update()
return page
def view_changed(self, notebook, page, page_num):
"""
Called when the notebook page is changed.
"""
if self.view_changing:
return
self.view_changing = True
cat_num = view_num = None
for key in self.page_lookup:
if self.page_lookup[key] == page_num:
cat_num, view_num = key
break
# Save last view in configuration
view_id = self.views[cat_num][view_num][0].id
config.set('preferences.last-view', view_id)
last_views = config.get('preferences.last-views')
if len(last_views) != len(self.views):
# If the number of categories has changed then reset the defaults
last_views = [''] * len(self.views)
last_views[cat_num] = view_id
config.set('preferences.last-views', last_views)
config.save()
self.navigator.view_changed(cat_num, view_num)
self.__change_page(page_num)
self.view_changing = False
def __change_page(self, page_num):
"""
Perform necessary actions when a page is changed.
"""
if not self.dbstate.is_open():
return
self.__disconnect_previous_page()
self.active_page = self.pages[page_num]
self.active_page.set_active()
self.__connect_active_page(page_num)
self.uimanager.ensure_update()
if _GTKOSXAPPLICATION:
self.macapp.sync_menubar()
while Gtk.events_pending():
Gtk.main_iteration()
self.active_page.change_page()
def __delete_pages(self):
"""
Calls on_delete() for each view
"""
for page in self.pages:
page.on_delete()
def __disconnect_previous_page(self):
"""
Disconnects the previous page, removing the old action groups
and removes the old UI components.
"""
list(map(self.uimanager.remove_ui, self.merge_ids))
if self.active_page is not None:
self.active_page.set_inactive()
groups = self.active_page.get_actions()
for grp in groups:
if grp in self.uimanager.get_action_groups():
self.uimanager.remove_action_group(grp)
self.active_page = None
def __connect_active_page(self, page_num):
"""
Inserts the action groups associated with the current page
into the UIManager
"""
for grp in self.active_page.get_actions():
self.uimanager.insert_action_group(grp, 1)
uidef = self.active_page.ui_definition()
self.merge_ids = [self.uimanager.add_ui_from_string(uidef)]
for uidef in self.active_page.additional_ui_definitions():
mergeid = self.uimanager.add_ui_from_string(uidef)
self.merge_ids.append(mergeid)
configaction = self.actiongroup.get_action('ConfigView')
if self.active_page.can_configure():
configaction.set_sensitive(True)
else:
configaction.set_sensitive(False)
def import_data(self, obj):
"""
Imports a file
"""
if self.dbstate.is_open():
self.db_loader.import_file()
infotxt = self.db_loader.import_info_text()
if infotxt:
InfoDialog(_('Import Statistics'), infotxt,
parent=self.window)
self.__post_load()
def __open_activate(self, obj):
"""
Called when the Open button is clicked, opens the DbManager
"""
from .dbman import DbManager
dialog = DbManager(self.uistate, self.dbstate, self, self.window)
value = dialog.run()
if value:
if self.dbstate.is_open():
self.dbstate.db.close(user=self.user)
(filename, title) = value
self.db_loader.read_file(filename)
if self.dbstate.db.is_open():
self._post_load_newdb(filename, 'x-directory/normal', title)
else:
if dialog.after_change != "":
# We change the title of the main window.
old_title = self.uistate.window.get_title()
if old_title:
delim = old_title.find(' - ')
tit1 = old_title[:delim]
tit2 = old_title[delim:]
new_title = dialog.after_change
if '<=' in tit2:
## delim2 = tit2.find('<=') + 3
## tit3 = tit2[delim2:-1]
new_title += tit2.replace(']', '') + ' => ' + tit1 + ']'
else:
new_title += tit2 + ' <= [' + tit1 + ']'
self.uistate.window.set_title(new_title)
def __post_load(self):
"""
This method is for the common UI post_load, both new files
and added data like imports.
"""
self.dbstate.db.undo_callback = self.__change_undo_label
self.dbstate.db.redo_callback = self.__change_redo_label
self.__change_undo_label(None)
self.__change_redo_label(None)
self.dbstate.db.undo_history_callback = self.undo_history_update
self.undo_history_close()
def _post_load_newdb(self, filename, filetype, title=None):
"""
The method called after load of a new database.
Inherit CLI method to add GUI part
"""
self._post_load_newdb_nongui(filename, title)
self._post_load_newdb_gui(filename, filetype, title)
def _post_load_newdb_gui(self, filename, filetype, title=None):
"""
Called after a new database is loaded to do GUI stuff
"""
# GUI related post load db stuff
# Update window title
if filename[-1] == os.path.sep:
filename = filename[:-1]
name = os.path.basename(filename)
if title:
name = title
rw = not self.dbstate.db.readonly
if rw:
msg = "%s - Gramps" % name
else:
msg = "%s (%s) - Gramps" % (name, _('Read Only'))
self.uistate.window.set_title(msg)
self.__change_page(self.notebook.get_current_page())
self.actiongroup.set_visible(rw)
self.readonlygroup.set_visible(True)
self.undoactions.set_visible(rw)
self.redoactions.set_visible(rw)
self.undohistoryactions.set_visible(rw)
self.actiongroup.set_sensitive(rw)
self.readonlygroup.set_sensitive(True)
self.undoactions.set_sensitive(rw)
self.redoactions.set_sensitive(rw)
self.undohistoryactions.set_sensitive(rw)
self.recent_manager.build()
# Call common __post_load method for GUI update after a change
self.__post_load()
def post_close_db(self):
"""
Called after a database is closed to do GUI stuff.
"""
self.undo_history_close()
self.uistate.window.set_title("%s - Gramps" % _('No Family Tree'))
self.actiongroup.set_sensitive(False)
self.readonlygroup.set_sensitive(False)
self.undohistoryactions.set_sensitive(False)
self.uistate.clear_filter_results()
self.__disconnect_previous_page()
self.actiongroup.set_visible(False)
self.readonlygroup.set_visible(False)
self.undoactions.set_visible(False)
self.redoactions.set_visible(False)
self.undohistoryactions.set_visible(False)
self.uimanager.ensure_update()
config.set('paths.recent-file', '')
config.save()
def enable_menu(self, enable):
""" Enable/disable the menues. Used by the dbloader for import to
prevent other operations during import. Needed because simpler methods
don't work under Gnome with application menus at top of screen (instead
of Gramps window).
Note: enable must be set to False on first call.
"""
if not enable:
self.action_st = (
self.actiongroup.get_sensitive(),
self.readonlygroup.get_sensitive(),
self.undoactions.get_sensitive(),
self.redoactions.get_sensitive(),
self.undohistoryactions.get_sensitive(),
self.fileactions.get_sensitive(),
self.toolactions.get_sensitive(),
self.reportactions.get_sensitive(),
self.recent_manager.action_group.get_sensitive())
self.actiongroup.set_sensitive(enable)
self.readonlygroup.set_sensitive(enable)
self.undoactions.set_sensitive(enable)
self.redoactions.set_sensitive(enable)
self.undohistoryactions.set_sensitive(enable)
self.fileactions.set_sensitive(enable)
self.toolactions.set_sensitive(enable)
self.reportactions.set_sensitive(enable)
self.recent_manager.action_group.set_sensitive(enable)
else:
self.actiongroup.set_sensitive(self.action_st[0])
self.readonlygroup.set_sensitive(self.action_st[1])
self.undoactions.set_sensitive(self.action_st[2])
self.redoactions.set_sensitive(self.action_st[3])
self.undohistoryactions.set_sensitive(self.action_st[4])
self.fileactions.set_sensitive(self.action_st[5])
self.toolactions.set_sensitive(self.action_st[6])
self.reportactions.set_sensitive(self.action_st[7])
self.recent_manager.action_group.set_sensitive(self.action_st[8])
def __change_undo_label(self, label):
"""
Change the UNDO label
"""
self.uimanager.remove_action_group(self.undoactions)
self.undoactions = Gtk.ActionGroup(name='Undo')
if label:
self.undoactions.add_actions([
('Undo', 'edit-undo', label, '<PRIMARY>z', None, self.undo)])
else:
self.undoactions.add_actions([
('Undo', 'edit-undo', _('_Undo'),
'<PRIMARY>z', None, self.undo)])
self.undoactions.set_sensitive(False)
self.uimanager.insert_action_group(self.undoactions, 1)
def __change_redo_label(self, label):
"""
Change the REDO label
"""
self.uimanager.remove_action_group(self.redoactions)
self.redoactions = Gtk.ActionGroup(name='Redo')
if label:
self.redoactions.add_actions([
('Redo', 'edit-redo', label, '<shift><PRIMARY>z',
None, self.redo)])
else:
self.redoactions.add_actions([
('Redo', 'edit-undo', _('_Redo'),
'<shift><PRIMARY>z', None, self.redo)])
self.redoactions.set_sensitive(False)
self.uimanager.insert_action_group(self.redoactions, 1)
def undo_history_update(self):
"""
This function is called to update both the state of
the Undo History menu item (enable/disable) and
the contents of the Undo History window.
"""
try:
# Try updating undo history window if it exists
self.undo_history_window.update()
except AttributeError:
# Let it go: history window does not exist
return
def undo_history_close(self):
"""
Closes the undo history
"""
try:
# Try closing undo history window if it exists
if self.undo_history_window.opened:
self.undo_history_window.close()
except AttributeError:
# Let it go: history window does not exist
return
def quick_backup(self, obj):
"""
Make a quick XML back with or without media.
"""
try:
QuickBackup(self.dbstate, self.uistate, self.user)
except WindowActiveError:
return
def autobackup(self):
"""
Backup the current family tree.
"""
if self.dbstate.db.is_open() and self.dbstate.db.has_changed:
self.uistate.set_busy_cursor(True)
self.uistate.progress.show()
self.uistate.push_message(self.dbstate, _("Autobackup..."))
try:
self.__backup()
except DbWriteFailure as msg:
self.uistate.push_message(self.dbstate,
_("Error saving backup data"))
self.uistate.set_busy_cursor(False)
self.uistate.progress.hide()
def __backup(self):
"""
Backup database to a Gramps XML file.
"""
from gramps.plugins.export.exportxml import XmlWriter
backup_path = config.get('database.backup-path')
compress = config.get('database.compress-backup')
writer = XmlWriter(self.dbstate.db, self.user, strip_photos=0,
compress=compress)
timestamp = '{0:%Y-%m-%d-%H-%M-%S}'.format(datetime.datetime.now())
backup_name = "%s-%s.gramps" % (self.dbstate.db.get_dbname(),
timestamp)
filename = os.path.join(backup_path, backup_name)
writer.write(filename)
def reports_clicked(self, obj):
"""
Displays the Reports dialog
"""
try:
ReportPluginDialog(self.dbstate, self.uistate, [])
except WindowActiveError:
return
def tools_clicked(self, obj):
"""
Displays the Tools dialog
"""
try:
ToolPluginDialog(self.dbstate, self.uistate, [])
except WindowActiveError:
return
def clipboard(self, obj):
"""
Displays the Clipboard
"""
from .clipboard import ClipboardWindow
try:
ClipboardWindow(self.dbstate, self.uistate)
except WindowActiveError:
return
# ---------------Add new xxx --------------------------------
def add_new_person(self, obj):
"""
Add a new person to the database. (Global keybinding)
"""
person = Person()
#the editor requires a surname
person.primary_name.add_surname(Surname())
person.primary_name.set_primary_surname(0)
try:
EditPerson(self.dbstate, self.uistate, [], person)
except WindowActiveError:
pass
def add_new_family(self, obj):
"""
Add a new family to the database. (Global keybinding)
"""
family = Family()
try:
EditFamily(self.dbstate, self.uistate, [], family)
except WindowActiveError:
pass
def add_new_event(self, obj):
"""
Add a new custom/unknown event (Note you type first letter of event)
"""
try:
event = Event()
event.set_type(EventType.UNKNOWN)
EditEvent(self.dbstate, self.uistate, [], event)
except WindowActiveError:
pass
def add_new_place(self, obj):
"""Add a new place to the place list"""
try:
EditPlace(self.dbstate, self.uistate, [], Place())
except WindowActiveError:
pass
def add_new_source(self, obj):
"""Add a new source to the source list"""
try:
EditSource(self.dbstate, self.uistate, [], Source())
except WindowActiveError:
pass
def add_new_repository(self, obj):
"""Add a new repository to the repository list"""
try:
EditRepository(self.dbstate, self.uistate, [], Repository())
except WindowActiveError:
pass
def add_new_citation(self, obj):
"""
Add a new citation
"""
try:
EditCitation(self.dbstate, self.uistate, [], Citation())
except WindowActiveError:
pass
def add_new_media(self, obj):
"""Add a new media object to the media list"""
try:
EditMedia(self.dbstate, self.uistate, [], Media())
except WindowActiveError:
pass
def add_new_note(self, obj):
"""Add a new note to the note list"""
try:
EditNote(self.dbstate, self.uistate, [], Note())
except WindowActiveError:
pass
# ------------------------------------------------------------------------
def config_view(self, obj):
"""
Displays the configuration dialog for the active view
"""
self.active_page.configure()
def undo(self, obj):
"""
Calls the undo function on the database
"""
self.uistate.set_busy_cursor(True)
self.dbstate.db.undo()
self.uistate.set_busy_cursor(False)
def redo(self, obj):
"""
Calls the redo function on the database
"""
self.uistate.set_busy_cursor(True)
self.dbstate.db.redo()
self.uistate.set_busy_cursor(False)
def undo_history(self, obj):
"""
Displays the Undo history window
"""
try:
self.undo_history_window = UndoHistory(self.dbstate, self.uistate)
except WindowActiveError:
return
def export_data(self, obj):
"""
Calls the ExportAssistant to export data
"""
if self.dbstate.is_open():
from .plug.export import ExportAssistant
try:
ExportAssistant(self.dbstate, self.uistate)
except WindowActiveError:
return
def __rebuild_report_and_tool_menus(self):
"""
Callback that rebuilds the tools and reports menu
"""
self.__build_tools_menu(self._pmgr.get_reg_tools())
self.__build_report_menu(self._pmgr.get_reg_reports())
self.uistate.set_relationship_class()
def __build_tools_menu(self, tool_menu_list):
"""
Builds a new tools menu
"""
if self.toolactions:
self.uistate.uimanager.remove_action_group(self.toolactions)
self.uistate.uimanager.remove_ui(self.tool_menu_ui_id)
self.toolactions = Gtk.ActionGroup(name='ToolWindow')
(uidef, actions) = self.build_plugin_menu(
'ToolsMenu', tool_menu_list, tool.tool_categories,
make_plugin_callback)
self.toolactions.add_actions(actions)
self.tool_menu_ui_id = self.uistate.uimanager.add_ui_from_string(uidef)
self.uimanager.insert_action_group(self.toolactions, 1)
self.uistate.uimanager.ensure_update()
def __build_report_menu(self, report_menu_list):
"""
Builds a new reports menu
"""
if self.reportactions:
self.uistate.uimanager.remove_action_group(self.reportactions)
self.uistate.uimanager.remove_ui(self.report_menu_ui_id)
self.reportactions = Gtk.ActionGroup(name='ReportWindow')
(udef, actions) = self.build_plugin_menu(
'ReportsMenu', report_menu_list, standalone_categories,
make_plugin_callback)
self.reportactions.add_actions(actions)
self.report_menu_ui_id = self.uistate.uimanager.add_ui_from_string(udef)
self.uimanager.insert_action_group(self.reportactions, 1)
self.uistate.uimanager.ensure_update()
def build_plugin_menu(self, text, item_list, categories, func):
"""
Builds a new XML description for a menu based on the list of plugindata
"""
actions = []
ofile = StringIO()
ofile.write('<ui><menubar name="MenuBar"><menu action="%s">'
'<placeholder name="%s">' % (text, 'P_'+ text))
menu = Gtk.Menu()
menu.show()
hash_data = defaultdict(list)
for pdata in item_list:
if not pdata.supported:
category = _UNSUPPORTED
else:
category = categories[pdata.category]
hash_data[category].append(pdata)
# Sort categories, skipping the unsupported
catlist = sorted(item for item in hash_data if item != _UNSUPPORTED)
for key in catlist:
new_key = key[0].replace(' ', '-')
ofile.write('<menu action="%s">' % new_key)
actions.append((new_key, None, key[1]))
pdatas = hash_data[key]
pdatas.sort(key=lambda x: x.name)
for pdata in pdatas:
new_key = pdata.id.replace(' ', '-')
menu_name = ("%s...") % pdata.name
ofile.write('<menuitem action="%s"/>' % new_key)
actions.append((new_key, None, menu_name, None, None,
func(pdata, self.dbstate, self.uistate)))
ofile.write('</menu>')
# If there are any unsupported items we add separator
# and the unsupported category at the end of the menu
if _UNSUPPORTED in hash_data:
ofile.write('<separator/>')
ofile.write('<menu action="%s">' % _UNSUPPORTED[0])
actions.append((_UNSUPPORTED[0], None, _UNSUPPORTED[1]))
pdatas = hash_data[_UNSUPPORTED]
pdatas.sort(key=lambda x: x.name)
for pdata in pdatas:
new_key = pdata.id.replace(' ', '-')
menu_name = ("%s...") % pdata.name
ofile.write('<menuitem action="%s"/>' % new_key)
actions.append((new_key, None, menu_name, None, None,
func(pdata, self.dbstate, self.uistate)))
ofile.write('</menu>')
ofile.write('</placeholder></menu></menubar></ui>')
return (ofile.getvalue(), actions)
def display_about_box(self, obj):
"""Display the About box."""
about = GrampsAboutDialog(self.uistate.window)
about.run()
about.destroy()
def get_available_views(self):
"""
Query the views and determine what views to show and in which order
:Returns: a list of lists containing tuples (view_id, viewclass)
"""
pmgr = GuiPluginManager.get_instance()
view_list = pmgr.get_reg_views()
viewstoshow = defaultdict(list)
for pdata in view_list:
mod = pmgr.load_plugin(pdata)
if not mod or not hasattr(mod, pdata.viewclass):
#import of plugin failed
try:
lasterror = pmgr.get_fail_list()[-1][1][1]
except:
lasterror = '*** No error found, '
lasterror += 'probably error in gpr.py file ***'
ErrorDialog(
_('Failed Loading View'),
_('The view %(name)s did not load and reported an error.'
'\n\n%(error_msg)s\n\n'
'If you are unable to fix the fault yourself then you '
'can submit a bug at %(gramps_bugtracker_url)s '
'or contact the view author (%(firstauthoremail)s).\n\n'
'If you do not want Gramps to try and load this view '
'again, you can hide it by using the Plugin Manager '
'on the Help menu.'
) % {'name': pdata.name,
'gramps_bugtracker_url': URL_BUGHOME,
'firstauthoremail': pdata.authors_email[0]
if pdata.authors_email else '...',
'error_msg': lasterror},
parent=self.uistate.window)
continue
viewclass = getattr(mod, pdata.viewclass)
# pdata.category is (string, trans-string):
if pdata.order == START:
viewstoshow[pdata.category[0]].insert(0, (pdata, viewclass))
else:
viewstoshow[pdata.category[0]].append((pdata, viewclass))
# First, get those in order defined, if exists:
resultorder = [viewstoshow[cat]
for cat in config.get("interface.view-categories")
if cat in viewstoshow]
# Next, get the rest in some order:
resultorder.extend(viewstoshow[cat]
for cat in sorted(viewstoshow.keys())
if viewstoshow[cat] not in resultorder)
return resultorder
def key_bindings(obj):
"""
Display key bindings
"""
display_help(webpage=WIKI_HELP_PAGE_KEY)
def manual_activate(obj):
"""
Display the Gramps manual
"""
display_help(webpage=WIKI_HELP_PAGE_MAN)
def report_bug_activate(obj):
"""
Display the bug tracker web site
"""
display_url(URL_BUGTRACKER)
def home_page_activate(obj):
"""
Display the Gramps home page
"""
display_url(URL_HOMEPAGE)
def mailing_lists_activate(obj):
"""
Display the mailing list web page
"""
display_url(URL_MAILINGLIST)
def extra_plugins_activate(obj):
"""
Display the wiki page with extra plugins
"""
display_url(URL_WIKISTRING+WIKI_EXTRAPLUGINS)
def faq_activate(obj):
"""
Display FAQ
"""
display_help(webpage=WIKI_HELP_PAGE_FAQ)
def run_plugin(pdata, dbstate, uistate):
"""
run a plugin based on it's PluginData:
1/ load plugin.
2/ the report is run
"""
pmgr = GuiPluginManager.get_instance()
mod = pmgr.load_plugin(pdata)
if not mod:
#import of plugin failed
failed = pmgr.get_fail_list()
if failed:
error_msg = failed[-1][1][1]
else:
error_msg = "(no error message)"
ErrorDialog(
_('Failed Loading Plugin'),
_('The plugin %(name)s did not load and reported an error.\n\n'
'%(error_msg)s\n\n'
'If you are unable to fix the fault yourself then you can '
'submit a bug at %(gramps_bugtracker_url)s or contact '
'the plugin author (%(firstauthoremail)s).\n\n'
'If you do not want Gramps to try and load this plugin again, '
'you can hide it by using the Plugin Manager on the '
'Help menu.') % {'name' : pdata.name,
'gramps_bugtracker_url' : URL_BUGHOME,
'firstauthoremail' : pdata.authors_email[0]
if pdata.authors_email
else '...',
'error_msg' : error_msg},
parent=uistate.window)
return
if pdata.ptype == REPORT:
report(dbstate, uistate, uistate.get_active('Person'),
getattr(mod, pdata.reportclass),
getattr(mod, pdata.optionclass),
pdata.name, pdata.id,
pdata.category, pdata.require_active)
else:
tool.gui_tool(dbstate=dbstate, user=User(uistate=uistate),
tool_class=getattr(mod, pdata.toolclass),
options_class=getattr(mod, pdata.optionclass),
translated_name=pdata.name,
name=pdata.id,
category=pdata.category,
callback=dbstate.db.request_rebuild)
gc.collect(2)
def make_plugin_callback(pdata, dbstate, uistate):
"""
Makes a callback for a report/tool menu item
"""
return lambda x: run_plugin(pdata, dbstate, uistate)
def views_to_show(views, use_last=True):
"""
Determine based on preference setting which views should be shown
"""
current_cat = 0
current_cat_view = 0
default_cat_views = [0] * len(views)
if use_last:
current_page_id = config.get('preferences.last-view')
default_page_ids = config.get('preferences.last-views')
found = False
for indexcat, cat_views in enumerate(views):
cat_view = 0
for pdata, page_def in cat_views:
if not found:
if pdata.id == current_page_id:
current_cat = indexcat
current_cat_view = cat_view
default_cat_views[indexcat] = cat_view
found = True
break
if pdata.id in default_page_ids:
default_cat_views[indexcat] = cat_view
cat_view += 1
if not found:
current_cat = 0
current_cat_view = 0
return current_cat, current_cat_view, default_cat_views
class QuickBackup(ManagedWindow): # TODO move this class into its own module
def __init__(self, dbstate, uistate, user):
"""
Make a quick XML back with or without media.
"""
self.dbstate = dbstate
self.user = user
ManagedWindow.__init__(self, uistate, [], self.__class__)
window = Gtk.Dialog('',
self.uistate.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, None)
self.set_window(window, None, _("Gramps XML Backup"))
self.setup_configs('interface.quick-backup', 500, 150)
close_button = window.add_button(_('_Close'),
Gtk.ResponseType.CLOSE)
ok_button = window.add_button(_('_OK'),
Gtk.ResponseType.APPLY)
vbox = window.get_content_area()
hbox = Gtk.Box()
label = Gtk.Label(label=_("Path:"))
label.set_justify(Gtk.Justification.LEFT)
label.set_size_request(90, -1)
label.set_halign(Gtk.Align.START)
hbox.pack_start(label, False, True, 0)
path_entry = Gtk.Entry()
dirtext = config.get('paths.quick-backup-directory')
path_entry.set_text(dirtext)
hbox.pack_start(path_entry, True, True, 0)
file_entry = Gtk.Entry()
button = Gtk.Button()
button.connect("clicked",
lambda widget:
self.select_backup_path(widget, path_entry))
image = Gtk.Image()
image.set_from_icon_name('document-open', Gtk.IconSize.BUTTON)
image.show()
button.add(image)
hbox.pack_end(button, False, True, 0)
vbox.pack_start(hbox, False, True, 0)
hbox = Gtk.Box()
label = Gtk.Label(label=_("File:"))
label.set_justify(Gtk.Justification.LEFT)
label.set_size_request(90, -1)
label.set_halign(Gtk.Align.START)
hbox.pack_start(label, False, True, 0)
struct_time = time.localtime()
file_entry.set_text(
config.get('paths.quick-backup-filename'
) % {"filename": self.dbstate.db.get_dbname(),
"year": struct_time.tm_year,
"month": struct_time.tm_mon,
"day": struct_time.tm_mday,
"hour": struct_time.tm_hour,
"minutes": struct_time.tm_min,
"seconds": struct_time.tm_sec,
"extension": "gpkg"})
hbox.pack_end(file_entry, True, True, 0)
vbox.pack_start(hbox, False, True, 0)
hbox = Gtk.Box()
fbytes = 0
mbytes = "0"
for media in self.dbstate.db.iter_media():
fullname = media_path_full(self.dbstate.db, media.get_path())
try:
fbytes += posixpath.getsize(fullname)
length = len(str(fbytes))
if fbytes <= 999999:
mbytes = "< 1"
else:
mbytes = str(fbytes)[:(length-6)]
except OSError:
pass
label = Gtk.Label(label=_("Media:"))
label.set_justify(Gtk.Justification.LEFT)
label.set_size_request(90, -1)
label.set_halign(Gtk.Align.START)
hbox.pack_start(label, False, True, 0)
include = Gtk.RadioButton.new_with_mnemonic_from_widget(
None, "%s (%s %s)" % (_("Include"),
mbytes, _("Megabyte|MB")))
exclude = Gtk.RadioButton.new_with_mnemonic_from_widget(include,
_("Exclude"))
include.connect("toggled", lambda widget: self.media_toggle(widget,
file_entry))
include_mode = config.get('preferences.quick-backup-include-mode')
if include_mode:
include.set_active(True)
else:
exclude.set_active(True)
hbox.pack_start(include, False, True, 0)
hbox.pack_end(exclude, False, True, 0)
vbox.pack_start(hbox, False, True, 0)
self.show()
dbackup = window.run()
if dbackup == Gtk.ResponseType.APPLY:
# if file exists, ask if overwrite; else abort
basefile = file_entry.get_text()
basefile = basefile.replace("/", r"-")
filename = os.path.join(path_entry.get_text(), basefile)
if os.path.exists(filename):
question = QuestionDialog2(
_("Backup file already exists! Overwrite?"),
_("The file '%s' exists.") % filename,
_("Proceed and overwrite"),
_("Cancel the backup"),
parent=self.window)
yes_no = question.run()
if not yes_no:
current_dir = path_entry.get_text()
if current_dir != dirtext:
config.set('paths.quick-backup-directory', current_dir)
self.close()
return
position = self.window.get_position() # crock
window.hide()
self.window.move(position[0], position[1])
self.uistate.set_busy_cursor(True)
self.uistate.pulse_progressbar(0)
self.uistate.progress.show()
self.uistate.push_message(self.dbstate, _("Making backup..."))
if include.get_active():
from gramps.plugins.export.exportpkg import PackageWriter
writer = PackageWriter(self.dbstate.db, filename, self.user)
writer.export()
else:
from gramps.plugins.export.exportxml import XmlWriter
writer = XmlWriter(self.dbstate.db, self.user,
strip_photos=0, compress=1)
writer.write(filename)
self.uistate.set_busy_cursor(False)
self.uistate.progress.hide()
self.uistate.push_message(self.dbstate,
_("Backup saved to '%s'") % filename)
config.set('paths.quick-backup-directory', path_entry.get_text())
else:
self.uistate.push_message(self.dbstate, _("Backup aborted"))
if dbackup != Gtk.ResponseType.DELETE_EVENT:
self.close()
def select_backup_path(self, widget, path_entry):
"""
Choose a backup folder. Make sure there is one highlighted in
right pane, otherwise FileChooserDialog will hang.
"""
fdialog = Gtk.FileChooserDialog(
title=_("Select backup directory"),
parent=self.window,
action=Gtk.FileChooserAction.SELECT_FOLDER,
buttons=(_('_Cancel'),
Gtk.ResponseType.CANCEL,
_('_Apply'),
Gtk.ResponseType.OK))
mpath = path_entry.get_text()
if not mpath:
mpath = HOME_DIR
fdialog.set_current_folder(os.path.dirname(mpath))
fdialog.set_filename(os.path.join(mpath, "."))
status = fdialog.run()
if status == Gtk.ResponseType.OK:
filename = fdialog.get_filename()
if filename:
path_entry.set_text(filename)
fdialog.destroy()
return True
def media_toggle(self, widget, file_entry):
"""
Toggles media include values in the quick backup dialog.
"""
include = widget.get_active()
config.set('preferences.quick-backup-include-mode', include)
extension = "gpkg" if include else "gramps"
filename = file_entry.get_text()
if "." in filename:
base, ext = filename.rsplit(".", 1)
file_entry.set_text("%s.%s" % (base, extension))
else:
file_entry.set_text("%s.%s" % (filename, extension))
|
prculley/gramps
|
gramps/gui/viewmanager.py
|
Python
|
gpl-2.0
| 76,169
|
[
"Brian"
] |
ac37dcfdcd5aed5d69f78e8b56fb1dc40514713a1e87d3a9806da16717a48183
|
import tensorflow as tf # neural network for function approximation
import gym # environment
import numpy as np # matrix operation and math functions
from gym import wrappers
import gym_morph # customized environment for cart-pole
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import time
start_time = time.time()
MAX_TEST = 10
for test_num in range(1, MAX_TEST+1):
# Hyperparameters
RANDOM_NUMBER_SEED = test_num
ENVIRONMENT1 = "morph-v0"
MAX_EPISODES = 20000 # number of episodes
EPISODE_LENGTH = 4000 # single episode length
HIDDEN_SIZE = 16
DISPLAY_WEIGHTS = False # Help debug weight update
gamma = 0.99 # Discount per step
RENDER = False # Render the cart-pole system
VIDEO_INTERVAL = 100 # Generate a video at this interval
CONSECUTIVE_TARGET = 100 # Including previous 100 rewards
CONST_LR = False # Constant or decaying learing rate
# Constant learning rate
const_learning_rate_in = 0.0008
# Decay learning rate
start_learning_rate_in = 0.003
decay_steps_in = 100
decay_rate_in = 0.96
DIR_PATH_SAVEFIG = "/root/cartpole_plot/"
if CONST_LR:
learning_rate = const_learning_rate_in
file_name_savefig = "el" + str(EPISODE_LENGTH) \
+ "_hn" + str(HIDDEN_SIZE) \
+ "_clr" + str(learning_rate).replace(".", "p") \
+ "_test" + str(test_num) \
+ ".png"
else:
start_learning_rate = start_learning_rate_in
decay_steps = decay_steps_in
decay_rate = decay_rate_in
file_name_savefig = "el" + str(EPISODE_LENGTH) \
+ "_hn" + str(HIDDEN_SIZE) \
+ "_dlr_slr" + str(start_learning_rate).replace(".", "p") \
+ "_ds" + str(decay_steps) \
+ "_dr" + str(decay_rate).replace(".", "p") \
+ "_test" + str(test_num) \
+ ".png"
env = gym.make(ENVIRONMENT1)
env.seed(RANDOM_NUMBER_SEED)
np.random.seed(RANDOM_NUMBER_SEED)
tf.set_random_seed(RANDOM_NUMBER_SEED)
# Input and output sizes
input_size = 4
output_size = 2
# input_size = env.observation_space.shape[0]
# try:
# output_size = env.action_space.shape[0]
# except AttributeError:
# output_size = env.action_space.n
# Tensorflow network setup
x = tf.placeholder(tf.float32, shape=(None, input_size))
y = tf.placeholder(tf.float32, shape=(None, 1))
if not CONST_LR:
# decay learning rate
global_step = tf.Variable(0, trainable=False)
learning_rate = tf.train.exponential_decay(start_learning_rate, global_step, decay_steps, decay_rate, staircase=False)
expected_returns = tf.placeholder(tf.float32, shape=(None, 1))
# Xavier (2010) weights initializer for uniform distribution:
# x = sqrt(6. / (in + out)); [-x, x]
w_init = tf.contrib.layers.xavier_initializer()
hidden_W = tf.get_variable("W1", shape=[input_size, HIDDEN_SIZE],
initializer=w_init)
hidden_B = tf.Variable(tf.zeros(HIDDEN_SIZE))
dist_W = tf.get_variable("W2", shape=[HIDDEN_SIZE, output_size],
initializer=w_init)
dist_B = tf.Variable(tf.zeros(output_size))
hidden = tf.nn.elu(tf.matmul(x, hidden_W) + hidden_B)
dist = tf.tanh(tf.matmul(hidden, dist_W) + dist_B)
dist_soft = tf.nn.log_softmax(dist)
dist_in = tf.matmul(dist_soft, tf.Variable([[1.], [0.]]))
pi = tf.contrib.distributions.Bernoulli(dist_in)
pi_sample = pi.sample()
log_pi = pi.log_prob(y)
if CONST_LR:
optimizer = tf.train.RMSPropOptimizer(learning_rate)
train = optimizer.minimize(-1.0 * expected_returns * log_pi)
else:
optimizer = tf.train.RMSPropOptimizer(learning_rate)
train = optimizer.minimize(-1.0 * expected_returns * log_pi, global_step=global_step)
# saver = tf.train.Saver()
# Create and initialize a session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
def run_episode(environment, ep, render=False):
raw_reward = 0
discounted_reward = 0
cumulative_reward = []
discount = 1.0
states = []
actions = []
obs = environment.reset()
done = False
while not done:
states.append(obs)
cumulative_reward.append(discounted_reward)
if render and ((ep % VIDEO_INTERVAL) == 0):
environment.render()
action = sess.run(pi_sample, feed_dict={x: [obs]})[0]
actions.append(action)
obs, reward, done, info = env.step(action[0])
raw_reward += reward
if reward > 0:
discounted_reward += reward * discount
else:
discounted_reward += reward
discount *= gamma
return raw_reward, discounted_reward, cumulative_reward, states, actions
def display_weights(session):
w1 = session.run(hidden_W)
b1 = session.run(hidden_B)
w2 = session.run(dist_W)
b2 = session.run(dist_B)
print(w1, b1, w2, b2)
returns = []
mean_returns = []
for ep in range(MAX_EPISODES):
raw_G, discounted_G, cumulative_G, ep_states, ep_actions = \
run_episode(env, ep, RENDER)
expected_R = np.transpose([discounted_G - np.array(cumulative_G)])
sess.run(train, feed_dict={x: ep_states, y: ep_actions,
expected_returns: expected_R})
if DISPLAY_WEIGHTS:
display_weights(sess)
returns.append(raw_G)
running_returns = returns[max(0, ep-CONSECUTIVE_TARGET):(ep+1)]
mean_return = np.mean(running_returns)
mean_returns.append(mean_return)
if CONST_LR:
msg = "Test: {}/{}, Episode: {}/{}, Time: {}, Learning rate: {}, Return: {}, Last {} returns mean: {}"
msg = msg.format(test_num, MAX_TEST, ep+1, MAX_EPISODES, time.strftime('%H:%M:%S', time.gmtime(time.time()-start_time)), learning_rate, raw_G, CONSECUTIVE_TARGET, mean_return)
print(msg)
else:
msg = "Test: {}/{}, Episode: {}/{}, Time: {}, Learning rate: {}, Return: {}, Last {} returns mean: {}"
msg = msg.format(test_num, MAX_TEST, ep+1, MAX_EPISODES, time.strftime('%H:%M:%S', time.gmtime(time.time()-start_time)), sess.run(learning_rate), raw_G, CONSECUTIVE_TARGET, mean_return)
print(msg)
env.close() # close openai gym environment
tf.reset_default_graph() # clear tensorflow graph
# Plot
# plt.style.use('ggplot')
plt.style.use('dark_background')
episodes_plot = np.arange(MAX_EPISODES)
fig = plt.figure()
ax = fig.add_subplot(111)
fig.subplots_adjust(top=0.85)
if CONST_LR:
ax.set_title("The Cart-Pole Problem Test %i \n \
Episode Length: %i \
Discount Factor: %.2f \n \
Number of Hidden Neuron: %i \
Constant Learning Rate: %.5f" % (test_num, EPISODE_LENGTH, gamma, HIDDEN_SIZE, learning_rate))
else:
ax.set_title("The Cart-Pole Problem Test %i \n \
EpisodeLength: %i DiscountFactor: %.2f NumHiddenNeuron: %i \n \
Decay Learning Rate: (start: %.5f, steps: %i, rate: %.2f)" % (test_num, EPISODE_LENGTH, gamma, HIDDEN_SIZE, start_learning_rate, decay_steps, decay_rate))
ax.set_xlabel("Episode")
ax.set_ylabel("Return")
ax.set_ylim((0, EPISODE_LENGTH))
ax.grid(linestyle='--')
ax.plot(episodes_plot, returns, label='Instant return')
ax.plot(episodes_plot, mean_returns, label='Averaged return')
legend = ax.legend(loc='best', shadow=True)
fig.savefig(DIR_PATH_SAVEFIG + file_name_savefig, dpi=500)
# plt.show()
|
GitYiheng/reinforcement_learning_test
|
test03_monte_carlo/t41_rlvps03.py
|
Python
|
mit
| 7,665
|
[
"NEURON"
] |
b54e596fd106a0ae2c513343867dd94ce0ed46bf610bdcebf4b71f750574f26a
|
# -*- coding: utf-8 -*-
"""
tests.datastructures
~~~~~~~~~~~~~~~~~~~~
Tests the functionality of the provided Werkzeug
datastructures.
Classes prefixed with an underscore are mixins and are not discovered by
the test runner.
TODO:
- FileMultiDict
- Immutable types undertested
- Split up dict tests
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import pytest
from tests import strict_eq
import pickle
from contextlib import contextmanager
from copy import copy, deepcopy
from werkzeug import datastructures
from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \
iterlistvalues, text_type, PY2
from werkzeug.exceptions import BadRequestKeyError
class TestNativeItermethods(object):
def test_basic(self):
@datastructures.native_itermethods(['keys', 'values', 'items'])
class StupidDict(object):
def keys(self, multi=1):
return iter(['a', 'b', 'c'] * multi)
def values(self, multi=1):
return iter([1, 2, 3] * multi)
def items(self, multi=1):
return iter(zip(iterkeys(self, multi=multi),
itervalues(self, multi=multi)))
d = StupidDict()
expected_keys = ['a', 'b', 'c']
expected_values = [1, 2, 3]
expected_items = list(zip(expected_keys, expected_values))
assert list(iterkeys(d)) == expected_keys
assert list(itervalues(d)) == expected_values
assert list(iteritems(d)) == expected_items
assert list(iterkeys(d, 2)) == expected_keys * 2
assert list(itervalues(d, 2)) == expected_values * 2
assert list(iteritems(d, 2)) == expected_items * 2
class _MutableMultiDictTests(object):
storage_class = None
def test_pickle(self):
cls = self.storage_class
def create_instance(module=None):
if module is None:
d = cls()
else:
old = cls.__module__
cls.__module__ = module
d = cls()
cls.__module__ = old
d.setlist(b'foo', [1, 2, 3, 4])
d.setlist(b'bar', b'foo bar baz'.split())
return d
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
d = create_instance()
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
assert type(ud) == type(d)
assert ud == d
alternative = pickle.dumps(create_instance('werkzeug'), protocol)
assert pickle.loads(alternative) == d
ud[b'newkey'] = b'bla'
assert ud != d
def test_basic_interface(self):
md = self.storage_class()
assert isinstance(md, dict)
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
# simple getitem gives the first value
assert md['a'] == 1
assert md['c'] == 3
with pytest.raises(KeyError):
md['e']
assert md.get('a') == 1
# list getitem
assert md.getlist('a') == [1, 2, 1, 3]
assert md.getlist('d') == [3, 4]
# do not raise if key not found
assert md.getlist('x') == []
# simple setitem overwrites all values
md['a'] = 42
assert md.getlist('a') == [42]
# list setitem
md.setlist('a', [1, 2, 3])
assert md['a'] == 1
assert md.getlist('a') == [1, 2, 3]
# verify that it does not change original lists
l1 = [1, 2, 3]
md.setlist('a', l1)
del l1[:]
assert md['a'] == 1
# setdefault, setlistdefault
assert md.setdefault('u', 23) == 23
assert md.getlist('u') == [23]
del md['u']
md.setlist('u', [-1, -2])
# delitem
del md['u']
with pytest.raises(KeyError):
md['u']
del md['d']
assert md.getlist('d') == []
# keys, values, items, lists
assert list(sorted(md.keys())) == ['a', 'b', 'c']
assert list(sorted(iterkeys(md))) == ['a', 'b', 'c']
assert list(sorted(itervalues(md))) == [1, 2, 3]
assert list(sorted(itervalues(md))) == [1, 2, 3]
assert list(sorted(md.items())) == [('a', 1), ('b', 2), ('c', 3)]
assert list(sorted(md.items(multi=True))) == \
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)]
assert list(sorted(iteritems(md))) == [('a', 1), ('b', 2), ('c', 3)]
assert list(sorted(iteritems(md, multi=True))) == \
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)]
assert list(sorted(md.lists())) == \
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])]
assert list(sorted(iterlists(md))) == \
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])]
# copy method
c = md.copy()
assert c['a'] == 1
assert c.getlist('a') == [1, 2, 3]
# copy method 2
c = copy(md)
assert c['a'] == 1
assert c.getlist('a') == [1, 2, 3]
# deepcopy method
c = md.deepcopy()
assert c['a'] == 1
assert c.getlist('a') == [1, 2, 3]
# deepcopy method 2
c = deepcopy(md)
assert c['a'] == 1
assert c.getlist('a') == [1, 2, 3]
# update with a multidict
od = self.storage_class([('a', 4), ('a', 5), ('y', 0)])
md.update(od)
assert md.getlist('a') == [1, 2, 3, 4, 5]
assert md.getlist('y') == [0]
# update with a regular dict
md = c
od = {'a': 4, 'y': 0}
md.update(od)
assert md.getlist('a') == [1, 2, 3, 4]
assert md.getlist('y') == [0]
# pop, poplist, popitem, popitemlist
assert md.pop('y') == 0
assert 'y' not in md
assert md.poplist('a') == [1, 2, 3, 4]
assert 'a' not in md
assert md.poplist('missing') == []
# remaining: b=2, c=3
popped = md.popitem()
assert popped in [('b', 2), ('c', 3)]
popped = md.popitemlist()
assert popped in [('b', [2]), ('c', [3])]
# type conversion
md = self.storage_class({'a': '4', 'b': ['2', '3']})
assert md.get('a', type=int) == 4
assert md.getlist('b', type=int) == [2, 3]
# repr
md = self.storage_class([('a', 1), ('a', 2), ('b', 3)])
assert "('a', 1)" in repr(md)
assert "('a', 2)" in repr(md)
assert "('b', 3)" in repr(md)
# add and getlist
md.add('c', '42')
md.add('c', '23')
assert md.getlist('c') == ['42', '23']
md.add('c', 'blah')
assert md.getlist('c', type=int) == [42, 23]
# setdefault
md = self.storage_class()
md.setdefault('x', []).append(42)
md.setdefault('x', []).append(23)
assert md['x'] == [42, 23]
# to dict
md = self.storage_class()
md['foo'] = 42
md.add('bar', 1)
md.add('bar', 2)
assert md.to_dict() == {'foo': 42, 'bar': 1}
assert md.to_dict(flat=False) == {'foo': [42], 'bar': [1, 2]}
# popitem from empty dict
with pytest.raises(KeyError):
self.storage_class().popitem()
with pytest.raises(KeyError):
self.storage_class().popitemlist()
# key errors are of a special type
with pytest.raises(BadRequestKeyError):
self.storage_class()[42]
# setlist works
md = self.storage_class()
md['foo'] = 42
md.setlist('foo', [1, 2])
assert md.getlist('foo') == [1, 2]
class _ImmutableDictTests(object):
storage_class = None
def test_follows_dict_interface(self):
cls = self.storage_class
data = {'foo': 1, 'bar': 2, 'baz': 3}
d = cls(data)
assert d['foo'] == 1
assert d['bar'] == 2
assert d['baz'] == 3
assert sorted(d.keys()) == ['bar', 'baz', 'foo']
assert 'foo' in d
assert 'foox' not in d
assert len(d) == 3
def test_copies_are_mutable(self):
cls = self.storage_class
immutable = cls({'a': 1})
with pytest.raises(TypeError):
immutable.pop('a')
mutable = immutable.copy()
mutable.pop('a')
assert 'a' in immutable
assert mutable is not immutable
assert copy(immutable) is immutable
def test_dict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': 1, 'b': 2})
immutable2 = cls({'a': 2, 'b': 2})
x = set([immutable])
assert immutable in x
assert immutable2 not in x
x.discard(immutable)
assert immutable not in x
assert immutable2 not in x
x.add(immutable2)
assert immutable not in x
assert immutable2 in x
x.add(immutable)
assert immutable in x
assert immutable2 in x
class TestImmutableTypeConversionDict(_ImmutableDictTests):
storage_class = datastructures.ImmutableTypeConversionDict
class TestImmutableMultiDict(_ImmutableDictTests):
storage_class = datastructures.ImmutableMultiDict
def test_multidict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': [1, 2], 'b': 2})
immutable2 = cls({'a': [1], 'b': 2})
x = set([immutable])
assert immutable in x
assert immutable2 not in x
x.discard(immutable)
assert immutable not in x
assert immutable2 not in x
x.add(immutable2)
assert immutable not in x
assert immutable2 in x
x.add(immutable)
assert immutable in x
assert immutable2 in x
class TestImmutableDict(_ImmutableDictTests):
storage_class = datastructures.ImmutableDict
class TestImmutableOrderedMultiDict(_ImmutableDictTests):
storage_class = datastructures.ImmutableOrderedMultiDict
def test_ordered_multidict_is_hashable(self):
a = self.storage_class([('a', 1), ('b', 1), ('a', 2)])
b = self.storage_class([('a', 1), ('a', 2), ('b', 1)])
assert hash(a) != hash(b)
class TestMultiDict(_MutableMultiDictTests):
storage_class = datastructures.MultiDict
def test_multidict_pop(self):
make_d = lambda: self.storage_class({'foo': [1, 2, 3, 4]})
d = make_d()
assert d.pop('foo') == 1
assert not d
d = make_d()
assert d.pop('foo', 32) == 1
assert not d
d = make_d()
assert d.pop('foos', 32) == 32
assert d
with pytest.raises(KeyError):
d.pop('foos')
def test_setlistdefault(self):
md = self.storage_class()
assert md.setlistdefault('u', [-1, -2]) == [-1, -2]
assert md.getlist('u') == [-1, -2]
assert md['u'] == -1
def test_iter_interfaces(self):
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
assert list(zip(md.keys(), md.listvalues())) == list(md.lists())
assert list(zip(md, iterlistvalues(md))) == list(iterlists(md))
assert list(zip(iterkeys(md), iterlistvalues(md))) == \
list(iterlists(md))
@pytest.mark.skipif(not PY2, reason='viewmethods work only for the 2-nd version.')
def test_view_methods(self):
mapping = [('a', 'b'), ('a', 'c')]
md = self.storage_class(mapping)
vi = md.viewitems()
vk = md.viewkeys()
vv = md.viewvalues()
assert list(vi) == list(md.items())
assert list(vk) == list(md.keys())
assert list(vv) == list(md.values())
md['k'] = 'n'
assert list(vi) == list(md.items())
assert list(vk) == list(md.keys())
assert list(vv) == list(md.values())
@pytest.mark.skipif(not PY2, reason='viewmethods work only for the 2-nd version.')
def test_viewitems_with_multi(self):
mapping = [('a', 'b'), ('a', 'c')]
md = self.storage_class(mapping)
vi = md.viewitems(multi=True)
assert list(vi) == list(md.items(multi=True))
md['k'] = 'n'
assert list(vi) == list(md.items(multi=True))
def test_getitem_raise_badrequestkeyerror_for_empty_list_value(self):
mapping = [('a', 'b'), ('a', 'c')]
md = self.storage_class(mapping)
md.setlistdefault('empty', [])
with pytest.raises(KeyError):
md['empty']
class TestOrderedMultiDict(_MutableMultiDictTests):
storage_class = datastructures.OrderedMultiDict
def test_ordered_interface(self):
cls = self.storage_class
d = cls()
assert not d
d.add('foo', 'bar')
assert len(d) == 1
d.add('foo', 'baz')
assert len(d) == 1
assert list(iteritems(d)) == [('foo', 'bar')]
assert list(d) == ['foo']
assert list(iteritems(d, multi=True)) == \
[('foo', 'bar'), ('foo', 'baz')]
del d['foo']
assert not d
assert len(d) == 0
assert list(d) == []
d.update([('foo', 1), ('foo', 2), ('bar', 42)])
d.add('foo', 3)
assert d.getlist('foo') == [1, 2, 3]
assert d.getlist('bar') == [42]
assert list(iteritems(d)) == [('foo', 1), ('bar', 42)]
expected = ['foo', 'bar']
assert list(d.keys()) == expected
assert list(d) == expected
assert list(iterkeys(d)) == expected
assert list(iteritems(d, multi=True)) == \
[('foo', 1), ('foo', 2), ('bar', 42), ('foo', 3)]
assert len(d) == 2
assert d.pop('foo') == 1
assert d.pop('blafasel', None) is None
assert d.pop('blafasel', 42) == 42
assert len(d) == 1
assert d.poplist('bar') == [42]
assert not d
d.get('missingkey') is None
d.add('foo', 42)
d.add('foo', 23)
d.add('bar', 2)
d.add('foo', 42)
assert d == datastructures.MultiDict(d)
id = self.storage_class(d)
assert d == id
d.add('foo', 2)
assert d != id
d.update({'blah': [1, 2, 3]})
assert d['blah'] == 1
assert d.getlist('blah') == [1, 2, 3]
# setlist works
d = self.storage_class()
d['foo'] = 42
d.setlist('foo', [1, 2])
assert d.getlist('foo') == [1, 2]
with pytest.raises(BadRequestKeyError):
d.pop('missing')
with pytest.raises(BadRequestKeyError):
d['missing']
# popping
d = self.storage_class()
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
assert d.popitem() == ('foo', 23)
with pytest.raises(BadRequestKeyError):
d.popitem()
assert not d
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
assert d.popitemlist() == ('foo', [23, 42, 1])
with pytest.raises(BadRequestKeyError):
d.popitemlist()
def test_iterables(self):
a = datastructures.MultiDict((("key_a", "value_a"),))
b = datastructures.MultiDict((("key_b", "value_b"),))
ab = datastructures.CombinedMultiDict((a, b))
assert sorted(ab.lists()) == [('key_a', ['value_a']), ('key_b', ['value_b'])]
assert sorted(ab.listvalues()) == [['value_a'], ['value_b']]
assert sorted(ab.keys()) == ["key_a", "key_b"]
assert sorted(iterlists(ab)) == [('key_a', ['value_a']), ('key_b', ['value_b'])]
assert sorted(iterlistvalues(ab)) == [['value_a'], ['value_b']]
assert sorted(iterkeys(ab)) == ["key_a", "key_b"]
class TestCombinedMultiDict(object):
storage_class = datastructures.CombinedMultiDict
def test_basic_interface(self):
d1 = datastructures.MultiDict([('foo', '1')])
d2 = datastructures.MultiDict([('bar', '2'), ('bar', '3')])
d = self.storage_class([d1, d2])
# lookup
assert d['foo'] == '1'
assert d['bar'] == '2'
assert d.getlist('bar') == ['2', '3']
assert sorted(d.items()) == [('bar', '2'), ('foo', '1')]
assert sorted(d.items(multi=True)) == \
[('bar', '2'), ('bar', '3'), ('foo', '1')]
assert 'missingkey' not in d
assert 'foo' in d
# type lookup
assert d.get('foo', type=int) == 1
assert d.getlist('bar', type=int) == [2, 3]
# get key errors for missing stuff
with pytest.raises(KeyError):
d['missing']
# make sure that they are immutable
with pytest.raises(TypeError):
d['foo'] = 'blub'
# copies are immutable
d = d.copy()
with pytest.raises(TypeError):
d['foo'] = 'blub'
# make sure lists merges
md1 = datastructures.MultiDict((("foo", "bar"),))
md2 = datastructures.MultiDict((("foo", "blafasel"),))
x = self.storage_class((md1, md2))
assert list(iterlists(x)) == [('foo', ['bar', 'blafasel'])]
def test_length(self):
d1 = datastructures.MultiDict([('foo', '1')])
d2 = datastructures.MultiDict([('bar', '2')])
assert len(d1) == len(d2) == 1
d = self.storage_class([d1, d2])
assert len(d) == 2
d1.clear()
assert len(d1) == 0
assert len(d) == 1
class TestHeaders(object):
storage_class = datastructures.Headers
def test_basic_interface(self):
headers = self.storage_class()
headers.add('Content-Type', 'text/plain')
headers.add('X-Foo', 'bar')
assert 'x-Foo' in headers
assert 'Content-type' in headers
headers['Content-Type'] = 'foo/bar'
assert headers['Content-Type'] == 'foo/bar'
assert len(headers.getlist('Content-Type')) == 1
# list conversion
assert headers.to_wsgi_list() == [
('Content-Type', 'foo/bar'),
('X-Foo', 'bar')
]
assert str(headers) == (
"Content-Type: foo/bar\r\n"
"X-Foo: bar\r\n"
"\r\n"
)
assert str(self.storage_class()) == "\r\n"
# extended add
headers.add('Content-Disposition', 'attachment', filename='foo')
assert headers['Content-Disposition'] == 'attachment; filename=foo'
headers.add('x', 'y', z='"')
assert headers['x'] == r'y; z="\""'
def test_defaults_and_conversion(self):
# defaults
headers = self.storage_class([
('Content-Type', 'text/plain'),
('X-Foo', 'bar'),
('X-Bar', '1'),
('X-Bar', '2')
])
assert headers.getlist('x-bar') == ['1', '2']
assert headers.get('x-Bar') == '1'
assert headers.get('Content-Type') == 'text/plain'
assert headers.setdefault('X-Foo', 'nope') == 'bar'
assert headers.setdefault('X-Bar', 'nope') == '1'
assert headers.setdefault('X-Baz', 'quux') == 'quux'
assert headers.setdefault('X-Baz', 'nope') == 'quux'
headers.pop('X-Baz')
# type conversion
assert headers.get('x-bar', type=int) == 1
assert headers.getlist('x-bar', type=int) == [1, 2]
# list like operations
assert headers[0] == ('Content-Type', 'text/plain')
assert headers[:1] == self.storage_class([('Content-Type', 'text/plain')])
del headers[:2]
del headers[-1]
assert headers == self.storage_class([('X-Bar', '1')])
def test_copying(self):
a = self.storage_class([('foo', 'bar')])
b = a.copy()
a.add('foo', 'baz')
assert a.getlist('foo') == ['bar', 'baz']
assert b.getlist('foo') == ['bar']
def test_popping(self):
headers = self.storage_class([('a', 1)])
assert headers.pop('a') == 1
assert headers.pop('b', 2) == 2
with pytest.raises(KeyError):
headers.pop('c')
def test_set_arguments(self):
a = self.storage_class()
a.set('Content-Disposition', 'useless')
a.set('Content-Disposition', 'attachment', filename='foo')
assert a['Content-Disposition'] == 'attachment; filename=foo'
def test_reject_newlines(self):
h = self.storage_class()
for variation in 'foo\nbar', 'foo\r\nbar', 'foo\rbar':
with pytest.raises(ValueError):
h['foo'] = variation
with pytest.raises(ValueError):
h.add('foo', variation)
with pytest.raises(ValueError):
h.add('foo', 'test', option=variation)
with pytest.raises(ValueError):
h.set('foo', variation)
with pytest.raises(ValueError):
h.set('foo', 'test', option=variation)
def test_slicing(self):
# there's nothing wrong with these being native strings
# Headers doesn't care about the data types
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('Content-Type', 'application/whocares')
h.set('X-Forwarded-For', '192.168.0.123')
h[:] = [(k, v) for k, v in h if k.startswith(u'X-')]
assert list(h) == [
('X-Foo-Poo', 'bleh'),
('X-Forwarded-For', '192.168.0.123')
]
def test_bytes_operations(self):
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('X-Whoops', b'\xff')
assert h.get('x-foo-poo', as_bytes=True) == b'bleh'
assert h.get('x-whoops', as_bytes=True) == b'\xff'
def test_to_wsgi_list(self):
h = self.storage_class()
h.set(u'Key', u'Value')
for key, value in h.to_wsgi_list():
if PY2:
strict_eq(key, b'Key')
strict_eq(value, b'Value')
else:
strict_eq(key, u'Key')
strict_eq(value, u'Value')
class TestEnvironHeaders(object):
storage_class = datastructures.EnvironHeaders
def test_basic_interface(self):
# this happens in multiple WSGI servers because they
# use a vary naive way to convert the headers;
broken_env = {
'HTTP_CONTENT_TYPE': 'text/html',
'CONTENT_TYPE': 'text/html',
'HTTP_CONTENT_LENGTH': '0',
'CONTENT_LENGTH': '0',
'HTTP_ACCEPT': '*',
'wsgi.version': (1, 0)
}
headers = self.storage_class(broken_env)
assert headers
assert len(headers) == 3
assert sorted(headers) == [
('Accept', '*'),
('Content-Length', '0'),
('Content-Type', 'text/html')
]
assert not self.storage_class({'wsgi.version': (1, 0)})
assert len(self.storage_class({'wsgi.version': (1, 0)})) == 0
def test_return_type_is_unicode(self):
# environ contains native strings; we return unicode
headers = self.storage_class({
'HTTP_FOO': '\xe2\x9c\x93',
'CONTENT_TYPE': 'text/plain',
})
assert headers['Foo'] == u"\xe2\x9c\x93"
assert isinstance(headers['Foo'], text_type)
assert isinstance(headers['Content-Type'], text_type)
iter_output = dict(iter(headers))
assert iter_output['Foo'] == u"\xe2\x9c\x93"
assert isinstance(iter_output['Foo'], text_type)
assert isinstance(iter_output['Content-Type'], text_type)
def test_bytes_operations(self):
foo_val = '\xff'
h = self.storage_class({
'HTTP_X_FOO': foo_val
})
assert h.get('x-foo', as_bytes=True) == b'\xff'
assert h.get('x-foo') == u'\xff'
class TestHeaderSet(object):
storage_class = datastructures.HeaderSet
def test_basic_interface(self):
hs = self.storage_class()
hs.add('foo')
hs.add('bar')
assert 'Bar' in hs
assert hs.find('foo') == 0
assert hs.find('BAR') == 1
assert hs.find('baz') < 0
hs.discard('missing')
hs.discard('foo')
assert hs.find('foo') < 0
assert hs.find('bar') == 0
with pytest.raises(IndexError):
hs.index('missing')
assert hs.index('bar') == 0
assert hs
hs.clear()
assert not hs
class TestImmutableList(object):
storage_class = datastructures.ImmutableList
def test_list_hashable(self):
t = (1, 2, 3, 4)
l = self.storage_class(t)
assert hash(t) == hash(l)
assert t != l
def make_call_asserter(func=None):
"""Utility to assert a certain number of function calls.
:param func: Additional callback for each function call.
>>> assert_calls, func = make_call_asserter()
>>> with assert_calls(2):
func()
func()
"""
calls = [0]
@contextmanager
def asserter(count, msg=None):
calls[0] = 0
yield
assert calls[0] == count
def wrapped(*args, **kwargs):
calls[0] += 1
if func is not None:
return func(*args, **kwargs)
return asserter, wrapped
class TestCallbackDict(object):
storage_class = datastructures.CallbackDict
def test_callback_dict_reads(self):
assert_calls, func = make_call_asserter()
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(0, 'callback triggered by read-only method'):
# read-only methods
dct['a']
dct.get('a')
pytest.raises(KeyError, lambda: dct['x'])
'a' in dct
list(iter(dct))
dct.copy()
with assert_calls(0, 'callback triggered without modification'):
# methods that may write but don't
dct.pop('z', None)
dct.setdefault('a')
def test_callback_dict_writes(self):
assert_calls, func = make_call_asserter()
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(8, 'callback not triggered by write method'):
# always-write methods
dct['z'] = 123
dct['z'] = 123 # must trigger again
del dct['z']
dct.pop('b', None)
dct.setdefault('x')
dct.popitem()
dct.update([])
dct.clear()
with assert_calls(0, 'callback triggered by failed del'):
pytest.raises(KeyError, lambda: dct.__delitem__('x'))
with assert_calls(0, 'callback triggered by failed pop'):
pytest.raises(KeyError, lambda: dct.pop('x'))
class TestCacheControl(object):
def test_repr(self):
cc = datastructures.RequestCacheControl(
[("max-age", "0"), ("private", "True")],
)
assert repr(cc) == "<RequestCacheControl max-age='0' private='True'>"
class TestAccept(object):
storage_class = datastructures.Accept
def test_accept_basic(self):
accept = self.storage_class([('tinker', 0), ('tailor', 0.333),
('soldier', 0.667), ('sailor', 1)])
# check __getitem__ on indices
assert accept[3] == ('tinker', 0)
assert accept[2] == ('tailor', 0.333)
assert accept[1] == ('soldier', 0.667)
assert accept[0], ('sailor', 1)
# check __getitem__ on string
assert accept['tinker'] == 0
assert accept['tailor'] == 0.333
assert accept['soldier'] == 0.667
assert accept['sailor'] == 1
assert accept['spy'] == 0
# check quality method
assert accept.quality('tinker') == 0
assert accept.quality('tailor') == 0.333
assert accept.quality('soldier') == 0.667
assert accept.quality('sailor') == 1
assert accept.quality('spy') == 0
# check __contains__
assert 'sailor' in accept
assert 'spy' not in accept
# check index method
assert accept.index('tinker') == 3
assert accept.index('tailor') == 2
assert accept.index('soldier') == 1
assert accept.index('sailor') == 0
with pytest.raises(ValueError):
accept.index('spy')
# check find method
assert accept.find('tinker') == 3
assert accept.find('tailor') == 2
assert accept.find('soldier') == 1
assert accept.find('sailor') == 0
assert accept.find('spy') == -1
# check to_header method
assert accept.to_header() == \
'sailor,soldier;q=0.667,tailor;q=0.333,tinker;q=0'
# check best_match method
assert accept.best_match(['tinker', 'tailor', 'soldier', 'sailor'],
default=None) == 'sailor'
assert accept.best_match(['tinker', 'tailor', 'soldier'],
default=None) == 'soldier'
assert accept.best_match(['tinker', 'tailor'], default=None) == \
'tailor'
assert accept.best_match(['tinker'], default=None) is None
assert accept.best_match(['tinker'], default='x') == 'x'
def test_accept_wildcard(self):
accept = self.storage_class([('*', 0), ('asterisk', 1)])
assert '*' in accept
assert accept.best_match(['asterisk', 'star'], default=None) == \
'asterisk'
assert accept.best_match(['star'], default=None) is None
@pytest.mark.skipif(True, reason='Werkzeug doesn\'t respect specificity.')
def test_accept_wildcard_specificity(self):
accept = self.storage_class([('asterisk', 0), ('star', 0.5), ('*', 1)])
assert accept.best_match(['star', 'asterisk'], default=None) == 'star'
assert accept.best_match(['asterisk', 'star'], default=None) == 'star'
assert accept.best_match(['asterisk', 'times'], default=None) == \
'times'
assert accept.best_match(['asterisk'], default=None) is None
class TestFileStorage(object):
storage_class = datastructures.FileStorage
def test_mimetype_always_lowercase(self):
file_storage = self.storage_class(content_type='APPLICATION/JSON')
assert file_storage.mimetype == 'application/json'
def test_bytes_proper_sentinel(self):
# ensure we iterate over new lines and don't enter into an infinite loop
import io
unicode_storage = self.storage_class(io.StringIO(u"one\ntwo"))
for idx, line in enumerate(unicode_storage):
assert idx < 2
assert idx == 1
binary_storage = self.storage_class(io.BytesIO(b"one\ntwo"))
for idx, line in enumerate(binary_storage):
assert idx < 2
assert idx == 1
|
magne4000/werkzeug
|
tests/test_datastructures.py
|
Python
|
bsd-3-clause
| 30,907
|
[
"TINKER"
] |
e2e2f697434be1c9e1cbe760100c6341758216d3201ba2eb6acfe48488c89911
|
#!/usr/bin/env python
"""
test.py
Author: Tony Papenfuss
Date: Mon Jun 23 22:35:08 EST 2008
"""
import os, sys
from sqlalchemy import MetaData, Table, Column, Integer, Float, Text
from sqlalchemy import create_engine
from sqlalchemy.orm import mapper, sessionmaker, relation
from sqlalchemy.sql.expression import join
from blast import HSP
metadata = MetaData()
hsp_table = Table("HSPs", metadata,
Column('id', Integer, primary_key=True),
Column('queryId', Text),
Column('subjectId', Text),
Column('pcId', Float),
Column('alignLength', Float),
Column('matches', Integer),
Column('mismatches', Integer),
Column('qStart', Integer),
Column('qEnd', Integer),
Column('sStart', Integer),
Column('sEnd', Integer),
Column('eValue', Float),
Column('bitScore', Float)
)
mapper(HSP, hsp_table)
engine = create_engine("sqlite:///test.sqlite3")
metadata.create_all(engine)
Session = sessionmaker(autoflush=True, transactional=True)
Session.configure(bind=engine)
session = Session()
for h in session.query(HSP):
print dir(h)
print h
|
PapenfussLab/Mungo
|
mungo/alchemy/tests/query.py
|
Python
|
artistic-2.0
| 1,096
|
[
"BLAST"
] |
4d816658c66429671893fa02260604e9492f84e781e65d7341953636c3d0fd59
|
########################################################################
# $HeadURL$
########################################################################
""" DIRAC FileCatalog mix-in class to manage users and groups
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Security import Properties
import time,threading
from types import IntType, LongType
class UserAndGroupManagerBase:
_tables = {}
_tables["FC_Groups"] = { "Fields" :
{
"GID": "INTEGER NOT NULL AUTO_INCREMENT",
"GroupName": "VARCHAR(127) NOT NULL"
},
"PrimaryKey": ['GID'],
"UniqueIndexes": { "GroupName": ["GroupName"] }
}
_tables["FC_Users"] = { "Fields" :
{
"UID": "INTEGER NOT NULL AUTO_INCREMENT",
"UserName": "VARCHAR(127) NOT NULL"
},
"PrimaryKey": ['UID'],
"UniqueIndexes": { "UserName": ["UserName"] }
}
def __init__( self, database=None ):
self.db = None
if database is not None:
self.setDatabase( database )
self.lock = threading.Lock()
self._refreshUsers()
self._refreshGroups()
def _refreshUsers( self ):
return S_ERROR( 'Should be implemented in a derived class' )
def _refreshGroups( self ):
return S_ERROR( 'Should be implemented in a derived class' )
def setDatabase( self, database ):
self.db = database
result = self.db._createTables( self._tables )
if not result['OK']:
gLogger.error( "Failed to create tables", str( self._tables.keys() ) )
elif result['Value']:
gLogger.info( "Tables created: %s" % ','.join( result['Value'] ) )
return result
def getUserAndGroupRight(self, credDict):
""" Evaluate rights for user and group operations """
if Properties.FC_MANAGEMENT in credDict[ 'properties' ]:
return S_OK(True)
return S_OK(False)
class UserAndGroupManagerDB(UserAndGroupManagerBase):
def getUserAndGroupID(self, credDict):
""" Get a uid, gid tuple for the given Credentials """
# Get the user
s_uid = credDict.get('username','anon')
res = self.getUserID(s_uid)
if not res['OK']:
return res
uid = res['Value']
# Get the group (create it if it doesn't exist)
s_gid = credDict.get('group','anon')
res = self.getGroupID(s_gid)
if not res['OK']:
return res
gid = res['Value']
return S_OK( ( uid, gid ) )
#####################################################################
#
# User related methods
#
#####################################################################
def getUserID(self,user):
""" Get ID for a user specified by its name """
if type(user) in [IntType,LongType]:
return S_OK(user)
if user in self.db.users.keys():
return S_OK(self.db.users[user])
return self.__addUser(user)
def addUser(self,uname):
""" Add a new user with a name 'uname' """
return self.getUserID(uname)
def getUsers(self):
#self.__refreshUsers()
return S_OK(self.db.users)
def findUser(self,user):
return self.getUserID(user)
def getUserName(self,uid):
""" Get user name for the given id """
if uid in self.db.uids.keys():
return S_OK(self.db.uids[uid])
return S_ERROR('User id %d not found' % uid)
def deleteUser(self,uname,force=True):
""" Delete a user specified by its name """
# ToDo: Check first if there are files belonging to the user
if not force:
pass
return self.__removeUser(uname)
def __addUser(self,uname):
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager AddUser lock created. Waited %.3f seconds. %s" % (waitTime-startTime,uname))
if uname in self.db.users.keys():
uid = self.db.users[uname]
gLogger.debug("UserGroupManager AddUser lock released. Used %.3f seconds. %s" % (time.time()-waitTime,uname))
self.lock.release()
return S_OK(uid)
res = self.db._insert('FC_Users',['UserName'],[uname])
if not res['OK']:
gLogger.debug("UserGroupManager AddUser lock released. Used %.3f seconds. %s" % (time.time()-waitTime,uname))
self.lock.release()
if "Duplicate entry" in res['Message']:
result = self._refreshUsers()
if not result['OK']:
return result
if uname in self.db.users.keys():
uid = self.db.users[uname]
return S_OK(uid)
return res
uid = res['lastRowId']
self.db.uids[uid] = uname
self.db.users[uname] = uid
gLogger.debug("UserGroupManager AddUser lock released. Used %.3f seconds. %s" % (time.time()-waitTime,uname))
self.lock.release()
return S_OK(uid)
def __removeUser(self,uname):
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager RemoveUser lock created. Waited %.3f seconds. %s" % (waitTime-startTime,uname))
uid = self.db.users.get(uname,'Missing')
req = "DELETE FROM FC_Users WHERE UserName='%s'" % uname
res = self.db._update(req)
if not res['OK']:
gLogger.debug("UserGroupManager RemoveUser lock released. Used %.3f seconds. %s" % (time.time()-waitTime,uname))
self.lock.release()
return res
if uid != 'Missing':
self.db.users.pop(uname)
self.db.uids.pop(uid)
gLogger.debug("UserGroupManager RemoveUser lock released. Used %.3f seconds. %s" % (time.time()-waitTime,uname))
self.lock.release()
return S_OK()
def _refreshUsers(self):
""" Get the current user IDs and names """
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager RefreshUsers lock created. Waited %.3f seconds." % (waitTime-startTime))
req = "SELECT UID,UserName from FC_Users"
res = self.db._query(req)
if not res['OK']:
gLogger.debug("UserGroupManager RefreshUsers lock released. Used %.3f seconds." % (time.time()-waitTime))
self.lock.release()
return res
self.db.users = {}
self.db.uids = {}
for uid,uname in res['Value']:
self.db.users[uname] = uid
self.db.uids[uid] = uname
gLogger.debug("UserGroupManager RefreshUsers lock released. Used %.3f seconds." % (time.time()-waitTime))
self.lock.release()
return S_OK()
#####################################################################
#
# Group related methods
#
def getGroupID(self,group):
""" Get ID for a group specified by its name """
if type(group) in [IntType,LongType]:
return S_OK(group)
if group in self.db.groups.keys():
return S_OK(self.db.groups[group])
return self.__addGroup(group)
def addGroup(self,gname):
""" Add a new group with a name 'name' """
return self.getGroupID(gname)
def getGroups(self):
#self.__refreshGroups()
return S_OK(self.db.groups)
def findGroup(self,group):
return self.getGroupID(group)
def getGroupName(self,gid):
""" Get group name for the given id """
if gid in self.db.gids.keys():
return S_OK(self.db.gids[gid])
return S_ERROR('Group id %d not found' % gid)
def deleteGroup(self,gname,force=True):
""" Delete a group specified by its name """
if not force:
# ToDo: Check first if there are files belonging to the group
pass
return self.__removeGroup(gname)
def __addGroup(self,group):
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager AddGroup lock created. Waited %.3f seconds. %s" % (waitTime-startTime,group))
if group in self.db.groups.keys():
gid = self.db.groups[group]
gLogger.debug("UserGroupManager AddGroup lock released. Used %.3f seconds. %s" % (time.time()-waitTime,group))
self.lock.release()
return S_OK(gid)
res = self.db._insert('FC_Groups',['GroupName'],[group])
if not res['OK']:
gLogger.debug("UserGroupManager AddGroup lock released. Used %.3f seconds. %s" % (time.time()-waitTime,group))
self.lock.release()
if "Duplicate entry" in res['Message']:
result = self._refreshGroups()
if not result['OK']:
return result
if group in self.db.groups.keys():
gid = self.db.groups[group]
return S_OK(gid)
return res
gid = res['lastRowId']
self.db.gids[gid] = group
self.db.groups[group] = gid
gLogger.debug("UserGroupManager AddGroup lock released. Used %.3f seconds. %s" % (time.time()-waitTime,group))
self.lock.release()
return S_OK(gid)
def __removeGroup(self,group):
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager RemoveGroup lock created. Waited %.3f seconds. %s" % (waitTime-startTime,group))
gid = self.db.groups.get(group,'Missing')
req = "DELETE FROM FC_Groups WHERE GroupName='%s'" % group
res = self.db._update(req)
if not res['OK']:
gLogger.debug("UserGroupManager RemoveGroup lock released. Used %.3f seconds. %s" % (time.time()-waitTime,group))
self.lock.release()
return res
if gid != 'Missing':
self.db.groups.pop(group)
self.db.gids.pop(gid)
gLogger.debug("UserGroupManager RemoveGroup lock released. Used %.3f seconds. %s" % (time.time()-waitTime,group))
self.lock.release()
return S_OK()
def _refreshGroups(self):
""" Get the current group IDs and names """
req = "SELECT GID,GroupName from FC_Groups"
startTime = time.time()
self.lock.acquire()
waitTime = time.time()
gLogger.debug("UserGroupManager RefreshGroups lock created. Waited %.3f seconds." % (waitTime-startTime))
res = self.db._query(req)
if not res['OK']:
gLogger.debug("UserGroupManager RefreshGroups lock released. Used %.3f seconds." % (time.time()-waitTime))
self.lock.release()
return res
self.db.groups = {}
self.db.gids = {}
for gid,gname in res['Value']:
self.db.groups[gname] = gid
self.db.gids[gid] = gname
gLogger.debug("UserGroupManager RefreshGroups lock released. Used %.3f seconds." % (time.time()-waitTime))
self.lock.release()
return S_OK()
class UserAndGroupManagerCS(UserAndGroupManagerBase):
def getUserAndGroupID(self,credDict):
user = credDict.get('username','anon')
group = credDict.get('group','anon')
return S_OK((user,group))
#####################################################################
#
# User related methods
#
#####################################################################
def addUser(self,name):
return S_OK(name)
def deleteUser(self,name,force=True):
return S_OK()
def getUsers(self):
res = gConfig.getSections('/Registry/Users')
if not res['OK']:
return res
userDict = {}
for user in res['Value']:
userDict[user] = user
return S_OK(userDict)
def getUserName(self,uid):
return S_OK(uid)
def findUser(self,user):
return S_OK(user)
#####################################################################
#
# Group related methods
#
#####################################################################
def addGroup(self,gname):
return S_OK(gname)
def deleteGroup(self,gname,force=True):
return S_OK()
def getGroups(self):
res = gConfig.getSections('/Registry/Groups')
if not res['OK']:
return res
groupDict = {}
for group in res['Value']:
groupDict[group] = group
return S_OK(groupDict)
def getGroupName(self,gid):
return S_OK(gid)
def findGroup(self,group):
return S_OK(group)
|
miloszz/DIRAC
|
DataManagementSystem/DB/FileCatalogComponents/UserAndGroupManager.py
|
Python
|
gpl-3.0
| 11,944
|
[
"DIRAC"
] |
5da4bd76b600ced0df50a103574f9f02096c17e4d9418f71e19fc7acca1a27f6
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import numpy as np
from chemreac import ReactionDiffusion
from chempy import ReactionSystem
from chempy.units import to_unitless, SI_base_registry, get_derived_unit, allclose, default_units as u
analytic = [
lambda y0, k, t: (
y0[0] * np.exp(-k[0]*t)),
lambda y0, k, t: (
y0[1] * np.exp(-k[1] * t) + y0[0] * k[0] / (k[1] - k[0]) *
(np.exp(-k[0]*t) - np.exp(-k[1]*t))),
lambda y0, k, t: (
y0[2] + y0[1] * k[1] / (-k[1]) *
(np.exp(-k[1]*t) - 1) +
k[1] * k[0] * y0[0] / (k[1] - k[0]) *
(1 / (-k[0]) * (np.exp(-k[0]*t) - 1) -
1 / (-k[1]) * (np.exp(-k[1]*t) - 1)))
]
def _get_odesys():
names = ['A', 'B', 'C']
pns = ['kA', 'kB']
rd = ReactionDiffusion(len(names), [[0], [1]], [[1], [2]], k=[0, 0],
substance_names=names, param_names=pns)
return rd._as_odesys(k_from_params=lambda self, p: [p[k] for k in self.param_names])
def test_decay():
kA = 0.13
odesys = _get_odesys()
# pyodesys compliance:
assert odesys.autonomous_interface
assert callable(odesys.numpy.linspace)
y0 = dict(A=3., B=1., C=0.)
t0, tend, nt = 5.0, 17.0, 42
tout = np.linspace(t0, tend, nt+1)
params = dict(kA=kA, kB=0.0)
result = odesys.integrate(tout, y0, params, atol=1e-8)
yref = np.array([y0['A']*np.exp(-kA*(tout-t0)),
y0['B']+y0['A']*(1-np.exp(-kA*(tout-t0)))]).transpose()
assert np.allclose(result.yout[:, :2], yref)
result.extend_by_integration(tend+1, params)
def test_decay_params():
odesys = _get_odesys()
y0 = 42., 7., 4.
k = .7, .3
ic = dict(zip(odesys.names, y0))
p = dict(zip('kA kB'.split(), k))
tout, yout, info = odesys.integrate([0, 5], ic, p, atol={k: 1e-8 for k in odesys.names})
yref = np.array([a(y0, k, tout) for a in analytic]).transpose()
assert np.allclose(yout, yref)
def test_chained_parameter_variation():
# A -> B
names = ['A', 'B']
rd = ReactionDiffusion(len(names), [], [], k=[],
substance_names=names, g_value_parents=[0], g_values=[[0, 1]],
param_names=['doserate'])
durations = [1., 3., 2.]
y0 = [13., 7.]
ic = dict(zip(names, y0))
doserates = [.3, .11, .7]
npoints = 3
odesys = rd._as_odesys(variables_from_params=dict(
density=lambda self, params: 1.0
))
res = odesys.chained_parameter_variation(
durations, ic, {'doserate': doserates}, npoints=npoints,
integrate_kwargs=dict(atol={k: 1e-8 for k in odesys.names}))
assert res.xout.size == npoints*len(durations) + 1
assert res.xout[0] == 0
assert np.all(res.yout[0, :] == y0)
expected = [.3]*npoints + [.11]*npoints + [.7]*(npoints+1)
assert np.all(res.params[:, odesys.param_names.index('doserate')] == expected)
cumulative = 0.0
for dr, dur in zip(doserates, durations):
mask = (cumulative <= res.xout) & (res.xout <= cumulative + dur)
cumulative += dur
t, y = res.xout[mask], res.yout[mask, :]
a, b = y[:, 0], y[:, 1]
refa = a[0]
refb = b[0] + (t - t[0])*dr*a[0]
assert np.allclose(refa, a)
assert np.allclose(refb, b)
res.extend_by_integration(np.sum(durations)+1, dict(doserate=doserates[-1]), integrator='cvode')
assert abs(res.yout[-1, 1] - (refb[-1] + doserates[-1]*a[0])) < 1e-8
def test_chained_parameter_variation_from_ReactionSystem():
g_E_mol_J = 2.1e-7
rsys = ReactionSystem.from_string(
"""
(H2O) -> e-(aq) + H+ + OH; Radiolytic(%.2e*mol/J)
2 OH -> H2O2; 3.6e9/M/s
H+ + OH- -> H2O; 1.4e11/M/s
H2O -> H+ + OH-; 1.4e-3/s
N2O + e-(aq) -> N2 + O-; 9.6e9/M/s
O- + H+ -> OH; 1e11/M/s
""" % g_E_mol_J # neglecting a large body of reactions (just a test-case after all)
)
ureg = SI_base_registry
field_u = get_derived_unit(ureg, 'doserate') * get_derived_unit(ureg, 'density')
rd = ReactionDiffusion.from_ReactionSystem(rsys, fields=[[0*field_u]], unit_registry=ureg,
param_names=['doserate'])
dens_kg_dm3 = 0.998
odesys = rd._as_odesys(
variables_from_params=dict(
density=lambda self, params: dens_kg_dm3*1e3*u.kg/u.m**3
)
)
npoints = 5
durations = [59*u.second, 42*u.minute, 2*u.hour]
doserates = [135*u.Gy/u.s, 11*u.Gy/u.s, 180*u.Gy/u.minute]
M = u.molar
ic = defaultdict(lambda: 0*M, {'H2O': 55.4*M, 'H+': 1e-7*M, 'OH-': 1e-7*M, 'N2O': 20e-3*M})
result = odesys.chained_parameter_variation(durations, ic, {'doserate': doserates}, npoints=npoints)
ref_xout_s = [0]
for dur in map(lambda dur: to_unitless(dur, u.s), durations):
ref_xout_s += list(np.linspace(ref_xout_s[-1], ref_xout_s[-1] + dur, npoints+1)[1:])
assert allclose(result.xout, ref_xout_s*u.s)
N2_M = to_unitless(result.named_dep('N2'), u.M)
H2O2_M = to_unitless(result.named_dep('H2O2'), u.M)
e_accum_molar = 0
for i, (dur, dr) in enumerate(zip(durations, doserates)):
dur_s = to_unitless(dur, u.s)
dr_Gy_s = to_unitless(dr, u.Gy/u.s)
local_ts = np.linspace(0, dur_s, npoints+1)
# local_ic = {k: result.named_dep(k)[i*npoints] for k in odesys.names}
for j, (lt, ld) in enumerate(zip(local_ts[1:], np.diff(local_ts))):
e_accum_molar += ld*g_E_mol_J*dr_Gy_s*dens_kg_dm3
assert abs(N2_M[i*npoints + j + 1] - e_accum_molar)/e_accum_molar < 1e-3
assert abs(H2O2_M[i*npoints + j + 1] - e_accum_molar)/e_accum_molar < 1e-3
res2 = odesys.integrate(durations[0], ic, {'doserate': doserates[0]}, integrator='cvode')
dr2 = res2.params[res2.odesys.param_names.index('doserate')]
assert np.asarray(res2.params).shape[-1] == len(odesys.param_names)
assert allclose(dr2, doserates[0])
assert allclose(res2.xout[-1], durations[0])
assert allclose(res2.named_dep('N2')[-1], durations[0]*doserates[0]*g_E_mol_J*u.mol/u.J*dens_kg_dm3*u.kg/u.dm3)
to_unitless(res2.xout, u.s)
to_unitless(res2.yout, u.molar)
to_unitless(dr2, u.Gy/u.s)
|
bjodah/chemreac
|
chemreac/tests/test_odesys.py
|
Python
|
bsd-2-clause
| 6,214
|
[
"ChemPy"
] |
6e4557c996c0482ca60cff8d1587fa823559b270ac8a483e862dd01598b794b2
|
import os
import time
import json
import numpy as np
from ..dftb import DFTBplus, read_detailed_out
from pychemia.crystal import KPoints
from pychemia import pcm_log, Structure
class KPointConvergence:
def __init__(self, structure, workdir='.', slater_path='.', waiting=False, energy_tolerance=1E-3,
output_file='results.json'):
self.structure = structure
self.workdir = workdir
self.slater_path = slater_path
self.waiting = waiting
self.energy_tolerance = energy_tolerance
if isinstance(slater_path, str):
self.slater_path = [slater_path]
self.results = []
self.output_file = output_file
dftb = DFTBplus(workdir=self.workdir)
kpoints = KPoints.optimized_grid(self.structure.lattice, kp_density=10000, force_odd=True)
dftb.initialize(structure=self.structure, kpoints=kpoints)
ans = dftb.set_slater_koster(search_paths=self.slater_path)
if not ans:
print('Slater-Koster files not complete')
def run(self):
n = 10
dftb = DFTBplus(workdir=self.workdir)
kpoints = KPoints.optimized_grid(self.structure.lattice, kp_density=10000, force_odd=True)
dftb.initialize(structure=self.structure, kpoints=kpoints)
ans = dftb.set_slater_koster(search_paths=self.slater_path)
if not ans:
print('Slater-Koster files not complete')
return
grid = None
energies = []
while True:
density = n ** 3
kpoints = KPoints.optimized_grid(self.structure.lattice, kp_density=density, force_odd=True)
if np.sum(grid) != np.sum(kpoints.grid):
pcm_log.debug('Trial density: %d Grid: %s' % (density, kpoints.grid))
grid = list(kpoints.grid)
dftb.kpoints = kpoints
dftb.basic_input()
dftb.hamiltonian['MaxSCCIterations'] = 50
if os.path.isfile('charges.bin'):
dftb.hamiltonian['ReadInitialCharges'] = True
dftb.hamiltonian['Mixer'] = {'name': 'DIIS'}
dftb.set_static()
dftb.set_inputs()
dftb.run()
if self.waiting:
dftb.runner.wait()
while True:
if dftb.runner is not None and dftb.runner.poll() is not None:
pcm_log.info('Execution completed. Return code %d' % dftb.runner.returncode)
filename = dftb.workdir + os.sep + 'detailed.out'
if os.path.exists(filename):
ret = read_detailed_out(filename)
line = 'KPoint_grid= %15s iSCC= %4d Total_energy= %10.4f SCC_error= %9.3E'
print(line % (grid, ret['SCC']['iSCC'], ret['total_energy'], ret['SCC']['SCC_error']))
else:
print('detailed.out could not be found, exiting...')
return
n += 2
energies.append(ret['total_energy'])
break
time.sleep(10)
self.results.append({'kp_grid': grid,
'iSCC': ret['SCC']['iSCC'],
'Total_energy': ret['total_energy'],
'SCC_error': ret['SCC']['SCC_error']})
else:
n += 2
if len(energies) > 2 and abs(max(energies[-3:]) - min(energies[-3:])) < self.energy_tolerance:
break
def save_json(self):
wf = open(self.output_file, 'w')
json.dump(self.results, wf, sort_keys=True, separators=(',\n', ': '))
wf.close()
def kpoint_convergence():
st = Structure.load_json('structure.json')
job = KPointConvergence(st)
job.run()
job.save_json()
|
MaterialsDiscovery/PyChemia
|
pychemia/code/dftb/task/kpconv.py
|
Python
|
mit
| 3,982
|
[
"CRYSTAL"
] |
f0a7cedbfa76246be45b6b7445ffe564393c140b47689ed59c1271f5b35ca198
|
## \file
## \ingroup tutorial_pyroot
## \notebook
## Example of function called when a mouse event occurs in a pad.
## When moving the mouse in the canvas, a second canvas shows the
## projection along X of the bin corresponding to the Y position
## of the mouse. The resulting histogram is fitted with a gaussian.
## A "dynamic" line shows the current bin position in Y.
## This more elaborated example can be used as a starting point
## to develop more powerful interactive applications exploiting CINT
## as a development engine.
##
## Note that a class is used to hold on to the canvas that display
## the selected slice.
##
## \macro_image
## \macro_code
##
## \author Rene Brun, Johann Cohen-Tanugi, Wim Lavrijsen, Enric Tejedor
import sys
import ctypes
from ROOT import gRandom, gPad, gROOT, gVirtualX
from ROOT import kTRUE, kRed
from ROOT import TCanvas, TH2, TH2F
class DynamicExec:
def __init__( self ):
self._cX = None
self._cY = None
self._old = None
def __call__( self ):
h = gPad.GetSelected();
if not h:
return
if not isinstance( h, TH2 ):
return
gPad.GetCanvas().FeedbackMode( kTRUE )
# erase old position and draw a line at current position
px = gPad.GetEventX()
py = gPad.GetEventY()
uxmin, uxmax = gPad.GetUxmin(), gPad.GetUxmax()
uymin, uymax = gPad.GetUymin(), gPad.GetUymax()
pxmin, pxmax = gPad.XtoAbsPixel( uxmin ), gPad.XtoAbsPixel( uxmax )
pymin, pymax = gPad.YtoAbsPixel( uymin ), gPad.YtoAbsPixel( uymax )
if self._old != None:
gVirtualX.DrawLine( pxmin, self._old[1], pxmax, self._old[1] )
gVirtualX.DrawLine( self._old[0], pymin, self._old[0], pymax )
gVirtualX.DrawLine( pxmin, py, pxmax, py )
gVirtualX.DrawLine( px, pymin, px, pymax )
self._old = px, py
upx = gPad.AbsPixeltoX( px )
x = gPad.PadtoX( upx )
upy = gPad.AbsPixeltoY( py )
y = gPad.PadtoY( upy )
padsav = gPad
# create or set the display canvases
if not self._cX:
self._cX = TCanvas( 'c2', 'Projection Canvas in X', 730, 10, 700, 500 )
else:
self._DestroyPrimitive( 'X' )
if not self._cY:
self._cY = TCanvas( 'c3', 'Projection Canvas in Y', 10, 550, 700, 500 )
else:
self._DestroyPrimitive( 'Y' )
self.DrawSlice( h, y, 'Y' )
self.DrawSlice( h, x, 'X' )
padsav.cd()
def _DestroyPrimitive( self, xy ):
proj = getattr( self, '_c'+xy ).GetPrimitive( 'Projection '+xy )
if proj:
proj.IsA().Destructor( proj )
def DrawSlice( self, histo, value, xy ):
yx = xy == 'X' and 'Y' or 'X'
# draw slice corresponding to mouse position
canvas = getattr( self, '_c'+xy )
canvas.SetGrid()
canvas.cd()
bin = getattr( histo, 'Get%saxis' % xy )().FindBin( value )
hp = getattr( histo, 'Projection' + yx )( '', bin, bin )
hp.SetFillColor( 38 )
hp.SetName( 'Projection ' + xy )
hp.SetTitle( xy + 'Projection of bin=%d' % bin )
hp.Fit( 'gaus', 'ql' )
hp.GetFunction( 'gaus' ).SetLineColor( kRed )
hp.GetFunction( 'gaus' ).SetLineWidth( 6 )
canvas.Update()
if __name__ == '__main__':
# create a new canvas.
c1 = TCanvas('c1', 'Dynamic Slice Example', 10, 10, 700, 500 )
c1.SetFillColor( 42 )
c1.SetFrameFillColor( 33 )
# create a 2-d histogram, fill and draw it
hpxpy = TH2F( 'hpxpy', 'py vs px', 40, -4, 4, 40, -4, 4 )
hpxpy.SetStats( 0 )
x, y = ctypes.c_double( 0.1 ), ctypes.c_double( 0.101 )
for i in range( 50000 ):
# pass ctypes doubles by reference, then retrieve their modified values with .value
gRandom.Rannor( x, y )
hpxpy.Fill( x.value, y.value )
hpxpy.Draw( 'COL' )
# Add a TExec object to the canvas (explicit use of __main__ is for IPython)
import __main__
__main__.slicer = DynamicExec()
c1.AddExec( 'dynamic', 'TPython::Exec( "slicer()" );' )
c1.Update()
|
root-mirror/root
|
tutorials/pyroot/DynamicSlice.py
|
Python
|
lgpl-2.1
| 3,997
|
[
"Gaussian"
] |
c3e4398f3ab4260ad8ed853a069bcfb06ac648d446e2c5af6dfbb582112e823b
|
# Lint as: python2, python3
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Setup configuration for the python dsrf modules."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import distutils.spawn
import os
import subprocess
import sys
from setuptools import find_packages
from setuptools import findall
from setuptools import setup
from setuptools.command.build_py import build_py
import six
def _find_protoc_path():
"""Verifies whether the protocol buffer compiler is installed."""
if 'PROTOC' in os.environ and os.path.exists(os.environ['PROTOC']):
protoc_path = os.environ['PROTOC']
else:
protoc_path = distutils.spawn.find_executable('protoc')
if protoc_path is None:
sys.stderr.write(
'protoc not found. Is protobuf-compiler installed? \n'
'Please visit https://developers.google.com/protocol-buffers/ for '
'instructions.')
sys.exit(-1)
return protoc_path
def _generate_proto(source):
"""Invoke Protocol Compiler to generate python from given source .proto."""
if not os.path.exists(source):
sys.stderr.write('Cannot find required file: %s' % source)
sys.exit(1)
output_path = source.replace('.proto', '_pb2.py')
if os.path.exists(output_path) and (
os.path.getmtime(source) < os.path.getmtime(output_path)):
# Proto files were generated since the source was last changed.
return
protoc = _find_protoc_path()
proto_dir = os.path.dirname(source)
protoc_command = [protoc,
'-I=%s' % proto_dir, '--python_out=%s' % proto_dir, source]
sys.stdout.write('Running command: %s' % ' '.join(protoc_command))
if subprocess.call(protoc_command) != 0:
sys.stderr.write(
'Error encountered while compiling proto file: %s\n'
% source)
sys.exit(1)
class MyBuild(build_py):
"""Custom build class that will compile the protobufs first."""
def run(self):
base_dir = os.path.dirname(os.path.join(os.path.realpath(__file__)))
sys.stdout.write('Base dir: %s\n' % base_dir)
proto_dir = os.path.join(base_dir, 'proto/')
proto_files = [os.path.join(proto_dir, f)
for f in os.listdir(proto_dir)
if f.endswith('.proto')]
for proto_file in proto_files:
sys.stdout.write('Generating proto: %s\n' % proto_file)
_generate_proto(proto_file)
build_py.run(self)
def _find_data_files():
"""Traverses the schema directory to identify all XSDs.
Returns:
A list of (target_directory: [files_list]) tuples.
"""
data_files = collections.defaultdict(list)
for filepath in findall('schemas'):
if not six.ensure_str(filepath).endswith('.xsd'):
continue
directory, unused_filename = os.path.split(filepath)
data_files[os.path.join('dsrf', directory)].append(filepath)
return [(k, v) for k, v in six.iteritems(data_files)]
def _find_dsrf_packages():
"""Traverses the source tree to find the packages.
A package is a directory containing the file __init__.py.
Returns:
A list of package names
"""
packages = ['dsrf']
for package in find_packages('.'):
packages.append('dsrf.%s' % package)
return packages
setup(name='dsrf',
version='1.1.0d',
license='Apache 2.0',
packages=_find_dsrf_packages(),
description='DSRF Parsing Library',
author_email='',
url='https://github.com/ddexnet',
package_dir={'dsrf': '../dsrf'},
cmdclass={'build_py': MyBuild},
data_files=_find_data_files())
|
ddexnet/dsrf
|
setup.py
|
Python
|
apache-2.0
| 4,199
|
[
"VisIt"
] |
70d9c5a48832a28996bd3b50c384584212bf93e794e4f9053a33bf26b6dedb87
|
# Copyright (c) 2014, the GREAT3 executive committee (http://www.great3challenge.info/?q=contacts)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This script `works like `training_galaxy_props.py`, and computes two additional quantities that were
later found to be necessary for using the real galaxy sample (as opposed to the parametric fits).
These are:
(a) the S/N within an elliptical Gaussian filter on the original image, and
(b) the minimum noise variance after noise whitening to eliminate correlated noise in the simulated
image.
If (a) is too low, that can indicate some issue with the galaxy image that makes it unusable. If
(b) is too high compared to the noise variance we want to add, then we cannot use a galaxy in our
simulation with our desired S/N limit. In practice, this script was also run by `run_props.py`
using the same command-line arguments, and the information in those outputs was combined using
`combine_image_info.py`.
"""
import galsim
import pyfits
import os
import numpy as np
import math
def training_galaxy_props_real(psf,
in_dir = '/home/rmandelb.proj/data-shared/great3_fit_data',
in_filename = 'real_galaxy_catalog_23.5.fits',
out_dir = '.',
out_filename = 'real_galaxy_catalog_23.5_real_props.fits',
pix_scale = 0.03,
size_factor = 0.6,
ps_size = 48,
do_orig = False,
n_use = None): # change to None!
"""
A routine for estimating the properties of the training sample galaxies in different imaging
conditions.
Given input PSF and pixel scale, we make a real galaxy image in the same way as for GREAT3, and check the
following:
* did the original image have reasonable S/N within an elliptical Gaussian filter? (Note:
result does not depend on final PSF / pixel scale.)
* what is the minimum noise variance in a whitened image?
@params psf GalSim object representing the PSF (without pixel convolution).
@params in_dir Directory containing the catalog of fit parameters for the sample.
@params in_filename Name of catalog of fit parameters for the sample.
@params out_dir Directory in which to put output file.
@params out_filename Name of output catalog.
@params pix_scale Pixel scale for images.
@params size_factor Multiplicative factor by which to modify galaxy sizes to represent a deeper
sample. See the GREAT3 handbook for more details.
@params ps_size Number of pixels per side for postage stamp into which to draw image.
@params do_orig Measure original PS? Or just simulated.
@params n_use Number of galaxies to use; =None for using whole catalog.
"""
# Define the effective PSF including the pixel convolution. Draw it into an image.
pix = galsim.Pixel(pix_scale)
epsf = galsim.Convolve(psf, pix)
im_epsf = epsf.draw(dx=pix_scale)
# Set up RealGalaxyCatalog object
rgc = galsim.RealGalaxyCatalog(in_filename, dir=in_dir)
n = rgc.nobjects
print "Read in ",n," from ",in_filename
# Select the requested subsample of galaxies.
if n_use is not None:
print "Using ",n_use
n = n_use
# Loop over objects.
sn_ellip_gauss = np.zeros(n)
min_var_white = np.zeros(n)
for i in range(n):
if i % 1000 == 0:
print "...",i
# Make the RealGalaxy object.
rg = galsim.RealGalaxy(rgc, index=i)
# First do the test of S/N for original image:
if do_orig:
orig_var = rgc.variance[i] / 0.316 # fudge factor for correlated noise, see Leauthaud et
# al. (2007)
# Try measuring moments to get a flux in the elliptical Gaussian. Then get the SNR.
try:
res = rg.original_image.draw(dx=0.03).FindAdaptiveMom()
aperture_noise = np.sqrt(orig_var*2.*np.pi*(res.moments_sigma**2))
sn_ellip_gauss[i] = res.moments_amp / aperture_noise
except:
sn_ellip_gauss[i] = -10.
# Now make the simulated object, and check the minimum noise variance after whitening.
# First we rescale the size of the object - could significantly change noise properties.
rg.applyDilation(size_factor)
# No need to apply shear/magnification since a small shear or magnification induces minimal
# changes in noise properties. So just convolve with target PSF and draw at target pixel
# scale.
obj = galsim.Convolve(rg, epsf)
im = galsim.ImageF(ps_size, ps_size)
try:
im = obj.draw(dx = pix_scale)
min_var_white[i] = obj.noise.applyWhiteningTo(im)
except:
min_var_white[i] = -10.
# Save results to file.
tbhdu = pyfits.new_table(pyfits.ColDefs([pyfits.Column(name='sn_ellip_gauss',
format='D',
array=sn_ellip_gauss),
pyfits.Column(name='min_var_white',
format='D',
array=min_var_white)]
))
# Carry out a sanity check.
print len(sn_ellip_gauss[sn_ellip_gauss<20.])," have S/N<20"
# Write outputs.
outfile = os.path.join(out_dir, out_filename)
print "Writing to file ",outfile
tbhdu.writeto(outfile, clobber=True)
|
barnabytprowe/great3-public
|
inputs/galdata/training_galaxy_props_real.py
|
Python
|
bsd-3-clause
| 7,274
|
[
"Galaxy",
"Gaussian"
] |
54e12b540dcb242e8bf91851a2909f992d7256bd2e841b808b6fe68ad9a6552b
|
"""Module for evaluating (PATH|MANNER|COMPOUND) classifier."""
__author__ = "Zachary Yocum"
__email__ = "zyocum@brandeis.edu"
import os, warnings
from corpus import *
import numpy as np
from collections import Counter
from scipy.stats.stats import pearsonr
from sklearn import svm, metrics, cross_validation
from sklearn.feature_extraction import DictVectorizer
from sklearn.linear_model import LogisticRegression
from sklearn.learning_curve import *
FEATURE_SETS = load(os.path.join('resources', 'features.json'))
SCORES = OrderedDict.fromkeys([
'precision_weighted',
'recall_weighted',
'f1_weighted'
])
WIDTH = max(map(len, SCORES.keys()))
HEADER = '{{:>{}}}'.format(WIDTH)
SCORE = '{{:>{}.3f}}'.format(WIDTH)
class Table(object):
"""A class for working with tabular data."""
def __init__(
self,
data,
align='>',
rowdelim='\n',
coldelim='\t'
):
self.data = map(list, data)
self.shape = self.dimensions()
self.height, self.width = self.shape
self.align = align
self.rowdelim = rowdelim
self.coldelim = coldelim
self.header = self.data[0]
self.rows = self.data[1:]
self.columns = zip(*self.data)
def __repr__(self):
return '<Table rows={} x cols={}>'.format(*self.shape)
def __str__(self):
table = []
widths = self.widths()
for row in self.data:
entry = []
for col, datum in enumerate(row):
cell = u'{{:{}{}}}'.format(self.align, widths[col])
entry.append(cell.format(datum))
table.append(entry)
return self.rowdelim.join((self.coldelim.join(r) for r in table))
def dimensions(self):
width = max(map(len, self.data))
for i, row in enumerate(self.data):
row_width = len(row)
message = 'Row {} has length {} (should be {})'.format(
i,
row_width,
width
)
assert(row_width == width), message
height = len(self.data)
return height, width
def widths(self):
widths = [max(map(len, column)) for column in self.columns]
return widths
def tabulate(args, cell=SCORE, delimiter='\t'):
return delimiter.join(cell.format(arg).replace('_', ' ') for arg in args)
def scorer(*args):
for key in SCORES:
SCORES[key] = metrics.SCORERS[key](*args)
print tabulate(SCORES.values())
return SCORES[SCORES.keys()[-1]]
langs = {
u'American English',
u'Anglo-French',
u'Celtic',
u'Danish',
u'Dutch',
u'English',
u'Frankish',
u'French',
u'Frisian',
u'Gaelic',
u'Gaulish',
u'German',
u'Germanic',
u'Gothic',
u'Greek',
u'I.E.',
u'Irish',
u'Italian',
u'Late Latin',
u'Latin',
u'Lithuanian',
u'Low German',
u'Middle Dutch',
u'Middle English',
u'Middle French',
u'Middle Low German',
u'Modern English',
u'Old Church Slavonic',
u'Old English',
u'Old French',
u'Old Frisian',
u'Old High German',
u'Old Irish',
u'Old Norse',
u'Old Saxon',
u'PIE',
u'Proto-Germanic',
u'Russian',
u'Sanskrit',
u'Scandinavian',
u'Scot.',
u'Scottish',
u'Spanish',
u'Swedish',
u'Vulgar Latin',
u'West Frisian',
u'West Germanic'
}
lemmas = {
u'abandon',
u'approach',
u'arrive',
u'avoid',
u'bicycle',
u'bike',
u'biking',
u'bring',
u'clear',
u'climb',
u'come',
u'connect',
u'continue',
u'cross',
u'cycle',
u'dance',
u'descend',
u'detour',
u'drive',
u'encounter',
u'enter',
u'entering',
u'entry',
u'find',
u'follow',
u'gather',
u'go',
u'head',
u'hike',
u'hiking',
u'join',
u'lead',
u'leave',
u'locate',
u'loop',
u'meet',
u'move',
u'park',
u'pass',
u'passing',
u'pitch',
u'reach',
u'reduce',
u'remove',
u'return',
u'rid',
u'ride',
u'rise',
u'rout',
u'run',
u'rush',
u'search',
u'slow',
u'split',
u'stop',
u'swimming',
u'take',
u'throw',
u'travel',
u'traverse',
u'trip',
u'turn',
u'use',
u'visit',
u'walk',
u'way'
}
def main(features):
train_data = load('train.motions.json')
test_data = load('test.motions.json')
all_data = train_data + test_data
for datum in all_data:
for feature in datum.keys():
selected = False
for selected_feature in features:
if feature.startswith(selected_feature):
selected = True
if not selected:
datum.pop(feature)
all_samples = [td.pop('motion_type') for td in all_data]
train, test = all_data[:len(train_data)], all_data[len(train_data):]
train_samples = all_samples[:len(train_data)]
test_samples = all_samples[len(train_data):]
vectorizer = DictVectorizer()
features = vectorizer.fit_transform(all_data).toarray()
print 'Features vector shape (rows x columns):', '{} x {}'.format(*features.shape)
train_features = features[:len(train_data)]
test_features = features[len(train_data):]
c = 1.0
iterations = 10 ** 6
weights = {
'MANNER' : c / 3,
'PATH' : c / 3,
'COMPOUND' : c / 3
}
lin_svc = svm.LinearSVC(
tol=10 ** -6,
dual=False,
multi_class='crammer_singer',
max_iter=iterations,
class_weight=weights,
C=c
)
degree = 2
poly_svc = svm.SVC(
kernel='poly',
degree=degree,
C=c
)
logistic_regression = LogisticRegression(
multi_class='multinomial',
max_iter=iterations,
solver='lbfgs',
class_weight=weights,
dual=False,
C=c
)
models = OrderedDict({
#'Degree {} Polynomial SVM'.format(degree) : poly_svc,
#'Logistic Regression' : logistic_regression,
'Linear SVM' : lin_svc,
})
fold = 10
for label, model in models.iteritems():
print 'Model={}'.format(label)
print 'Train:Test::{}:{}'.format(*map(len, (train, test)))
model.fit(train_features, train_samples)
reference = test_samples
predicted = model.predict(test_features)
labels = list(set(train_samples + test_samples))
print metrics.classification_report(
reference,
predicted,
target_names=labels
)
################################################################################
# Cross Validation
################################################################################
#print 'Train:Test={}-fold cross-validation'.format(label, fold)
#model.fit(features, all_samples)
#print tabulate(SCORES.keys(), cell=HEADER)
#scores = cross_validation.cross_val_score(
# model,
# features,
# all_samples,
# cv=fold,
# scoring=scorer
#)
################################################################################
# Error Analysis
################################################################################
#motions = []
#for datum in test_data:
# text = []
# text.append(datum.get('word[-5]', u''))
# text.append(datum.get('word[-4]', u''))
# text.append(datum.get('word[-3]', u''))
# text.append(datum.get('word[-2]', u''))
# text.append(datum.get('word[-1]', u''))
# text.append('[{}]'.format(datum.get('word[0]', u'')))
# text.append(datum.get('word[1]', u''))
# text.append(datum.get('word[2]', u''))
# text.append(datum.get('word[3]', u''))
# text.append(datum.get('word[4]', u''))
# text.append(datum.get('word[5]', u''))
# motions.append(u' '.join(filter(None, text)))
#
#header = ('text', 'reference', 'predicted')
#comparisons = zip(motions, test_samples, predicted)
#label_width = max(map(len, set(list(test_samples) + list(predicted))))
#motion_width = max(map(len, motions))
#for comparison in comparisons:
# motion, ref, pred = comparison
# motion = u'{{:<{}}}'.format(motion_width).format(motion)
# ref = u'{{:<{}}}'.format(label_width).format(ref)
# pred = u'{{:<{}}}'.format(label_width).format(pred)
# print u'\t'.join((motion, ref, pred))
################################################################################
# Sparse Feature Table
################################################################################
#label_dict = OrderedDict(
# (v,k) for k,v in enumerate(sorted(set(test_samples)))
#)
#lang_dict = OrderedDict(
# (v,k) for k,v in enumerate(sorted(langs))
#)
#
#a = np.zeros((len(lang_dict), len(label_dict)))
#lines = []
#for datum, label in zip(test_data, test_samples):
# line = []
# line.append(datum.get('word[0]') or u'!!!')
# line.append(datum.get('pos[0]') or u'!!!')
# line.append(datum.get('lemma') or u'!!!')
# for lang in sorted(langs):
# if datum.get(lang):
# line.append(u'+')
# row = lang_dict[lang]
# col = label_dict[label]
# a[row,col] += 1
# else:
# line.append(u'')
# line.append(label)
# lines.append(line)
#header = ['Word', 'POS', 'Lemma'] + sorted(langs) + ['Reference Label']
#print u' & ' .join(header)
#t = Table(lines, rowdelim='\\\\\n', coldelim=u' & ', align='')
##print repr(t)
#print t
#print u',' + u','.join(sorted(set(test_samples)))
#for i, row in enumerate(a):
# print u','.join(map(str, [lang_dict.keys()[i]] + np.divide(a[i], a[i].sum()).tolist()))
################################################################################
# Correlation Coefficients
################################################################################
#label_dict = OrderedDict.fromkeys(sorted(set(test_samples)))
#for key in label_dict:
# label_dict[key] = []
#
#lang_dict = OrderedDict.fromkeys(sorted(langs))
#for key in lang_dict:
# lang_dict[key] = []
#
#for datum, sample in zip(test_data, test_samples):
# for lang in lang_dict.keys():
# lang_dict[lang].append(datum.get(lang, 0))
# for label in label_dict.keys():
# label_dict[label].append(sample==label)
#
#a = np.zeros((len(lang_dict), len(label_dict)))
#for i, lang in enumerate(lang_dict.keys()):
# for j, label in enumerate(label_dict.keys()):
# x, y = (lang_dict[lang],label_dict[label])
# correlation = np.corrcoef(x, y)[0, 1]
# a[i,j] = correlation
#print u',' + u','.join(sorted(set(test_samples)))
#for i, row in enumerate(a):
# coefs = [u'{:1.2f}'.format(f) for f in a[i].tolist()]
# print u','.join(map(str, [lang_dict.keys()[i]] + coefs))
################################################################################
# Motion-type Counts
################################################################################
#all_counts = Counter(all_samples)
#train_counts = Counter(train_samples)
#test_counts = Counter(test_samples)
#for counts in (train_counts, test_counts, all_counts):
# print counts, sum(counts.values())
################################################################################
# Learning Curve
################################################################################
#lc = learning_curve(model, features, all_samples)
#train_sizes, train_scores, test_scores = lc
if __name__ == '__main__':
for label in sorted(FEATURE_SETS.keys(), key=len):
features = FEATURE_SETS[label]
print '-' * 80
print 'Features={}'.format(label)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
main(features)
|
zyocum/motion-type-classifier
|
eval.py
|
Python
|
mit
| 12,561
|
[
"VisIt"
] |
14a74d0bca5348a027cf83997876aa5ef7aded36774a6f36bd7c3718ed7695f3
|
# -*- coding: utf-8 -*-
""" Tests for student account views. """
import logging
import re
from unittest import skipUnless
from urllib import urlencode
import mock
import ddt
from django.conf import settings
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.messages.middleware import MessageMiddleware
from django.test import TestCase
from django.test.utils import override_settings
from django.http import HttpRequest
from edx_oauth2_provider.tests.factories import ClientFactory, AccessTokenFactory, RefreshTokenFactory
from edx_rest_api_client import exceptions
from nose.plugins.attrib import attr
from oauth2_provider.models import (
AccessToken as dot_access_token,
RefreshToken as dot_refresh_token
)
from provider.oauth2.models import (
AccessToken as dop_access_token,
RefreshToken as dop_refresh_token
)
from testfixtures import LogCapture
from commerce.models import CommerceConfiguration
from commerce.tests import factories
from commerce.tests.mocks import mock_get_orders
from course_modes.models import CourseMode
from edxmako.shortcuts import render_to_response
from openedx.core.djangoapps.oauth_dispatch.tests import factories as dot_factories
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin
from openedx.core.djangoapps.user_api.accounts.api import activate_account, create_account
from openedx.core.djangoapps.user_api.accounts import EMAIL_MAX_LENGTH
from openedx.core.djangolib.js_utils import dump_js_escaped_json
from openedx.core.djangoapps.site_configuration.tests.mixins import SiteMixin
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.tests.factories import UserFactory
from student_account.views import account_settings_context, get_user_orders
from third_party_auth.tests.testutil import simulate_running_pipeline, ThirdPartyAuthTestMixin
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme_context
LOGGER_NAME = 'audit'
User = get_user_model() # pylint:disable=invalid-name
@ddt.ddt
class StudentAccountUpdateTest(CacheIsolationTestCase, UrlResetMixin):
""" Tests for the student account views that update the user's account information. """
USERNAME = u"heisenberg"
ALTERNATE_USERNAME = u"walt"
OLD_PASSWORD = u"ḅḷüëṡḳÿ"
NEW_PASSWORD = u"🄱🄸🄶🄱🄻🅄🄴"
OLD_EMAIL = u"walter@graymattertech.com"
NEW_EMAIL = u"walt@savewalterwhite.com"
INVALID_ATTEMPTS = 100
INVALID_EMAILS = [
None,
u"",
u"a",
"no_domain",
"no+domain",
"@",
"@domain.com",
"test@no_extension",
# Long email -- subtract the length of the @domain
# except for one character (so we exceed the max length limit)
u"{user}@example.com".format(
user=(u'e' * (EMAIL_MAX_LENGTH - 11))
)
]
INVALID_KEY = u"123abc"
URLCONF_MODULES = ['student_accounts.urls']
ENABLED_CACHES = ['default']
def setUp(self):
super(StudentAccountUpdateTest, self).setUp()
# Create/activate a new account
activation_key = create_account(self.USERNAME, self.OLD_PASSWORD, self.OLD_EMAIL)
activate_account(activation_key)
# Login
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertTrue(result)
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in LMS')
def test_password_change(self):
# Request a password change while logged in, simulating
# use of the password reset link from the account page
response = self._change_password()
self.assertEqual(response.status_code, 200)
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
# Retrieve the activation link from the email body
email_body = mail.outbox[0].body
result = re.search(r'(?P<url>https?://[^\s]+)', email_body)
self.assertIsNot(result, None)
activation_link = result.group('url')
# Visit the activation link
response = self.client.get(activation_link)
self.assertEqual(response.status_code, 200)
# Submit a new password and follow the redirect to the success page
response = self.client.post(
activation_link,
# These keys are from the form on the current password reset confirmation page.
{'new_password1': self.NEW_PASSWORD, 'new_password2': self.NEW_PASSWORD},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Your password has been reset.")
# Log the user out to clear session data
self.client.logout()
# Verify that the new password can be used to log in
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
# Try reusing the activation link to change the password again
# Visit the activation link again.
response = self.client.get(activation_link)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "This password reset link is invalid. It may have been used already.")
self.client.logout()
# Verify that the old password cannot be used to log in
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertFalse(result)
# Verify that the new password continues to be valid
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
@ddt.data(True, False)
def test_password_change_logged_out(self, send_email):
# Log the user out
self.client.logout()
# Request a password change while logged out, simulating
# use of the password reset link from the login page
if send_email:
response = self._change_password(email=self.OLD_EMAIL)
self.assertEqual(response.status_code, 200)
else:
# Don't send an email in the POST data, simulating
# its (potentially accidental) omission in the POST
# data sent from the login page
response = self._change_password()
self.assertEqual(response.status_code, 400)
def test_access_token_invalidation_logged_out(self):
self.client.logout()
user = User.objects.get(email=self.OLD_EMAIL)
self._create_dop_tokens(user)
self._create_dot_tokens(user)
response = self._change_password(email=self.OLD_EMAIL)
self.assertEqual(response.status_code, 200)
self.assert_access_token_destroyed(user)
def test_access_token_invalidation_logged_in(self):
user = User.objects.get(email=self.OLD_EMAIL)
self._create_dop_tokens(user)
self._create_dot_tokens(user)
response = self._change_password()
self.assertEqual(response.status_code, 200)
self.assert_access_token_destroyed(user)
def test_password_change_inactive_user(self):
# Log out the user created during test setup
self.client.logout()
# Create a second user, but do not activate it
create_account(self.ALTERNATE_USERNAME, self.OLD_PASSWORD, self.NEW_EMAIL)
# Send the view the email address tied to the inactive user
response = self._change_password(email=self.NEW_EMAIL)
# Expect that the activation email is still sent,
# since the user may have lost the original activation email.
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
def test_password_change_no_user(self):
# Log out the user created during test setup
self.client.logout()
with LogCapture(LOGGER_NAME, level=logging.INFO) as logger:
# Send the view an email address not tied to any user
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 200)
logger.check((LOGGER_NAME, 'INFO', 'Invalid password reset attempt'))
def test_password_change_rate_limited(self):
# Log out the user created during test setup, to prevent the view from
# selecting the logged-in user's email address over the email provided
# in the POST data
self.client.logout()
# Make many consecutive bad requests in an attempt to trigger the rate limiter
for __ in xrange(self.INVALID_ATTEMPTS):
self._change_password(email=self.NEW_EMAIL)
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 403)
@ddt.data(
('post', 'password_change_request', []),
)
@ddt.unpack
def test_require_http_method(self, correct_method, url_name, args):
wrong_methods = {'get', 'put', 'post', 'head', 'options', 'delete'} - {correct_method}
url = reverse(url_name, args=args)
for method in wrong_methods:
response = getattr(self.client, method)(url)
self.assertEqual(response.status_code, 405)
def _change_password(self, email=None):
"""Request to change the user's password. """
data = {}
if email:
data['email'] = email
return self.client.post(path=reverse('password_change_request'), data=data)
def _create_dop_tokens(self, user=None):
"""Create dop access token for given user if user provided else for default user."""
if not user:
user = User.objects.get(email=self.OLD_EMAIL)
client = ClientFactory()
access_token = AccessTokenFactory(user=user, client=client)
RefreshTokenFactory(user=user, client=client, access_token=access_token)
def _create_dot_tokens(self, user=None):
"""Create dop access token for given user if user provided else for default user."""
if not user:
user = User.objects.get(email=self.OLD_EMAIL)
application = dot_factories.ApplicationFactory(user=user)
access_token = dot_factories.AccessTokenFactory(user=user, application=application)
dot_factories.RefreshTokenFactory(user=user, application=application, access_token=access_token)
def assert_access_token_destroyed(self, user):
"""Assert all access tokens are destroyed."""
self.assertFalse(dot_access_token.objects.filter(user=user).exists())
self.assertFalse(dot_refresh_token.objects.filter(user=user).exists())
self.assertFalse(dop_access_token.objects.filter(user=user).exists())
self.assertFalse(dop_refresh_token.objects.filter(user=user).exists())
@attr(shard=3)
@ddt.ddt
class StudentAccountLoginAndRegistrationTest(ThirdPartyAuthTestMixin, UrlResetMixin, ModuleStoreTestCase):
""" Tests for the student account views that update the user's account information. """
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
URLCONF_MODULES = ['openedx.core.djangoapps.embargo']
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(StudentAccountLoginAndRegistrationTest, self).setUp()
# Several third party auth providers are created for these tests:
self.google_provider = self.configure_google_provider(enabled=True, visible=True)
self.configure_facebook_provider(enabled=True, visible=True)
self.configure_dummy_provider(
visible=True,
enabled=True,
icon_class='',
icon_image=SimpleUploadedFile('icon.svg', '<svg><rect width="50" height="100"/></svg>'),
)
self.hidden_enabled_provider = self.configure_linkedin_provider(
visible=False,
enabled=True,
)
self.hidden_disabled_provider = self.configure_azure_ad_provider()
@ddt.data(
("signin_user", "login"),
("register_user", "register"),
)
@ddt.unpack
def test_login_and_registration_form(self, url_name, initial_mode):
response = self.client.get(reverse(url_name))
expected_data = '"initial_mode": "{mode}"'.format(mode=initial_mode)
self.assertContains(response, expected_data)
@ddt.data("signin_user", "register_user")
def test_login_and_registration_form_already_authenticated(self, url_name):
# Create/activate a new account and log in
activation_key = create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
activate_account(activation_key)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result)
# Verify that we're redirected to the dashboard
response = self.client.get(reverse(url_name))
self.assertRedirects(response, reverse("dashboard"))
@ddt.data(
(None, "signin_user"),
(None, "register_user"),
("edx.org", "signin_user"),
("edx.org", "register_user"),
)
@ddt.unpack
def test_login_and_registration_form_signin_preserves_params(self, theme, url_name):
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
]
# The response should have a "Sign In" button with the URL
# that preserves the querystring params
with with_comprehensive_theme_context(theme):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/login?{}'.format(self._finish_auth_url_param(params + [('next', '/dashboard')]))
self.assertContains(response, expected_url)
# Add additional parameters:
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
('course_mode', CourseMode.DEFAULT_MODE_SLUG),
('email_opt_in', 'true'),
('next', '/custom/final/destination')
]
# Verify that this parameter is also preserved
with with_comprehensive_theme_context(theme):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
expected_url = '/login?{}'.format(self._finish_auth_url_param(params))
self.assertContains(response, expected_url)
@mock.patch.dict(settings.FEATURES, {"ENABLE_THIRD_PARTY_AUTH": False})
@ddt.data("signin_user", "register_user")
def test_third_party_auth_disabled(self, url_name):
response = self.client.get(reverse(url_name))
self._assert_third_party_auth_data(response, None, None, [], None)
@mock.patch('student_account.views.enterprise_customer_for_request')
@ddt.data(
("signin_user", None, None, None),
("register_user", None, None, None),
("signin_user", "google-oauth2", "Google", None),
("register_user", "google-oauth2", "Google", None),
("signin_user", "facebook", "Facebook", None),
("register_user", "facebook", "Facebook", None),
("signin_user", "dummy", "Dummy", None),
("register_user", "dummy", "Dummy", None),
(
"signin_user",
"google-oauth2",
"Google",
{
'name': 'FakeName',
'logo': 'https://host.com/logo.jpg',
'welcome_msg': 'No message'
}
)
)
@ddt.unpack
def test_third_party_auth(
self,
url_name,
current_backend,
current_provider,
expected_enterprise_customer_mock_attrs,
enterprise_customer_mock
):
params = [
('course_id', 'course-v1:Org+Course+Run'),
('enrollment_action', 'enroll'),
('course_mode', CourseMode.DEFAULT_MODE_SLUG),
('email_opt_in', 'true'),
('next', '/custom/final/destination'),
]
if expected_enterprise_customer_mock_attrs:
expected_ec = mock.MagicMock(
branding_configuration=mock.MagicMock(
logo=mock.MagicMock(
url=expected_enterprise_customer_mock_attrs['logo']
),
welcome_message=expected_enterprise_customer_mock_attrs['welcome_msg']
)
)
expected_ec.name = expected_enterprise_customer_mock_attrs['name']
else:
expected_ec = None
enterprise_customer_mock.return_value = expected_ec
# Simulate a running pipeline
if current_backend is not None:
pipeline_target = "student_account.views.third_party_auth.pipeline"
with simulate_running_pipeline(pipeline_target, current_backend):
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
# Do NOT simulate a running pipeline
else:
response = self.client.get(reverse(url_name), params, HTTP_ACCEPT="text/html")
# This relies on the THIRD_PARTY_AUTH configuration in the test settings
expected_providers = [
{
"id": "oa2-dummy",
"name": "Dummy",
"iconClass": None,
"iconImage": settings.MEDIA_URL + "icon.svg",
"loginUrl": self._third_party_login_url("dummy", "login", params),
"registerUrl": self._third_party_login_url("dummy", "register", params)
},
{
"id": "oa2-facebook",
"name": "Facebook",
"iconClass": "fa-facebook",
"iconImage": None,
"loginUrl": self._third_party_login_url("facebook", "login", params),
"registerUrl": self._third_party_login_url("facebook", "register", params)
},
{
"id": "oa2-google-oauth2",
"name": "Google",
"iconClass": "fa-google-plus",
"iconImage": None,
"loginUrl": self._third_party_login_url("google-oauth2", "login", params),
"registerUrl": self._third_party_login_url("google-oauth2", "register", params)
},
]
self._assert_third_party_auth_data(
response,
current_backend,
current_provider,
expected_providers,
expected_ec
)
def test_hinted_login(self):
params = [("next", "/courses/something/?tpa_hint=oa2-google-oauth2")]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "oa2-google-oauth2"')
tpa_hint = self.hidden_enabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertContains(response, '"third_party_auth_hint": "{0}"'.format(tpa_hint))
tpa_hint = self.hidden_disabled_provider.provider_id
params = [("next", "/courses/something/?tpa_hint={0}".format(tpa_hint))]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertNotIn(response.content, tpa_hint)
def test_hinted_login_dialog_disabled(self):
"""Test that the dialog doesn't show up for hinted logins when disabled. """
self.google_provider.skip_hinted_login_dialog = True
self.google_provider.save()
params = [("next", "/courses/something/?tpa_hint=oa2-google-oauth2")]
response = self.client.get(reverse('signin_user'), params, HTTP_ACCEPT="text/html")
self.assertRedirects(
response,
'auth/login/google-oauth2/?auth_entry=login&next=%2Fcourses%2Fsomething%2F%3Ftpa_hint%3Doa2-google-oauth2',
target_status_code=302
)
@mock.patch('student_account.views.enterprise_customer_for_request')
@ddt.data(
('signin_user', False, None, None, None),
('register_user', False, None, None, None),
('signin_user', True, 'Fake EC', 'http://logo.com/logo.jpg', u'{enterprise_name} - {platform_name}'),
('register_user', True, 'Fake EC', 'http://logo.com/logo.jpg', u'{enterprise_name} - {platform_name}'),
('signin_user', True, 'Fake EC', None, u'{enterprise_name} - {platform_name}'),
('register_user', True, 'Fake EC', None, u'{enterprise_name} - {platform_name}'),
('signin_user', True, 'Fake EC', 'http://logo.com/logo.jpg', None),
('register_user', True, 'Fake EC', 'http://logo.com/logo.jpg', None),
('signin_user', True, 'Fake EC', None, None),
('register_user', True, 'Fake EC', None, None),
)
@ddt.unpack
def test_enterprise_register(self, url_name, ec_present, ec_name, logo_url, welcome_message, mock_get_ec):
"""
Verify that when an EnterpriseCustomer is received on the login and register views,
the appropriate sidebar is rendered.
"""
if ec_present:
mock_ec = mock_get_ec.return_value
mock_ec.name = ec_name
if logo_url:
mock_ec.branding_configuration.logo.url = logo_url
else:
mock_ec.branding_configuration.logo = None
if welcome_message:
mock_ec.branding_configuration.welcome_message = welcome_message
else:
del mock_ec.branding_configuration.welcome_message
else:
mock_get_ec.return_value = None
response = self.client.get(reverse(url_name), HTTP_ACCEPT="text/html")
enterprise_sidebar_div_id = u'enterprise-content-container'
if not ec_present:
self.assertNotContains(response, text=enterprise_sidebar_div_id)
else:
self.assertContains(response, text=enterprise_sidebar_div_id)
if not welcome_message:
welcome_message = settings.ENTERPRISE_SPECIFIC_BRANDED_WELCOME_TEMPLATE
expected_message = welcome_message.format(
start_bold=u'<b>',
end_bold=u'</b>',
enterprise_name=ec_name,
platform_name=settings.PLATFORM_NAME
)
self.assertContains(response, expected_message)
if logo_url:
self.assertContains(response, logo_url)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_microsite_uses_old_login_page(self):
# Retrieve the login page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("signin_user"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Log into your Test Site Account")
self.assertContains(resp, "login-form")
def test_microsite_uses_old_register_page(self):
# Retrieve the register page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("register_user"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Register for Test Site")
self.assertContains(resp, "register-form")
def test_login_registration_xframe_protected(self):
resp = self.client.get(
reverse("register_user"),
{},
HTTP_REFERER="http://localhost/iframe"
)
self.assertEqual(resp['X-Frame-Options'], 'DENY')
self.configure_lti_provider(name='Test', lti_hostname='localhost', lti_consumer_key='test_key', enabled=True)
resp = self.client.get(
reverse("register_user"),
HTTP_REFERER="http://localhost/iframe"
)
self.assertEqual(resp['X-Frame-Options'], 'ALLOW')
def _assert_third_party_auth_data(self, response, current_backend, current_provider, providers, expected_ec):
"""Verify that third party auth info is rendered correctly in a DOM data attribute. """
finish_auth_url = None
if current_backend:
finish_auth_url = reverse("social:complete", kwargs={"backend": current_backend}) + "?"
auth_info = {
"currentProvider": current_provider,
"providers": providers,
"secondaryProviders": [],
"finishAuthUrl": finish_auth_url,
"errorMessage": None,
}
if expected_ec is not None:
# If we set an EnterpriseCustomer, third-party auth providers ought to be hidden.
auth_info['providers'] = []
auth_info = dump_js_escaped_json(auth_info)
expected_data = '"third_party_auth": {auth_info}'.format(
auth_info=auth_info
)
self.assertContains(response, expected_data)
def _third_party_login_url(self, backend_name, auth_entry, login_params):
"""Construct the login URL to start third party authentication. """
return u"{url}?auth_entry={auth_entry}&{param_str}".format(
url=reverse("social:begin", kwargs={"backend": backend_name}),
auth_entry=auth_entry,
param_str=self._finish_auth_url_param(login_params),
)
def _finish_auth_url_param(self, params):
"""
Make the next=... URL parameter that indicates where the user should go next.
>>> _finish_auth_url_param([('next', '/dashboard')])
'/account/finish_auth?next=%2Fdashboard'
"""
return urlencode({
'next': '/account/finish_auth?{}'.format(urlencode(params))
})
def test_english_by_default(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html")
self.assertEqual(response['Content-Language'], 'en')
def test_unsupported_language(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="ts-zx")
self.assertEqual(response['Content-Language'], 'en')
def test_browser_language(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="es")
self.assertEqual(response['Content-Language'], 'es-419')
def test_browser_language_dialent(self):
response = self.client.get(reverse('signin_user'), [], HTTP_ACCEPT="text/html", HTTP_ACCEPT_LANGUAGE="es-es")
self.assertEqual(response['Content-Language'], 'es-es')
class AccountSettingsViewTest(ThirdPartyAuthTestMixin, TestCase, ProgramsApiConfigMixin):
""" Tests for the account settings view. """
USERNAME = 'student'
PASSWORD = 'password'
FIELDS = [
'country',
'gender',
'language',
'level_of_education',
'password',
'year_of_birth',
'preferred_language',
'time_zone',
]
@mock.patch("django.conf.settings.MESSAGE_STORAGE", 'django.contrib.messages.storage.cookie.CookieStorage')
def setUp(self):
super(AccountSettingsViewTest, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
CommerceConfiguration.objects.create(cache_ttl=10, enabled=True)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.request = HttpRequest()
self.request.user = self.user
# For these tests, two third party auth providers are enabled by default:
self.configure_google_provider(enabled=True, visible=True)
self.configure_facebook_provider(enabled=True, visible=True)
# Python-social saves auth failure notifcations in Django messages.
# See pipeline.get_duplicate_provider() for details.
self.request.COOKIES = {}
MessageMiddleware().process_request(self.request)
messages.error(self.request, 'Facebook is already in use.', extra_tags='Auth facebook')
def test_context(self):
context = account_settings_context(self.request)
user_accounts_api_url = reverse("accounts_api", kwargs={'username': self.user.username})
self.assertEqual(context['user_accounts_api_url'], user_accounts_api_url)
user_preferences_api_url = reverse('preferences_api', kwargs={'username': self.user.username})
self.assertEqual(context['user_preferences_api_url'], user_preferences_api_url)
for attribute in self.FIELDS:
self.assertIn(attribute, context['fields'])
self.assertEqual(
context['user_accounts_api_url'], reverse("accounts_api", kwargs={'username': self.user.username})
)
self.assertEqual(
context['user_preferences_api_url'], reverse('preferences_api', kwargs={'username': self.user.username})
)
self.assertEqual(context['duplicate_provider'], 'facebook')
self.assertEqual(context['auth']['providers'][0]['name'], 'Facebook')
self.assertEqual(context['auth']['providers'][1]['name'], 'Google')
def test_view(self):
"""
Test that all fields are visible
"""
view_path = reverse('account_settings')
response = self.client.get(path=view_path)
for attribute in self.FIELDS:
self.assertIn(attribute, response.content)
def test_header_with_programs_listing_enabled(self):
"""
Verify that tabs header will be shown while program listing is enabled.
"""
self.create_programs_config()
view_path = reverse('account_settings')
response = self.client.get(path=view_path)
self.assertContains(response, '<li class="tab-nav-item">')
def test_header_with_programs_listing_disabled(self):
"""
Verify that nav header will be shown while program listing is disabled.
"""
self.create_programs_config(enabled=False)
view_path = reverse('account_settings')
response = self.client.get(path=view_path)
self.assertContains(response, '<li class="item nav-global-01">')
def test_commerce_order_detail(self):
"""
Verify that get_user_orders returns the correct order data.
"""
with mock_get_orders():
order_detail = get_user_orders(self.user)
for i, order in enumerate(mock_get_orders.default_response['results']):
expected = {
'number': order['number'],
'price': order['total_excl_tax'],
'order_date': 'Jan 01, 2016',
'receipt_url': '/checkout/receipt/?order_number=' + order['number'],
'lines': order['lines'],
}
self.assertEqual(order_detail[i], expected)
def test_commerce_order_detail_exception(self):
with mock_get_orders(exception=exceptions.HttpNotFoundError):
order_detail = get_user_orders(self.user)
self.assertEqual(order_detail, [])
def test_incomplete_order_detail(self):
response = {
'results': [
factories.OrderFactory(
status='Incomplete',
lines=[
factories.OrderLineFactory(
product=factories.ProductFactory(attribute_values=[factories.ProductAttributeFactory()])
)
]
)
]
}
with mock_get_orders(response=response):
order_detail = get_user_orders(self.user)
self.assertEqual(order_detail, [])
def test_order_history_with_no_product(self):
response = {
'results': [
factories.OrderFactory(
lines=[
factories.OrderLineFactory(
product=None
),
factories.OrderLineFactory(
product=factories.ProductFactory(attribute_values=[factories.ProductAttributeFactory(
name='certificate_type',
value='verified'
)])
)
]
)
]
}
with mock_get_orders(response=response):
order_detail = get_user_orders(self.user)
self.assertEqual(len(order_detail), 1)
@override_settings(SITE_NAME=settings.MICROSITE_LOGISTRATION_HOSTNAME)
class MicrositeLogistrationTests(TestCase):
"""
Test to validate that microsites can display the logistration page
"""
def test_login_page(self):
"""
Make sure that we get the expected logistration page on our specialized
microsite
"""
resp = self.client.get(
reverse('signin_user'),
HTTP_HOST=settings.MICROSITE_LOGISTRATION_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertIn('<div id="login-and-registration-container"', resp.content)
def test_registration_page(self):
"""
Make sure that we get the expected logistration page on our specialized
microsite
"""
resp = self.client.get(
reverse('register_user'),
HTTP_HOST=settings.MICROSITE_LOGISTRATION_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertIn('<div id="login-and-registration-container"', resp.content)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_no_override(self):
"""
Make sure we get the old style login/registration if we don't override
"""
resp = self.client.get(
reverse('signin_user'),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertNotIn('<div id="login-and-registration-container"', resp.content)
resp = self.client.get(
reverse('register_user'),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertEqual(resp.status_code, 200)
self.assertNotIn('<div id="login-and-registration-container"', resp.content)
class AccountCreationTestCaseWithSiteOverrides(SiteMixin, TestCase):
"""
Test cases for Feature flag ALLOW_PUBLIC_ACCOUNT_CREATION which when
turned off disables the account creation options in lms
"""
def setUp(self):
"""Set up the tests"""
super(AccountCreationTestCaseWithSiteOverrides, self).setUp()
# Set the feature flag ALLOW_PUBLIC_ACCOUNT_CREATION to False
self.site_configuration_values = {
'ALLOW_PUBLIC_ACCOUNT_CREATION': False
}
self.site_domain = 'testserver1.com'
self.set_up_site(self.site_domain, self.site_configuration_values)
def test_register_option_login_page(self):
"""
Navigate to the login page and check the Register option is hidden when
ALLOW_PUBLIC_ACCOUNT_CREATION flag is turned off
"""
response = self.client.get(reverse('signin_user'))
self.assertNotIn('<a class="btn-neutral" href="/register?next=%2Fdashboard">Register</a>',
response.content)
|
fintech-circle/edx-platform
|
lms/djangoapps/student_account/test/test_views.py
|
Python
|
agpl-3.0
| 35,614
|
[
"VisIt"
] |
349ef73e28a09cc74b203e33b4d1d8fd82237a59cf4c79fc2a18096e764340cf
|
#!/usr/bin/env python
# ** The MIT License **
#
# Copyright (c) 2015 The Brookhaven Group, LLC
# Author: Marc Schwarzschild
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
__doc__ = '''
To list command line options:
$ gback.py -h
To list calendar names:
$ gback.py --list --oauthfn ~/gback.oauth
To add a whole day event:
$ gback.py --add <Calendar Name> -d <YYYYMMDD> -s "<summary>"
To export calendar to ical file:
$ gback.py --export <CALENDAR> [--path <PATH>]
'''
__program__ = 'gback'
__version__ = 'v0.52'
__author__ = 'Marc Schwarzschild'
import sys
import os
import datetime
import dateutil.parser
import httplib2
import json
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run_flow
from icalendar import Calendar, Event
def Version():
sys.stdout.write(__program__+' '+__version__+' ('+__author__+')\n')
sys.exit(1)
def s2d(s):
'''
Convert YYYYMMDD formatted string to date object.
'''
if type(s) is datetime.date: return s
return datetime.date(int(s[0:4]), int(s[4:6]), int(s[6:8]))
class GoogleCalendarNotFound(Exception): pass
class GCalSession(object):
'''
Object establishes connection to Google Calendar API.
'''
class _GCalendar(object):
'''
Convenience object for use internally to GCalSession().
'''
def __init__(self, session, name):
def get_calendar_ref():
for cal in session.allCals:
if cal['summary'] == name: return cal
raise GoogleCalendarNotFound('Could not find ' + str(name))
self.session = session
self.cal = get_calendar_ref()
def _get_events(self):
session = self.session
cal = self.cal
def getDate(x):
try:
t = dateutil.parser.parse(x['dateTime'])
return t.strftime('%Y%m%dT%H%M%S')
except KeyError:
d = dateutil.parser.parse(x['date'])
return d.strftime('%Y%m%d')
return None
ical = Calendar()
page_token = None
while True:
events = session.events().\
list(calendarId=cal['id'], pageToken=page_token,
orderBy='startTime', singleEvents=True).execute()
for event in events['items']:
if 'cancelled' == event['status']: continue
e = Event()
if 'start' in event: e['dtstart'] = getDate(event['start'])
if 'end' in event: e['dtend'] = getDate(event['end'])
for k in ['summary', 'location', 'description']:
if k in event: e[k] = event[k]
ical.add_component(e)
page_token = events.get('nextPageToken')
if not page_token: break
return ical.to_ical()
events = property(_get_events)
def add(self, summary, ts, te=None, des=None, loc=None):
'''
If te is not provided then it must be a whole day event.
In that case ts can be a datetime or date object.
Arguments:
summary -- summary
ts -- start time.
te -- end time.
des -- appointment description
loc -- appointment location
Example ts value:
ts = dateutil.parser.parse('20150427 09:00:00')
ts = datetime.date(2015, 04, 27)
'''
cal = self.cal
e = {}
e['summary'] = unicode(summary)
tz = cal['timeZone']
if te is None:
# Must be a day event.
ts = te = s2d(ts).strftime('%Y-%m-%d')
e['start'] = e['end'] = {'date':ts, 'timeZone':tz}
else:
ts = ts.strftime('%Y-%m-%dT%H:%M:%S')
te = te.strftime('%Y-%m-%dT%H:%M:%S')
e['start'] = {'dateTime':ts, 'timeZone':tz}
e['end'] = {'dateTime':te, 'timeZone':tz}
if des: e['description'] = unicode(des)
if loc: e['location'] = unicode(loc)
events = self.session.events()
event = events.insert(calendarId=cal['id'], body=e)
event = event.execute()
return event['htmlLink']
def __init__(self, oauthfn, clientfn=None):
'''
Create a connection to Google Calendars and store calendar objects.
Arguments:
oauthfn -- name of a file containing the oath credentials.
clientfn -- name of a file containing the client id and secret.
The clientfn is only needed when the oauthfn file does not exist.
If the oaauthfn file does not exist then GCalSession() will use
your browser to propmpt for validation. Once authenticated via
the browser the oauthfn file will be written with stored
credentials so subsequent connections can be made without browser
prompting.
'''
if clientfn is None: clientfn = oauthfn
try:
json_data = open(os.path.expanduser(clientfn)).read()
data = json.loads(json_data)
if 'installed' in data: data = data['installed']
__API_CLIENT_ID__ = data['client_id']
__API_CLIENT_SECRET__ = data['client_secret']
except:
print 'Could not read', clientfn
exit()
storage = Storage(os.path.expanduser(oauthfn))
credentials = storage.get()
if credentials is None or credentials.invalid:
flow = OAuth2WebServerFlow(
client_id=__API_CLIENT_ID__,
client_secret=__API_CLIENT_SECRET__,
scope=['https://www.googleapis.com/auth/calendar',
'https://www.googleapis.com/auth/urlshortener'],
redirect_uri='urn:ietf:wg:oauth:2.0:oob',
euser_agent=__program__+'/'+__version__)
credentials = run_flow(flow, storage)
authHttp = credentials.authorize(httplib2.Http())
self.service = build(serviceName='calendar', version='v3', http=authHttp)
self._cacheCalendars()
def _cacheCalendars(self):
'''
Cache all the calendar objects for the given service.
'''
service = self.service
self.allCals = allCals = []
calList = service.calendarList().list().execute()
while True:
for cal in calList['items']: allCals.append(cal)
pageToken = calList.get('nextPageToken')
if pageToken:
calList = service.calendarList().list(pageToken=pageToken).execute()
else:
break
def __iter__(self):
'''
Return an iterable list of calendar objects.
'''
return iter(self.allCals)
def __getitem__(self, name):
'''
Return a calendar object for the named calendar.
'''
return GCalSession._GCalendar(self, name)
def events(self): return self.service.events()
def _names(self): return [cal['summary'] for cal in self]
names = property(_names)
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser(description='Backup Google Calendar', usage=__doc__)
parser.add_argument('--list', '-l', action='store_true', dest='list_f',
help='List calendar names.')
parser.add_argument('-d', dest='d', metavar='YYYYMMDD')
parser.add_argument('-s', dest='s', help='Summary string.')
parser.add_argument('--descr', dest='descr', help='Description')
parser.add_argument('--add', '-a', dest='add', metavar='NAME',
help='Export calendar to ical file.')
parser.add_argument('--export', '-e', dest='export', metavar='NAME',
help='Export calendar to ical file.')
parser.add_argument('--path', '-p', dest='path', help='Set path.',
default='./')
parser.add_argument('--oauthfn', dest='oauthfn', default='~/gback.oauth',
metavar='FILENAME',
help='Set oauth credential file name.')
parser.add_argument('--clientfn', dest='clientfn', metavar='FILENAME',
help='Set client credential file name.')
args = parser.parse_args()
if args.list_f:
session = GCalSession(args.oauthfn, args.clientfn)
for c in session.names: print c
elif args.add is not None:
if args.d is None:
print 'You must use -d <YYYYMMDD>'
exit()
if args.s is None:
print 'You must use -s <event description>'
exit()
session = GCalSession(args.oauthfn, args.clientfn)
print session[args.add].add(args.s, args.d, des=args.descr)
elif args.export is not None:
session = GCalSession(args.oauthfn, args.clientfn)
if args.export in session.names:
with open(os.path.join(args.path, args.export + '.ical'), 'w') as fh:
fh.write(session[args.export].events)
else:
print args.export, 'is not a known calendar.'
else:
print __doc__
'''
Visit:
https://console.developers.google.com/project/
Choose "Create Project"
Enter a project name. This can be anything.
I used "gback" for google backup.
Read the agreements and agree to them if you wish to continue.
Wait while Activities windows works on setting up your project.
Select "APIs & auth" on the left pane to expand menu items.
select "Credentials"
select "Create new Client ID"
select "Installed application"
Answer consent screen information prompt.
Select your email address and enter your project name in the "Product
Name" field. I entered "gback".
Click on "Save"
If prompted to create another client id. Click on "Cancel".
You have to repeat the following steps. But this time "gback" should be
shown in the drop down box at the top of the web page.
select "Create new Client ID"
select "Installed application"
select "Other" for the installed application type.
Now you should have a "Client ID for native application shown".
Select "Download JSON"
That will save a JSON file with a client_id and client_secret among
other things. It will have a long file name but you can rename it to
anything you like, "gback.json", say.
Enable APIs:
Under "APIs & auth" select "APIs"
Select "Google+ API"
Then enable it.
Under "APIs & auth" select "APIs"
Select "Calendar API"
Then enable it.
The first time you run this program it will launch your browser to log
into your Google account. It will get a key and save it to your named
OAuth file using the --oauthfn arg. After that it will read your OAuth
file to get the key.
$ python gback.py -l --clientfn gback.json --oauthfn gback.oauth
After logging in using on your browser click on 'Accept' when prompted that
gcalcback would ike to "Manage your calendars".
From now on the gback.py program should work using the gback.json and
gback.oauth files without need for a browser.
Note:
Running this program requested permission which needed validation
using a browser which did not work with w3m because it doesn't support
Javascript. I had to run it the first time on ny in xwindows so it
could launch chrome for validation. It stored keys etc using
Storage() to a file so that it doesn't need to revalidate again.
'''
|
Schwarzschild/gback
|
gback/gback.py
|
Python
|
mit
| 11,964
|
[
"VisIt"
] |
1ae81f63391b4ad11de3728fe8a2af47a0d6366942df4c6714bf0958a2d37572
|
"""Copy number detection using read counts, with cn.mops.
http://www.bioconductor.org/packages/release/bioc/html/cn.mops.html
"""
import os
import re
import shutil
import subprocess
import pysam
import toolz as tz
from bcbio import bam, install, utils
from bcbio.distributed.multi import run_multicore, zeromq_aware_logging
from bcbio.distributed.transaction import file_transaction
from bcbio.log import logger
from bcbio.pipeline import config_utils, shared
from bcbio.provenance import do
from bcbio.structural import shared as sshared
from bcbio.variation import bedutils, vcfutils
def run(items, background=None):
"""Detect copy number variations from batched set of samples using cn.mops.
"""
if not background: background = []
names = [tz.get_in(["rgnames", "sample"], x) for x in items + background]
work_bams = [x["align_bam"] for x in items + background]
if len(items + background) < 2:
raise ValueError("cn.mops only works on batches with multiple samples")
data = items[0]
work_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "structural", names[0],
"cn_mops"))
parallel = {"type": "local", "cores": data["config"]["algorithm"].get("num_cores", 1),
"progs": ["delly"]}
with pysam.Samfile(work_bams[0], "rb") as pysam_work_bam:
chroms = [None] if _get_regional_bed_file(items[0]) else pysam_work_bam.references
out_files = run_multicore(_run_on_chrom, [(chrom, work_bams, names, work_dir, items)
for chrom in chroms],
data["config"], parallel)
out_file = _combine_out_files(out_files, work_dir, data)
out = []
for data in items:
if "sv" not in data:
data["sv"] = []
data["sv"].append({"variantcaller": "cn_mops",
"vrn_file": _prep_sample_cnvs(out_file, data)})
out.append(data)
return out
def _combine_out_files(chr_files, work_dir, data):
"""Concatenate all CNV calls into a single file.
"""
out_file = "%s.bed" % sshared.outname_from_inputs(chr_files)
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for chr_file in chr_files:
with open(chr_file) as in_handle:
is_empty = in_handle.readline().startswith("track name=empty")
if not is_empty:
with open(chr_file) as in_handle:
shutil.copyfileobj(in_handle, out_handle)
return out_file
def _prep_sample_cnvs(cnv_file, data):
"""Convert a multiple sample CNV file into a single BED file for a sample.
Handles matching and fixing names where R converts numerical IDs (1234) into
strings by adding an X (X1234), and converts other characters into '.'s.
http://stat.ethz.ch/R-manual/R-devel/library/base/html/make.names.html
"""
import pybedtools
sample_name = tz.get_in(["rgnames", "sample"], data)
def make_names(name):
return re.sub("[^\w.]", '.', name)
def matches_sample_name(feat):
return (feat.name == sample_name or feat.name == "X%s" % sample_name or
feat.name == make_names(sample_name))
def update_sample_name(feat):
feat.name = sample_name
return feat
sample_file = os.path.join(os.path.dirname(cnv_file), "%s-cnv.bed" % sample_name)
if not utils.file_exists(sample_file):
with file_transaction(data, sample_file) as tx_out_file:
with shared.bedtools_tmpdir(data):
pybedtools.BedTool(cnv_file).filter(matches_sample_name).each(update_sample_name).saveas(tx_out_file)
return sample_file
@utils.map_wrap
@zeromq_aware_logging
def _run_on_chrom(chrom, work_bams, names, work_dir, items):
"""Run cn.mops on work BAMs for a specific chromosome.
"""
local_sitelib = os.path.join(install.get_defaults().get("tooldir", "/usr/local"),
"lib", "R", "site-library")
batch = sshared.get_cur_batch(items)
ext = "-%s-cnv" % batch if batch else "-cnv"
out_file = os.path.join(work_dir, "%s%s-%s.bed" % (os.path.splitext(os.path.basename(work_bams[0]))[0],
ext, chrom if chrom else "all"))
if not utils.file_exists(out_file):
with file_transaction(items[0], out_file) as tx_out_file:
rcode = "%s-run.R" % os.path.splitext(out_file)[0]
with open(rcode, "w") as out_handle:
out_handle.write(_script.format(prep_str=_prep_load_script(work_bams, names, chrom, items),
out_file=tx_out_file,
local_sitelib=local_sitelib))
rscript = utils.Rscript_cmd()
try:
do.run([rscript, rcode], "cn.mops CNV detection", items[0], log_error=False)
except subprocess.CalledProcessError as msg:
# cn.mops errors out if no CNVs found. Just write an empty file.
if _allowed_cnmops_errorstates(str(msg)):
with open(tx_out_file, "w") as out_handle:
out_handle.write('track name=empty description="No CNVs found"\n')
else:
logger.exception()
raise
return [out_file]
def _allowed_cnmops_errorstates(msg):
return (msg.find("No CNV regions in result object. Rerun cn.mops with different parameters") >= 0
or msg.find("Normalization might not be applicable for this small number of segments") >= 0
or msg.find("Error in if (is.finite(mv2m)) { : argument is of length zero") >= 0
or msg.find("Some normalization factors are zero") >= 0)
def _prep_load_script(work_bams, names, chrom, items):
if not chrom: chrom = ""
pairmode = "paired" if bam.is_paired(work_bams[0]) else "unpaired"
if len(items) == 2 and vcfutils.get_paired_phenotype(items[0]):
load_script = _paired_load_script
else:
load_script = _population_load_script
return load_script(work_bams, names, chrom, pairmode, items)
def _get_regional_bed_file(data):
"""If we are running a non-genome analysis, pull the regional file for analysis.
"""
variant_regions = bedutils.merge_overlaps(tz.get_in(["config", "algorithm", "variant_regions"], data),
data)
is_genome = data["config"]["algorithm"].get("coverage_interval", "exome").lower() in ["genome"]
if variant_regions and utils.file_exists(variant_regions) and not is_genome:
return variant_regions
def _population_load_script(work_bams, names, chrom, pairmode, items):
"""Prepare BAMs for assessing CNVs in a population.
"""
bed_file = _get_regional_bed_file(items[0])
if bed_file:
return _population_prep_targeted.format(bam_file_str=",".join(work_bams), names_str=",".join(names),
chrom=chrom, num_cores=0, pairmode=pairmode, bed_file=bed_file)
else:
return _population_prep.format(bam_file_str=",".join(work_bams), names_str=",".join(names),
chrom=chrom, num_cores=0, pairmode=pairmode)
def _paired_load_script(work_bams, names, chrom, pairmode, items):
"""Prepare BAMs for assessing CNVs in a paired tumor/normal setup.
"""
paired = vcfutils.get_paired_bams(work_bams, items)
bed_file = _get_regional_bed_file(items[0])
if bed_file:
return _paired_prep_targeted.format(case_file=paired.tumor_bam, case_name=paired.tumor_name,
ctrl_file=paired.normal_bam, ctrl_name=paired.normal_name,
num_cores=0, chrom=chrom, pairmode=pairmode, bed_file=bed_file)
else:
return _paired_prep.format(case_file=paired.tumor_bam, case_name=paired.tumor_name,
ctrl_file=paired.normal_bam, ctrl_name=paired.normal_name,
num_cores=0, chrom=chrom, pairmode=pairmode)
_script = """
.libPaths(c("{local_sitelib}"))
library(cn.mops)
library(rtracklayer)
{prep_str}
calc_cnvs <- cnvs(cnv_out)
strcn_to_cn <- function(x) {{
as.numeric(substring(x, 3, 20))}}
calc_cnvs$score <- strcn_to_cn(calc_cnvs$CN)
calc_cnvs$name <- calc_cnvs$sampleName
export.bed(calc_cnvs, "{out_file}")
"""
_population_prep = """
bam_files <- strsplit("{bam_file_str}", ",")[[1]]
sample_names <- strsplit("{names_str}", ",")[[1]]
count_drs <- getReadCountsFromBAM(bam_files, sampleNames=sample_names, mode="{pairmode}",
refSeqName="{chrom}", parallel={num_cores})
prep_counts <- cn.mops(count_drs, parallel={num_cores})
cnv_out <- calcIntegerCopyNumbers(prep_counts)
"""
_paired_prep = """
case_count <- getReadCountsFromBAM(c("{case_file}"), sampleNames=c("{case_name}"), mode="{pairmode}",
refSeqName="{chrom}", parallel={num_cores})
ctrl_count <- getReadCountsFromBAM(c("{ctrl_file}"), sampleNames=c("{ctrl_name}"), mode="{pairmode}",
refSeqName="{chrom}", parallel={num_cores},
WL=width(case_count)[[1]])
prep_counts <- referencecn.mops(case_count, ctrl_count, parallel={num_cores})
cnv_out <- calcIntegerCopyNumbers(prep_counts)
"""
_population_prep_targeted = """
bam_files <- strsplit("{bam_file_str}", ",")[[1]]
sample_names <- strsplit("{names_str}", ",")[[1]]
my_gr <- import.bed(c("{bed_file}"), trackLine=FALSE, asRangedData=FALSE)
if ("{chrom}" != "") my_gr = subset(my_gr, seqnames(my_gr) == "{chrom}")
if (length(my_gr) < 1) stop("No CNV regions in result object. Rerun cn.mops with different parameters!")
count_drs <- getSegmentReadCountsFromBAM(bam_files, sampleNames=sample_names, mode="{pairmode}",
GR=my_gr, parallel={num_cores})
prep_counts <- cn.mops(count_drs, parallel={num_cores})
cnv_out <- calcIntegerCopyNumbers(prep_counts)
"""
_paired_prep_targeted = """
my_gr <- import.bed(c("{bed_file}"), trackLine=FALSE, asRangedData=FALSE)
if ("{chrom}" != "") my_gr = subset(my_gr, seqnames(my_gr) == "{chrom}")
if (length(my_gr) < 1) stop("No CNV regions in result object. Rerun cn.mops with different parameters!")
case_count <- getSegmentReadCountsFromBAM(c("{case_file}"), GR=my_gr,
sampleNames=c("{case_name}"),
mode="{pairmode}", parallel={num_cores})
ctrl_count <- getSegmentReadCountsFromBAM(c("{ctrl_file}"), GR=my_gr,
sampleNames=c("{case_name}"),
mode="{pairmode}", parallel={num_cores})
prep_counts <- referencecn.mops(case_count, ctrl_count, parallel={num_cores})
cnv_out <- calcIntegerCopyNumbers(prep_counts)
"""
|
brainstorm/bcbio-nextgen
|
bcbio/structural/cn_mops.py
|
Python
|
mit
| 11,115
|
[
"Bioconductor",
"pysam"
] |
71301c5bae2e3a7499c89ad307937da6ce045c5ab28adebae62005eaaf482daf
|
#!/usr/bin/env python
"""
CsPython Tutorial Example 3
By Mark Gossage (mark@gossage.cjb.net)
A pure-Python script to show the use of Crystal Space.
To use this, ensure that your PYTHONPATH, CRYSTAL, and LD_LIBRARY_PATH
(or DYLD_LIBRARY_PATH for MacOS/X; or PATH for Windows) variables are set
approrpriately, and then run the script with the command:
python scripts/python/tutorial1.py
This performs the same features at tutorial C++ tutorial 'simpmap',
loading a map with all the nice mesh effects.
===========================================================================
There are two ways to use the CsPython module.
Either as a plugin within CS (pysimp),
or as a pure Python module (this example).
This is just the CS tutorial 'simpmap' rewritten in Python.
Overall it looks quite like the original C++ code,
just with Python syntax; but the functions are all the same names and formats
(use None instead of NULL, and "" instead of (char*)NULL).
Please refer to the CS Tutorial 3 in the documentation
for detail on how the C++ works.
"""
import types, string, re, sys
import traceback
try: # get in CS
from cspace import *
except:
print "WARNING: Failed to import module cspace"
traceback.print_exc()
sys.exit(1) # die!!
# utils code
#############################
# Note: we are assuming a global 'object_reg'
# which will be defined later
def Report (severity, msg):
"Reporting routine"
csReport(object_reg, severity, "crystalspace.application.python", msg)
def Log(msg):
Report(CS_REPORTER_SEVERITY_NOTIFY, msg)
def FatalError(msg="FatalError"):
"A Panic & die routine"
Report(CS_REPORTER_SEVERITY_ERROR,msg)
sys.exit(1)
# Application
#############################
class MyCsApp:
def Init(self):
Log('MyCsApp.Init()...')
self.vc = object_reg.Get(iVirtualClock)
self.engine = object_reg.Get(iEngine)
self.g3d = object_reg.Get(iGraphics3D)
self.loader = object_reg.Get(iLoader)
self.keybd = object_reg.Get(iKeyboardDriver)
if self.vc==None or self.engine==None or self.g3d==None or self.keybd==None or self.loader==None:
FatalError("Error: in object registry query")
if not csInitializer.OpenApplication(object_reg):
FatalError("Could not open the application!")
self.view=csView(self.engine,self.g3d)
g2d = self.g3d.GetDriver2D()
self.view.SetRectangle(0, 0, g2d.GetWidth(), g2d.GetHeight ())
self.LoadMap("world")
Log('MyCsApp.Init() finished')
def LoadMap(self,name):
# Set VFS current directory to the level we want to load.
vfs=object_reg.Get(iVFS)
vfs.ChDir("/lev/partsys");
# Load the level file which is called 'world'.
if not self.loader.LoadMapFile(name):
FatalError("Couldn't load level!")
self.engine.Prepare()
# Find the starting position in this level.
pos=csVector3(0,0,0)
room=None
if self.engine.GetCameraPositions().GetCount()>0:
#// There is a valid starting position defined in the level file.
campos = self.engine.GetCameraPositions().Get(0)
room = self.engine.GetSectors().FindByName(campos.GetSector())
pos = campos.GetPosition()
else:
#// We didn't find a valid starting position. So we default
#// to going to room called 'room' at position (0,0,0).
room = self.engine.GetSectors().FindByName("room")
if room==None:
FatalError("Can't find a valid starting position!")
self.view.GetCamera().SetSector(room)
self.view.GetCamera().GetTransform().SetOrigin(pos)
def SetupFrame (self):
#print 'SetupFrame called',
elapsed_time = self.vc.GetElapsedTicks()
# Now rotate the camera according to keyboard state
speed = (elapsed_time / 1000.) * (0.03 * 20);
if self.keybd.GetKeyState(CSKEY_RIGHT):
self.view.GetCamera().GetTransform().RotateThis(CS_VEC_ROT_RIGHT, speed)
if self.keybd.GetKeyState(CSKEY_LEFT):
self.view.GetCamera().GetTransform().RotateThis(CS_VEC_ROT_LEFT, speed)
if self.keybd.GetKeyState(CSKEY_PGUP):
self.view.GetCamera().GetTransform().RotateThis(CS_VEC_TILT_UP, speed)
if self.keybd.GetKeyState(CSKEY_PGDN):
self.view.GetCamera().GetTransform().RotateThis(CS_VEC_TILT_DOWN, speed)
if self.keybd.GetKeyState(CSKEY_UP):
self.view.GetCamera().Move(CS_VEC_FORWARD * 4 * speed)
if self.keybd.GetKeyState(CSKEY_DOWN):
self.view.GetCamera().Move(CS_VEC_BACKWARD * 4 * speed)
# Tell 3D driver we're going to display 3D things.
if not self.g3d.BeginDraw(self.engine.GetBeginDrawFlags() | CSDRAW_3DGRAPHICS):
FatalError()
self.view.Draw()
#print 'SetupFrame done'
def FinishFrame(self):
#print 'FinishFrame called'
self.g3d.FinishDraw()
self.g3d.Print(None)
#print 'FinishFrame done'
# EventHandler
#############################
def EventHandler(ev):
#print 'EventHandler called'
if ((ev.Name == KeyboardDown) and
(csKeyEventHelper.GetCookedCode(ev) == CSKEY_ESC)):
q = object_reg.Get(iEventQueue)
if q:
q.GetEventOutlet().Broadcast(csevQuit(object_reg))
return 1
elif ev.Name == Frame:
app.SetupFrame()
app.FinishFrame()
return 1
return 0
# startup code
#############################
# we could write a 'main' fn for this
# but I decided to put in in the body of the app
object_reg = csInitializer.CreateEnvironment(sys.argv)
if object_reg is None:
FatalError("Couldn't create enviroment!")
if csCommandLineHelper.CheckHelp(object_reg):
csCommandLineHelper.Help(object_reg)
sys.exit(0)
if not csInitializer.SetupConfigManager(object_reg):
FatalError("Couldn't init app!")
plugin_requests = [
CS_REQUEST_VFS, CS_REQUEST_OPENGL3D, CS_REQUEST_ENGINE,
CS_REQUEST_FONTSERVER, CS_REQUEST_IMAGELOADER, CS_REQUEST_LEVELLOADER,
]
if not csInitializer.RequestPlugins(object_reg, plugin_requests):
FatalError("Plugin requests failed!")
# setup the event handler:
# note: we need not even make EventHandler() a global fn
# python would accept it as a member fn of MyCsApp
if not csInitializer.SetupEventHandler(object_reg, EventHandler):
FatalError("Could not initialize event handler!")
# Get some often used event IDs
KeyboardDown = csevKeyboardDown(object_reg)
Frame = csevFrame(object_reg)
app=MyCsApp() # this is the one & only app
app.Init() # turn on the app
# this also now calls OpenApplication
csDefaultRunLoop(object_reg)
app=None # need to do this or you get 'unreleased instances' warning
# See! CsPython manages the smart pointers correctly
csInitializer.DestroyApplication (object_reg) # bye bye
object_reg=None # just to be complete (not really needed)
|
baoboa/Crystal-Space
|
scripts/python/tutorial3.py
|
Python
|
lgpl-2.1
| 7,068
|
[
"CRYSTAL"
] |
931db963f8d83c109dd146b316de202d128bb1bdd28042fb6e0101d730bfdc04
|
# -*- coding: utf-8 -*-
# Copyright 2014-2016 The HyperSpyUI developers
#
# This file is part of HyperSpyUI.
#
# HyperSpyUI is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpyUI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpyUI. If not, see <http://www.gnu.org/licenses/>.
from hyperspyui.plugins.plugin import Plugin
import numpy as np
from qtpy import QtCore, QtWidgets
from qtpy.QtWidgets import QDialogButtonBox
from hyperspyui.tools import SelectionTool
from hyperspyui.util import SignalTypeFilter
from hyperspyui.widgets.extendedqwidgets import ExToolWindow
from hyperspy.roi import BaseInteractiveROI
import hyperspy.signals
class AlignPlugin(Plugin):
name = "Align"
def __init__(self, main_window):
super(AlignPlugin, self).__init__(main_window)
self.settings.set_default('sub_pixel_factor', 20)
self.settings.set_default('1d_smooth_amount', 50)
self.settings.set_default('2d_smooth_amount', 0.0)
self.settings.set_default('sobel_2D', True)
self.settings.set_default('median_2D', True)
self.settings.set_default('hanning_2D', True)
self.settings.set_default('alignment_reference', 'current')
self.settings.set_default('expand', True)
self.settings.set_default('crop', True)
self.settings.set_default('plot', False)
def create_actions(self):
self.add_action('manual_align', "Manual align",
self.manual_align,
icon='align_manual.svg',
tip="Interactively align the signal",
selection_callback=SignalTypeFilter(
hyperspy.signals.Signal2D, self.ui))
def create_menu(self):
self.add_menuitem("Signal", self.ui.actions['manual_align'])
def create_toolbars(self):
self.add_toolbar_button("Signal", self.ui.actions['manual_align'])
def create_tools(self):
tools = []
# XD tool
self.tool_XD = SelectionTool(
name='Align tool', icon="align2d.svg", category="Signal",
description="Align images across the stack")
self.tool_XD.accepted[BaseInteractiveROI].connect(
self.align_XD)
self.tool_XD.valid_dimensions = (1, 2)
tools.append(self.tool_XD)
# Vertical 2D align
self.tool_vertical = SelectionTool(
name='Align vertical tool', icon="align_vertical.svg",
category="Signal",
description="Align an image feature vertically across the stack")
self.tool_vertical.accepted[BaseInteractiveROI].connect(
self.align_vertical)
self.tool_vertical.valid_dimensions = (2,)
tools.append(self.tool_vertical)
# Vertical 2D align
self.tool_horizontal = SelectionTool(
name='Align horizontal tool', icon="align_horizontal.svg",
category="Signal",
description="Align an image feature horizontally across the stack")
self.tool_horizontal.accepted[BaseInteractiveROI].connect(
self.align_horizontal)
self.tool_horizontal.valid_dimensions = (2,)
tools.append(self.tool_horizontal)
for t in tools:
t.cancel_on_accept = True
self.add_tool(t, self.ui.select_signal)
def manual_align(self, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
diag = ManualAlignDialog(signal, self.ui)
diag.show()
@staticmethod
def _smooth(y, box_pts):
box = np.ones(box_pts) / box_pts
y_smooth = np.convolve(y, box, mode='valid')
return y_smooth
def _get_signal(self, signal):
if signal is None:
return self.ui.get_selected_signal()
return signal
def align_XD(self, roi, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
if signal.axes_manager.signal_dimension != roi.ndim:
return
if roi.ndim == 1:
return self.align_1D(roi, signal)
elif roi.ndim == 2:
return self.align_2D(roi, signal)
else:
raise ValueError("Cannot align a signal of %d dimensions" %
roi.ndim)
def align_1D(self, roi, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
shifts = signal.estimate_shift1D(
reference='current',
roi=(roi.left, roi.right),
show_progressbar=True)
s_aligned = signal.deepcopy()
s_aligned.align1D(shifts=shifts, expand=True)
s_aligned.plot()
self.record_code("signal = ui.get_selected_signal()")
self.record_code("s_aligned = signal.deepcopy()")
self.record_code("s_aligned.align1D(reference='current', "
"roi=(%f, %f), show_progressbar=True, expand=True)" %
(roi.left, roi.right))
return s_aligned
def align_2D(self, roi, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
s = signal
sobel = self.settings['sobel_2D', bool]
hanning = self.settings['hanning_2D', bool]
median = self.settings['median_2D', bool]
sub_pixel_factor = self.settings['sub_pixel_factor', float]
plot = self.settings['plot', bool]
if plot:
plot = 'reuse'
ref = self.settings['alignment_reference'].lower()
if not ref:
ref = 'current'
expand = self.settings['expand', bool]
crop = self.settings['crop', bool]
gauss = self.settings['2d_smooth_amount', float]
if gauss > 0.0 and 'Gaussian Filter' in self.ui.plugins:
p = self.ui.plugins['Gaussian Filter']
s = p.gaussian(sigma=gauss, signal=signal, record=False)
s.axes_manager.indices = signal.axes_manager.indices
record_string = (
"reference={0}, sobel={1}, hanning={2}, medfilter={3},"
"roi=({4}, {5}, {6}, {7}), plot={8},"
"show_progressbar=True").format(
ref, sobel, hanning, median, roi.left, roi.right, roi.top,
roi.bottom, plot)
try:
shifts = s.estimate_shift2D(
reference=ref,
roi=(roi.left, roi.right, roi.top, roi.bottom),
sobel=sobel, hanning=hanning, medfilter=median,
sub_pixel_factor=sub_pixel_factor,
plot=plot,
show_progressbar=True)
record_string += ", sub_pixel_factor=" + str(sub_pixel_factor)
except TypeError:
# Hyperspy might not accept 'sub_pixel_factor'
shifts = s.estimate_shift2D(
reference=ref,
roi=(roi.left, roi.right, roi.top, roi.bottom),
sobel=sobel, hanning=hanning, medfilter=median,
plot=plot,
show_progressbar=True)
s_aligned = signal.deepcopy()
s_aligned.align2D(shifts=shifts, crop=crop, expand=expand)
record_string += ", crop={0}, expand={1}".format(crop, expand)
s_aligned.plot()
self.record_code("signal = ui.get_selected_signal()")
self.record_code("s_aligned = signal.deepcopy()")
self.record_code("s_aligned.align1D(%s)" % record_string)
return s_aligned
def align_vertical(self, roi, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
self.record_code("<p>.align_vertical(roi=%s)" % repr(roi))
return self._align_along_axis(roi, signal, axis=1)
def align_horizontal(self, roi, signal=None):
signal = self._get_signal(signal)
if signal is None:
return
self.record_code("<p>.align_vertical(roi=%s)" % repr(roi))
return self._align_along_axis(roi, signal, axis=0)
def _align_along_axis(self, roi, signal, axis):
sumaxis = 1 if axis == 0 else 0
daxis = signal.axes_manager.signal_axes[sumaxis]
iref = daxis.index
s_al = roi(signal).sum(axis=daxis.index_in_array+3j)
s_al.change_dtype(float)
s_al.unfold() # Temp signal, so don't need to refold
# Check that signal axis is last dimension
if s_al.axes_manager.signal_axes[0].index_in_array < 1:
s_al.data = s_al.data.T # Unfolded, so simply transpose
# From now on, navigation is in first dimension
smooth = self.settings['1d_smooth_amount', float]
d = np.array([self._smooth(s_al.data[i, :], smooth)
for i in range(s_al.data.shape[0])])
d = np.diff(d, axis=1) # Differentiate to highlight edges
sz = d.shape # Initial shape
ref = d[iref, :] # Reference row
# Pad reference with +/- half size at each ends (maximum shift allowed)
ref = np.pad(ref, (sz[1] / 2, sz[1] / 2), 'edge')
shifts = []
# Find shifts for each row
for row in range(sz[0]):
if row == iref:
# Set shift of reference to compensate for padding
shifts.append(sz[1] / 2)
else:
corr = np.correlate(ref, d[row, :], 'valid')
shifts.append(corr.argmax())
# Remove "padding" from found shifts
shifts = np.array(shifts) - sz[1] / 2
# Pad for both x and y shifts, but zero unused one:
shifts = np.tile(-shifts, (2, 1)).T
shifts[:, axis] = 0.0
# Apply shifts using hyperspy routine:
s_aligned = signal.deepcopy()
s_aligned.align2D(shifts=shifts, crop=False, expand=True)
s_aligned.plot()
return s_aligned
class ManualAlignDialog(ExToolWindow):
def __init__(self, signal, parent=None):
super(ExToolWindow, self).__init__(parent)
self.ui = parent
self.signal = signal
self._orig_data = None
self.shifts = None
self._prev_x = 0
self._prev_y = 0
self.create_controls()
self.accepted.connect(self.ok)
self.rejected.connect(self.cancel)
def ok(self):
"""
Callback when dialog is closed by OK-button.
"""
signal = self.signal
if self._orig_data is not None:
signal.data = self._orig_data
if self.shifts is not None:
with signal.unfolded(unfold_signal=False):
signal.align2D(shifts=self.shifts, expand=True)
signal.get_dimensions_from_data()
rc = self.ui.record_code
rc("signal = ui.get_selected_signal()")
rc("shifts = np.array(%s)" % str(self.shifts.tolist()))
rc("with signal.unfolded(unfold_signal=False):")
rc(" signal.align2D(shifts=shifts, expand=True)")
rc("signal.get_dimensions_from_data()")
def cancel(self):
signal = self.signal
if self._orig_data is not None:
signal.data = self._orig_data
signal.update_plot()
self.close()
def close(self):
self._orig_data = None
def update_x(self):
signal = self.signal
val = self.num_x.value() - self._prev_x
self._prev_x = self.num_x.value()
if self._orig_data is None:
with signal.unfolded(unfold_signal=False):
self._orig_data = signal.data.copy()
if self.shifts is None:
self.shifts = np.zeros((signal.axes_manager.navigation_size, 2),
dtype=np.int)
index = np.ravel_multi_index(signal.axes_manager.indices,
signal.axes_manager.navigation_shape)
with signal.unfolded(unfold_signal=False):
axis = signal.axes_manager.signal_axes[0].index_in_array
signal.data[index:, ...] = np.roll(
signal.data[index:, ...], val, axis)
self.shifts[index:, 1] -= val
signal.update_plot()
def update_y(self):
signal = self.signal
val = self.num_y.value() - self._prev_y
self._prev_y = self.num_y.value()
if self._orig_data is None:
with signal.unfolded(unfold_signal=False):
self._orig_data = signal.data.copy()
if self.shifts is None:
self.shifts = np.zeros((signal.axes_manager.navigation_size, 2),
dtype=np.int)
index = np.ravel_multi_index(signal.axes_manager.indices,
signal.axes_manager.navigation_shape)
with signal.unfolded(unfold_signal=False):
axis = signal.axes_manager.signal_axes[1].index_in_array
signal.data[index:, ...] = np.roll(
signal.data[index:, ...], val, axis)
self.shifts[index:, 0] -= val
signal.update_plot()
def create_controls(self):
self.setWindowTitle("Align signal")
form = QtWidgets.QFormLayout()
self.num_x = QtWidgets.QSpinBox()
self.num_y = QtWidgets.QSpinBox()
self.num_x.valueChanged.connect(self.update_x)
self.num_y.valueChanged.connect(self.update_y)
dims = self.signal.axes_manager.signal_shape
self.num_x.setMaximum(dims[0])
self.num_y.setMaximum(dims[1])
self.num_x.setMinimum(-dims[0])
self.num_y.setMinimum(-dims[1])
form.addRow("X:", self.num_x)
form.addRow("Y:", self.num_y)
vbox = QtWidgets.QVBoxLayout()
vbox.addLayout(form)
btns = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel,
QtCore.Qt.Horizontal, self)
btns.accepted.connect(self.accept)
btns.rejected.connect(self.reject)
vbox.addWidget(btns)
self.setLayout(vbox)
|
hyperspy/hyperspyUI
|
hyperspyui/plugins/align.py
|
Python
|
gpl-3.0
| 14,382
|
[
"Gaussian"
] |
a2a7d80eefd4dc9cdfb659171b1781ddd7c681630d15c1d909931683a650b1c8
|
# Copyright (C) 2012,2013,2015,2016
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
***************************************************
espressopp.interaction.CoulombTruncatedUniqueCharge
***************************************************
.. math::
U = \frac{Q}{d}
where :math:`Q` is the product of the charges of the two particles and :math:`d` is their distance from each other.
In this interaction potential, a unique :math:`Q = q_iq_j` value is specified per potential. For a more flexible truncated Coulomb interaction potential where each individual particle has its own charge :math:`q_i`, see CoulombTruncated.
.. function:: espressopp.interaction.CoulombTruncatedUniqueCharge(qq, cutoff, shift)
:param qq: (default: 1.0)
:param cutoff: (default: infinity)
:param shift: (default: "auto")
:type qq: real
:type cutoff:
:type shift:
.. function:: espressopp.interaction.VerletListCoulombTruncatedUniqueCharge(vl)
:param vl:
:type vl:
.. function:: espressopp.interaction.VerletListCoulombTruncatedUniqueCharge.getPotential(type1, type2)
:param type1:
:param type2:
:type type1:
:type type2:
:rtype:
.. function:: espressopp.interaction.VerletListCoulombTruncatedUniqueCharge.setPotential(type1, type2, potential)
:param type1:
:param type2:
:param potential:
:type type1:
:type type2:
:type potential:
.. function:: espressopp.interaction.CellListCoulombTruncatedUniqueCharge(stor)
:param stor:
:type stor:
.. function:: espressopp.interaction.CellListCoulombTruncatedUniqueCharge.setPotential(type1, type2, potential)
:param type1:
:param type2:
:param potential:
:type type1:
:type type2:
:type potential:
.. function:: espressopp.interaction.FixedPairListCoulombTruncatedUniqueCharge(system, vl, potential)
:param system:
:param vl:
:param potential:
:type system:
:type vl:
:type potential:
.. function:: espressopp.interaction.FixedPairListCoulombTruncatedUniqueCharge.setPotential(potential)
:param potential:
:type potential:
"""
from espressopp import pmi, infinity
from espressopp.esutil import *
from espressopp.interaction.Potential import *
from espressopp.interaction.Interaction import *
from _espressopp import interaction_CoulombTruncatedUniqueCharge, \
interaction_VerletListCoulombTruncatedUniqueCharge, \
interaction_CellListCoulombTruncatedUniqueCharge, \
interaction_FixedPairListCoulombTruncatedUniqueCharge
class CoulombTruncatedUniqueChargeLocal(PotentialLocal, interaction_CoulombTruncatedUniqueCharge):
def __init__(self, qq=1.0,
cutoff=infinity, shift="auto"):
"""Initialize the local CoulombTruncatedUniqueCharge object."""
if shift =="auto":
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_CoulombTruncatedUniqueCharge,
qq, cutoff)
else:
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_CoulombTruncatedUniqueCharge,
qq, cutoff, shift)
class VerletListCoulombTruncatedUniqueChargeLocal(InteractionLocal, interaction_VerletListCoulombTruncatedUniqueCharge):
def __init__(self, vl):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_VerletListCoulombTruncatedUniqueCharge, vl)
def setPotential(self, type1, type2, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.setPotential(self, type1, type2, potential)
def getPotential(self, type1, type2):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
return self.cxxclass.getPotential(self, type1, type2)
class CellListCoulombTruncatedUniqueChargeLocal(InteractionLocal, interaction_CellListCoulombTruncatedUniqueCharge):
def __init__(self, stor):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_CellListCoulombTruncatedUniqueCharge, stor)
def setPotential(self, type1, type2, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.setPotential(self, type1, type2, potential)
class FixedPairListCoulombTruncatedUniqueChargeLocal(InteractionLocal, interaction_FixedPairListCoulombTruncatedUniqueCharge):
def __init__(self, system, vl, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_FixedPairListCoulombTruncatedUniqueCharge, system, vl, potential)
def setPotential(self, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.setPotential(self, potential)
if pmi.isController:
class CoulombTruncatedUniqueCharge(Potential):
'The CoulombTruncatedUniqueCharge potential.'
pmiproxydefs = dict(
cls = 'espressopp.interaction.CoulombTruncatedUniqueChargeLocal',
pmiproperty = ['qq']
)
class VerletListCoulombTruncatedUniqueCharge(Interaction):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.interaction.VerletListCoulombTruncatedUniqueChargeLocal',
pmicall = ['setPotential','getPotential']
)
class CellListCoulombTruncatedUniqueCharge(Interaction):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.interaction.CellListCoulombTruncatedUniqueChargeLocal',
pmicall = ['setPotential']
)
class FixedPairListCoulombTruncatedUniqueCharge(Interaction):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.interaction.FixedPairListCoulombTruncatedUniqueChargeLocal',
pmicall = ['setPotential']
)
|
fedepad/espressopp
|
src/interaction/CoulombTruncatedUniqueCharge.py
|
Python
|
gpl-3.0
| 7,179
|
[
"ESPResSo"
] |
3c592b5950f7e9777963ded160c941dc2ce56e34a05c352e237109f2516e50a5
|
import abc
import array
import math
import os
import pickle
import re
import signal
import subprocess as sub
from datetime import datetime
from shutil import copy2, rmtree, which
from acpype import __version__ as version
from acpype.logger import set_logging_conf as logger
from acpype.mol import Angle, Atom, AtomType, Bond, Dihedral
from acpype.params import (
MAXTIME,
TLEAP_TEMPLATE,
binaries,
cal,
dictAtomTypeAmb2OplsGmxCode,
dictAtomTypeGaff2OplsGmxCode,
diffTol,
ionOrSolResNameList,
leapAmberFile,
maxDist,
maxDist2,
minDist,
minDist2,
oplsCode2AtomTypeDict,
outTopols,
qConv,
qDict,
radPi,
specialGaffAtoms,
)
from acpype.utils import (
_getoutput,
checkOpenBabelVersion,
distanceAA,
elapsedTime,
find_bin,
imprDihAngle,
job_pids_family,
parmMerge,
while_replace,
)
year = datetime.today().year
tag = version
lineHeader = f"""
| ACPYPE: AnteChamber PYthon Parser interfacE v. {tag} (c) {year} AWSdS |
"""
frameLine = (len(lineHeader) - 2) * "="
header = f"{frameLine}{lineHeader}{frameLine}"
# TODO:
# Howto Charmm and Amber with NAMD
# Howto build topology for a modified amino acid
# CYANA topology files
head = "%s created by acpype (v: " + tag + ") on %s\n"
date = datetime.now().ctime()
pid: int
class Topology_14:
"""
Amber topology abstraction for non-uniform 1-4 scale factors.
"""
def __init__(self) -> None:
self.pointers = array.array("d")
self.charge = array.array("d")
self.atom_type_index = array.array("d")
self.nonbonded_parm_index = array.array("d")
self.scee_scale_factor = array.array("d")
self.scnb_scale_factor = array.array("d")
self.dihedral_force_constants = array.array("d")
self.dihedral_periodicity = array.array("d")
self.dihedral_phase = array.array("d")
self.dihedral_yes_H = array.array("d")
self.dihedral_no_H = array.array("d")
self.lennard_jones_acoef = array.array("d")
self.lennard_jones_bcoef = array.array("d")
def read_amber_topology(self, buff):
"""Read AMBER topology file."""
flag_strings = [
"%FLAG POINTERS",
"%FLAG CHARGE",
"%FLAG ATOM_TYPE_INDEX",
"%FLAG NONBONDED_PARM_INDEX",
"%FLAG SCEE_SCALE_FACTOR",
"%FLAG SCNB_SCALE_FACTOR",
"%FLAG DIHEDRAL_FORCE_CONSTANT",
"%FLAG DIHEDRAL_PERIODICITY",
"%FLAG DIHEDRAL_PHASE",
"%FLAG DIHEDRALS_INC_HYDROGEN",
"%FLAG DIHEDRALS_WITHOUT_HYDROGEN",
"%FLAG LENNARD_JONES_ACOEF",
"%FLAG LENNARD_JONES_BCOEF",
]
attributes = [
"pointers",
"charge",
"atom_type_index",
"nonbonded_parm_index",
"scee_scale_factor",
"scnb_scale_factor",
"dihedral_force_constants",
"dihedral_periodicity",
"dihedral_phase",
"dihedral_yes_H",
"dihedral_no_H",
"lennard_jones_acoef",
"lennard_jones_bcoef",
]
for i, _item in enumerate(attributes):
try:
setattr(self, attributes[i], self.p7_array_read(buff, flag_strings[i]))
except Exception:
logger().exception(f"Skipping non-existent attributes {attributes[i]} {flag_strings[i]}")
@staticmethod
def skipline(buff, index):
"""skip line."""
while buff[index] != "\n":
index += 1
index += 1
return index
def p7_array_read(self, buff, flag_string):
"""Convert AMBER topology data to python array."""
myarray = array.array("d")
i = buff.index(flag_string)
i = self.skipline(buff, i)
i = self.skipline(buff, i)
while 1:
while buff[i] == " " or buff[i] == "\t" or buff[i] == "\n":
i += 1
j = i
if buff[i] == "%":
break
while buff[i] != " " and buff[i] != "\t" and buff[i] != "\n":
i += 1
myarray.append(float(buff[j:i]))
return myarray
def print_gmx_pairs(self):
"""Generate non-bonded pairs list."""
pair_list = []
pair_buff = "[ pairs_nb ]\n; ai aj funct qi qj sigma epsilon\n"
pair_list.append(pair_buff)
dihedrals = self.dihedral_yes_H + self.dihedral_no_H
dih_number = len(dihedrals)
j = int(0)
while j < dih_number:
if dihedrals[j + 2] > 0:
parm_idx = int(dihedrals[j + 4]) - 1
scee_scale_factor = self.scee_scale_factor[parm_idx]
if scee_scale_factor == 0:
scee_scale_factor = 1.2
ai = int(abs(dihedrals[j]) / 3)
al = int(abs(dihedrals[j + 3]) / 3)
qi = self.charge[ai] / qConv
ql = self.charge[al] / qConv / scee_scale_factor
ntypes = int(self.pointers[1])
ai_index = int(self.atom_type_index[ai])
al_index = int(self.atom_type_index[al])
nb_parm_index = int(self.nonbonded_parm_index[ntypes * (ai_index - 1) + al_index - 1]) - 1
scnb_scale_factor = self.scnb_scale_factor[parm_idx]
if scnb_scale_factor == 0:
scnb_scale_factor = 2
lj_acoeff = self.lennard_jones_acoef[nb_parm_index] / scnb_scale_factor
lj_bcoeff = self.lennard_jones_bcoef[nb_parm_index] / scnb_scale_factor
if lj_bcoeff != 0:
sigma6 = lj_acoeff / lj_bcoeff
else:
sigma6 = 1 # arbitrary and doesn't matter
epsilon = lj_bcoeff / 4 / sigma6 * 4.184
sigma = sigma6 ** (1 / 6) / 10
pair_buff = (
f"{ai + 1:>10.0f} {al + 1:>10.0f} {1:>6.0f} "
+ f"{qi:>10.6f} {ql:>10.6f} "
+ f"{sigma:>15.5e} {epsilon:>15.5e}\n"
)
pair_list.append(pair_buff)
j += 5
return "".join(pair_list)
def hasNondefault14(self):
"""Check non-uniform 1-4 scale factor."""
for val in self.scee_scale_factor:
if val not in (0, 1.2):
return True
for val in self.scnb_scale_factor:
if val not in (0, 2):
return True
return False
def patch_gmx_topol14(self, gmx_init_top):
"""Patch GMX topology file for non-uniform 1-4 scale factor."""
pair_buff = self.print_gmx_pairs()
jdefault = gmx_init_top.index("\n[ atomtypes ]")
ipair = gmx_init_top.index("[ pairs ]")
jpair = gmx_init_top.index("\n[ angles ]")
init_buff = (
"\n\n[ defaults ]\n"
+ "; nbfunc comb-rule gen-pairs \n"
+ "1 2 no \n"
)
return (
gmx_init_top.splitlines()[0]
+ init_buff
+ gmx_init_top[jdefault:ipair]
+ pair_buff
+ gmx_init_top[jpair : len(gmx_init_top)]
)
class AbstractTopol(abc.ABC):
"""
Abstract super class to build topologies
"""
@abc.abstractmethod
def __init__(self):
self.debug = None
self.verbose = None
self.chargeVal = None
self.tmpDir = None
self.absInputFile = None
self.chargeType = None
self.obabelExe = None
self.baseName = None
self.acExe = None
self.force = None
self.acBaseName = None
self.atomType = None
self.acMol2FileName = None
self.multiplicity = None
self.qFlag = None
self.ekFlag = None
self.timeTol = None
self.acXyzFileName = None
self.acTopFileName = None
self.acParDict = None
self.tleapExe = None
self.parmchkExe = None
self.acFrcmodFileName = None
self.gaffDatfile = None
self.homeDir = None
self.rootDir = None
self.extOld = None
self.direct = None
self.merge = None
self.gmx4 = None
self.sorted = None
self.chiral = None
self.outTopols = None
self.ext = None
self.xyzFileData = None
self.charmmBase = None
self.allhdg = None
self.topo14Data = None
self.atomPairs = None
self.properDihedralsGmx45 = None
self.properDihedralsAlphaGamma = None
self.properDihedralsCoefRB = None
self.resName = None
self.acLog = None
self.tleapLog = None
self.parmchkLog = None
self.inputFile = None
self.obabelLog = None
self.absHomeDir = None
self.molTopol = None
self.topFileData = None
self.residueLabel = None
self._atomTypeNameList = None
self.atomTypeSystem = None
self.totalCharge = None
self.atoms = None
self.atomTypes = None
self.pbc = None
self.bonds = None
self.angles = None
self.properDihedrals = None
self.improperDihedrals = None
self.condensedProperDihedrals = None
self.chiralGroups = None
self.excludedAtoms = None
self.atomsGromacs = None
self.atomTypesGromacs = None
self.CnsTopFileName = None
self.CnsInpFileName = None
self.CnsParFileName = None
self.CnsPdbFileName = None
self.is_smiles = None
self.smiles = None
self.amb2gmx = None
def printDebug(self, text=""):
"""Debug log level."""
logger(self.level).debug(f"{while_replace(text)}")
def printWarn(self, text=""):
"""Warn log level."""
logger(self.level).warning(f"{while_replace(text)}")
def printError(self, text=""):
"""Error log level."""
logger(self.level).error(f"{while_replace(text)}")
def printMess(self, text=""):
"""Info log level."""
logger(self.level).info(f"==> {while_replace(text)}")
def printDebugQuoted(self, text=""):
"""Print quoted messages."""
logger(self.level).debug(10 * "+" + "start_quote" + 59 * "+")
logger(self.level).debug(while_replace(text))
logger(self.level).debug(10 * "+" + "end_quote" + 61 * "+")
def printErrorQuoted(self, text=""):
"""Print quoted messages."""
logger(self.level).error(10 * "+" + "start_quote" + 59 * "+")
logger(self.level).error(while_replace(text))
logger(self.level).error(10 * "+" + "end_quote" + 61 * "+")
def search(self, name=None, alist=False):
"""
Returns a list with all atomName matching 'name' or just the first case.
"""
ll = [x for x in self.atoms if x.atomName == name.upper()]
if ll and not alist:
ll = ll[0]
return ll
def checkSmiles(self):
"""
Check if input arg is a SMILES string.
Returns:
bool: True/False
"""
if find_bin(self.binaries["obabel_bin"]):
if checkOpenBabelVersion() >= 300:
from openbabel import openbabel as ob
from openbabel import pybel
ob.cvar.obErrorLog.StopLogging()
elif checkOpenBabelVersion() >= 200 and checkOpenBabelVersion() < 300:
import openbabel as ob
import pybel # type: ignore
ob.cvar.obErrorLog.StopLogging()
else:
logger(self.level).warning("WARNING: your input may be a SMILES but")
logger(self.level).warning(" without OpenBabel, this functionality won't work")
return False
# Check if input is a smiles string
try:
ob.obErrorLog.SetOutputLevel(0)
pybel.readstring("smi", self.smiles)
return True
except Exception:
ob.obErrorLog.SetOutputLevel(0)
return False
def guessCharge(self):
"""
Guess the charge of a system based on antechamber.
Returns None in case of error.
"""
done = False
error = False
charge = self.chargeVal
localDir = os.path.abspath(".")
if not os.path.exists(self.tmpDir):
os.mkdir(self.tmpDir)
if not os.path.exists(os.path.join(self.tmpDir, self.inputFile)):
copy2(self.absInputFile, self.tmpDir)
os.chdir(self.tmpDir)
if self.chargeType == "user":
if self.ext == ".mol2":
self.printMess("Reading user's charges from mol2 file...")
charge = self.readMol2TotalCharge(self.inputFile)
done = True
else:
self.chargeType = "bcc"
self.printWarn("cannot read charges from a PDB file")
self.printWarn("using now 'bcc' method for charge")
if self.chargeVal is None and not done:
self.printWarn("no charge value given, trying to guess one...")
mol2FileForGuessCharge = self.inputFile
if self.ext == ".pdb":
cmd = f"{self.obabelExe} -ipdb {self.inputFile} -omol2 -O {self.baseName}.mol2"
self.printDebug(f"guessCharge: {cmd}")
out = _getoutput(cmd)
self.printDebug(out)
mol2FileForGuessCharge = os.path.abspath(f"{self.baseName}.mol2")
in_mol = "mol2"
else:
in_mol = self.ext[1:]
if in_mol == "mol":
in_mol = "mdl"
cmd = f"{self.acExe} -dr no -i {mol2FileForGuessCharge} -fi {in_mol} -o tmp -fo mol2 -c gas -pf n"
logger(self.level).debug(while_replace(cmd))
log = _getoutput(cmd).strip()
if os.path.exists("tmp"):
charge = self.readMol2TotalCharge("tmp")
else:
try:
charge = float(
log.strip()
.split("equal to the total charge (")[-1]
.split(") based on Gasteiger atom type, exit")[0]
)
except Exception:
error = True
if not charge:
error = True
charge = 0
if error:
self.printError("guessCharge failed")
os.chdir(localDir)
rmtree(self.tmpDir)
self.printErrorQuoted(log)
self.printMess("Trying with net charge = 0 ...")
charge = float(charge)
charge2 = int(round(charge))
drift = abs(charge2 - charge)
self.printDebug(f"Net charge drift '{drift:3.6f}'")
if drift > diffTol:
self.printError(f"Net charge drift '{drift:3.5f}' bigger than tolerance '{diffTol:3.5f}'")
if not self.force:
msg = "Error with calculated charge"
logger(self.level).error(msg)
rmtree(self.tmpDir)
raise Exception(msg)
self.chargeVal = str(charge2)
self.printMess(f"... charge set to {charge2}")
os.chdir(localDir)
def setResNameCheckCoords(self):
"""
Set a 3 letter residue name and check coords for issues
like duplication, atoms too close or too sparse.
"""
exit_ = False
localDir = os.path.abspath(".")
if not os.path.exists(self.tmpDir):
os.mkdir(self.tmpDir)
copy2(self.absInputFile, self.tmpDir)
os.chdir(self.tmpDir)
exten = self.ext[1:]
if self.ext == ".pdb":
tmpFile = open(self.inputFile)
else:
if exten == "mol":
exten = "mdl"
cmd = f"{self.acExe} -dr no -i {self.inputFile} -fi {exten} -o tmp -fo ac -pf y"
self.printDebug(cmd)
out = _getoutput(cmd)
if not out.isspace():
self.printDebug(out)
try:
tmpFile = open("tmp")
except Exception:
rmtree(self.tmpDir)
raise
tmpData = tmpFile.readlines()
residues = set()
coords = {}
for line in tmpData:
if "ATOM " in line or "HETATM" in line:
residues.add(line[17:20])
at = line[0:17]
cs = line[30:54]
if cs in coords:
coords[cs].append(at)
else:
coords[cs] = [at]
if len(residues) > 1:
self.printError(f"more than one residue detected '{str(residues)}'")
self.printError(f"verify your input file '{self.inputFile}'. Aborting ...")
msg = "Only ONE Residue is allowed for ACPYPE to work"
logger(self.level).error(msg)
raise Exception(msg)
dups = ""
shortd = ""
longd = ""
longSet = set()
id_ = 0
items = list(coords.items())
ll = len(items)
for item in items:
id_ += 1
if len(item[1]) > 1: # if True means atoms with same coordinates
for i in item[1]:
dups += f"{i} {item[0]}\n"
for id2 in range(id_, ll):
item2 = items[id2]
c1 = list(map(float, [item[0][i : i + 8] for i in range(0, 24, 8)]))
c2 = list(map(float, [item2[0][i : i + 8] for i in range(0, 24, 8)]))
dist2 = distanceAA(c1, c2)
if dist2 < minDist2:
dist = math.sqrt(dist2)
shortd += f"{dist:8.5f} {item[1]} {item2[1]}\n"
if dist2 < maxDist2: # and not longOK:
longSet.add(str(item[1]))
longSet.add(str(item2[1]))
if str(item[1]) not in longSet and ll > 1:
longd += f"{item[1]}\n"
if dups:
self.printError(f"Atoms with same coordinates in '{self.inputFile}'!")
self.printErrorQuoted(dups[:-1])
exit_ = True
if shortd:
self.printError(f"Atoms TOO close (< {minDist} Ang.)")
self.printErrorQuoted(f"Dist (Ang.) Atoms\n{shortd[:-1]}")
exit_ = True
if longd:
self.printError(f"Atoms TOO scattered (> {maxDist} Ang.)")
self.printErrorQuoted(longd[:-1])
exit_ = True
if exit_:
if self.force:
self.printWarn("You chose to proceed anyway with '-f' option. GOOD LUCK!")
else:
self.printError("Use '-f' option if you want to proceed anyway. Aborting ...")
if not self.debug:
rmtree(self.tmpDir)
msg = "Coordinates issues with your system"
logger(self.level).error(msg)
rmtree(self.tmpDir)
raise Exception(msg)
# escape resname list index out of range: no RES name in pdb for example
resname = list(residues)[0].strip()
if not resname:
resname = "LIG"
self.printWarn("No residue name identified, using default resname: 'LIG'")
newresname = resname
# To avoid resname likes: 001 (all numbers), 1e2 (sci number), ADD : reserved terms for leap
leapWords = [
"_cmd_options_",
"_types_",
"add",
"addAtomTypes",
"addIons",
"addIons2",
"addPath",
"addPdbAtomMap",
"addPdbResMap",
"alias",
"alignAxes",
"bond",
"bondByDistance",
"center",
"charge",
"check",
"clearPdbAtomMap",
"clearPdbResMap",
"clearVariables",
"combine",
"copy",
"createAtom",
"createParmset",
"createResidue",
"createUnit",
"crossLink",
"debugOff",
"debugOn",
"debugStatus",
"deleteBond",
"deleteOffLibEntry",
"deleteRestraint",
"desc",
"deSelect",
"displayPdbAtomMap",
"displayPdbResMap",
"edit",
"flip",
"groupSelectedAtoms",
"help",
"impose",
"list",
"listOff",
"loadAmberParams",
"loadAmberPrep",
"loadMol2",
"loadOff",
"loadPdb",
"loadPdbUsingSeq",
"logFile",
"matchVariables",
"measureGeom",
"quit",
"relax",
"remove",
"restrainAngle",
"restrainBond",
"restrainTorsion",
"saveAmberParm",
"saveAmberParmPert",
"saveAmberParmPol",
"saveAmberParmPolPert",
"saveAmberPrep",
"saveMol2",
"saveOff",
"saveOffParm",
"savePdb",
"scaleCharges",
"select",
"sequence",
"set",
"setBox",
"solvateBox",
"solvateCap",
"solvateDontClip",
"solvateOct",
"solvateShell",
"source",
"transform",
"translate",
"verbosity",
"zMatrix",
]
isLeapWord = False
for word in leapWords:
if resname.upper().startswith(word.upper()):
self.printDebug(f"Residue name is a reserved word: '{word.upper()}'")
isLeapWord = True
try:
float(resname)
self.printDebug(f"Residue name is a 'number': '{resname}'")
isNumber = True
except ValueError:
isNumber = False
if resname[0].isdigit() or isNumber or isLeapWord:
newresname = "R" + resname
if not resname.isalnum():
newresname = "MOL"
if newresname != resname:
self.printWarn(
f"In {self.acBaseName}.lib, residue name will be '{newresname}' instead of '{resname}' elsewhere"
)
self.resName = newresname
os.chdir(localDir)
self.printDebug("setResNameCheckCoords done")
def readMol2TotalCharge(self, mol2File):
"""Reads the charges in given mol2 file and returns the total."""
charge = 0.0
ll = []
cmd = f"{self.acExe} -dr no -i {mol2File} -fi mol2 -o tmp -fo mol2 -c wc -cf tmp.crg -pf n"
self.printDebug(cmd)
log = _getoutput(cmd)
if os.path.exists("tmp.crg"):
tmpFile = open("tmp.crg")
tmpData = tmpFile.readlines()
for line in tmpData:
ll += line.split()
charge = sum(map(float, ll))
if not log.isspace():
self.printDebugQuoted(log)
self.printDebug("readMol2TotalCharge: " + str(charge))
return charge
def execAntechamber(self, chargeType=None, atomType=None) -> bool:
r"""
To call Antechamber and execute it.
Args:
chargeType ([str], optional): bcc, gas or user. Defaults to None/bcc.
atomType ([str], optional): gaff, amber, gaff2, amber2. Defaults to None/gaff2.
Returns:
bool: True if failed.
::
Welcome to antechamber 21.0: molecular input file processor.
Usage: antechamber \
-i input file name
-fi input file format
-o output file name
-fo output file format
-c charge method
-cf charge file name
-nc net molecular charge (int)
-a additional file name
-fa additional file format
-ao additional file operation
crd : only read in coordinate
crg : only read in charge
radius: only read in radius
name : only read in atom name
type : only read in atom type
bond : only read in bond type
-m multiplicity (2S+1), default is 1
-rn residue name, overrides input file, default is MOL
-rf residue topology file name in prep input file,
default is molecule.res
-ch check file name for gaussian, default is 'molecule'
-ek mopac or sqm keyword, inside quotes; overwrites previous ones
-gk gaussian job keyword, inside quotes, is ignored when both -gopt and -gsp are used
-gopt gaussian job keyword for optimization, inside quotes
-gsp gaussian job keyword for single point calculation, inside quotes
-gm gaussian memory keyword, inside quotes, such as "%mem=1000MB"
-gn gaussian number of processors keyword, inside quotes, such as "%nproc=8"
-gdsk gaussian maximum disk usage keyword, inside quotes, such as "%maxdisk=50GB"
-gv add keyword to generate gesp file (for Gaussian 09 only)
1 : yes
0 : no, the default
-ge gaussian esp file generated by iop(6/50=1), default is g09.gesp
-tor torsional angle list, inside a pair of quotes, such as "1-2-3-4:0,5-6-7-8"
':1' or ':0' indicates the torsional angle is frozen or not
-df am1-bcc precharge flag, 2 - use sqm(default); 0 - use mopac
-at atom type
gaff : the default
gaff2: for gaff2 (beta-version)
amber: for PARM94/99/99SB
bcc : bcc
sybyl: sybyl
-du fix duplicate atom names: yes(y)[default] or no(n)
-bk component/block Id, for ccif
-an adjust atom names: yes(y) or no(n)
the default is 'y' for 'mol2' and 'ac' and 'n' for the other formats
-j atom type and bond type prediction index, default is 4
0 : no assignment
1 : atom type
2 : full bond types
3 : part bond types
4 : atom and full bond type
5 : atom and part bond type
-s status information: 0(brief), 1(default) or 2(verbose)
-eq equalizing atomic charge, default is 1 for '-c resp' and '-c bcc' and 0
for the other charge methods
0 : no use
1 : by atomic paths
2 : by atomic paths and structural information, i.e. E/Z configurations
-pf remove intermediate files: yes(y) or no(n)[default]
-pl maximum path length to determin equivalence of atomic charges for resp and bcc,
the smaller the value, the faster the algorithm, default is -1 (use full length),
set this parameter to 10 to 30 if your molecule is big (# atoms >= 100)
-seq atomic sequence order changable: yes(y)[default] or no(n)
-dr acdoctor mode: yes(y)[default] or no(n)
-i -o -fi and -fo must appear; others are optional
Use 'antechamber -L' to list the supported file formats and charge methods
List of the File Formats
file format type abbre. index | file format type abbre. index
--------------------------------------------------------------
Antechamber ac 1 | Sybyl Mol2 mol2 2
PDB pdb 3 | Modified PDB mpdb 4
AMBER PREP (int) prepi 5 | AMBER PREP (car) prepc 6
Gaussian Z-Matrix gzmat 7 | Gaussian Cartesian gcrt 8
Mopac Internal mopint 9 | Mopac Cartesian mopcrt 10
Gaussian Output gout 11 | Mopac Output mopout 12
Alchemy alc 13 | CSD csd 14
MDL mdl 15 | Hyper hin 16
AMBER Restart rst 17 | Jaguar Cartesian jcrt 18
Jaguar Z-Matrix jzmat 19 | Jaguar Output jout 20
Divcon Input divcrt 21 | Divcon Output divout 22
SQM Input sqmcrt 23 | SQM Output sqmout 24
Charmm charmm 25 | Gaussian ESP gesp 26
Component cif ccif 27 | GAMESS dat gamess 28
Orca input orcinp 29 | Orca output orcout 30
--------------------------------------------------------------
AMBER restart file can only be read in as additional file.
List of the Charge Methods
charge method abbre. index | charge method abbre. index
--------------------------------------------------------------
RESP resp 1 | AM1-BCC bcc 2
CM1 cm1 3 | CM2 cm2 4
ESP (Kollman) esp 5 | Mulliken mul 6
Gasteiger gas 7 | Read in charge rc 8
Write out charge wc 9 | Delete Charge dc 10
--------------------------------------------------------------
"""
global pid
self.printMess("Executing Antechamber...")
self.makeDir()
ct = chargeType or self.chargeType
at = atomType or self.atomType
if "amber2" in at:
at = "amber"
if ct == "user":
ct = ""
else:
ct = f"-c {ct}"
exten = self.ext[1:]
if exten == "mol":
exten = "mdl"
cmd = "{} -dr no -i {} -fi {} -o {} -fo mol2 {} -nc {} -m {} -s 2 -df {} -at {} -pf n {}".format(
self.acExe,
self.inputFile,
exten,
self.acMol2FileName,
ct,
self.chargeVal,
self.multiplicity,
self.qFlag,
at,
self.ekFlag,
)
self.printDebug(cmd)
if os.path.exists(self.acMol2FileName) and not self.force:
self.printMess("AC output file present... doing nothing")
else:
try:
os.remove(self.acMol2FileName)
except Exception:
self.printDebug("No file left to be removed")
signal.signal(signal.SIGALRM, self.signal_handler)
signal.alarm(self.timeTol)
p = sub.Popen(cmd, shell=True, stderr=sub.STDOUT, stdout=sub.PIPE)
pid = p.pid
out = str(p.communicate()[0].decode()) # p.stdout.read()
self.acLog = out
if os.path.exists(self.acMol2FileName):
self.printMess("* Antechamber OK *")
else:
self.printErrorQuoted(self.acLog)
return True
return False
def signal_handler(self, _signum, _frame): # , pid = 0):
"""Signal handler."""
global pid
pids = job_pids_family(pid)
self.printDebug(f"PID: {pid}, PIDS: {pids}")
self.printMess(f"Timed out! Process {pids} killed, max exec time ({self.timeTol}s) exceeded")
# os.system('kill -15 %s' % pids)
for i in pids.split():
os.kill(int(i), 15)
msg = "Semi-QM taking too long to finish... aborting!"
logger(self.level).error(msg)
raise Exception(msg)
def delOutputFiles(self):
"""Delete temporary output files."""
delFiles = [
"mopac.in",
"tleap.in",
"fixbo.log",
"addhs.log",
"ac_tmp_ot.mol2",
"frcmod.ac_tmp",
"fragment.mol2",
self.tmpDir,
] # , 'divcon.pdb', 'mopac.pdb', 'mopac.out'] #'leap.log'
self.printMess("Removing temporary files...")
for file_ in delFiles:
file_ = os.path.join(self.absHomeDir, file_)
if os.path.exists(file_):
if os.path.isdir(file_):
rmtree(file_)
else:
os.remove(file_)
def checkXyzAndTopFiles(self):
"""Check XYZ and TOP files."""
fileXyz = self.acXyzFileName
fileTop = self.acTopFileName
if os.path.exists(fileXyz) and os.path.exists(fileTop):
return True
return False
def execTleap(self):
"""Execute tleap."""
fail = False
self.makeDir()
if self.ext == ".pdb":
self.printMess("... converting pdb input file to mol2 input file")
if self.convertPdbToMol2():
self.printError("convertPdbToMol2 failed")
if self.execAntechamber():
self.printError("Antechamber failed")
fail = True
if self.execParmchk():
self.printError("Parmchk failed")
fail = True
if fail:
return True
tleapScpt = TLEAP_TEMPLATE % self.acParDict
fp = open("tleap.in", "w")
fp.write(tleapScpt)
fp.close()
cmd = "%s -f tleap.in" % self.tleapExe
if self.checkXyzAndTopFiles() and not self.force:
self.printMess("Topologies files already present... doing nothing")
else:
try:
os.remove(self.acTopFileName)
os.remove(self.acXyzFileName)
except Exception:
self.printDebug("No crd or prm files left to be removed")
self.printMess("Executing Tleap...")
self.printDebug(cmd)
self.tleapLog = _getoutput(cmd)
self.checkLeapLog(self.tleapLog)
if self.checkXyzAndTopFiles():
self.printMess("* Tleap OK *")
else:
self.printErrorQuoted(self.tleapLog)
return True
return False
def checkLeapLog(self, log):
"""Check Leap log."""
log = log.splitlines(True)
check = ""
block = False
for line in log:
# print "*"+line+"*"
if "Checking '" in line:
# check += line
block = True
if "Checking Unit." in line:
block = False
if block:
check += line
self.printDebugQuoted(check[:-1])
def locateDat(self, aFile):
"""Locate a file pertinent to $AMBERHOME/dat/leap/parm/."""
amberhome = os.environ.get("AMBERHOME")
if amberhome:
aFileF = os.path.join(amberhome, "dat/leap/parm", aFile)
if os.path.exists(aFileF):
return aFileF
aFileF = os.path.join(os.path.dirname(self.acExe), "../dat/leap/parm", aFile)
if os.path.exists(aFileF):
return aFileF
return None
def execParmchk(self):
"""Execute parmchk."""
self.makeDir()
cmd = f"{self.parmchkExe} -i {self.acMol2FileName} -f mol2 -o {self.acFrcmodFileName}"
if "amber" in self.atomType:
gaffFile = self.locateDat(self.gaffDatfile)
parmfile = self.locateDat("parm10.dat")
frcmodffxxSB = self.locateDat("frcmod.ff14SB")
# frcmodparmbsc0 = self.locateDat('frcmod.parmbsc0')
parmGaffFile = parmMerge(parmfile, gaffFile)
parmGaffffxxSBFile = parmMerge(parmGaffFile, frcmodffxxSB, frcmod=True)
# parm99gaffff99SBparmbsc0File = parmMerge(parm99gaffff99SBFile, frcmodparmbsc0, frcmod = True)
# parm10file = self.locateDat('parm10.dat') # PARM99 + frcmod.ff99SB + frcmod.parmbsc0 in AmberTools 1.4
cmd += f" -p {parmGaffffxxSBFile}" # Ignoring BSC0
elif "gaff2" in self.atomType:
cmd += " -s 2"
self.printDebug(cmd)
self.parmchkLog = _getoutput(cmd)
if os.path.exists(self.acFrcmodFileName):
check = self.checkFrcmod()
if check:
self.printWarn("Couldn't determine all parameters:")
self.printMess(f"From file '{self.acFrcmodFileName + check}'\n")
else:
self.printMess("* Parmchk OK *")
else:
self.printErrorQuoted(self.parmchkLog)
return True
return False
def checkFrcmod(self):
"""Check FRCMOD file."""
check = ""
frcmodContent = open(self.acFrcmodFileName).readlines()
for line in frcmodContent:
if "ATTN, need revision" in line:
check += line
return check
def convertPdbToMol2(self):
"""Convert PDB to MOL2 by using obabel."""
if self.ext == ".pdb":
if self.execObabel():
self.printError(f"convert pdb to mol2 via {binaries['obabel_bin']} failed")
return True
return False
def convertSmilesToMol2(self):
"""Convert Smiles to MOL2 by using obabel."""
# if not self.obabelExe:
# msg = "SMILES needs OpenBabel python module"
# logger(self.level).error(msg)
# raise Exception(msg)
if checkOpenBabelVersion() >= 300:
from openbabel import pybel
elif checkOpenBabelVersion() >= 200 and checkOpenBabelVersion() < 300:
import pybel # type: ignore
try:
mymol = pybel.readstring("smi", str(self.smiles))
mymol.addh()
mymol.make3D()
mymol.write(self.ext.replace(".", ""), self.absInputFile, overwrite=True)
return True
except Exception:
return False
def execObabel(self):
"""Execute obabel."""
self.makeDir()
cmd = f"{self.obabelExe} -ipdb {self.inputFile} -omol2 -O {self.baseName}.mol2"
self.printDebug(cmd)
self.obabelLog = _getoutput(cmd)
self.ext = ".mol2"
self.inputFile = self.baseName + self.ext
self.acParDict["ext"] = "mol2"
if os.path.exists(self.inputFile):
self.printMess("* Babel OK *")
else:
self.printErrorQuoted(self.obabelLog)
return True
return False
def makeDir(self):
"""Make Dir."""
os.chdir(self.rootDir)
self.absHomeDir = os.path.abspath(self.homeDir)
if not os.path.exists(self.homeDir):
os.mkdir(self.homeDir)
os.chdir(self.homeDir)
if self.absInputFile:
copy2(self.absInputFile, ".")
return True
def createACTopol(self):
"""
If successful, Amber Top and Xyz files will be generated.
"""
if self.execTleap():
self.printError("Tleap failed")
if not self.debug:
self.delOutputFiles()
def createMolTopol(self):
"""
Create MolTopol obj.
"""
self.topFileData = open(self.acTopFileName).readlines()
self.molTopol = MolTopol(
self, # acTopolObj
verbose=self.verbose,
debug=self.debug,
gmx4=self.gmx4,
merge=self.merge,
direct=self.direct,
is_sorted=self.sorted,
chiral=self.chiral,
)
if self.outTopols:
if "cns" in self.outTopols:
self.molTopol.writeCnsTopolFiles()
if "gmx" in self.outTopols:
self.molTopol.writeGromacsTopolFiles()
if "charmm" in self.outTopols:
self.writeCharmmTopolFiles()
try: # scape the pickle save error
self.pickleSave()
except Exception:
self.printError("pickleSave failed")
if not self.debug:
self.delOutputFiles() # required to use on Jupyter Notebook
os.chdir(self.rootDir)
def pickleSave(self):
"""
Create portable serialized representations of System's Python objects.
Example:
to restore:
.. code-block:: python
from acpype import *
# import cPickle as pickle
import pickle
mol = pickle.load(open('DDD.pkl','rb'))
"""
pklFile = self.baseName + ".pkl"
dumpFlag = False
if not os.path.exists(pklFile):
mess = "Writing pickle file %s" % pklFile
dumpFlag = True
elif self.force:
mess = "Overwriting pickle file %s" % pklFile
dumpFlag = True
else:
mess = "Pickle file %s already present... doing nothing" % pklFile
self.printMess(mess)
if dumpFlag:
with open(pklFile, "wb") as f: # for python 3.3 or higher
pickle.dump(self, f)
def getFlagData(self, flag):
"""
For a given acFileTop flag, return a list of the data related.
"""
def proc_line(line):
# data need format
data = line.rstrip()
sdata = [data[i : i + f].strip() for i in range(0, len(data), f)]
if "+" and "." in data and flag != "RESIDUE_LABEL": # it's a float
ndata = list(map(float, sdata))
elif flag != "RESIDUE_LABEL":
try: # try if it's integer
ndata = list(map(int, sdata))
except Exception:
ndata = sdata
else:
ndata = sdata
return ndata
block = False
tFlag = "%FLAG " + flag
ndata = []
if not self.topFileData:
msg = "PRMTOP file empty?"
logger(self.level).error(msg)
raise Exception(msg)
for rawLine in self.topFileData:
line = rawLine.replace("\r", "").replace("\n", "")
if tFlag in line:
block = True
continue
if block and "%FLAG " in line:
break
if block:
if "%FORMAT" in line:
line = line.strip().strip("%FORMAT()").split(".")[0]
for c in line:
if c.isalpha():
f = int(line.split(c)[1])
break
continue
ndata += proc_line(line)
if flag == "AMBER_ATOM_TYPE":
nn = []
ll = set()
prefixed = False
for ii in ndata:
prefixed = True
if ii[0].isdigit():
ll.add(ii)
ii = "A" + ii
nn.append(ii)
if prefixed and ll:
self.printDebug("GMX does not like atomtype starting with Digit")
self.printDebug("prefixing AtomType %s with 'A'." % list(ll))
ndata = nn
return ndata # a list
def getResidueLabel(self):
"""
Get a 3 capital letters code from acFileTop.
Returns a list.
"""
residueLabel = self.getFlagData("RESIDUE_LABEL")
residueLabel = list(map(str, residueLabel))
if residueLabel[0] != residueLabel[0].upper():
self.printWarn(f"residue label '{residueLabel[0]}' in '{self.inputFile}' is not all UPPERCASE")
self.printWarn("this may raise problem with some applications like CNS")
self.residueLabel = residueLabel
def getCoords(self):
"""
For a given acFileXyz file, return a list of coords as::
[[x1,y1,z1],[x2,y2,z2], etc.]
"""
if not self.xyzFileData:
msg = "INPCRD file empty?"
logger(self.level).error(msg)
raise Exception(msg)
data = ""
for rawLine in self.xyzFileData[2:]:
line = rawLine.replace("\r", "").replace("\n", "")
data += line
ll = len(data)
ndata = list(map(float, [data[i : i + 12] for i in range(0, ll, 12)]))
gdata = []
for i in range(0, len(ndata), 3):
gdata.append([ndata[i], ndata[i + 1], ndata[i + 2]])
self.printDebug("getCoords done")
return gdata
def getAtoms(self):
"""
Set a list with all atoms objects built from dat in acFileTop.
Set also atomType system is gaff or amber, list of atomTypes, resid
and system's total charge.
"""
atomNameList = self.getFlagData("ATOM_NAME")
atomTypeNameList = self.getFlagData("AMBER_ATOM_TYPE")
self._atomTypeNameList = atomTypeNameList
massList = self.getFlagData("MASS")
chargeList = self.getFlagData("CHARGE")
resIds = self.getFlagData("RESIDUE_POINTER") + [0]
coords = self.getCoords()
ACOEFs, BCOEFs = self.getABCOEFs()
atoms = []
atomTypes = []
tmpList = [] # a list with unique atom types
totalCharge = 0.0
countRes = 0
id_ = 0
FirstNonSoluteId = None
for atomName in atomNameList:
if atomName != atomName.upper():
self.printDebug("atom name '%s' HAS to be all UPPERCASE... Applying this here." % atomName)
atomName = atomName.upper()
atomTypeName = atomTypeNameList[id_]
if id_ + 1 == resIds[countRes]:
resid = countRes
countRes += 1
resName = self.residueLabel[resid]
if resName in ionOrSolResNameList and not FirstNonSoluteId:
FirstNonSoluteId = id_
mass = massList[id_]
charge = chargeList[id_]
chargeConverted = charge / qConv
totalCharge += charge
coord = coords[id_]
ACOEF = ACOEFs[id_]
BCOEF = BCOEFs[id_]
atomType = AtomType(atomTypeName, mass, ACOEF, BCOEF)
if atomTypeName not in tmpList:
tmpList.append(atomTypeName)
atomTypes.append(atomType)
atom = Atom(atomName, atomType, id_ + 1, resid, mass, chargeConverted, coord)
atoms.append(atom)
id_ += 1
balanceChargeList, balanceValue, balanceIds = self.balanceCharges(chargeList, FirstNonSoluteId)
for id_ in balanceIds:
atoms[id_].charge = balanceValue / qConv
if atomTypeName[0].islower():
self.atomTypeSystem = "gaff"
else:
self.atomTypeSystem = "amber"
self.printDebug("Balanced TotalCharge %13.10f" % float(sum(balanceChargeList) / qConv))
self.totalCharge = int(round(totalCharge / qConv))
self.atoms = atoms
self.atomTypes = atomTypes
self.pbc = None
if len(coords) == len(atoms) + 2 or len(coords) == len(atoms) * 2 + 2:
self.pbc = [coords[-2], coords[-1]]
self.printDebug("PBC = %s" % self.pbc)
self.printDebug("getAtoms done")
def getBonds(self):
"""Get Bonds."""
uniqKbList = self.getFlagData("BOND_FORCE_CONSTANT")
uniqReqList = self.getFlagData("BOND_EQUIL_VALUE")
bondCodeHList = self.getFlagData("BONDS_INC_HYDROGEN")
bondCodeNonHList = self.getFlagData("BONDS_WITHOUT_HYDROGEN")
bondCodeList = bondCodeHList + bondCodeNonHList
bonds = []
for i in range(0, len(bondCodeList), 3):
idAtom1 = bondCodeList[i] // 3 # remember python starts with id 0
idAtom2 = bondCodeList[i + 1] // 3
bondTypeId = bondCodeList[i + 2] - 1
atom1 = self.atoms[idAtom1]
atom2 = self.atoms[idAtom2]
kb = uniqKbList[bondTypeId]
req = uniqReqList[bondTypeId]
atoms = [atom1, atom2]
bond = Bond(atoms, kb, req)
bonds.append(bond)
self.bonds = bonds
self.printDebug("getBonds done")
def getAngles(self):
"""Get Angles."""
uniqKtList = self.getFlagData("ANGLE_FORCE_CONSTANT")
uniqTeqList = self.getFlagData("ANGLE_EQUIL_VALUE")
# for list below, true atom number = index/3 + 1
angleCodeHList = self.getFlagData("ANGLES_INC_HYDROGEN")
angleCodeNonHList = self.getFlagData("ANGLES_WITHOUT_HYDROGEN")
angleCodeList = angleCodeHList + angleCodeNonHList
angles = []
for i in range(0, len(angleCodeList), 4):
idAtom1 = angleCodeList[i] // 3 # remember python starts with id 0
idAtom2 = angleCodeList[i + 1] // 3
idAtom3 = angleCodeList[i + 2] // 3
angleTypeId = angleCodeList[i + 3] - 1
atom1 = self.atoms[idAtom1]
atom2 = self.atoms[idAtom2]
atom3 = self.atoms[idAtom3]
kt = uniqKtList[angleTypeId]
teq = uniqTeqList[angleTypeId] # angle given in rad in prmtop
atoms = [atom1, atom2, atom3]
angle = Angle(atoms, kt, teq)
angles.append(angle)
self.angles = angles
self.printDebug("getAngles done")
def getDihedrals(self):
"""
Get dihedrals (proper and imp), condensed list of prop dih and
atomPairs.
"""
uniqKpList = self.getFlagData("DIHEDRAL_FORCE_CONSTANT")
uniqPeriodList = self.getFlagData("DIHEDRAL_PERIODICITY")
uniqPhaseList = self.getFlagData("DIHEDRAL_PHASE")
# for list below, true atom number = abs(index)/3 + 1
dihCodeHList = self.getFlagData("DIHEDRALS_INC_HYDROGEN")
dihCodeNonHList = self.getFlagData("DIHEDRALS_WITHOUT_HYDROGEN")
dihCodeList = dihCodeHList + dihCodeNonHList
properDih = []
improperDih = []
condProperDih = [] # list of dihedrals condensed by the same quartet
# atomPairs = []
atomPairs = set()
for i in range(0, len(dihCodeList), 5):
idAtom1 = dihCodeList[i] // 3 # remember python starts with id 0
idAtom2 = dihCodeList[i + 1] // 3
# 3 and 4 indexes can be negative: if id3 < 0, end group interations
# in amber are to be ignored; if id4 < 0, dihedral is improper
idAtom3raw = dihCodeList[i + 2] // 3 # can be negative -> exclude from 1-4vdw
idAtom4raw = dihCodeList[i + 3] // 3 # can be negative -> Improper
idAtom3 = abs(idAtom3raw)
idAtom4 = abs(idAtom4raw)
dihTypeId = dihCodeList[i + 4] - 1
atom1 = self.atoms[idAtom1]
atom2 = self.atoms[idAtom2]
atom3 = self.atoms[idAtom3]
atom4 = self.atoms[idAtom4]
kPhi = uniqKpList[dihTypeId] # already divided by IDIVF
period = int(uniqPeriodList[dihTypeId]) # integer
phase = uniqPhaseList[dihTypeId] # angle given in rad in prmtop
if phase == kPhi == 0:
period = 0 # period is set to 0
atoms = [atom1, atom2, atom3, atom4]
dihedral = Dihedral(atoms, kPhi, period, phase)
if idAtom4raw > 0:
try:
atomsPrev = properDih[-1].atoms
except Exception:
atomsPrev = []
properDih.append(dihedral)
if idAtom3raw < 0 and atomsPrev == atoms:
condProperDih[-1].append(dihedral)
else:
condProperDih.append([dihedral])
pair = (atom1, atom4)
# if atomPairs.count(pair) == 0 and idAtom3raw > 0:
if idAtom3raw > 0:
atomPairs.add(pair)
else:
improperDih.append(dihedral)
if self.sorted:
atomPairs = sorted(atomPairs, key=lambda x: (x[0].id, x[1].id))
self.properDihedrals = properDih
self.improperDihedrals = improperDih
self.condensedProperDihedrals = condProperDih # [[],[],...]
self.atomPairs = atomPairs # set((atom1, atom2), ...)
self.printDebug("getDihedrals done")
def getChirals(self):
"""
Get chiral atoms (for CNS only!).
Plus its 4 neighbours and improper dihedral angles
to store non-planar improper dihedrals.
"""
if not self._parent.obabelExe:
self.printWarn("No Openbabel python module, no chiral groups")
self.chiralGroups = []
return
if checkOpenBabelVersion() >= 300:
from openbabel import openbabel as ob
from openbabel import pybel
elif checkOpenBabelVersion() >= 200 and checkOpenBabelVersion() < 300:
import openbabel as ob
import pybel # type: ignore
self.printMess("Using OpenBabel v." + ob.OBReleaseVersion() + "\n")
" obchiral script - replace the obchiral executable"
out = []
_filename, file_extension = os.path.splitext(self.inputFile)
mol = pybel.readfile(file_extension.replace(".", ""), self.inputFile)
for ml in mol:
for at in ml:
if ob.OBStereoFacade(ml.OBMol).HasTetrahedralStereo(at.idx):
out.append(at.idx)
" end of obchiral script "
chiralGroups = []
for id_ in out:
atChi = self.atoms[id_]
quad = []
for bb in self.bonds:
bAts = bb.atoms[:]
if atChi in bAts:
bAts.remove(atChi)
quad.append(bAts[0])
if len(quad) != 4:
if self.chiral:
self.printWarn(f"Atom {atChi} has less than 4 connections to 4 different atoms. It's NOT Chiral!")
continue
v1, v2, v3, v4 = (x.coords for x in quad)
chiralGroups.append((atChi, quad, imprDihAngle(v1, v2, v3, v4)))
self.chiralGroups = chiralGroups
def sortAtomsForGromacs(self):
"""
Re-sort atoms for gromacs, which expects all hydrogens to immediately
follow the heavy atom they are bonded to and belong to the same charge
group.
Currently, atom mass < 1.2 is taken to denote a proton. This behaviour
may be changed by modifying the 'is_hydrogen' function within.
JDC 2011-02-03
"""
# Build dictionary of bonded atoms.
bonded_atoms = dict()
for atom in self.atoms:
bonded_atoms[atom] = list()
for bond in self.bonds:
[atom1, atom2] = bond.atoms
bonded_atoms[atom1].append(atom2)
bonded_atoms[atom2].append(atom1)
# Define hydrogen and heavy atom classes.
def is_hydrogen(atom):
"""Check for H."""
return atom.mass < 1.2
def is_heavy(atom):
"""Check for non H."""
return not is_hydrogen(atom)
# Build list of sorted atoms, assigning charge groups by heavy atom.
sorted_atoms = list()
cgnr = 1 # charge group number: each heavy atoms is assigned its own charge group
# First pass: add heavy atoms, followed by the hydrogens bonded to them.
for atom in self.atoms:
if is_heavy(atom):
# Append heavy atom.
atom.cgnr = cgnr
sorted_atoms.append(atom)
# Append all hydrogens.
for bonded_atom in bonded_atoms[atom]:
if is_hydrogen(bonded_atom) and bonded_atom not in sorted_atoms:
# Append bonded hydrogen.
bonded_atom.cgnr = cgnr
sorted_atoms.append(bonded_atom)
cgnr += 1
# Second pass: Add any remaining atoms.
if len(sorted_atoms) < len(self.atoms):
for atom in self.atoms:
if atom not in sorted_atoms:
atom.cgnr = cgnr
sorted_atoms.append(atom)
cgnr += 1
# Replace current list of atoms with sorted list.
self.atoms = sorted_atoms
# Renumber atoms in sorted list, starting from 1.
for (index, atom) in enumerate(self.atoms):
atom.id = index + 1
def balanceCharges(self, chargeList, FirstNonSoluteId=None):
"""
Spread charges fractions among atoms to balance system's total charge.
Note that python is very annoying about floating points.
Even after balance, there will always be some residue of order :math:`e^{-12}`
to :math:`e^{-16}`, which is believed to vanished once one writes a topology
file, say, for CNS or GMX, where floats are represented with 4 or 5
maximum decimals.
"""
limIds = []
total = sum(chargeList)
totalConverted = total / qConv
self.printDebug("charge to be balanced: total %13.10f" % (totalConverted))
maxVal = max(chargeList[:FirstNonSoluteId])
minVal = min(chargeList[:FirstNonSoluteId])
if abs(maxVal) >= abs(minVal):
lim = maxVal
else:
lim = minVal
nLims = chargeList.count(lim)
diff = totalConverted - round(totalConverted)
fix = lim - diff * qConv / nLims
id_ = 0
for c in chargeList:
if c == lim:
limIds.append(id_)
chargeList[id_] = fix
id_ += 1
self.printDebug("balanceCharges done")
return chargeList, fix, limIds
def getABCOEFs(self):
"""Get non-bonded coefficients."""
uniqAtomTypeIdList = self.getFlagData("ATOM_TYPE_INDEX")
nonBonIdList = self.getFlagData("NONBONDED_PARM_INDEX")
rawACOEFs = self.getFlagData("LENNARD_JONES_ACOEF")
rawBCOEFs = self.getFlagData("LENNARD_JONES_BCOEF")
ACOEFs = []
BCOEFs = []
ntypes = max(uniqAtomTypeIdList)
for id_ in range(len(self._atomTypeNameList)):
atomTypeId = uniqAtomTypeIdList[id_]
index = ntypes * (atomTypeId - 1) + atomTypeId
nonBondId = nonBonIdList[index - 1]
ACOEFs.append(rawACOEFs[nonBondId - 1])
BCOEFs.append(rawBCOEFs[nonBondId - 1])
self.printDebug("getABCOEFs done")
return ACOEFs, BCOEFs
def setProperDihedralsCoef(self):
"""
Create proper dihedrals list with Ryckaert-Bellemans coefficients.
It takes self.condensedProperDihedrals and returns
self.properDihedralsCoefRB, a reduced list of quartet atoms + RB.
Coefficients ready for GMX (multiplied by 4.184)::
self.properDihedralsCoefRB = [ [atom1,..., atom4], C[0:5] ]
For proper dihedrals: a quartet of atoms may appear with more than
one set of parameters and to convert to GMX they are treated as RBs.
The resulting coefficients calculated here may look slighted different
from the ones calculated by amb2gmx.pl because python is taken full float
number from prmtop and not rounded numbers from rdparm.out as amb2gmx.pl does.
"""
properDihedralsCoefRB = []
properDihedralsAlphaGamma = []
properDihedralsGmx45 = []
for item in self.condensedProperDihedrals:
V = 6 * [0.0]
C = 6 * [0.0]
for dih in item:
period = dih.period # Pn
kPhi = dih.kPhi # in rad
phaseRaw = dih.phase * radPi # in degree
phase = int(phaseRaw) # in degree
if period > 4 and self.gmx4:
rmtree(self.absHomeDir)
msg = "Likely trying to convert ILDN to RB, DO NOT use option '-z'"
logger(self.level).error(msg)
raise Exception(msg)
if phase in [0, 180]:
properDihedralsGmx45.append([item[0].atoms, phaseRaw, kPhi, period])
if self.gmx4:
if kPhi != 0:
V[period] = 2 * kPhi * cal
if period == 1:
C[0] += 0.5 * V[period]
if phase == 0:
C[1] -= 0.5 * V[period]
else:
C[1] += 0.5 * V[period]
elif period == 2:
if phase == 180:
C[0] += V[period]
C[2] -= V[period]
else:
C[2] += V[period]
elif period == 3:
C[0] += 0.5 * V[period]
if phase == 0:
C[1] += 1.5 * V[period]
C[3] -= 2 * V[period]
else:
C[1] -= 1.5 * V[period]
C[3] += 2 * V[period]
elif period == 4:
if phase == 180:
C[2] += 4 * V[period]
C[4] -= 4 * V[period]
else:
C[0] += V[period]
C[2] -= 4 * V[period]
C[4] += 4 * V[period]
else:
properDihedralsAlphaGamma.append([item[0].atoms, phaseRaw, kPhi, period])
# print phaseRaw, kPhi, period
if phase in [0, 180]:
properDihedralsCoefRB.append([item[0].atoms, C])
self.printDebug("setProperDihedralsCoef done")
self.properDihedralsCoefRB = properDihedralsCoefRB
self.properDihedralsAlphaGamma = properDihedralsAlphaGamma
self.properDihedralsGmx45 = properDihedralsGmx45
def writeCharmmTopolFiles(self):
"""Write CHARMM topology files."""
self.printMess("Writing CHARMM files\n")
at = self.atomType
self.getResidueLabel()
res = self.resName
cmd = f"{self.acExe} -dr no -i {self.acMol2FileName} -fi mol2 -o {self.charmmBase} \
-fo charmm -s 2 -at {at} -pf n -rn {res}"
self.printDebug(cmd)
log = _getoutput(cmd)
self.printDebugQuoted(log)
def writePdb(self, afile):
"""
Write a new PDB file with the atom names defined by Antechamber.
The format generated here use is slightly different from:
old: http://www.wwpdb.org/documentation/file-format-content/format23/sect9.html
latest: http://www.wwpdb.org/documentation/file-format-content/format33/sect9.html
respected to atom name. Using GAFF2 atom types::
CU/Cu Copper, CL/cl Chlorine, BR/br Bromine
Args:
afile ([str]): file path name
"""
# TODO: assuming only one residue ('1')
pdbFile = open(afile, "w")
fbase = os.path.basename(afile)
pdbFile.write("REMARK " + head % (fbase, date))
id_ = 1
for atom in self.atoms:
# id_ = self.atoms.index(atom) + 1
aName = atom.atomName
if atom.atomType.atomTypeName.upper() in specialGaffAtoms:
s = atom.atomType.atomTypeName.upper()
else:
s = atom.atomType.atomTypeName[0].upper()
rName = self.residueLabel[0]
x = atom.coords[0]
y = atom.coords[1]
z = atom.coords[2]
line = "%-6s%5d %4s %3s Z%4d%s%8.3f%8.3f%8.3f%6.2f%6.2f%s%2s\n" % (
"ATOM",
id_,
aName,
rName,
1,
4 * " ",
x,
y,
z,
1.0,
0.0,
10 * " ",
s,
)
pdbFile.write(line)
id_ += 1
pdbFile.write("END\n")
def writeGromacsTopolFiles(self):
"""
Write GMX topology Files.
::
# from ~/Programmes/amber10/dat/leap/parm/gaff.dat
#atom type atomic mass atomic polarizability comments
ca 12.01 0.360 Sp2 C in pure aromatic systems
ha 1.008 0.135 H bonded to aromatic carbon
#bonded atoms harmonic force kcal/mol/A^2 eq. dist. Ang. comments
ca-ha 344.3* 1.087** SOURCE3 1496 0.0024 0.0045
* for gmx: 344.3 * 4.184 * 100 * 2 = 288110 kJ/mol/nm^2 (why factor 2?)
** convert Ang to nm ( div by 10) for gmx: 1.087 A = 0.1087 nm
# CA HA 1 0.10800 307105.6 ; ged from 340. bsd on C6H6 nmodes; PHE,TRP,TYR (from ffamber99bon.itp)
# CA-HA 367.0 1.080 changed from 340. bsd on C6H6 nmodes; PHE,TRP,TYR (from parm99.dat)
# angle HF kcal/mol/rad^2 eq angle degrees comments
ca-ca-ha 48.5* 120.01 SOURCE3 2980 0.1509 0.2511
* to convert to gmx: 48.5 * 4.184 * 2 = 405.848 kJ/mol/rad^2 (why factor 2?)
# CA CA HA 1 120.000 418.400 ; new99 (from ffamber99bon.itp)
# CA-CA-HA 50.0 120.00 (from parm99.dat)
# dihedral idivf barrier hight/2 kcal/mol phase degrees periodicity comments
X -ca-ca-X 4 14.500* 180.000 2.000 intrpol.bsd.on C6H6
*convert 2 gmx: 14.5/4 * 4.184 * 2 (?) (yes in amb2gmx, not in topolbuild, why?) = 30.334 or 15.167 kJ/mol
# X -CA-CA-X 4 14.50 180.0 2. intrpol.bsd.on C6H6 (from parm99.dat)
# X CA CA X 3 30.334 0.000 -30.33400 0.000 0.000 0.000 ; intrpol.bsd.on C6H6
;propers treated as RBs in GMX to use combine multiple AMBER torsions per quartet (from ffamber99bon.itp)
# impr. dihedral barrier hight/2 phase degrees periodicity comments
X -X -ca-ha 1.1* 180. 2. bsd.on C6H6 nmodes
* to convert to gmx: 1.1 * 4.184 = 4.6024 kJ/mol/rad^2
# X -X -CA-HA 1.1 180. 2. bsd.on C6H6 nmodes (from parm99.dat)
# X X CA HA 1 180.00 4.60240 2 ; bsd.on C6H6 nmodes
;impropers treated as propers in GROMACS to use correct AMBER analytical function (from ffamber99bon.itp)
# 6-12 parms sigma = 2 * r * 2^(-1/6) epsilon
# atomtype radius Ang. pot. well depth kcal/mol comments
ha 1.4590* 0.0150** Spellmeyer
ca 1.9080 0.0860 OPLS
* to convert to gmx:
sigma = 1.4590 * 2^(-1/6) * 2 = 2 * 1.29982 Ang. = 2 * 0.129982 nm = 1.4590 * 2^(5/6)/10 = 0.259964 nm
** to convert to gmx: 0.0150 * 4.184 = 0.06276 kJ/mol
# amber99_3 CA 0.0000 0.0000 A 3.39967e-01 3.59824e-01 (from ffamber99nb.itp)
# amber99_22 HA 0.0000 0.0000 A 2.59964e-01 6.27600e-02 (from ffamber99nb.itp)
# C* 1.9080 0.0860 Spellmeyer
# HA 1.4590 0.0150 Spellmeyer (from parm99.dat)
# to convert r and epsilon to ACOEF and BCOEF
# ACOEF = sqrt(e1*e2) * (r1 + r2)^12 ; BCOEF = 2 * sqrt(e1*e2) * (r1 + r2)^6 = 2 * ACOEF/(r1+r2)^6
# to convert ACOEF and BCOEF to r and epsilon
# r = 0.5 * (2*ACOEF/BCOEF)^(1/6); ep = BCOEF^2/(4*ACOEF)
# to convert ACOEF and BCOEF to sigma and epsilon (GMX)
# sigma = (ACOEF/BCOEF)^(1/6) * 0.1 ; epsilon = 4.184 * BCOEF^2/(4*ACOEF)
# ca ca 819971.66 531.10
# ca ha 76245.15 104.66
# ha ha 5716.30 18.52
For proper dihedrals: a quartet of atoms may appear with more than
one set of parameters and to convert to GMX they are treated as RBs;
use the algorithm:
.. code-block:: c++
for(my $j=$i;$j<=$lines;$j++){
my $period = $pn{$j};
if($pk{$j}>0) {
$V[$period] = 2*$pk{$j}*$cal;
}
# assign V values to C values as predefined #
if($period==1){
$C[0]+=0.5*$V[$period];
if($phase{$j}==0){
$C[1]-=0.5*$V[$period];
}else{
$C[1]+=0.5*$V[$period];
}
}elsif($period==2){
if(($phase{$j}==180)||($phase{$j}==3.14)){
$C[0]+=$V[$period];
$C[2]-=$V[$period];
}else{
$C[2]+=$V[$period];
}
}elsif($period==3){
$C[0]+=0.5*$V[$period];
if($phase{$j}==0){
$C[1]+=1.5*$V[$period];
$C[3]-=2*$V[$period];
}else{
$C[1]-=1.5*$V[$period];
$C[3]+=2*$V[$period];
}
}elsif($period==4){
if(($phase{$j}==180)||($phase{$j}==3.14)){
$C[2]+=4*$V[$period];
$C[4]-=4*$V[$period];
}else{
$C[0]+=$V[$period];
$C[2]-=4*$V[$period];
$C[4]+=4*$V[$period];
}
}
}
"""
if self.amb2gmx:
os.chdir(self.absHomeDir)
self.printMess("Writing GROMACS files\n")
self.setAtomType4Gromacs()
self.writeGroFile()
self.writePosreFile()
self.writeGromacsTop()
self.writeMdpFiles()
if self.amb2gmx:
os.chdir(self.rootDir)
def setAtomType4Gromacs(self):
"""
Set atom types for Gromacs.
Atom types names in Gromacs TOP file are not case sensitive;
this routine will append a '_' to lower case atom type.
Example:
>>> CA and ca -> CA and ca_
"""
if self.merge:
self.printMess("Merging identical lower and uppercase atomtypes in GMX top file.\n")
atNames = [at.atomTypeName for at in self.atomTypes]
delAtomTypes = []
modAtomTypes = []
atomTypesGromacs = []
dictAtomTypes = {}
for at in self.atomTypes:
atName = at.atomTypeName
dictAtomTypes[atName] = at
if atName.islower() and atName.upper() in atNames:
atUpper = self.atomTypes[atNames.index(atName.upper())]
if at.ACOEF == atUpper.ACOEF and at.BCOEF == atUpper.BCOEF and at.mass == atUpper.mass:
delAtomTypes.append(atName)
else:
newAtName = atName + "_"
modAtomTypes.append(atName)
atomType = AtomType(newAtName, at.mass, at.ACOEF, at.BCOEF)
atomTypesGromacs.append(atomType)
dictAtomTypes[newAtName] = atomType
else:
atomTypesGromacs.append(at)
atomsGromacs = []
for a in self.atoms:
atName = a.atomType.atomTypeName
if atName in delAtomTypes:
atom = Atom(a.atomName, dictAtomTypes[atName.upper()], a.id, a.resid, a.mass, a.charge, a.coords)
atom.cgnr = a.cgnr
atomsGromacs.append(atom)
elif atName in modAtomTypes:
atom = Atom(a.atomName, dictAtomTypes[atName + "_"], a.id, a.resid, a.mass, a.charge, a.coords)
atom.cgnr = a.cgnr
atomsGromacs.append(atom)
else:
atomsGromacs.append(a)
self.atomTypesGromacs = atomTypesGromacs
self.atomsGromacs = atomsGromacs
return
self.printMess("Disambiguating lower and uppercase atomtypes in GMX top file, even if identical.\n")
self.atomTypesGromacs = self.atomTypes
self.atomsGromacs = self.atoms
def writeGromacsTop(self):
"""Write GMX topology file."""
if self.atomTypeSystem == "amber":
d2opls = dictAtomTypeAmb2OplsGmxCode
else:
d2opls = dictAtomTypeGaff2OplsGmxCode
topText = []
itpText = []
oitpText = []
otopText = []
top = self.baseName + "_GMX.top"
itp = self.baseName + "_GMX.itp"
posre = "posre_" + self.baseName + ".itp"
otop = self.baseName + "_GMX_OPLS.top"
oitp = self.baseName + "_GMX_OPLS.itp"
headDefault = """
[ defaults ]
; nbfunc comb-rule gen-pairs fudgeLJ fudgeQQ
1 2 yes 0.5 0.8333333333
"""
headItp = """
; Include %s topology
#include "%s"
"""
headLigPosre = """
; Ligand position restraints
#ifdef POSRES_LIG
#include "%s"
#endif
"""
headOpls = """
; Include forcefield parameters
#include "ffoplsaa.itp"
"""
headSystem = """
[ system ]
%s
"""
headMols = """
[ molecules ]
; Compound nmols
"""
headAtomtypes = """
[ atomtypes ]
;name bond_type mass charge ptype sigma epsilon Amb
"""
headAtomtypesOpls = """
; For OPLS atomtypes manual fine tuning
; AC_at:OPLS_at:OPLScode: Possible_Alternatives (see ffoplsaa.atp and ffoplsaanb.itp)
"""
headMoleculetype = """
[ moleculetype ]
;name nrexcl
%-16s 3
"""
headAtoms = """
[ atoms ]
; nr type resi res atom cgnr charge mass ; qtot bond_type
"""
headBonds = """
[ bonds ]
; ai aj funct r k
"""
headPairs = """
[ pairs ]
; ai aj funct
"""
headAngles = """
[ angles ]
; ai aj ak funct theta cth
"""
headProDih = """
[ dihedrals ] ; propers
; treated as RBs in GROMACS to use combine multiple AMBER torsions per quartet
; i j k l func C0 C1 C2 C3 C4 C5
"""
headProDihAlphaGamma = """; treated as usual propers in GROMACS since Phase angle diff from 0 or 180 degrees
; i j k l func phase kd pn
"""
headProDihGmx45 = """
[ dihedrals ] ; propers
; for gromacs 4.5 or higher, using funct 9
; i j k l func phase kd pn
"""
headImpDih = """
[ dihedrals ] ; impropers
; treated as propers in GROMACS to use correct AMBER analytical function
; i j k l func phase kd pn
"""
# NOTE: headTopWaterTip3p and headTopWaterSpce actually do NOTHING
# ==============================================================================================================
# _headTopWaterTip3p = """
# [ bondtypes ]
# ; i j func b0 kb
# OW HW 1 0.09572 462750.4 ; TIP3P water
# HW HW 1 0.15139 462750.4 ; TIP3P water
#
# [ angletypes ]
# ; i j k func th0 cth
# HW OW HW 1 104.520 836.800 ; TIP3P water
# HW HW OW 1 127.740 0.000 ; (found in crystallographic water with 3 bonds)
# """
#
# _headTopWaterSpce = """
# [ bondtypes ]
# ; i j func b0 kb
# OW HW 1 0.1 462750.4 ; SPCE water
# HW HW 1 0.1633 462750.4 ; SPCE water
#
# [ angletypes ]
# ; i j k func th0 cth
# HW OW HW 1 109.47 836.800 ; SPCE water
# HW HW OW 1 125.265 0.000 ; SPCE water
# """
# ==============================================================================================================
headNa = """
[ moleculetype ]
; molname nrexcl
NA+ 1
[ atoms ]
; id_ at type res nr residue name at name cg nr charge mass
1 %s 1 NA+ NA+ 1 1 22.9898
"""
headCl = """
[ moleculetype ]
; molname nrexcl
CL- 1
[ atoms ]
; id_ at type res nr residue name at name cg nr charge mass
1 %s 1 CL- CL- 1 -1 35.45300
"""
headK = """
[ moleculetype ]
; molname nrexcl
K+ 1
[ atoms ]
; id_ at type res nr residue name at name cg nr charge mass
1 %s 1 K+ K+ 1 1 39.100
"""
headWaterTip3p = """
[ moleculetype ]
; molname nrexcl ; TIP3P model
WAT 2
[ atoms ]
; nr type resnr residue atom cgnr charge mass
1 OW 1 WAT O 1 -0.834 16.00000
2 HW 1 WAT H1 1 0.417 1.00800
3 HW 1 WAT H2 1 0.417 1.00800
#ifdef FLEXIBLE
[ bonds ]
; i j funct length force.c.
1 2 1 0.09572 462750.4 0.09572 462750.4
1 3 1 0.09572 462750.4 0.09572 462750.4
[ angles ]
; i j k funct angle force.c.
2 1 3 1 104.520 836.800 104.520 836.800
#else
[ settles ]
; i j funct length
1 1 0.09572 0.15139
[ exclusions ]
1 2 3
2 1 3
3 1 2
#endif
"""
headWaterSpce = """
[ moleculetype ]
; molname nrexcl ; SPCE model
WAT 2
[ atoms ]
; nr type resnr residue atom cgnr charge mass
1 OW 1 WAT O 1 -0.8476 15.99940
2 HW 1 WAT H1 1 0.4238 1.00800
3 HW 1 WAT H2 1 0.4238 1.00800
#ifdef FLEXIBLE
[ bonds ]
; i j funct length force.c.
1 2 1 0.1 462750.4 0.1 462750.4
1 3 1 0.1 462750.4 0.1 462750.4
[ angles ]
; i j k funct angle force.c.
2 1 3 1 109.47 836.800 109.47 836.800
#else
[ settles ]
; OW funct doh dhh
1 1 0.1 0.16330
[ exclusions ]
1 2 3
2 1 3
3 1 2
#endif
"""
if self.direct and self.amb2gmx:
self.printMess("Converting directly from AMBER to GROMACS (EXPERIMENTAL).\n")
# Dict of ions dealt by acpype emulating amb2gmx
ionsDict = {"Na+": headNa, "Cl-": headCl, "K+": headK}
ionsSorted = []
# NOTE: headWaterTip3p and headWaterSpce actually do the real thing
# so, skipping headTopWaterTip3p and headWaterTip3p
# headTopWater = headTopWaterTip3p
headWater = headWaterTip3p
nWat = 0
topText.append("; " + head % (top, date))
otopText.append("; " + head % (otop, date))
topText.append(headDefault)
nSolute = 0
if not self.amb2gmx:
topText.append(headItp % (itp, itp))
topText.append(headLigPosre % posre)
otopText.append(headOpls)
otopText.append(headItp % (itp, itp))
otopText.append(headLigPosre % posre)
itpText.append("; " + head % (itp, date))
oitpText.append("; " + head % (oitp, date))
self.printDebug("atomTypes %i" % len(self.atomTypesGromacs))
temp = []
otemp = []
for aType in self.atomTypesGromacs:
aTypeName = aType.atomTypeName
oaCode = d2opls.get(aTypeName, ["x", "0"])[:-1]
aTypeNameOpls = oplsCode2AtomTypeDict.get(oaCode[0], "x")
A = aType.ACOEF
B = aType.BCOEF
# one cannot infer sigma or epsilon for B = 0, assuming 0 for them
if B == 0.0:
sigma, epsilon, r0, epAmber = 0, 0, 0, 0
else:
r0 = 0.5 * math.pow((2 * A / B), (1.0 / 6))
epAmber = 0.25 * B * B / A
sigma = 0.1 * math.pow((A / B), (1.0 / 6))
epsilon = cal * epAmber
if aTypeName == "OW":
if A == 629362.166 and B == 625.267765:
# headTopWater = headTopWaterSpce
headWater = headWaterSpce
# OW 629362.166 625.267765 spce
# OW 581935.564 594.825035 tip3p
# print aTypeName, A, B
line = (
" %-8s %-11s %3.5f %3.5f A %13.5e %13.5e"
% (
aTypeName,
aTypeName,
0.0,
0.0,
sigma,
epsilon,
)
+ f" ; {r0:4.2f} {epAmber:1.4f}\n"
)
oline = f"; {aTypeName}:{aTypeNameOpls}:opls_{oaCode[0]}: {repr(oaCode[1:])}\n"
# tmpFile.write(line)
temp.append(line)
otemp.append(oline)
if self.amb2gmx:
topText.append(headAtomtypes)
topText += temp
nWat = self.residueLabel.count("WAT")
for ion in ionsDict:
nIon = self.residueLabel.count(ion)
if nIon > 0:
idIon = self.residueLabel.index(ion)
ionType = self.search(name=ion).atomType.atomTypeName
ionsSorted.append((idIon, nIon, ion, ionType))
ionsSorted.sort()
else:
itpText.append(headAtomtypes)
itpText += temp
oitpText.append(headAtomtypesOpls)
oitpText += otemp
self.printDebug("GMX atomtypes done")
if len(self.atoms) > 3 * nWat + sum(x[1] for x in ionsSorted):
nSolute = 1
if nWat:
# topText.append(headTopWater)
self.printDebug("type of water '%s'" % headWater[43:48].strip())
if nSolute:
if self.amb2gmx:
topText.append(headMoleculetype % self.baseName)
else:
itpText.append(headMoleculetype % self.baseName)
oitpText.append(headMoleculetype % self.baseName)
self.printDebug("atoms %i" % len(self.atoms))
qtot = 0.0
count = 1
temp = []
otemp = []
id2oplsATDict = {}
for atom in self.atomsGromacs:
resid = atom.resid
resname = self.residueLabel[resid]
if not self.direct:
if resname in list(ionsDict) + ["WAT"]:
break
aName = atom.atomName
aType = atom.atomType.atomTypeName
oItem = d2opls.get(aType, ["x", 0])
oplsAtName = oplsCode2AtomTypeDict.get(oItem[0], "x")
id_ = atom.id
id2oplsATDict[id_] = oplsAtName
oaCode = "opls_" + oItem[0]
cgnr = id_
if self.sorted:
cgnr = atom.cgnr # JDC
charge = atom.charge
mass = atom.mass
omass = float(oItem[-1])
qtot += charge
resnr = resid + 1
line = "%6d %4s %5d %5s %5s %4d %12.6f %12.5f ; qtot %1.3f\n" % (
id_,
aType,
resnr,
resname,
aName,
cgnr,
charge,
mass,
qtot,
) # JDC
oline = "%6d %4s %5d %5s %5s %4d %12.6f %12.5f ; qtot % 3.3f %-4s\n" % (
id_,
oaCode,
resnr,
resname,
aName,
cgnr,
charge,
omass,
qtot,
oplsAtName,
) # JDC
count += 1
temp.append(line)
otemp.append(oline)
if temp:
if self.amb2gmx:
topText.append(headAtoms)
topText += temp
else:
itpText.append(headAtoms)
itpText += temp
oitpText.append(headAtoms)
oitpText += otemp
self.printDebug("GMX atoms done")
# remove bond of water
self.printDebug("bonds %i" % len(self.bonds))
temp = []
otemp = []
for bond in self.bonds:
res1 = self.residueLabel[bond.atoms[0].resid]
res2 = self.residueLabel[bond.atoms[0].resid]
if "WAT" in [res1, res2]:
continue
a1Name = bond.atoms[0].atomName
a2Name = bond.atoms[1].atomName
id1 = bond.atoms[0].id
id2 = bond.atoms[1].id
oat1 = id2oplsATDict.get(id1)
oat2 = id2oplsATDict.get(id2)
line = "%6i %6i %3i %13.4e %13.4e ; %6s - %-6s\n" % (
id1,
id2,
1,
bond.rEq * 0.1,
bond.kBond * 200 * cal,
a1Name,
a2Name,
)
oline = "%6i %6i %3i ; %13.4e %13.4e ; %6s - %-6s %6s - %-6s\n" % (
id1,
id2,
1,
bond.rEq * 0.1,
bond.kBond * 200 * cal,
a1Name,
a2Name,
oat1,
oat2,
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headBonds)
topText += temp
else:
itpText.append(headBonds)
itpText += temp
oitpText.append(headBonds)
oitpText += otemp
self.printDebug("GMX bonds done")
self.printDebug("atomPairs %i" % len(self.atomPairs))
temp = []
for pair in self.atomPairs:
# if not printed:
# tmpFile.write(headPairs)
# printed = True
a1Name = pair[0].atomName
a2Name = pair[1].atomName
id1 = pair[0].id
id2 = pair[1].id
# id1 = self.atoms.index(pair[0]) + 1
# id2 = self.atoms.index(pair[1]) + 1
line = "%6i %6i %6i ; %6s - %-6s\n" % (id1, id2, 1, a1Name, a2Name)
temp.append(line)
temp.sort()
if temp:
if self.amb2gmx:
topText.append(headPairs)
topText += temp
else:
itpText.append(headPairs)
itpText += temp
oitpText.append(headPairs)
oitpText += temp
self.printDebug("GMX pairs done")
self.printDebug("angles %i" % len(self.angles))
temp = []
otemp = []
for angle in self.angles:
a1 = angle.atoms[0].atomName
a2 = angle.atoms[1].atomName
a3 = angle.atoms[2].atomName
id1 = angle.atoms[0].id
id2 = angle.atoms[1].id
id3 = angle.atoms[2].id
oat1 = id2oplsATDict.get(id1)
oat2 = id2oplsATDict.get(id2)
oat3 = id2oplsATDict.get(id3)
line = "%6i %6i %6i %6i %13.4e %13.4e ; %6s - %-6s - %-6s\n" % (
id1,
id2,
id3,
1,
angle.thetaEq * radPi,
2 * cal * angle.kTheta,
a1,
a2,
a3,
)
oline = "%6i %6i %6i %6i ; %13.4e %13.4e ; %6s - %-4s - %-6s %4s - %+4s - %-4s\n" % (
id1,
id2,
id3,
1,
angle.thetaEq * radPi,
2 * cal * angle.kTheta,
a1,
a2,
a3,
oat1,
oat2,
oat3,
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headAngles)
topText += temp
else:
itpText.append(headAngles)
itpText += temp
oitpText.append(headAngles)
oitpText += otemp
self.printDebug("GMX angles done")
self.setProperDihedralsCoef()
self.printDebug("properDihedralsCoefRB %i" % len(self.properDihedralsCoefRB))
self.printDebug("properDihedralsAlphaGamma %i" % len(self.properDihedralsAlphaGamma))
self.printDebug("properDihedralsGmx45 %i" % len(self.properDihedralsGmx45))
temp = []
otemp = []
if self.gmx4:
self.printMess("Writing RB dihedrals for old GMX 4.\n")
for dih in self.properDihedralsCoefRB:
a1 = dih[0][0].atomName
a2 = dih[0][1].atomName
a3 = dih[0][2].atomName
a4 = dih[0][3].atomName
id1 = dih[0][0].id
id2 = dih[0][1].id
id3 = dih[0][2].id
id4 = dih[0][3].id
oat1 = id2oplsATDict.get(id1)
oat2 = id2oplsATDict.get(id2)
oat3 = id2oplsATDict.get(id3)
oat4 = id2oplsATDict.get(id4)
c0, c1, c2, c3, c4, c5 = dih[1]
line = (
"%6i %6i %6i %6i %6i %10.5f %10.5f %10.5f %10.5f %10.5f %10.5f"
% (
id1,
id2,
id3,
id4,
3,
c0,
c1,
c2,
c3,
c4,
c5,
)
+ " ; %6s-%6s-%6s-%6s\n" % (a1, a2, a3, a4)
)
oline = (
"%6i %6i %6i %6i %6i ; %10.5f %10.5f %10.5f %10.5f %10.5f %10.5f"
% (
id1,
id2,
id3,
id4,
3,
c0,
c1,
c2,
c3,
c4,
c5,
)
+ " ; %6s-%6s-%6s-%6s %4s-%4s-%4s-%4s\n" % (a1, a2, a3, a4, oat1, oat2, oat3, oat4)
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headProDih)
topText += temp
else:
itpText.append(headProDih)
itpText += temp
oitpText.append(headProDih)
oitpText += otemp
self.printDebug("GMX proper dihedrals done")
else:
self.printMess("Writing GMX dihedrals for GMX 4.5 and higher.\n")
funct = 9 # 9
for dih in self.properDihedralsGmx45:
a1 = dih[0][0].atomName
a2 = dih[0][1].atomName
a3 = dih[0][2].atomName
a4 = dih[0][3].atomName
id1 = dih[0][0].id
id2 = dih[0][1].id
id3 = dih[0][2].id
id4 = dih[0][3].id
ph = dih[1] # phase already in degree
kd = dih[2] * cal # kPhi PK
pn = dih[3] # .period
line = "%6i %6i %6i %6i %6i %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
oline = "%6i %6i %6i %6i %6i ; %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headProDihGmx45)
topText += temp
else:
itpText.append(headProDihGmx45)
itpText += temp
oitpText.append(headProDihGmx45)
oitpText += otemp
# for properDihedralsAlphaGamma
if not self.gmx4:
funct = 4 # 4
else:
funct = 1
temp = []
otemp = []
for dih in self.properDihedralsAlphaGamma:
a1 = dih[0][0].atomName
a2 = dih[0][1].atomName
a3 = dih[0][2].atomName
a4 = dih[0][3].atomName
id1 = dih[0][0].id
id2 = dih[0][1].id
id3 = dih[0][2].id
id4 = dih[0][3].id
ph = dih[1] # phase already in degree
kd = dih[2] * cal # kPhi PK
pn = dih[3] # .period
line = "%6i %6i %6i %6i %6i %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
oline = "%6i %6i %6i %6i %6i ; %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headProDihAlphaGamma)
topText += temp
else:
itpText.append(headProDihAlphaGamma)
itpText += temp
oitpText.append(headProDihAlphaGamma)
oitpText += otemp
self.printDebug("GMX special proper dihedrals done")
self.printDebug("improperDihedrals %i" % len(self.improperDihedrals))
temp = []
otemp = []
for dih in self.improperDihedrals:
a1 = dih.atoms[0].atomName
a2 = dih.atoms[1].atomName
a3 = dih.atoms[2].atomName
a4 = dih.atoms[3].atomName
id1 = dih.atoms[0].id
id2 = dih.atoms[1].id
id3 = dih.atoms[2].id
id4 = dih.atoms[3].id
kd = dih.kPhi * cal
pn = dih.period
ph = dih.phase * radPi
line = "%6i %6i %6i %6i %6i %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
oline = "%6i %6i %6i %6i %6i ; %8.2f %9.5f %3i ; %6s-%6s-%6s-%6s\n" % (
id1,
id2,
id3,
id4,
funct,
ph,
kd,
pn,
a1,
a2,
a3,
a4,
)
temp.append(line)
otemp.append(oline)
temp.sort()
otemp.sort()
if temp:
if self.amb2gmx:
topText.append(headImpDih)
topText += temp
else:
itpText.append(headImpDih)
itpText += temp
oitpText.append(headImpDih)
oitpText += otemp
self.printDebug("GMX improper dihedrals done")
if not self.direct:
for ion in ionsSorted:
topText.append(ionsDict[ion[2]] % ion[3])
if nWat:
topText.append(headWater)
topText.append(headSystem % (self.baseName))
topText.append(headMols)
otopText.append(headSystem % (self.baseName))
otopText.append(headMols)
if nSolute > 0:
topText.append(" %-16s %-6i\n" % (self.baseName, nSolute))
otopText.append(" %-16s %-6i\n" % (self.baseName, nSolute))
if not self.direct:
for ion in ionsSorted:
topText.append(" %-16s %-6i\n" % (ion[2].upper(), ion[1]))
if nWat:
topText.append(" %-16s %-6i\n" % ("WAT", nWat))
if self.topo14Data.hasNondefault14():
citation = (
" BERNARDI, A., FALLER, R., REITH, D., and KIRSCHNER, K. N. ACPYPE update for\n"
+ " nonuniform 1-4 scale factors: Conversion of the GLYCAM06 force field from AMBER\n"
+ ' to GROMACS. SoftwareX 10 (2019), 100241. doi: 10.1016/j.softx.2019.100241"\n'
)
msg = "Non-default 1-4 scale parameters detected. Converting individually. Please cite:\n\n" + citation
self.printMess(msg)
topText = self.topo14Data.patch_gmx_topol14("".join(topText))
gmxDir = os.path.abspath(".")
topFileName = os.path.join(gmxDir, top)
topFile = open(topFileName, "w")
topFile.writelines(topText)
self.topText = topText
if not self.amb2gmx:
itpFileName = os.path.join(gmxDir, itp)
itpFile = open(itpFileName, "w")
itpFile.writelines(itpText)
oitpFileName = os.path.join(gmxDir, oitp)
oitpFile = open(oitpFileName, "w")
oitpFile.writelines(oitpText)
otopFileName = os.path.join(gmxDir, otop)
otopFile = open(otopFileName, "w")
otopFile.writelines(otopText)
def writeGroFile(self):
"""Write GRO files."""
# print "Writing GROMACS GRO file\n"
self.printDebug("writing GRO file")
gro = self.baseName + "_GMX.gro"
gmxDir = os.path.abspath(".")
groFileName = os.path.join(gmxDir, gro)
groFile = open(groFileName, "w")
groFile.write(head % (gro, date))
groFile.write(" %i\n" % len(self.atoms))
count = 1
for atom in self.atoms:
coords = [c * 0.1 for c in atom.coords]
resid = atom.resid
line = "%5d%5s%5s%5d%8.3f%8.3f%8.3f\n" % (
resid + 1,
self.residueLabel[resid],
atom.atomName,
count,
coords[0],
coords[1],
coords[2],
)
count += 1
if count == 100000:
count = 0
groFile.write(line)
if self.pbc:
boxX = self.pbc[0][0] * 0.1
boxY = self.pbc[0][1] * 0.1
boxZ = self.pbc[0][2] * 0.1
vX = self.pbc[1][0]
# vY = self.pbc[1][1]
# vZ = self.pbc[1][2]
if vX == 90.0:
self.printDebug("PBC triclinic")
text = f"{boxX:11.5f} {boxY:11.5f} {boxZ:11.5f}\n"
elif round(vX, 2) == 109.47:
self.printDebug("PBC octahedron")
f1 = 0.471405 # 1/3 * sqrt(2)
f2 = 0.333333 * boxX
v22 = boxY * 2 * f1
v33 = boxZ * f1 * 1.73205 # f1 * sqrt(3)
v21 = v31 = v32 = 0.0
v12 = f2
v13 = -f2
v23 = f1 * boxX
text = "{:11.5f} {:11.5f} {:11.5f} {:11.5f} {:11.5f} {:11.5f} {:11.5f} {:11.5f} {:11.5f}\n".format(
boxX,
v22,
v33,
v21,
v31,
v12,
v32,
v13,
v23,
)
else:
self.printDebug("Box size estimated")
X = [a.coords[0] * 0.1 for a in self.atoms]
Y = [a.coords[1] * 0.1 for a in self.atoms]
Z = [a.coords[2] * 0.1 for a in self.atoms]
boxX = max(X) - min(X) # + 2.0 # 2.0 is double of rlist
boxY = max(Y) - min(Y) # + 2.0
boxZ = max(Z) - min(Z) # + 2.0
text = f"{boxX * 20.0:11.5f} {boxY * 20.0:11.5f} {boxZ * 20.0:11.5f}\n"
groFile.write(text)
def writePosreFile(self, fc=1000):
"""
Write file with positional restraints for heavy atoms.
http://www.mdtutorials.com/gmx/complex/06_equil.html
"""
self.printDebug("writing POSRE file")
posre = "posre_" + self.baseName + ".itp"
gmxDir = os.path.abspath(".")
posreFileName = os.path.join(gmxDir, posre)
posreFile = open(posreFileName, "w")
posreFile.write("; " + head % (posre, date))
posreFile.write("\n[ position_restraints ]\n; atom type fx fy fz\n")
for atom in self.atoms:
if not atom.atomType.atomTypeName.upper().startswith("H"):
posreFile.write(f"{atom.id:>6d} 1 {fc:>5d} {fc:>5d} {fc:>5d}\n")
def writeMdpFiles(self):
"""Write MDP for test with GROMACS."""
emMdp = f"""; to test
; echo 0 | gmx editconf -f {self.baseName}_GMX.gro -bt octahedron -d 1 -c -princ
; gmx grompp -f em.mdp -c out.gro -p {self.baseName}_GMX.top -o em.tpr -v
; gmx mdrun -ntmpi 1 -v -deffnm em
; Parameters describing what to do, when to stop and what to save
integrator = steep ; Algorithm (steep = steepest descent minimization)
nsteps = 500 ; Maximum number of (minimization) steps to perform
nstxout = 10
; Parameters describing how to find the neighbors of each atom and how to calculate the interactions
nstlist = 1 ; Frequency to update the neighbour list and long range forces
cutoff-scheme = Verlet
rlist = 1.2 ; Cut-off for making neighbour list (short range forces)
coulombtype = PME ; Treatment of long range electrostatic interactions
rcoulomb = 1.2 ; long range electrostatic cut-off
vdw-type = cutoff
vdw-modifier = force-switch
rvdw-switch = 1.0
rvdw = 1.2 ; long range Van der Waals cut-off
pbc = xyz ; Periodic Boundary Conditions
DispCorr = no
; vmd em.gro em.trr
"""
mdMdp = f"""; to test
; gmx grompp -f md.mdp -c em.gro -p {self.baseName}_GMX.top -o md.tpr
; gmx mdrun -ntmpi 1 -v -deffnm md
; define = -DPOSRES_LIG
integrator = md
nsteps = 10000
nstxout = 10
cutoff-scheme = verlet
coulombtype = PME
constraints = h-bonds
vdwtype = cutoff
vdw-modifier = force-switch
rlist = 1.0
rvdw = 1.0
rvdw-switch = 0.9
rcoulomb = 1.1
DispCorr = EnerPres
lincs-iter = 2
fourierspacing = 0.25
gen-vel = yes
; vmd md.gro md.trr
"""
rungmx = f"""
echo 0 | gmx editconf -f {self.baseName}_GMX.gro -bt octahedron -d 1 -c -princ
gmx grompp -f em.mdp -c out.gro -p {self.baseName}_GMX.top -o em.tpr -v
gmx mdrun -ntmpi 1 -v -deffnm em
gmx grompp -f md.mdp -c em.gro -p {self.baseName}_GMX.top -o md.tpr -r em.gro
gmx mdrun -ntmpi 1 -v -deffnm md
"""
emMdpFile = open("em.mdp", "w")
mdMdpFile = open("md.mdp", "w")
runGmxFile = open("rungmx.sh", "w")
emMdpFile.write(emMdp)
mdMdpFile.write(mdMdp)
runGmxFile.write(rungmx)
os.chmod("rungmx.sh", 0o744)
def writeCnsTopolFiles(self):
"""Write CNS topology files."""
if self.amb2gmx:
os.chdir(self.absHomeDir)
autoAngleFlag = True
autoDihFlag = True
cnsDir = os.path.abspath(".")
pdb = self.baseName + "_NEW.pdb"
par = self.baseName + "_CNS.par"
top = self.baseName + "_CNS.top"
inp = self.baseName + "_CNS.inp"
pdbFileName = os.path.join(cnsDir, pdb)
parFileName = os.path.join(cnsDir, par)
topFileName = os.path.join(cnsDir, top)
inpFileName = os.path.join(cnsDir, inp)
self.CnsTopFileName = topFileName
self.CnsInpFileName = inpFileName
self.CnsParFileName = parFileName
self.CnsPdbFileName = pdbFileName
parFile = open(parFileName, "w")
topFile = open(topFileName, "w")
inpFile = open(inpFileName, "w")
self.printMess("Writing NEW PDB file\n")
self.writePdb(pdbFileName)
self.printMess("Writing CNS/XPLOR files\n")
# print "Writing CNS PAR file\n"
parFile.write("Remarks " + head % (par, date))
parFile.write("\nset echo=false end\n")
parFile.write("\n{ Bonds: atomType1 atomType2 kb r0 }\n")
lineSet = []
for bond in self.bonds:
a1Type = bond.atoms[0].atomType.atomTypeName + "_"
a2Type = bond.atoms[1].atomType.atomTypeName + "_"
kb = 1000.0
if not self.allhdg:
kb = bond.kBond
r0 = bond.rEq
line = "BOND %5s %5s %8.1f %8.4f\n" % (a1Type, a2Type, kb, r0)
lineRev = "BOND %5s %5s %8.1f %8.4f\n" % (a2Type, a1Type, kb, r0)
if line not in lineSet:
if lineRev not in lineSet:
lineSet.append(line)
for item in lineSet:
parFile.write(item)
parFile.write("\n{ Angles: aType1 aType2 aType3 kt t0 }\n")
lineSet = []
for angle in self.angles:
a1 = angle.atoms[0].atomType.atomTypeName + "_"
a2 = angle.atoms[1].atomType.atomTypeName + "_"
a3 = angle.atoms[2].atomType.atomTypeName + "_"
kt = 500.0
if not self.allhdg:
kt = angle.kTheta
t0 = angle.thetaEq * radPi
line = "ANGLe %5s %5s %5s %8.1f %8.2f\n" % (a1, a2, a3, kt, t0)
lineRev = "ANGLe %5s %5s %5s %8.1f %8.2f\n" % (a3, a2, a1, kt, t0)
if line not in lineSet:
if lineRev not in lineSet:
lineSet.append(line)
for item in lineSet:
parFile.write(item)
parFile.write(
"\n{ Proper Dihedrals: aType1 aType2 aType3 aType4 kt per\
iod phase }\n"
)
lineSet = set()
for item in self.condensedProperDihedrals:
seq = ""
id_ = 0
for dih in item:
# id_ = item.index(dih)
ll = len(item)
a1 = dih.atoms[0].atomType.atomTypeName + "_"
a2 = dih.atoms[1].atomType.atomTypeName + "_"
a3 = dih.atoms[2].atomType.atomTypeName + "_"
a4 = dih.atoms[3].atomType.atomTypeName + "_"
kp = 750.0
if not self.allhdg:
kp = dih.kPhi
p = dih.period
ph = dih.phase * radPi
if ll > 1:
if id_ == 0:
line = (
"DIHEdral %5s %5s %5s %5s MULT %1i %7.3f %4i %8\
.2f\n"
% (a1, a2, a3, a4, ll, kp, p, ph)
)
else:
line = "%s %7.3f %4i %8.2f\n" % (40 * " ", kp, p, ph)
else:
line = "DIHEdral %5s %5s %5s %5s %15.3f %4i %8.2f\n" % (a1, a2, a3, a4, kp, p, ph)
seq += line
id_ += 1
lineSet.add(seq)
for item in lineSet:
parFile.write(item)
parFile.write(
"\n{ Improper Dihedrals: aType1 aType2 aType3 aType4 kt p\
eriod phase }\n"
)
lineSet = set()
for idh in self.improperDihedrals:
a1 = idh.atoms[0].atomType.atomTypeName + "_"
a2 = idh.atoms[1].atomType.atomTypeName + "_"
a3 = idh.atoms[2].atomType.atomTypeName + "_"
a4 = idh.atoms[3].atomType.atomTypeName + "_"
kp = 750.0
if not self.allhdg:
kp = idh.kPhi
p = idh.period
ph = idh.phase * radPi
line = "IMPRoper %5s %5s %5s %5s %13.1f %4i %8.2f\n" % (a1, a2, a3, a4, kp, p, ph)
lineSet.add(line)
if self.chiral:
for idhc in self.chiralGroups:
_atc, neig, angle = idhc
a1 = neig[0].atomType.atomTypeName + "_"
a2 = neig[1].atomType.atomTypeName + "_"
a3 = neig[2].atomType.atomTypeName + "_"
a4 = neig[3].atomType.atomTypeName + "_"
kp = 11000.0
p = 0
ph = angle
line = "IMPRoper %5s %5s %5s %5s %13.1f %4i %8.2f\n" % (a1, a2, a3, a4, kp, p, ph)
lineSet.add(line)
for item in lineSet:
parFile.write(item)
parFile.write("\n{ Nonbonded: Type Emin sigma; (1-4): Emin/2 sigma }\n")
for at in self.atomTypes:
A = at.ACOEF
B = at.BCOEF
atName = at.atomTypeName + "_"
if B == 0.0:
sigma = epAmber = ep2 = sig2 = 0.0
else:
epAmber = 0.25 * B * B / A
ep2 = epAmber / 2.0
sigma = math.pow((A / B), (1.0 / 6))
sig2 = sigma
line = "NONBonded %5s %11.6f %11.6f %11.6f %11.6f\n" % (atName, epAmber, sigma, ep2, sig2)
parFile.write(line)
parFile.write("\nset echo=true end\n")
# print "Writing CNS TOP file\n"
topFile.write("Remarks " + head % (top, date))
topFile.write("\nset echo=false end\n")
topFile.write(f"\nautogenerate angles={autoAngleFlag} dihedrals={autoDihFlag} end\n")
topFile.write("\n{ atomType mass }\n")
for at in self.atomTypes:
atType = at.atomTypeName + "_"
mass = at.mass
line = "MASS %-5s %8.3f\n" % (atType, mass)
topFile.write(line)
topFile.write("\nRESIdue %s\n" % self.residueLabel[0])
topFile.write("\nGROUP\n")
topFile.write("\n{ atomName atomType Charge }\n")
for at in self.atoms:
atName = at.atomName
atType = at.atomType.atomTypeName + "_"
charge = at.charge
line = "ATOM %-5s TYPE= %-5s CHARGE= %8.4f END\n" % (atName, atType, charge)
topFile.write(line)
topFile.write("\n{ Bonds: atomName1 atomName2 }\n")
for bond in self.bonds:
a1Name = bond.atoms[0].atomName
a2Name = bond.atoms[1].atomName
line = "BOND %-5s %-5s\n" % (a1Name, a2Name)
topFile.write(line)
if not autoAngleFlag or 1: # generating angles anyway
topFile.write("\n{ Angles: atomName1 atomName2 atomName3}\n")
for angle in self.angles:
a1Name = angle.atoms[0].atomName
a2Name = angle.atoms[1].atomName
a3Name = angle.atoms[2].atomName
line = "ANGLe %-5s %-5s %-5s\n" % (a1Name, a2Name, a3Name)
topFile.write(line)
if not autoDihFlag or 1: # generating angles anyway
topFile.write("\n{ Proper Dihedrals: name1 name2 name3 name4 }\n")
for item in self.condensedProperDihedrals:
for dih in item:
a1Name = dih.atoms[0].atomName
a2Name = dih.atoms[1].atomName
a3Name = dih.atoms[2].atomName
a4Name = dih.atoms[3].atomName
line = "DIHEdral %-5s %-5s %-5s %-5s\n" % (a1Name, a2Name, a3Name, a4Name)
break
topFile.write(line)
topFile.write("\n{ Improper Dihedrals: aName1 aName2 aName3 aName4 }\n")
for dih in self.improperDihedrals:
a1Name = dih.atoms[0].atomName
a2Name = dih.atoms[1].atomName
a3Name = dih.atoms[2].atomName
a4Name = dih.atoms[3].atomName
line = "IMPRoper %-5s %-5s %-5s %-5s\n" % (a1Name, a2Name, a3Name, a4Name)
topFile.write(line)
if self.chiral:
for idhc in self.chiralGroups:
_atc, neig, angle = idhc
a1Name = neig[0].atomName
a2Name = neig[1].atomName
a3Name = neig[2].atomName
a4Name = neig[3].atomName
line = "IMPRoper %-5s %-5s %-5s %-5s\n" % (a1Name, a2Name, a3Name, a4Name)
topFile.write(line)
topFile.write("\nEND {RESIdue %s}\n" % self.residueLabel[0])
topFile.write("\nset echo=true end\n")
inpFile.write("Remarks " + head % (inp, date))
inpData = """
topology
@%(CNS_top)s
end
parameters
@%(CNS_par)s
nbonds
atom cdie shift eps=1.0 e14fac=0.4 tolerance=0.5
cutnb=9.0 ctonnb=7.5 ctofnb=8.0
nbxmod=5 vswitch wmin 1.0
end
remark dielectric constant eps set to 1.0
end
flags exclude elec ? end
segment name=" "
chain
coordinates @%(NEW_pdb)s
end
end
coordinates @%(NEW_pdb)s
coord copy end
! Remarks If you want to shake up the coordinates a bit ...
vector do (x=x+6*(rand()-0.5)) (all)
vector do (y=y+6*(rand()-0.5)) (all)
vector do (z=z+6*(rand()-0.5)) (all)
write coordinates output=%(CNS_ran)s end
! Remarks RMS diff after randomisation and before minimisation
coord rms sele=(known and not hydrogen) end
print threshold=0.02 bonds
print threshold=3.0 angles
print threshold=3.0 dihedrals
print threshold=3.0 impropers
! Remarks Do Powell energy minimisation
minimise powell
nstep=250 drop=40.0
end
write coordinates output=%(CNS_min)s end
write structure output=%(CNS_psf)s end
! constraints interaction (not hydro) (not hydro) end
print threshold=0.02 bonds
print threshold=3.0 angles
print threshold=3.0 dihedrals
print threshold=3.0 impropers
flags exclude * include vdw end energy end
distance from=(not hydro) to=(not hydro) cutoff=2.6 end
! Remarks RMS fit after minimisation
coord fit sele=(known and not hydrogen) end
stop
"""
dictInp = {}
dictInp["CNS_top"] = top
dictInp["CNS_par"] = par
dictInp["NEW_pdb"] = pdb
dictInp["CNS_min"] = self.baseName + "_NEW_min.pdb"
dictInp["CNS_psf"] = self.baseName + "_CNS.psf"
dictInp["CNS_ran"] = self.baseName + "_rand.pdb"
line = inpData % dictInp
inpFile.write(line)
if not self.amb2gmx:
self.printDebug("chiralGroups %i" % len(self.chiralGroups))
else:
os.chdir(self.rootDir)
class ACTopol(AbstractTopol):
"""
Class to build the AC topologies (Antechamber AmberTools).
"""
def __init__(
self,
inputFile,
binaries=binaries,
chargeType="bcc",
chargeVal=None,
multiplicity="1",
atomType="gaff2",
force=False,
basename=None,
debug=False,
outTopol="all",
allhdg=False,
timeTol=MAXTIME,
qprog="sqm",
ekFlag=None,
verbose=True,
gmx4=False,
merge=False,
direct=False,
is_sorted=False,
chiral=False,
amb2gmx=False,
level=20,
):
super().__init__()
self.binaries = binaries
self.amb2gmx = amb2gmx
self.debug = debug
self.verbose = verbose
self.gmx4 = gmx4
self.merge = merge
self.direct = direct
self.sorted = is_sorted
self.chiral = chiral
if not self.verbose:
level = 100
if self.debug:
level = 10
self.level = level or 20
self.acExe = find_bin(binaries["ac_bin"])
if not os.path.exists(self.acExe):
self.printError(f"no '{binaries['ac_bin']}' executable... aborting! ")
hint1 = "HINT1: is 'AMBERHOME' environment variable set?"
hint2 = (
f"HINT2: is '{binaries['ac_bin']}' in your $PATH?"
+ f"\n What 'which {binaries['ac_bin']}' in your terminal says?"
+ "\n 'alias' doesn't work for ACPYPE."
)
self.printMess(hint1)
self.printMess(hint2)
msg = "Missing ANTECHAMBER"
logger(self.level).error(msg)
raise Exception(msg)
self.inputFile = os.path.basename(inputFile)
self.rootDir = os.path.abspath(".")
self.absInputFile = os.path.abspath(inputFile)
if not os.path.exists(self.absInputFile) and not re.search(r"\.mol2$|\.mdl$|\.pdb$", self.inputFile):
self.smiles = inputFile
if self.checkSmiles():
self.is_smiles = True
if not basename:
self.inputFile = "smiles_molecule.mol2"
else:
self.inputFile = f"{basename}.mol2"
self.absInputFile = os.path.abspath(self.inputFile)
else:
self.is_smiles = False
self.smiles = None
elif not os.path.exists(self.absInputFile):
msg = f"Input file {inputFile} DOES NOT EXIST"
logger(self.level).error(msg)
raise Exception(msg)
baseOriginal, ext = os.path.splitext(self.inputFile)
base = basename or baseOriginal
self.baseOriginal = baseOriginal
self.ext = ext
self.baseName = base # name of the input file without ext.
self.obabelExe = find_bin(binaries["obabel_bin"])
if not os.path.exists(self.obabelExe):
if self.ext != ".mol2" and self.ext != ".mdl":
self.printError(f"no '{binaries['obabel_bin']}' executable; you need it if input is PDB or SMILES")
self.printError("otherwise use only MOL2 or MDL file as input ... aborting!")
msg = "Missing OBABEL"
logger(self.level).error(msg)
raise Exception(msg)
else:
self.printWarn(f"no '{binaries['obabel_bin']}' executable, no PDB file can be used as input!")
if self.is_smiles:
self.convertSmilesToMol2()
self.timeTol = timeTol
self.printDebug("Max execution time tolerance is %s" % elapsedTime(self.timeTol))
# ekFlag e.g. (default used by sqm):
# acpype -i cccc -k "qm_theory='AM1', grms_tol=0.0005, scfconv=1.d-10, ndiis_attempts=700, qmcharge=0"
if ekFlag == '"None"' or ekFlag is None:
self.ekFlag = ""
else:
self.ekFlag = "-ek %s" % ekFlag
self.extOld = ext
self.homeDir = self.baseName + ".acpype"
self.chargeType = chargeType
self.chargeVal = chargeVal
self.multiplicity = multiplicity
self.atomType = atomType
self.gaffDatfile = "gaff.dat"
leapGaffFile = "leaprc.gaff"
if "2" in self.atomType:
leapGaffFile = "leaprc.gaff2"
self.gaffDatfile = "gaff2.dat"
self.force = force
self.allhdg = allhdg
self.tleapExe = which("tleap") or ""
self.parmchkExe = which("parmchk2") or ""
acBase = base + "_AC"
self.acBaseName = acBase
self.acXyzFileName = acBase + ".inpcrd"
self.acTopFileName = acBase + ".prmtop"
self.acFrcmodFileName = acBase + ".frcmod"
self.tmpDir = os.path.join(self.rootDir, ".acpype_tmp_%s" % os.path.basename(base))
self.setResNameCheckCoords()
self.guessCharge()
acMol2FileName = f"{base}_{chargeType}_{atomType}.mol2"
self.acMol2FileName = acMol2FileName
self.charmmBase = "%s_CHARMM" % base
self.qFlag = qDict[qprog]
self.outTopols = [outTopol]
if outTopol == "all":
self.outTopols = outTopols
self.acParDict = {
"base": base,
"ext": ext[1:],
"acBase": acBase,
"acMol2FileName": acMol2FileName,
"res": self.resName,
"leapAmberFile": leapAmberFile,
"baseOrg": self.baseOriginal,
"leapGaffFile": leapGaffFile,
}
class MolTopol(AbstractTopol):
"""
Class to write topologies and parameters files for several applications.
https://ambermd.org/FileFormats.php
Parser, take information in AC xyz and top files and convert to objects.
Args:
acFileXyz
acFileTop
Returns:
molTopol obj or None
"""
def __init__(
self,
acTopolObj=None,
acFileXyz=None,
acFileTop=None,
debug=False,
basename=None,
verbose=True,
gmx4=False,
merge=False,
direct=False,
is_sorted=False,
chiral=False,
amb2gmx=False,
level=20,
):
super().__init__()
self.amb2gmx = amb2gmx
self.chiral = chiral
self.allhdg = False
self.debug = debug
self.level = level
self.gmx4 = gmx4
self.merge = merge
self.direct = direct
self.sorted = is_sorted
self.verbose = verbose
self.inputFile = acFileTop
if not self.verbose:
level = 100
if self.debug:
level = 10
self.level = level
if acTopolObj:
if not acFileXyz:
acFileXyz = acTopolObj.acXyzFileName
if not acFileTop:
acFileTop = acTopolObj.acTopFileName
self._parent = acTopolObj
self.allhdg = self._parent.allhdg
self.debug = self._parent.debug
self.inputFile = self._parent.inputFile
elif not self.amb2gmx:
self.amb2gmx = True
if not os.path.exists(acFileXyz) or not os.path.exists(acFileTop):
self.printError(f"Files '{acFileXyz}' and/or '{acFileTop}' don't exist")
self.printError("molTopol object won't be created")
self.xyzFileData = open(acFileXyz).readlines()
self.topFileData = [x for x in open(acFileTop).readlines() if not x.startswith("%COMMENT")]
self.topo14Data = Topology_14()
self.topo14Data.read_amber_topology("".join(self.topFileData))
self.printDebug("prmtop and inpcrd files loaded")
self.getResidueLabel()
if len(self.residueLabel) > 1:
self.baseName = basename or os.path.splitext(os.path.basename(acFileTop))[0] # 'solute'
else:
self.baseName = basename or self.residueLabel[0] # 3 caps letters
if acTopolObj:
self.baseName = basename or acTopolObj.baseName
self.printDebug("basename defined = '%s'" % self.baseName)
self.getAtoms()
self.getBonds()
self.getAngles()
self.getDihedrals()
if self.amb2gmx:
self.rootDir = os.path.abspath(".")
self.homeDir = f"{self.baseName}.amb2gmx"
self.makeDir()
else:
self.getChirals()
# Sort atoms for gromacs output. # JDC
if self.sorted:
self.printMess("Sorting atoms for gromacs ordering.\n")
self.sortAtomsForGromacs()
|
alanwilter/acpype
|
acpype/topol.py
|
Python
|
gpl-3.0
| 125,476
|
[
"Amber",
"CHARMM",
"GAMESS",
"Gaussian",
"Gromacs",
"Jaguar",
"MOPAC",
"NAMD",
"ORCA",
"Pybel",
"VMD"
] |
e963ce2df0d238234c6d8ec945607d1b9babdb19c35aea7d6ab67bae74e85612
|
# -*- coding: utf-8 -*-
""" Tests for student account views. """
import re
from unittest import skipUnless
from urllib import urlencode
import json
import mock
import ddt
import markupsafe
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from django.contrib import messages
from django.contrib.messages.middleware import MessageMiddleware
from django.test import TestCase
from django.test.utils import override_settings
from django.test.client import RequestFactory
from embargo.test_utils import restrict_course
from openedx.core.djangoapps.user_api.accounts.api import activate_account, create_account
from openedx.core.djangoapps.user_api.accounts import EMAIL_MAX_LENGTH
from student.tests.factories import CourseModeFactory, UserFactory
from student_account.views import account_settings_context
from third_party_auth.tests.testutil import simulate_running_pipeline
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt.ddt
class StudentAccountUpdateTest(UrlResetMixin, TestCase):
""" Tests for the student account views that update the user's account information. """
USERNAME = u"heisenberg"
ALTERNATE_USERNAME = u"walt"
OLD_PASSWORD = u"ḅḷüëṡḳÿ"
NEW_PASSWORD = u"🄱🄸🄶🄱🄻🅄🄴"
OLD_EMAIL = u"walter@graymattertech.com"
NEW_EMAIL = u"walt@savewalterwhite.com"
INVALID_ATTEMPTS = 100
INVALID_EMAILS = [
None,
u"",
u"a",
"no_domain",
"no+domain",
"@",
"@domain.com",
"test@no_extension",
# Long email -- subtract the length of the @domain
# except for one character (so we exceed the max length limit)
u"{user}@example.com".format(
user=(u'e' * (EMAIL_MAX_LENGTH - 11))
)
]
INVALID_KEY = u"123abc"
def setUp(self):
super(StudentAccountUpdateTest, self).setUp("student_account.urls")
# Create/activate a new account
activation_key = create_account(self.USERNAME, self.OLD_PASSWORD, self.OLD_EMAIL)
activate_account(activation_key)
# Login
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertTrue(result)
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in LMS')
def test_password_change(self):
# Request a password change while logged in, simulating
# use of the password reset link from the account page
response = self._change_password()
self.assertEqual(response.status_code, 200)
# Check that an email was sent
self.assertEqual(len(mail.outbox), 1)
# Retrieve the activation link from the email body
email_body = mail.outbox[0].body
result = re.search('(?P<url>https?://[^\s]+)', email_body)
self.assertIsNot(result, None)
activation_link = result.group('url')
# Visit the activation link
response = self.client.get(activation_link)
self.assertEqual(response.status_code, 200)
# Submit a new password and follow the redirect to the success page
response = self.client.post(
activation_link,
# These keys are from the form on the current password reset confirmation page.
{'new_password1': self.NEW_PASSWORD, 'new_password2': self.NEW_PASSWORD},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Your password has been set.")
# Log the user out to clear session data
self.client.logout()
# Verify that the new password can be used to log in
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
# Try reusing the activation link to change the password again
response = self.client.post(
activation_link,
{'new_password1': self.OLD_PASSWORD, 'new_password2': self.OLD_PASSWORD},
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "The password reset link was invalid, possibly because the link has already been used.")
self.client.logout()
# Verify that the old password cannot be used to log in
result = self.client.login(username=self.USERNAME, password=self.OLD_PASSWORD)
self.assertFalse(result)
# Verify that the new password continues to be valid
result = self.client.login(username=self.USERNAME, password=self.NEW_PASSWORD)
self.assertTrue(result)
@ddt.data(True, False)
def test_password_change_logged_out(self, send_email):
# Log the user out
self.client.logout()
# Request a password change while logged out, simulating
# use of the password reset link from the login page
if send_email:
response = self._change_password(email=self.OLD_EMAIL)
self.assertEqual(response.status_code, 200)
else:
# Don't send an email in the POST data, simulating
# its (potentially accidental) omission in the POST
# data sent from the login page
response = self._change_password()
self.assertEqual(response.status_code, 400)
def test_password_change_inactive_user(self):
# Log out the user created during test setup
self.client.logout()
# Create a second user, but do not activate it
create_account(self.ALTERNATE_USERNAME, self.OLD_PASSWORD, self.NEW_EMAIL)
# Send the view the email address tied to the inactive user
response = self._change_password(email=self.NEW_EMAIL)
# Expect that the activation email is still sent,
# since the user may have lost the original activation email.
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
def test_password_change_no_user(self):
# Log out the user created during test setup
self.client.logout()
# Send the view an email address not tied to any user
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 400)
def test_password_change_rate_limited(self):
# Log out the user created during test setup, to prevent the view from
# selecting the logged-in user's email address over the email provided
# in the POST data
self.client.logout()
# Make many consecutive bad requests in an attempt to trigger the rate limiter
for attempt in xrange(self.INVALID_ATTEMPTS):
self._change_password(email=self.NEW_EMAIL)
response = self._change_password(email=self.NEW_EMAIL)
self.assertEqual(response.status_code, 403)
@ddt.data(
('post', 'password_change_request', []),
)
@ddt.unpack
def test_require_http_method(self, correct_method, url_name, args):
wrong_methods = {'get', 'put', 'post', 'head', 'options', 'delete'} - {correct_method}
url = reverse(url_name, args=args)
for method in wrong_methods:
response = getattr(self.client, method)(url)
self.assertEqual(response.status_code, 405)
def _change_password(self, email=None):
"""Request to change the user's password. """
data = {}
if email:
data['email'] = email
return self.client.post(path=reverse('password_change_request'), data=data)
@ddt.ddt
class StudentAccountLoginAndRegistrationTest(UrlResetMixin, ModuleStoreTestCase):
""" Tests for the student account views that update the user's account information. """
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(StudentAccountLoginAndRegistrationTest, self).setUp('embargo')
@ddt.data(
("account_login", "login"),
("account_register", "register"),
)
@ddt.unpack
def test_login_and_registration_form(self, url_name, initial_mode):
response = self.client.get(reverse(url_name))
expected_data = u"data-initial-mode=\"{mode}\"".format(mode=initial_mode)
self.assertContains(response, expected_data)
@ddt.data("account_login", "account_register")
def test_login_and_registration_form_already_authenticated(self, url_name):
# Create/activate a new account and log in
activation_key = create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
activate_account(activation_key)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result)
# Verify that we're redirected to the dashboard
response = self.client.get(reverse(url_name))
self.assertRedirects(response, reverse("dashboard"))
@ddt.data(
(False, "account_login"),
(False, "account_login"),
(True, "account_login"),
(True, "account_register"),
)
@ddt.unpack
def test_login_and_registration_form_signin_preserves_params(self, is_edx_domain, url_name):
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
]
# The response should have a "Sign In" button with the URL
# that preserves the querystring params
with mock.patch.dict(settings.FEATURES, {'IS_EDX_DOMAIN': is_edx_domain}):
response = self.client.get(reverse(url_name), params)
expected_url = '/login?{}'.format(self._finish_auth_url_param(params + [('next', '/dashboard')]))
self.assertContains(response, expected_url)
# Add additional parameters:
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
('course_mode', 'honor'),
('email_opt_in', 'true'),
('next', '/custom/final/destination')
]
# Verify that this parameter is also preserved
with mock.patch.dict(settings.FEATURES, {'IS_EDX_DOMAIN': is_edx_domain}):
response = self.client.get(reverse(url_name), params)
expected_url = '/login?{}'.format(self._finish_auth_url_param(params))
self.assertContains(response, expected_url)
@mock.patch.dict(settings.FEATURES, {"ENABLE_THIRD_PARTY_AUTH": False})
@ddt.data("account_login", "account_register")
def test_third_party_auth_disabled(self, url_name):
response = self.client.get(reverse(url_name))
self._assert_third_party_auth_data(response, None, None, [])
@ddt.data(
("account_login", None, None),
("account_register", None, None),
("account_login", "google-oauth2", "Google"),
("account_register", "google-oauth2", "Google"),
("account_login", "facebook", "Facebook"),
("account_register", "facebook", "Facebook"),
)
@ddt.unpack
def test_third_party_auth(self, url_name, current_backend, current_provider):
params = [
('course_id', 'edX/DemoX/Demo_Course'),
('enrollment_action', 'enroll'),
('course_mode', 'honor'),
('email_opt_in', 'true'),
('next', '/custom/final/destination'),
]
# Simulate a running pipeline
if current_backend is not None:
pipeline_target = "student_account.views.third_party_auth.pipeline"
with simulate_running_pipeline(pipeline_target, current_backend):
response = self.client.get(reverse(url_name), params)
# Do NOT simulate a running pipeline
else:
response = self.client.get(reverse(url_name), params)
# This relies on the THIRD_PARTY_AUTH configuration in the test settings
expected_providers = [
{
"name": "Facebook",
"iconClass": "fa-facebook",
"loginUrl": self._third_party_login_url("facebook", "login", params),
"registerUrl": self._third_party_login_url("facebook", "register", params)
},
{
"name": "Google",
"iconClass": "fa-google-plus",
"loginUrl": self._third_party_login_url("google-oauth2", "login", params),
"registerUrl": self._third_party_login_url("google-oauth2", "register", params)
}
]
self._assert_third_party_auth_data(response, current_backend, current_provider, expected_providers)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_microsite_uses_old_login_page(self):
# Retrieve the login page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("account_login"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Log into your Test Microsite Account")
self.assertContains(resp, "login-form")
def test_microsite_uses_old_register_page(self):
# Retrieve the register page from a microsite domain
# and verify that we're served the old page.
resp = self.client.get(
reverse("account_register"),
HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME
)
self.assertContains(resp, "Register for Test Microsite")
self.assertContains(resp, "register-form")
def _assert_third_party_auth_data(self, response, current_backend, current_provider, providers):
"""Verify that third party auth info is rendered correctly in a DOM data attribute. """
auth_info = markupsafe.escape(
json.dumps({
"currentProvider": current_provider,
"providers": providers,
"finishAuthUrl": "/auth/complete/{}?".format(current_backend) if current_backend else None,
"errorMessage": None,
})
)
expected_data = u"data-third-party-auth='{auth_info}'".format(
auth_info=auth_info
)
self.assertContains(response, expected_data)
def _third_party_login_url(self, backend_name, auth_entry, login_params):
"""Construct the login URL to start third party authentication. """
return u"{url}?auth_entry={auth_entry}&{param_str}".format(
url=reverse("social:begin", kwargs={"backend": backend_name}),
auth_entry=auth_entry,
param_str=self._finish_auth_url_param(login_params),
)
def _finish_auth_url_param(self, params):
"""
Make the next=... URL parameter that indicates where the user should go next.
>>> _finish_auth_url_param([('next', '/dashboard')])
'/account/finish_auth?next=%2Fdashboard'
"""
return urlencode({
'next': '/account/finish_auth?{}'.format(urlencode(params))
})
class AccountSettingsViewTest(TestCase):
""" Tests for the account settings view. """
USERNAME = 'student'
PASSWORD = 'password'
FIELDS = [
'country',
'gender',
'language',
'level_of_education',
'password',
'year_of_birth',
'preferred_language',
]
@mock.patch("django.conf.settings.MESSAGE_STORAGE", 'django.contrib.messages.storage.cookie.CookieStorage')
def setUp(self):
super(AccountSettingsViewTest, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.request = RequestFactory()
self.request.user = self.user
# Python-social saves auth failure notifcations in Django messages.
# See pipeline.get_duplicate_provider() for details.
self.request.COOKIES = {}
MessageMiddleware().process_request(self.request)
messages.error(self.request, 'Facebook is already in use.', extra_tags='Auth facebook')
def test_context(self):
context = account_settings_context(self.request)
user_accounts_api_url = reverse("accounts_api", kwargs={'username': self.user.username})
self.assertEqual(context['user_accounts_api_url'], user_accounts_api_url)
user_preferences_api_url = reverse('preferences_api', kwargs={'username': self.user.username})
self.assertEqual(context['user_preferences_api_url'], user_preferences_api_url)
for attribute in self.FIELDS:
self.assertIn(attribute, context['fields'])
self.assertEqual(
context['user_accounts_api_url'], reverse("accounts_api", kwargs={'username': self.user.username})
)
self.assertEqual(
context['user_preferences_api_url'], reverse('preferences_api', kwargs={'username': self.user.username})
)
self.assertEqual(context['duplicate_provider'].BACKEND_CLASS.name, 'facebook')
self.assertEqual(context['auth']['providers'][0]['name'], 'Facebook')
self.assertEqual(context['auth']['providers'][1]['name'], 'Google')
def test_view(self):
view_path = reverse('account_settings')
response = self.client.get(path=view_path)
for attribute in self.FIELDS:
self.assertIn(attribute, response.content)
|
kamalx/edx-platform
|
lms/djangoapps/student_account/test/test_views.py
|
Python
|
agpl-3.0
| 17,533
|
[
"VisIt"
] |
42f71b0a464f369666215dd196139f162d48d0442516d110c59c8d5594bbaec0
|
import ocl
import camvtk
import time
import vtk
import datetime
import math
import random
import numpy as np
import gc
def drawVertex(myscreen, p, vertexColor, rad=1):
myscreen.addActor( camvtk.Sphere( center=(p.x,p.y,p.z), radius=rad, color=vertexColor ) )
def drawEdge(myscreen, e, edgeColor=camvtk.yellow):
p1 = e[0]
p2 = e[1]
myscreen.addActor( camvtk.Line( p1=( p1.x,p1.y,p1.z), p2=(p2.x,p2.y,p2.z), color=edgeColor ) )
def drawFarCircle(myscreen, r, circleColor):
myscreen.addActor( camvtk.Circle( center=(0,0,0), radius=r, color=circleColor ) )
def drawDiagram( myscreen, vd ):
drawFarCircle(myscreen, vd.getFarRadius(), camvtk.pink)
for v in vd.getGenerators():
drawVertex(myscreen, v, camvtk.green, 2)
for v in vd.getVoronoiVertices():
drawVertex(myscreen, v, camvtk.red, 1)
for v in vd.getFarVoronoiVertices():
drawVertex(myscreen, v, camvtk.pink, 10)
vde = vd.getVoronoiEdges()
print " got ",len(vde)," Voronoi edges"
for e in vde:
drawEdge(myscreen,e, camvtk.cyan)
class VD:
def __init__(self, myscreen, vd, scale=1):
self.myscreen = myscreen
self.gen_pts=[ocl.Point(0,0,0)]
self.generators = camvtk.PointCloud(pointlist=self.gen_pts)
self.verts=[]
self.far=[]
self.edges =[]
self.generatorColor = camvtk.green
self.vertexColor = camvtk.red
self.edgeColor = camvtk.cyan
self.vdtext = camvtk.Text()
self.vdtext.SetPos( (50, myscreen.height-50) )
self.Ngen = 0
self.vdtext_text = ""
self.scale=scale
self.setVDText(vd)
myscreen.addActor(self.vdtext)
def setVDText(self, vd):
self.Ngen = len( vd.getGenerators() )-3
self.vdtext_text = "VD with " + str(self.Ngen) + " generators. SCALE= " + str(vd.getFarRadius())
self.vdtext.SetText( self.vdtext_text )
self.vdtext.SetSize(32)
def setGenerators(self, vd):
if len(self.gen_pts)>0:
self.myscreen.removeActor( self.generators )
#self.generators=[]
self.gen_pts = []
for p in vd.getGenerators():
self.gen_pts.append(self.scale*p)
self.generators= camvtk.PointCloud(pointlist=self.gen_pts)
self.generators.SetPoints()
self.myscreen.addActor(self.generators)
self.setVDText(vd)
self.myscreen.render()
def setFar(self, vd):
for p in vd.getFarVoronoiVertices():
p=self.scale*p
self.myscreen.addActor( camvtk.Sphere( center=(p.x,p.y,p.z), radius=4, color=camvtk.pink ) )
self.myscreen.render()
def setVertices(self, vd):
for p in self.verts:
self.myscreen.removeActor(p)
self.verts = []
for p in vd.getVoronoiVertices():
p=self.scale*p
actor = camvtk.Sphere( center=(p.x,p.y,p.z), radius=0.000005, color=self.vertexColor )
self.verts.append(actor)
self.myscreen.addActor( actor )
self.myscreen.render()
def setEdgesPolydata(self, vd):
self.edges = []
self.edges = vd.getEdgesGenerators()
self.epts = vtk.vtkPoints()
nid = 0
lines=vtk.vtkCellArray()
for e in self.edges:
p1 = self.scale*e[0]
p2 = self.scale*e[1]
self.epts.InsertNextPoint( p1.x, p1.y, p1.z)
self.epts.InsertNextPoint( p2.x, p2.y, p2.z)
line = vtk.vtkLine()
line.GetPointIds().SetId(0,nid)
line.GetPointIds().SetId(1,nid+1)
nid = nid+2
lines.InsertNextCell(line)
linePolyData = vtk.vtkPolyData()
linePolyData.SetPoints(self.epts)
linePolyData.SetLines(lines)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(linePolyData)
self.edge_actor = vtk.vtkActor()
self.edge_actor.SetMapper(mapper)
self.edge_actor.GetProperty().SetColor( camvtk.cyan )
self.myscreen.addActor( self.edge_actor )
self.myscreen.render()
def setEdges(self, vd):
for e in self.edges:
myscreen.removeActor(e)
self.edges = []
for e in vd.getEdgesGenerators():
p1 = self.scale*e[0]
p2 = self.scale*e[1]
actor = camvtk.Line( p1=( p1.x,p1.y,p1.z), p2=(p2.x,p2.y,p2.z), color=self.edgeColor )
self.myscreen.addActor(actor)
self.edges.append(actor)
self.myscreen.render()
def setAll(self, vd):
self.setGenerators(vd)
#self.setFar(vd)
#self.setVertices(vd)
self.setEdges(vd)
def addVertexSlow(myscreen, vd, vod, p):
pass
def drawDiag(far, framenr):
myscreen = camvtk.VTKScreen()
myscreen.camera.SetFocalPoint(0, 0, 0)
camvtk.drawOCLtext(myscreen)
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(myscreen.renWin)
lwr = vtk.vtkPNGWriter()
lwr.SetInput( w2if.GetOutput() )
scale=10000
#far = 0.00001
vd = ocl.VoronoiDiagram(far,1200)
camPos = 0.4* (far/0.00001)
myscreen.camera.SetPosition(camPos/10000, 0, camPos)
myscreen.camera.SetClippingRange(-2*camPos,2*camPos)
random.seed(42)
vod = VD(myscreen,vd,scale)
drawFarCircle(myscreen, scale*vd.getFarRadius(), camvtk.orange)
Nmax = 300
plist=[]
for n in range(Nmax):
x=-far/2+far*random.random()
y=-far/2+far*random.random()
plist.append( ocl.Point(x,y) )
n=1
#ren = [300]
for p in plist:
print "PYTHON: adding generator: ",n," at ",p
#if n in ren:
vd.addVertexSite( p )
n=n+1
vod.setAll(vd)
myscreen.render()
w2if.Modified()
lwr.SetFileName("frames/vd_v_"+ ('%05d' % framenr)+".png")
lwr.Write()
print "PYTHON All DONE."
myscreen.render()
#myscreen.iren.Start()
if __name__ == "__main__":
print ocl.revision()
maxf = 0.00001
minf = 0.00000001
lmaxf = math.log(maxf)
lminf = math.log(minf)
Nframes = 5
lrange = np.arange(lmaxf,lminf, -(lmaxf-lminf)/Nframes)
print lrange
fars = []
for l in lrange:
f = math.exp(l)
fars.append(f)
print fars
#exit()
#farvals = [0.1 , 0.01]
n=1
for f in fars:
print "****************"
print "PYTHON diagram with f= ",f
print "****************"
drawDiag(f,n)
n=n+1
gc.collect()
|
AlanZatarain/opencamlib
|
scripts/voronoi/voronoi_8_scale-test.py
|
Python
|
gpl-3.0
| 6,627
|
[
"VTK"
] |
4ffde84519768c8a0898058ed3e061c521d8bf8ed4254ee5de59e8df3067a1bf
|
# rdesignerProtos.py ---
#
# Filename: rdesignerProtos.py
# Description:
# Author: Subhasis Ray, Upi Bhalla
# Maintainer:
# Created: Tue May 7 12:11:22 2013 (+0530)
# Version:
# Last-Updated: Wed Dec 30 13:01:00 2015 (+0530)
# By: Upi
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
import numpy as np
import moose
import math
from moose import utils
EREST_ACT = -70e-3
per_ms = 1e3
PI = 3.14159265359
FaradayConst = 96485.3365 # Coulomb/mol
def make_HH_Na(name = 'HH_Na', parent='/library', vmin=-110e-3, vmax=50e-3, vdivs=3000):
"""Create a Hodhkin-Huxley Na channel under `parent`.
vmin, vmax, vdivs: voltage range and number of divisions for gate tables
"""
na = moose.HHChannel('%s/%s' % (parent, name))
na.Ek = 50e-3
na.Xpower = 3
na.Ypower = 1
v = np.linspace(vmin, vmax, vdivs+1) - EREST_ACT
m_alpha = per_ms * (25 - v * 1e3) / (10 * (np.exp((25 - v * 1e3) / 10) - 1))
m_beta = per_ms * 4 * np.exp(- v * 1e3/ 18)
m_gate = moose.element('%s/gateX' % (na.path))
m_gate.min = vmin
m_gate.max = vmax
m_gate.divs = vdivs
m_gate.tableA = m_alpha
m_gate.tableB = m_alpha + m_beta
h_alpha = per_ms * 0.07 * np.exp(-v / 20e-3)
h_beta = per_ms * 1/(np.exp((30e-3 - v) / 10e-3) + 1)
h_gate = moose.element('%s/gateY' % (na.path))
h_gate.min = vmin
h_gate.max = vmax
h_gate.divs = vdivs
h_gate.tableA = h_alpha
h_gate.tableB = h_alpha + h_beta
na.tick = -1
return na
def make_HH_K(name = 'HH_K', parent='/library', vmin=-120e-3, vmax=40e-3, vdivs=3000):
"""Create a Hodhkin-Huxley K channel under `parent`.
vmin, vmax, vdivs: voltage range and number of divisions for gate tables
"""
k = moose.HHChannel('%s/%s' % (parent, name))
k.Ek = -77e-3
k.Xpower = 4
v = np.linspace(vmin, vmax, vdivs+1) - EREST_ACT
n_alpha = per_ms * (10 - v * 1e3)/(100 * (np.exp((10 - v * 1e3)/10) - 1))
n_beta = per_ms * 0.125 * np.exp(- v * 1e3 / 80)
n_gate = moose.element('%s/gateX' % (k.path))
n_gate.min = vmin
n_gate.max = vmax
n_gate.divs = vdivs
n_gate.tableA = n_alpha
n_gate.tableB = n_alpha + n_beta
k.tick = -1
return k
def makeChemOscillator( name = 'osc', parent = '/library' ):
model = moose.Neutral( parent + '/' + name )
compt = moose.CubeMesh( model.path + '/kinetics' )
"""
This function sets up a simple oscillatory chemical system within
the script. The reaction system is::
s ---a---> a // s goes to a, catalyzed by a.
s ---a---> b // s goes to b, catalyzed by a.
a ---b---> s // a goes to s, catalyzed by b.
b -------> s // b is degraded irreversibly to s.
in sum, **a** has a positive feedback onto itself and also forms **b**.
**b** has a negative feedback onto **a**.
Finally, the diffusion constant for **a** is 1/10 that of **b**.
"""
# create container for model
diffConst = 10e-12 # m^2/sec
motorRate = 1e-6 # m/sec
concA = 1 # millimolar
# create molecules and reactions
a = moose.Pool( compt.path + '/a' )
b = moose.Pool( compt.path + '/b' )
s = moose.Pool( compt.path + '/s' )
e1 = moose.MMenz( compt.path + '/e1' )
e2 = moose.MMenz( compt.path + '/e2' )
e3 = moose.MMenz( compt.path + '/e3' )
r1 = moose.Reac( compt.path + '/r1' )
a.concInit = 0.1
b.concInit = 0.1
s.concInit = 1
moose.connect( e1, 'sub', s, 'reac' )
moose.connect( e1, 'prd', a, 'reac' )
moose.connect( a, 'nOut', e1, 'enzDest' )
e1.Km = 1
e1.kcat = 1
moose.connect( e2, 'sub', s, 'reac' )
moose.connect( e2, 'prd', b, 'reac' )
moose.connect( a, 'nOut', e2, 'enzDest' )
e2.Km = 1
e2.kcat = 0.5
moose.connect( e3, 'sub', a, 'reac' )
moose.connect( e3, 'prd', s, 'reac' )
moose.connect( b, 'nOut', e3, 'enzDest' )
e3.Km = 0.1
e3.kcat = 1
moose.connect( r1, 'sub', b, 'reac' )
moose.connect( r1, 'prd', s, 'reac' )
r1.Kf = 0.3 # 1/sec
r1.Kb = 0 # 1/sec
# Assign parameters
a.diffConst = diffConst/10
b.diffConst = diffConst
s.diffConst = 0
return compt
#################################################################
# Here we have a series of utility functions for building cell
# prototypes.
#################################################################
def transformNMDAR( path ):
for i in moose.wildcardFind( path + "/##/#NMDA#[ISA!=NMDAChan]" ):
chanpath = i.path
pa = i.parent
i.name = '_temp'
if ( chanpath[-3:] == "[0]" ):
chanpath = chanpath[:-3]
nmdar = moose.NMDAChan( chanpath )
sh = moose.SimpleSynHandler( chanpath + '/sh' )
moose.connect( sh, 'activationOut', nmdar, 'activation' )
sh.numSynapses = 1
sh.synapse[0].weight = 1
nmdar.Ek = i.Ek
nmdar.tau1 = i.tau1
nmdar.tau2 = i.tau2
nmdar.Gbar = i.Gbar
nmdar.CMg = 12
nmdar.KMg_A = 1.0 / 0.28
nmdar.KMg_B = 1.0 / 62
nmdar.temperature = 300
nmdar.extCa = 1.5
nmdar.intCa = 0.00008
nmdar.intCaScale = 1
nmdar.intCaOffset = 0.00008
nmdar.condFraction = 0.02
moose.delete( i )
moose.connect( pa, 'channel', nmdar, 'channel' )
caconc = moose.wildcardFind( pa.path + '/#[ISA=CaConcBase]' )
if ( len( caconc ) < 1 ):
print('no caconcs found on ', pa.path)
else:
moose.connect( nmdar, 'ICaOut', caconc[0], 'current' )
moose.connect( caconc[0], 'concOut', nmdar, 'assignIntCa' )
################################################################
# Utility function for building a compartment, used for spines.
def buildCompt( pa, name, length, dia, xoffset, RM, RA, CM ):
compt = moose.Compartment( pa.path + '/' + name )
compt.x0 = xoffset
compt.y0 = 0
compt.z0 = 0
compt.x = length + xoffset
compt.y = 0
compt.z = 0
compt.diameter = dia
compt.length = length
xa = dia * dia * PI / 4.0
sa = length * dia * PI
compt.Ra = length * RA / xa
compt.Rm = RM / sa
compt.Cm = CM * sa
return compt
################################################################
# Utility function for building a synapse, used for spines.
def buildSyn( name, compt, Ek, tau1, tau2, Gbar, CM ):
syn = moose.SynChan( compt.path + '/' + name )
syn.Ek = Ek
syn.tau1 = tau1
syn.tau2 = tau2
syn.Gbar = Gbar * compt.Cm / CM
#print "BUILD SYN: ", name, Gbar, syn.Gbar, CM
moose.connect( compt, 'channel', syn, 'channel' )
sh = moose.SimpleSynHandler( syn.path + '/sh' )
moose.connect( sh, 'activationOut', syn, 'activation' )
sh.numSynapses = 1
sh.synapse[0].weight = 1
return syn
######################################################################
# Utility function, borrowed from proto18.py, for making an LCa channel.
# Based on Traub's 91 model, I believe.
def make_LCa( name = 'LCa', parent = '/library' ):
EREST_ACT = -0.060 #/* hippocampal cell resting potl */
ECA = 0.140 + EREST_ACT #// 0.080
if moose.exists( parent + '/' + name ):
return
Ca = moose.HHChannel( parent + '/' + name )
Ca.Ek = ECA
Ca.Gbar = 0
Ca.Gk = 0
Ca.Xpower = 2
Ca.Ypower = 1
Ca.Zpower = 0
xgate = moose.element( parent + '/' + name + '/gateX' )
xA = np.array( [ 1.6e3, 0, 1.0, -1.0 * (0.065 + EREST_ACT), -0.01389, -20e3 * (0.0511 + EREST_ACT), 20e3, -1.0, -1.0 * (0.0511 + EREST_ACT), 5.0e-3, 3000, -0.1, 0.05 ] )
xgate.alphaParms = xA
ygate = moose.element( parent + '/' + name + '/gateY' )
ygate.min = -0.1
ygate.max = 0.05
ygate.divs = 3000
yA = np.zeros( (ygate.divs + 1), dtype=float)
yB = np.zeros( (ygate.divs + 1), dtype=float)
#Fill the Y_A table with alpha values and the Y_B table with (alpha+beta)
dx = (ygate.max - ygate.min)/ygate.divs
x = ygate.min
for i in range( ygate.divs + 1 ):
if ( x > EREST_ACT):
yA[i] = 5.0 * math.exp( -50 * (x - EREST_ACT) )
else:
yA[i] = 5.0
yB[i] = 5.0
x += dx
ygate.tableA = yA
ygate.tableB = yB
return Ca
################################################################
# API function for building spine prototypes. Here we put in the
# spine dimensions, and options for standard channel types.
# The synList tells it to create dual alpha function synchans:
# [name, Erev, tau1, tau2, conductance_density, connectToCa]
# The chanList tells it to copy over channels defined in /library
# and assign the specified conductance density.
# If caTau <= zero then there is no caConc created, otherwise it
# creates one and assigns the desired tau in seconds.
# With the default arguments here it will create a glu, NMDA and LCa,
# and add a Ca_conc.
def addSpineProto( name = 'spine',
parent = '/library',
RM = 1.0, RA = 1.0, CM = 0.01,
shaftLen = 1.e-6 , shaftDia = 0.2e-6,
headLen = 0.5e-6, headDia = 0.5e-6,
synList = (),
chanList = (),
caTau = 0.0
):
assert( moose.exists( parent ) )
spine = moose.Neutral( parent + '/' + name )
shaft = buildCompt( spine, 'shaft', shaftLen, shaftDia, 0.0, RM, RA, CM )
head = buildCompt( spine, 'head', headLen, headDia, shaftLen, RM, RA, CM )
moose.connect( shaft, 'axial', head, 'raxial' )
if caTau > 0.0:
conc = moose.CaConc( head.path + '/Ca_conc' )
conc.tau = caTau
conc.length = head.length
conc.diameter = head.diameter
conc.thick = 0.0
# The 'B' field is deprecated.
# B = 1/(ion_charge * Faraday * volume)
#vol = head.length * head.diameter * head.diameter * PI / 4.0
#conc.B = 1.0 / ( 2.0 * FaradayConst * vol )
conc.Ca_base = 0.0
for i in synList:
syn = buildSyn( i[0], head, i[1], i[2], i[3], i[4], CM )
if i[5] and caTau > 0.0:
moose.connect( syn, 'IkOut', conc, 'current' )
for i in chanList:
if ( moose.exists( parent + '/' + i[0] ) ):
chan = moose.copy( parent + '/' + i[0], head )
else:
moose.setCwe( head )
chan = make_LCa()
chan.name = i[0]
moose.setCwe( '/' )
chan.Gbar = i[1] * head.Cm / CM
#print "CHAN = ", chan, chan.tick, chan.Gbar
moose.connect( head, 'channel', chan, 'channel' )
if i[2] and caTau > 0.0:
moose.connect( chan, 'IkOut', conc, 'current' )
transformNMDAR( parent + '/' + name )
return spine
#######################################################################
# Here are some compartment related prototyping functions
def makePassiveHHsoma(name = 'passiveHHsoma', parent='/library'):
''' Make HH squid model sized compartment:
len and dia 500 microns. CM = 0.01 F/m^2, RA =
'''
elecpath = parent + '/' + name
if not moose.exists( elecpath ):
elecid = moose.Neuron( elecpath )
dia = 500e-6
soma = buildCompt( elecid, 'soma', dia, dia, 0.0,
0.33333333, 3000, 0.01 )
soma.initVm = -65e-3 # Resting of -65, from HH
soma.Em = -54.4e-3 # 10.6 mV above resting of -65, from HH
else:
elecid = moose.element( elecpath )
return elecid
# Wrapper function. This is used by the proto builder from rdesigneur
def makeActiveSpine(name = 'active_spine', parent='/library'):
return addSpineProto( name = name, parent = parent,
synList = ( ['glu', 0.0, 2e-3, 9e-3, 200.0, False],
['NMDA', 0.0, 20e-3, 20e-3, 80.0, True] ),
chanList = ( ['Ca', 10.0, True ], ),
caTau = 13.333e-3
)
# Wrapper function. This is used by the proto builder from rdesigneur
def makeExcSpine(name = 'exc_spine', parent='/library'):
return addSpineProto( name = name, parent = parent,
synList = ( ['glu', 0.0, 2e-3, 9e-3, 200.0, False],
['NMDA', 0.0, 20e-3, 20e-3, 80.0, True] ),
caTau = 13.333e-3 )
# Wrapper function. This is used by the proto builder from rdesigneur
def makePassiveSpine(name = 'passive_spine', parent='/library'):
return addSpineProto( name = name, parent = parent)
# legacy function. This is used by the proto builder from rdesigneur
def makeSpineProto( name ):
addSpineProto( name = name, chanList = () )
|
rahulgayatri23/moose-core
|
python/rdesigneur/rdesigneurProtos.py
|
Python
|
gpl-3.0
| 13,405
|
[
"MOOSE",
"NEURON"
] |
b38cbcd0e83adf804fbb801219fe4afacd0e9916a67ac9996df677a3e29ad973
|
import numpy as np
import scipy.ndimage as ndi
from .._shared.utils import warn
from . import _marching_cubes_cy
def marching_cubes(volume, level, spacing=(1., 1., 1.),
gradient_direction='descent'):
"""
Marching cubes algorithm to find iso-valued surfaces in 3d volumetric data
Parameters
----------
volume : (M, N, P) array of doubles
Input data volume to find isosurfaces. Will be cast to `np.float64`.
level : float
Contour value to search for isosurfaces in `volume`.
spacing : length-3 tuple of floats
Voxel spacing in spatial dimensions corresponding to numpy array
indexing dimensions (M, N, P) as in `volume`.
gradient_direction : string
Controls if the mesh was generated from an isosurface with gradient
descent toward objects of interest (the default), or the opposite.
The two options are:
* descent : Object was greater than exterior
* ascent : Exterior was greater than object
Returns
-------
verts : (V, 3) array
Spatial coordinates for V unique mesh vertices. Coordinate order
matches input `volume` (M, N, P).
faces : (F, 3) array
Define triangular faces via referencing vertex indices from ``verts``.
This algorithm specifically outputs triangles, so each face has
exactly three indices.
Notes
-----
The marching cubes algorithm is implemented as described in [1]_.
A simple explanation is available here::
http://www.essi.fr/~lingrand/MarchingCubes/algo.html
There are several known ambiguous cases in the marching cubes algorithm.
Using point labeling as in [1]_, Figure 4, as shown::
v8 ------ v7
/ | / | y
/ | / | ^ z
v4 ------ v3 | | /
| v5 ----|- v6 |/ (note: NOT right handed!)
| / | / ----> x
| / | /
v1 ------ v2
Most notably, if v4, v8, v2, and v6 are all >= `level` (or any
generalization of this case) two parallel planes are generated by this
algorithm, separating v4 and v8 from v2 and v6. An equally valid
interpretation would be a single connected thin surface enclosing all
four points. This is the best known ambiguity, though there are others.
This algorithm does not attempt to resolve such ambiguities; it is a naive
implementation of marching cubes as in [1]_, but may be a good beginning
for work with more recent techniques (Dual Marching Cubes, Extended
Marching Cubes, Cubic Marching Squares, etc.).
Because of interactions between neighboring cubes, the isosurface(s)
generated by this algorithm are NOT guaranteed to be closed, particularly
for complicated contours. Furthermore, this algorithm does not guarantee
a single contour will be returned. Indeed, ALL isosurfaces which cross
`level` will be found, regardless of connectivity.
The output is a triangular mesh consisting of a set of unique vertices and
connecting triangles. The order of these vertices and triangles in the
output list is determined by the position of the smallest ``x,y,z`` (in
lexicographical order) coordinate in the contour. This is a side-effect
of how the input array is traversed, but can be relied upon.
The generated mesh guarantees coherent orientation as of version 0.12.
To quantify the area of an isosurface generated by this algorithm, pass
outputs directly into `skimage.measure.mesh_surface_area`.
Regarding visualization of algorithm output, the ``mayavi`` package
is recommended. To contour a volume named `myvolume` about the level 0.0::
>>> from mayavi import mlab # doctest: +SKIP
>>> verts, faces = marching_cubes(myvolume, 0.0, (1., 1., 2.)) # doctest: +SKIP
>>> mlab.triangular_mesh([vert[0] for vert in verts],
... [vert[1] for vert in verts],
... [vert[2] for vert in verts],
... faces) # doctest: +SKIP
>>> mlab.show() # doctest: +SKIP
References
----------
.. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High
Resolution 3D Surface Construction Algorithm. Computer Graphics
(SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170).
See Also
--------
skimage.measure.correct_mesh_orientation
skimage.measure.mesh_surface_area
"""
# Check inputs and ensure `volume` is C-contiguous for memoryviews
if volume.ndim != 3:
raise ValueError("Input volume must have 3 dimensions.")
if level < volume.min() or level > volume.max():
raise ValueError("Contour level must be within volume data range.")
if len(spacing) != 3:
raise ValueError("`spacing` must consist of three floats.")
volume = np.array(volume, dtype=np.float64, order="C")
# Extract raw triangles using marching cubes in Cython
# Returns a list of length-3 lists, each sub-list containing three
# tuples. The tuples hold (x, y, z) coordinates for triangle vertices.
# Note: this algorithm is fast, but returns degenerate "triangles" which
# have repeated vertices - and equivalent vertices are redundantly
# placed in every triangle they connect with.
raw_faces = _marching_cubes_cy.iterate_and_store_3d(volume, float(level))
# Find and collect unique vertices, storing triangle verts as indices.
# Returns a true mesh with no degenerate faces.
verts, faces = _marching_cubes_cy.unpack_unique_verts(raw_faces)
verts = np.asarray(verts)
faces = np.asarray(faces)
# Calculate gradient of `volume`, then interpolate to vertices in `verts`
grad_x, grad_y, grad_z = np.gradient(volume)
# Fancy indexing to define two vector arrays from triangle vertices
faces = _correct_mesh_orientation(volume, verts[faces], faces, spacing,
gradient_direction)
# Adjust for non-isotropic spacing in `verts` at time of return
return verts * np.r_[spacing], faces
def mesh_surface_area(verts, faces):
"""
Compute surface area, given vertices & triangular faces
Parameters
----------
verts : (V, 3) array of floats
Array containing (x, y, z) coordinates for V unique mesh vertices.
faces : (F, 3) array of ints
List of length-3 lists of integers, referencing vertex coordinates as
provided in `verts`
Returns
-------
area : float
Surface area of mesh. Units now [coordinate units] ** 2.
Notes
-----
The arguments expected by this function are the exact outputs from
`skimage.measure.marching_cubes`. For unit correct output, ensure correct
`spacing` was passed to `skimage.measure.marching_cubes`.
This algorithm works properly only if the ``faces`` provided are all
triangles.
See Also
--------
skimage.measure.marching_cubes
skimage.measure.correct_mesh_orientation
"""
# Fancy indexing to define two vector arrays from triangle vertices
actual_verts = verts[faces]
a = actual_verts[:, 0, :] - actual_verts[:, 1, :]
b = actual_verts[:, 0, :] - actual_verts[:, 2, :]
del actual_verts
# Area of triangle in 3D = 1/2 * Euclidean norm of cross product
return ((np.cross(a, b) ** 2).sum(axis=1) ** 0.5).sum() / 2.
def correct_mesh_orientation(volume, verts, faces, spacing=(1., 1., 1.),
gradient_direction='descent'):
"""
Correct orientations of mesh faces.
Parameters
----------
volume : (M, N, P) array of doubles
Input data volume to find isosurfaces. Will be cast to `np.float64`.
verts : (V, 3) array of floats
Array containing (x, y, z) coordinates for V unique mesh vertices.
faces : (F, 3) array of ints
List of length-3 lists of integers, referencing vertex coordinates as
provided in `verts`.
spacing : length-3 tuple of floats
Voxel spacing in spatial dimensions corresponding to numpy array
indexing dimensions (M, N, P) as in `volume`.
gradient_direction : string
Controls if the mesh was generated from an isosurface with gradient
descent toward objects of interest (the default), or the opposite.
The two options are:
* descent : Object was greater than exterior
* ascent : Exterior was greater than object
Returns
-------
faces_corrected (F, 3) array of ints
Corrected list of faces referencing vertex coordinates in `verts`.
Notes
-----
Certain applications and mesh processing algorithms require all faces
to be oriented in a consistent way. Generally, this means a normal vector
points "out" of the meshed shapes. This algorithm corrects the output from
`skimage.measure.marching_cubes` by flipping the orientation of
mis-oriented faces.
Because marching cubes could be used to find isosurfaces either on
gradient descent (where the desired object has greater values than the
exterior) or ascent (where the desired object has lower values than the
exterior), the ``gradient_direction`` kwarg allows the user to inform this
algorithm which is correct. If the resulting mesh appears to be oriented
completely incorrectly, try changing this option.
The arguments expected by this function are the exact outputs from
`skimage.measure.marching_cubes`. Only `faces` is corrected and returned,
as the vertices do not change; only the order in which they are
referenced.
This algorithm assumes ``faces`` provided are all triangles.
See Also
--------
skimage.measure.marching_cubes
skimage.measure.mesh_surface_area
"""
warn(DeprecationWarning("`correct_mesh_orientation` is deprecated for "
"removal as `marching_cubes` now guarantess "
"correct mesh orientation."))
verts = verts.copy()
verts[:, 0] /= spacing[0]
verts[:, 1] /= spacing[1]
verts[:, 2] /= spacing[2]
# Fancy indexing to define two vector arrays from triangle vertices
actual_verts = verts[faces]
return _correct_mesh_orientation(volume, actual_verts, faces, spacing,
gradient_direction)
def _correct_mesh_orientation(volume, actual_verts, faces,
spacing=(1., 1., 1.),
gradient_direction='descent'):
"""
Correct orientations of mesh faces.
Parameters
----------
volume : (M, N, P) array of doubles
Input data volume to find isosurfaces. Will be cast to `np.float64`.
actual_verts : (F, 3, 3) array of floats
Array with (face, vertex, coords) index coordinates.
faces : (F, 3) array of ints
List of length-3 lists of integers, referencing vertex coordinates as
provided in `verts`.
spacing : length-3 tuple of floats
Voxel spacing in spatial dimensions corresponding to numpy array
indexing dimensions (M, N, P) as in `volume`.
gradient_direction : string
Controls if the mesh was generated from an isosurface with gradient
descent toward objects of interest (the default), or the opposite.
The two options are:
* descent : Object was greater than exterior
* ascent : Exterior was greater than object
Returns
-------
faces_corrected (F, 3) array of ints
Corrected list of faces referencing vertex coordinates in `verts`.
Notes
-----
Certain applications and mesh processing algorithms require all faces
to be oriented in a consistent way. Generally, this means a normal vector
points "out" of the meshed shapes. This algorithm corrects the output from
`skimage.measure.marching_cubes` by flipping the orientation of
mis-oriented faces.
Because marching cubes could be used to find isosurfaces either on
gradient descent (where the desired object has greater values than the
exterior) or ascent (where the desired object has lower values than the
exterior), the ``gradient_direction`` kwarg allows the user to inform this
algorithm which is correct. If the resulting mesh appears to be oriented
completely incorrectly, try changing this option.
The arguments expected by this function are the exact outputs from
`skimage.measure.marching_cubes` except `actual_verts`, which is an
uncorrected version of the fancy indexing operation `verts[faces]`.
Only `faces` is corrected and returned as the vertices do not change,
only the order in which they are referenced.
This algorithm assumes ``faces`` provided are exclusively triangles.
See Also
--------
skimage.measure.marching_cubes
skimage.measure.mesh_surface_area
"""
# Calculate gradient of `volume`, then interpolate to vertices in `verts`
grad_x, grad_y, grad_z = np.gradient(volume)
a = actual_verts[:, 0, :] - actual_verts[:, 1, :]
b = actual_verts[:, 0, :] - actual_verts[:, 2, :]
# Find triangle centroids
centroids = (actual_verts.sum(axis=1) / 3.).T
del actual_verts
# Interpolate face centroids into each gradient axis
grad_centroids_x = ndi.map_coordinates(grad_x, centroids)
grad_centroids_y = ndi.map_coordinates(grad_y, centroids)
grad_centroids_z = ndi.map_coordinates(grad_z, centroids)
# Combine and normalize interpolated gradients
grad_centroids = np.c_[grad_centroids_x, grad_centroids_y,
grad_centroids_z]
grad_centroids = (grad_centroids /
(np.sum(grad_centroids ** 2,
axis=1) ** 0.5)[:, np.newaxis])
# Find normal vectors for each face via cross product
crosses = np.cross(a, b)
crosses = crosses / (np.sum(crosses ** 2, axis=1) ** (0.5))[:, np.newaxis]
# Take dot product
dotproducts = (grad_centroids * crosses).sum(axis=1)
# Find mis-oriented faces
if 'descent' in gradient_direction:
# Faces with incorrect orientations have dot product < 0
indices = (dotproducts < 0).nonzero()[0]
elif 'ascent' in gradient_direction:
# Faces with incorrection orientation have dot product > 0
indices = (dotproducts > 0).nonzero()[0]
else:
raise ValueError("Incorrect input %s in `gradient_direction`, see "
"docstring." % (gradient_direction))
# Correct orientation and return, without modifying original data
faces_corrected = faces.copy()
faces_corrected[indices] = faces_corrected[indices, ::-1]
return faces_corrected
|
pratapvardhan/scikit-image
|
skimage/measure/_marching_cubes.py
|
Python
|
bsd-3-clause
| 14,778
|
[
"Mayavi"
] |
9004c626daa3202f5b350d4de7da969d970ff91ec0aea944366548b8555e3303
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
This module contains an algorithm to solve the Linear Assignment Problem.
It has the same functionality as linear_assignment.pyx, but is much slower
as it is vectorized in numpy rather than cython
"""
__author__ = "Will Richards"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Will Richards"
__email__ = "wrichards@mit.edu"
__date__ = "Jan 28, 2013"
import numpy as np
from six.moves import range
class LinearAssignment(object):
"""
This class finds the solution to the Linear Assignment Problem.
It finds a minimum cost matching between two sets, given a cost
matrix.
This class is an implementation of the LAPJV algorithm described in:
R. Jonker, A. Volgenant. A Shortest Augmenting Path Algorithm for
Dense and Sparse Linear Assignment Problems. Computing 38, 325-340
(1987)
Args:
costs: The cost matrix of the problem. cost[i,j] should be the
cost of matching x[i] to y[j]. The cost matrix may be
rectangular
epsilon: Tolerance for determining if solution vector is < 0
.. attribute: min_cost:
The minimum cost of the matching
.. attribute: solution:
The matching of the rows to columns. i.e solution = [1, 2, 0]
would match row 0 to column 1, row 1 to column 2 and row 2
to column 0. Total cost would be c[0, 1] + c[1, 2] + c[2, 0]
"""
def __init__(self, costs, epsilon=1e-6):
self.orig_c = np.array(costs, dtype=np.float64)
self.nx, self.ny = self.orig_c.shape
self.n = self.ny
self._inds = np.arange(self.n)
self.epsilon = abs(epsilon)
#check that cost matrix is square
if self.nx > self.ny:
raise ValueError("cost matrix must have at least as many columns as rows")
if self.nx == self.ny:
self.c = self.orig_c
else:
# Can run into precision issues if np.max is used as the fill value (since a
# value of this size doesn't necessarily end up in the solution). A value
# at least as large as the maximin is, however, guaranteed to appear so it
# is a safer choice. The fill value is not zero to avoid choosing the extra
# rows in the initial column reduction step
self.c = np.full((self.n, self.n), np.max(np.min(self.orig_c, axis=1)))
self.c[:self.nx] = self.orig_c
#initialize solution vectors
self._x = np.zeros(self.n, dtype=np.int) - 1
self._y = self._x.copy()
#if column reduction doesn't find a solution, augment with shortest
#paths until one is found
if self._column_reduction():
self._augmenting_row_reduction()
#initialize the reduced costs
self._update_cred()
while -1 in self._x:
self._augment()
self.solution = self._x[:self.nx]
self._min_cost = None
@property
def min_cost(self):
"""
Returns the cost of the best assignment
"""
if self._min_cost:
return self._min_cost
self._min_cost = np.sum(self.c[np.arange(self.nx), self.solution])
return self._min_cost
def _column_reduction(self):
"""
Column reduction and reduction transfer steps from LAPJV algorithm
"""
#assign each column to its lowest cost row, ensuring that only row
#or column is assigned once
i1, j = np.unique(np.argmin(self.c, axis=0), return_index=True)
self._x[i1] = j
#if problem is solved, return
if len(i1) == self.n:
return False
self._y[j] = i1
#reduction_transfer
#tempc is array with previously assigned matchings masked
self._v = np.min(self.c, axis=0)
tempc = self.c.copy()
tempc[i1, j] = np.inf
mu = np.min(tempc[i1, :] - self._v[None, :], axis=1)
self._v[j] -= mu
return True
def _augmenting_row_reduction(self):
"""
Augmenting row reduction step from LAPJV algorithm
"""
unassigned = np.where(self._x == -1)[0]
for i in unassigned:
for _ in range(self.c.size):
# Time in this loop can be proportional to 1/epsilon
# This step is not strictly necessary, so cutoff early
# to avoid near-infinite loops
# find smallest 2 values and indices
temp = self.c[i] - self._v
j1 = np.argmin(temp)
u1 = temp[j1]
temp[j1] = np.inf
j2 = np.argmin(temp)
u2 = temp[j2]
if u1 < u2:
self._v[j1] -= u2 - u1
elif self._y[j1] != -1:
j1 = j2
k = self._y[j1]
if k != -1:
self._x[k] = -1
self._x[i] = j1
self._y[j1] = i
i = k
if k == -1 or abs(u1 - u2) < self.epsilon:
break
def _update_cred(self):
"""
Updates the reduced costs with the values from the
dual solution
"""
ui = self.c[self._inds, self._x] - self._v[self._x]
self.cred = self.c - ui[:, None] - self._v[None, :]
def _augment(self):
"""
Finds a minimum cost path and adds it to the matching
"""
#build a minimum cost tree
_pred, _ready, istar, j, mu = self._build_tree()
#update prices
self._v[_ready] += self._d[_ready] - mu
#augment the solution with the minimum cost path from the
#tree. Follows an alternating path along matched, unmatched
#edges from X to Y
while True:
i = _pred[j]
self._y[j] = i
k = j
j = self._x[i]
self._x[i] = k
if i == istar:
break
self._update_cred()
def _build_tree(self):
"""
Builds the tree finding an augmenting path. Alternates along
matched and unmatched edges between X and Y. The paths are
stored in _pred (new predecessor of nodes in Y), and
self._x and self._y
"""
#find unassigned i*
istar = np.argmin(self._x)
#compute distances
self._d = self.c[istar] - self._v
_pred = np.zeros(self.n, dtype=np.int) + istar
#initialize sets
#READY: set of nodes visited and in the path (whose price gets
#updated in augment)
#SCAN: set of nodes at the bottom of the tree, which we need to
#look at
#T0DO: unvisited nodes
_ready = np.zeros(self.n, dtype=np.bool)
_scan = np.zeros(self.n, dtype=np.bool)
_todo = np.zeros(self.n, dtype=np.bool) + True
while True:
#populate scan with minimum reduced distances
if True not in _scan:
mu = np.min(self._d[_todo])
_scan[self._d == mu] = True
_todo[_scan] = False
j = np.argmin(self._y * _scan)
if self._y[j] == -1 and _scan[j]:
return _pred, _ready, istar, j, mu
#pick jstar from scan (scan always has at least 1)
_jstar = np.argmax(_scan)
#pick i associated with jstar
i = self._y[_jstar]
_scan[_jstar] = False
_ready[_jstar] = True
#find shorter distances
newdists = mu + self.cred[i, :]
shorter = np.logical_and(newdists < self._d, _todo)
#update distances
self._d[shorter] = newdists[shorter]
#update predecessors
_pred[shorter] = i
for j in np.nonzero(np.logical_and(self._d == mu, _todo))[0]:
if self._y[j] == -1:
return _pred, _ready, istar, j, mu
_scan[j] = True
_todo[j] = False
|
xhqu1981/pymatgen
|
pymatgen/optimization/linear_assignment_numpy.py
|
Python
|
mit
| 8,228
|
[
"pymatgen"
] |
6f91df2f7619cb1162b099704f6d2473f99ca8e8a480dd4de679ad7d096c5afa
|
# -*- coding: utf-8 -*-
from .. Error import RINGError
class Reader(object):
"""
Reader reads the parsed RING input, and returns the RDkit wrapper objects
in pgradd.RDkitWrapper.
Attributes
----------
ast : abstract syntax tree obtrained from parser
"""
def __init__(self, ast):
# ast = Abstract Syntax Tree
self.ast = ast
def ReadRINGInput(self, tree):
# Check the type of input
assert tree[0][0].name in ('Fragment',
'ReactionRule',
'EnumerationQuery')
# if fragment, molquery is returned
if tree[0][0].name == 'Fragment':
from . MolQueryRead import MolQueryReader
self.type = 'MolQuery'
return MolQueryReader(tree[0][1:]).Read()
# if reaction rule, reacitonquery is returned
elif tree[0][0].name == 'ReactionRule':
from . ReactionQueryRead import ReactionQueryReader
self.type = 'ReactionQuery'
return ReactionQueryReader(tree[0][1:]).Read()
# TODO enumeration query
elif tree[0][0].name == 'EnumerationQuery':
raise NotImplementedError('Coming soon')
def Read(self):
# Root tree reading. Check if the input is RINGinput
assert self.ast[0].name == "RINGInput"
return self.ReadRINGInput(self.ast[1:])
def Read(text, strict=False):
"""
Return MolQuery, ReactionQuery, or ReactionNetworkEnumerationQuery by
interpretting RING input string.
Parameters
----------
text : string
Specify string describing chemical structure, elementary reaction, or
reaction network enumeration rules in RING notation.
strict : boolean, optional
If True, then disable use of syntactic extensions such as support for
"radical electrons".
Returns
-------
Returns RDkit wrapped queries that extends RDkit's functionality:
* MolQuery if fragment is given in string
* ReactionQuery if reaction rule is given in string
* ReactionNetworkEnumerationQuery if enumeration query is given in string
Raises
------
msr.error.RINGSyntaxError
If `text` does not conform to RING syntax.
msr.error.RINGReaderError
If `text` is invalid RING for non-syntactic reasons.
"""
from . import Parser
try:
return Reader(Parser.parse(text)).Read()
except RINGError as exc:
raise exc
|
VlachosGroup/VlachosGroupAdditivity
|
pgradd/RINGParser/Reader.py
|
Python
|
mit
| 2,491
|
[
"RDKit"
] |
2824fea4260dfd31243c41928a6c2f6ec310679f8b11aa06feea3b334b0c3883
|
#! /usr/bin/env python
#PBS -N Timing
#PBS -l nodes=1:ppn=4:opteron285
#PBS -q small
from numpy import *
from asap3 import *
from asap3.md.verlet import VelocityVerlet
from asap3.md.langevin import Langevin
from ase.lattice.cubic import FaceCenteredCubic
from asap3.Timing import report_timing
import sys, cPickle, time, commands, os, re
import numpy as np
from asap3.testtools import ReportTest
# cpu time: time.clock(). Wall clock time: time.time()
#set_verbose(1)
usethread = (len(sys.argv) > 1 and
(sys.argv[1] == "-t" or sys.argv[1] == "-T"))
if usethread:
if sys.argv[1] == "-t":
AsapThreads()
else:
AsapThreads(4)
host = commands.getoutput("hostname")
timesteps = 100
if usethread:
dbfilename = "timing-thread.dat"
logfilename = "timing-thread.log"
else:
dbfilename = "timing.dat"
logfilename = "timing.log"
selfcheckfilename = "timing-selfcheck.dat"
asapversion = get_version()
when = time.strftime("%a %d %b %Y %H:%M", time.localtime(time.time()))
randomstate = "randomstate.pickle"
if os.path.isfile(randomstate):
np.random.set_state(cPickle.load(open(randomstate)))
else:
print "Saving random state for next call."
rndfile = open(randomstate, "w")
cPickle.dump(np.random.get_state(), rndfile)
rndfile.close()
#PrintVersion(1)
print "Running ASAP timing on "+host+"."
if re.match("^n\d\d\d.dcsc.fysik.dtu.dk$", host):
print " This is a d512 node on Niflheim."
fullhost = "niflheim-d512/%s" % (host.split(".")[0])
host = "niflheim-d512"
elif re.match("^[stu]\d\d\d.dcsc.fysik.dtu.dk$", host):
print " This is an s50 node on Niflheim."
fullhost = "niflheim-s50/%s" % (host.split(".")[0])
host = "niflheim-s50"
else:
fullhost = host
print "Current time is "+when
print ""
print "Preparing system"
initial = FaceCenteredCubic(directions=[[1,0,0],[0,1,0],[0,0,1]],
size=(30, 30, 30),
symbol="Cu")
ReportTest("Number of atoms", len(initial), 108000, 0)
r = initial.get_positions()
r.flat[:] += 0.14 * sin(arange(3*len(initial)))
initial.set_positions(r)
print "Running self-test."
atoms = Atoms(initial)
atoms.set_calculator(EMT())
e = atoms.get_potential_energies()
f = atoms.get_forces()
if os.access(selfcheckfilename, os.F_OK):
olde, oldf = cPickle.load(open(selfcheckfilename))
de = max(fabs(e - olde))
df = max(fabs(f.flat[:] - oldf.flat[:]))
print "Maximal deviation: Energy", de, " Force", df
ReportTest("Max force error", df, 0.0, 1e-11)
ReportTest("Max energy error", de, 0.0, 1e-11)
del olde, oldf
else:
print "WARNING: No self-check database found, creating it."
cPickle.dump((e, f), open(selfcheckfilename, "w"))
del e,f,atoms
ReportTest.Summary(exit=1)
print "Preparing to run Langevin dynamics."
atoms = Atoms(initial)
atoms.set_calculator(EMT())
dynamics = Langevin(atoms, 5*units.fs, 400*units.kB, 0.001)
print "Running Langevin dynamics."
startcpu, startwall = time.clock(), time.time()
dynamics.run(timesteps)
lcpu, lwall = time.clock() - startcpu, time.time() - startwall
lfraction = lcpu/lwall
sys.stderr.write("\n")
print "Langevin dynamics done."
print "Temperature:", atoms.get_temperature()
#del dynamics, atoms
print ""
print ""
print "TIMING RESULTS:"
print "Langevin: CPU time %.2fs Wall clock time %.2fs (%.0f%%)" % (lcpu, lwall, lfraction * 100)
print ""
report_timing()
|
auag92/n2dm
|
Asap-3.8.4/Test/Timing/TimingLangevin.py
|
Python
|
mit
| 3,432
|
[
"ASE"
] |
783ba638d1b9650a9e386883d9323026421ba37253ed76e85180ea252995c6db
|
from __future__ import division
import os.path
from subprocess import Popen, PIPE
from operator import itemgetter
from itertools import izip
from array import array
from numpy import histogram, zeros, median, sum as np_sum
import pysam
try:
from pysam.csamtools import Samfile
except ImportError:
from pysam import Samfile
from crumbs.statistics import (draw_histogram_ascii, IntCounter, LABELS,
BestItemsKeeper)
from bam_crumbs.settings import get_setting
from bam_crumbs.utils.flag import SAM_FLAG_BINARIES, SAM_FLAGS
from bam_crumbs.utils.bin import get_binary_path
from collections import Counter
# pylint: disable=C0111
DEFAULT_N_BINS = get_setting('DEFAULT_N_BINS')
DEFAULT_N_MOST_ABUNDANT_REFERENCES = get_setting('DEFAULT_N_MOST_ABUNDANT_REFERENCES')
def count_reads(ref_name, bams, start=None, end=None):
'It returns the count of aligned reads in the region'
count = 0
for bam in bams:
count += bam.count(reference=ref_name, start=start, end=end)
return count
class ArrayWrapper(object):
'A thin wrapper around numpy to have the same interface as IntCounter'
def __init__(self, array, bins=DEFAULT_N_BINS):
self.array = array
self.labels = LABELS.copy()
self._bins = bins
@property
def min(self):
return self.array.min()
@property
def max(self):
return self.array.max()
@property
def average(self):
return self.array.mean()
@property
def median(self):
return median(self.array)
@property
def variance(self):
return self.array.var()
@property
def count(self):
return len(self.array)
@property
def sum(self):
return np_sum(self.array)
def calculate_distribution(self, bins=None, min_=None, max_=None):
if min_ is None:
min_ = self.min
if max_ is None:
max_ = self.max
if bins is None:
bins = self._bins
counts, bins = histogram(self.array, bins=bins, range=(min_, max_))
return {'bin_limits': bins, 'counts': counts}
def update_labels(self, labels):
'It prepares the labels for output files'
self.labels.update(labels)
def __str__(self):
return self.write()
def write(self, max_in_distrib=None):
'It writes some basic stats of the values'
if self.count != 0:
labels = self.labels
# now we write some basic stats
format_num = lambda x: '{:,d}'.format(x) if isinstance(x, int) else '%.2f' % x
text = '{}: {}\n'.format(labels['minimum'], format_num(self.min))
text += '{}: {}\n'.format(labels['maximum'], format_num(self.max))
text += '{}: {}\n'.format(labels['average'],
format_num(self.average))
if labels['variance'] is not None:
text += '{}: {}\n'.format(labels['variance'],
format_num(self.variance))
if labels['sum'] is not None:
text += '{}: {}\n'.format(labels['sum'],
format_num(self.sum))
if labels['items'] is not None:
text += '{}: {}\n'.format(labels['items'], self.count)
text += '\n'
distrib = self.calculate_distribution(max_=max_in_distrib,
bins=self._bins)
text += draw_histogram_ascii(distrib['bin_limits'], distrib['counts'])
return text
return ''
class ReferenceStats(object):
def __init__(self, bams,
n_most_abundant_refs=DEFAULT_N_MOST_ABUNDANT_REFERENCES,
bins=DEFAULT_N_BINS):
self._bams = bams
self._bins = bins
self._rpkms = None
self._tot_reads = 0
self._lengths = None
self._n_most_expressed_reads = n_most_abundant_refs
self._most_abundant_refs = None
self._count_reads()
def _count_reads(self):
nreferences = self._bams[0].nreferences
rpks = zeros(nreferences)
references = []
length_counts = IntCounter()
first_bam = True
n_reads = 0
for bam in self._bams:
if bam.nreferences != nreferences:
msg = 'BAM files should have the same references'
raise ValueError(msg)
for index, count in enumerate(get_reference_counts(bam.filename)):
n_reads += count['unmapped_reads'] + count['mapped_reads']
if count['reference'] is None:
# some non-mapped reads have reference = None
continue
kb_len = count['length'] / 1000
rpk = count['mapped_reads'] / kb_len
rpks[index] += rpk
if first_bam:
# For the reference lengths we use the first BAM to make
references.append(count['reference'])
length_counts[count['length']] += 1
else:
# the bams should be sorted with the references in the same
# order
if references[index] != count['reference']:
msg = 'The reference lengths do not match in the bams'
raise RuntimeError(msg)
first_bam = False
million_reads = n_reads / 1e6
rpks /= million_reads # rpkms
self._rpkms = ArrayWrapper(rpks, bins=self._bins)
abundant_refs = BestItemsKeeper(self._n_most_expressed_reads,
izip(references, rpks),
key=itemgetter(1))
abundant_refs = [{'reference': i[0], 'rpkm': i[1]} for i in abundant_refs]
self._most_abundant_refs = abundant_refs
self._lengths = length_counts
@property
def lengths(self):
return self._lengths
@property
def rpkms(self):
return self._rpkms
@property
def most_abundant_refs(self):
return self._most_abundant_refs
def __str__(self):
return self.write()
def write(self, max_rpkm=None):
result = 'RPKMs\n'
result += '-----\n'
result += self.rpkms.write(max_in_distrib=max_rpkm)
result += '\n'
result += 'Most represented references\n'
result += '---------------------------\n'
result += ''.join(['{reference:s}: {rpkm:.5f}\n'.format(**r) for r in self.most_abundant_refs])
result += '\n'
result += 'Lengths\n'
result += '-----\n'
result += str(self.lengths)
return result
def _flag_to_binary(flag):
'It returns the indexes of the bits sets to 1 in the given flag'
return [index for index, num in enumerate(SAM_FLAG_BINARIES) if num & flag]
class ReadStats(object):
def __init__(self, bams):
# TODO flag, read_group
self._bams = bams
self._mapqs = IntCounter()
self._flag_counts = {}
self._count_mapqs()
def _count_mapqs(self):
mapqs = self._mapqs
flag_counts = [0] * len(SAM_FLAG_BINARIES)
for bam in self._bams:
for read in bam:
if not read.is_unmapped:
mapqs[read.mapq] += 1
for flag_index in _flag_to_binary(read.flag):
flag_counts[flag_index] += 1
for count, flag_bin in zip(flag_counts, SAM_FLAG_BINARIES):
self._flag_counts[SAM_FLAGS[flag_bin]] = count
@property
def mapqs(self):
return self._mapqs
@property
def flag_counts(self):
return self._flag_counts
class CoverageCounter(IntCounter):
def __init__(self, bams):
self._bams = bams
self._count_cov()
def _count_cov(self):
for bam in self._bams:
for column in bam.pileup():
self[len(column.pileups)] += 1
def get_reference_counts_dict(bam_fpaths):
'It gets a list of bams and returns a dict indexed by reference'
counts = {}
for bam_fpath in bam_fpaths:
for line in get_reference_counts(bam_fpath):
ref_name = line['reference']
length = line['length']
mapped_reads = line['mapped_reads']
unmapped_reads = line['unmapped_reads']
if ref_name not in counts:
counts[ref_name] = {'mapped_reads': 0, 'unmapped_reads': 0,
'length': length}
assert length == counts[ref_name]['length']
counts[ref_name]['mapped_reads'] += mapped_reads
counts[ref_name]['unmapped_reads'] += unmapped_reads
return counts
def get_reference_counts(bam_fpath):
'Using samtools idxstats it generates dictionaries with read counts'
cmd = [get_binary_path('samtools'), 'idxstats', bam_fpath]
idx_process = Popen(cmd, stdout=PIPE)
# we're not using pysam.idxstats here because the stdout differed
# depending on how the tests were run
for line in idx_process.stdout:
ref_name, ref_length, mapped_reads, unmapped_reads = line.split()
if ref_name == '*':
ref_name = None
ref_length = None
else:
ref_length = int(ref_length)
yield {'reference': ref_name, 'length': ref_length,
'mapped_reads': int(mapped_reads),
'unmapped_reads': int(unmapped_reads)}
MAPQS_TO_CALCULATE = (0, 20, 30, 40)
class GenomeCoverages(object):
def __init__(self, bam_fhands, mapqs=MAPQS_TO_CALCULATE):
self._bam_fhands = bam_fhands
self.mapqs_to_calculate = mapqs
self._counters = {mapq: IntCounter() for mapq in mapqs}
self._calculate()
def __len__(self):
return len(self._counters)
def _calculate(self):
for bam_fhand in self._bam_fhands:
samfile = Samfile(bam_fhand.name)
for column in samfile.pileup(stepper='all', max_depth=100000):
self._add(column)
def _add(self, column):
column_coverages = Counter()
mapqs_in_column = [r.alignment.mapq for r in column.pileups]
for read_mapq in mapqs_in_column:
for mapq_to_calc in self.mapqs_to_calculate:
if read_mapq > mapq_to_calc:
column_coverages[mapq_to_calc] += 1
for map_to_calc, count in column_coverages.items():
self._counters[map_to_calc][count] += 1
def get_mapq_counter(self, mapq):
return self._counters.get(mapq, None)
def get_genome_coverage(bam_fhands):
coverage_hist = IntCounter()
for bam_fhand in bam_fhands:
bam_fpath = bam_fhand.name
cmd = [get_binary_path('bedtools'), 'genomecov', '-ibam', bam_fpath]
cover_process = Popen(cmd, stdout=PIPE)
for line in cover_process.stdout:
if line.startswith('genome'):
cov, value = line.split('\t')[1: 3]
coverage_hist[int(cov)] += int(value)
return coverage_hist
def counter_to_scatter_group(coverage_hist):
# convert histohgram to the format that scatter_draw understands
scatter_group = {'x': array('l'), 'y': array('l')}
for integer in range(0, coverage_hist.max + 1):
scatter_group['x'].append(integer)
scatter_group['y'].append(coverage_hist[integer])
return scatter_group
def get_bam_readgroups(bam):
header = bam.header
if 'RG' not in header:
return None
readgroups = []
for rg in header['RG']:
readgroups.append(rg)
return readgroups
def get_rg_from_alignedread(read):
rgid = [value for key, value in read.tags if key == 'RG']
return None if not rgid else rgid[0]
def mapped_count_by_rg(bam_fpaths, mapqx=None):
do_mapqx = True if mapqx is not None else False
counter_by_rg = {}
for bam_fpath in bam_fpaths:
bam = pysam.Samfile(bam_fpath, 'rb')
readgroups = get_bam_readgroups(bam)
if readgroups is None:
bam_basename = os.path.splitext(os.path.basename(bam_fpath))[0]
readgroups = [bam_basename]
else:
readgroups = [rg['ID'] for rg in readgroups]
for readgroup in readgroups:
counter = IntCounter({'unmapped': 0, 'mapped': 0})
if do_mapqx:
counter['bigger_mapqx'] = 0
counter_by_rg[readgroup] = counter
for read in bam:
rg = get_rg_from_alignedread(read)
if rg is None:
rg = bam_basename
if do_mapqx and read.mapq >= mapqx:
counter_by_rg[rg]['bigger_mapqx'] += 1
if read.is_unmapped:
counter_by_rg[rg]['unmapped'] += 1
else:
counter_by_rg[rg]['mapped'] += 1
return counter_by_rg
|
pziarsolo/bam_crumbs
|
bam_crumbs/statistics.py
|
Python
|
gpl-3.0
| 12,923
|
[
"pysam"
] |
9f610f6a58dcf4b6f66168a04a1bcb6c14e34ffe38e71b76926065d2260903e8
|
import io
import os
import sys
from configparser import ConfigParser
import fsbc.application
from fsgamesys.application import ApplicationMixin
from fsgamesys.context import fsgs
from fsgamesys.Database import Database
from fsgamesys.util.gamenameutil import GameNameUtil
class Application(ApplicationMixin, fsbc.application.Application):
pass
def main():
Application("fs-uae-game-system")
if "--unsupported" in sys.argv:
if "--http-server" in sys.argv:
from fsgamesys.http.server import http_server_main
return http_server_main()
if len(sys.argv) < 3:
print("")
print("usage: fsgs run <game>")
print("")
print("game:")
print(" - search term(s) identifying a single game")
print(" - path to a .fsgs file")
print(" - path to a recognized cartridge ROM or disk file format")
print("")
sys.exit(1)
assert sys.argv[1] == "run"
game_arg = " ".join(sys.argv[2:])
print(game_arg)
if os.path.exists(game_arg):
load_file(game_arg)
else:
search = game_arg.lower()
database = Database.instance()
# cursor.execute("SELECT id FROM game WHERE name like")
terms = GameNameUtil.extract_search_terms(search)
found_games = database.find_games_new(" ".join(terms))
games = []
for game in found_games:
print(list(game))
if game[0]:
# only process entries with a game uuid
games.append(game)
game_uuid = None
if len(games) == 0:
print("no games found")
sys.exit(2)
if len(games) > 1:
matches = 0
for row in games:
if row[1].lower() == search:
if game_uuid is None:
game_uuid = row[0]
matches += 1
if matches != 1:
print("")
print("More than one game matches:")
print("")
for row in games:
print(" {0} ({1})".format(row[1], row[2]))
print(" {0}".format(row[0]))
print("")
sys.exit(3)
game_uuid = games[0][0]
assert game_uuid
variant_uuid = find_preferred_variant(game_uuid)
load_game_variant(variant_uuid)
fsgs.run_game()
def find_preferred_variant(game_uuid):
return fsgs.find_preferred_game_variant(game_uuid)
def load_game_variant(variant_uuid):
return fsgs.load_game_variant(variant_uuid)
def load_file(path):
config = {}
name, ext = os.path.splitext(path)
if ext in [".fs-uae", ".fsgs"]:
return load_config_file(path)
elif ext == ".st":
config["platform"] = "atari-st"
config["floppy_drive_0"] = path
elif ext in [".adf", ".dms", ".ipf"]:
config["platform"] = "amiga"
config["floppy_drive_0"] = path
elif ext in [".tap"]:
config["platform"] = "commodore-64"
config["tape_drive"] = path
if config:
load_config(config)
def load_config_file(fsgs_file):
cp = ConfigParser()
with io.open(fsgs_file, "r", encoding="UTF-8") as f:
cp.read_file(f)
config = {}
if cp.has_section("fsgs"):
for key in cp.options("fsgs"):
value = cp.get("fsgs", key)
config[key.lower().replace("-", "_")] = value
load_config(config)
def load_config(config):
fsgs.config.load(config)
fsgs.game.platform.id = config["platform"]
fsgs.game.uuid = "7bc9ae8b-e454-4108-87fe-6aac09cfb1e9"
fsgs.game.name = "Default Game"
fsgs.game.variant.uuid = "973d787f-2cc4-4d8d-b0c1-1bd911ef407a"
fsgs.game.variant.name = "Default Variant"
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/main.py
|
Python
|
gpl-2.0
| 3,786
|
[
"ADF"
] |
79ee3194ac2b6ccb52b3c4193919576e1ed99a8d98c1629936ff60ac82f5416d
|
# -*- Mode: Python; coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2013 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
import gobject
import gtk
from stoqlib.gui.base.dialogs import run_dialog
from stoqlib.gui.dialogs.feedbackdialog import FeedbackDialog
from stoqlib.lib.translation import stoqlib_gettext as _
class ShellStatusbar(gtk.Statusbar):
__gtype_name__ = 'ShellStatusbar'
def __init__(self, window):
gtk.Statusbar.__init__(self)
self._disable_border()
self.message_area = self._create_message_area()
self._create_default_widgets()
self.shell_window = window
def _disable_border(self):
# Disable border on statusbar
children = self.get_children()
if children and isinstance(children[0], gtk.Frame):
frame = children[0]
frame.set_shadow_type(gtk.SHADOW_NONE)
def _create_message_area(self):
for child in self.get_children():
child.hide()
area = gtk.HBox(False, 4)
self.add(area)
area.show()
return area
def _create_default_widgets(self):
alignment = gtk.Alignment(0.0, 0.0, 1.0, 1.0)
# FIXME: These looks good on Mac, might need to tweak
# on Linux to look good
alignment.set_padding(2, 3, 5, 5)
self.message_area.pack_start(alignment, True, True)
alignment.show()
widget_area = gtk.HBox(False, 0)
alignment.add(widget_area)
widget_area.show()
self._text_label = gtk.Label()
self._text_label.set_alignment(0.0, 0.5)
widget_area.pack_start(self._text_label, True, True)
self._text_label.show()
vsep = gtk.VSeparator()
widget_area.pack_start(vsep, False, False, 0)
vsep.show()
from stoqlib.gui.stockicons import STOQ_FEEDBACK
self._feedback_button = gtk.Button(_('Feedback'))
image = gtk.Image()
image.set_from_stock(STOQ_FEEDBACK, gtk.ICON_SIZE_MENU)
self._feedback_button.set_image(image)
image.show()
self._feedback_button.set_can_focus(False)
self._feedback_button.connect('clicked',
self._on_feedback__clicked)
self._feedback_button.set_relief(gtk.RELIEF_NONE)
widget_area.pack_start(self._feedback_button, False, False, 0)
self._feedback_button.show()
vsep = gtk.VSeparator()
widget_area.pack_start(vsep, False, False, 0)
vsep.show()
def do_text_popped(self, ctx, text):
self._text_label.set_label(text)
def do_text_pushed(self, ctx, text):
self._text_label.set_label(text)
#
# Callbacks
#
def _on_feedback__clicked(self, button):
if self.shell_window.current_app:
screen = self.shell_window.current_app.app_name + ' application'
else:
screen = 'launcher'
run_dialog(FeedbackDialog, self.get_toplevel(), screen)
gobject.type_register(ShellStatusbar)
|
andrebellafronte/stoq
|
stoq/gui/shell/statusbar.py
|
Python
|
gpl-2.0
| 3,793
|
[
"VisIt"
] |
61491a658c6419d53c698d2a3254c43d19e71e5b538b0548f68e7e7a6da9c1db
|
"""Classes of random variables."""
import numpy as np
import numpy.random as rnd
from numpy import sqrt
from struct_tools import NicePrint
from dapper.tools.matrices import CovMat
class RV(NicePrint):
"""Class to represent random variables."""
printopts = NicePrint.printopts.copy()
printopts["ordering"] = "linenumber"
printopts["reverse"] = True
def __init__(self, M, **kwargs):
"""Initalization arguments:
- M <int> : ndim
- is0 <bool> : if True, the random variable is identically 0
- func <func(N)> : use this sampling function. Example:
`RV(M=4,func=lambda N: rand(N,4)`
- file <str> : draw from file. Example:
`RV(M=4,file=dpr.rc.dirs.data/'tmp.npz')`
The following kwords (versions) are available,
but should not be used for anything serious
(use instead subclasses, like `GaussRV`).
- icdf <func(x)> : marginal/independent "inverse transform" sampling.
Example: `RV(M=4,icdf = scipy.stats.norm.ppf)`
- cdf <func(x)> : as icdf, but with approximate icdf, from interpolation.
Example: `RV(M=4,cdf = scipy.stats.norm.cdf)`
- pdf <func(x)> : "acceptance-rejection" sampling. Not implemented.
"""
self.M = M
for key, value in kwargs.items():
setattr(self, key, value)
def sample(self, N):
if getattr(self, 'is0', False):
# Identically 0
E = np.zeros((N, self.M))
elif hasattr(self, 'func'):
# Provided by function
E = self.func(N)
elif hasattr(self, 'file'):
# Provided by numpy file with sample
data = np.load(self.file)
sample = data['sample']
N0 = len(sample)
if 'w' in data:
w = data['w']
else:
w = np.ones(N0)/N0
idx = rnd.choice(N0, N, replace=True, p=w)
E = sample[idx]
elif hasattr(self, 'icdf'):
# Independent "inverse transform" sampling
icdf = np.vectorize(self.icdf)
uu = rnd.rand(N, self.M)
E = icdf(uu)
elif hasattr(self, 'cdf'):
# Like above, but with inv-cdf approximate, from interpolation
if not hasattr(self, 'icdf_interp'):
# Define inverse-cdf
from scipy.interpolate import interp1d
from scipy.optimize import fsolve
cdf = self.cdf
Left, = fsolve(lambda x: cdf(x) - 1e-9, 0.1) # noqa
Right, = fsolve(lambda x: cdf(x) - (1-1e-9), 0.1) # noqa
xx = np.linspace(Left, Right, 1001)
uu = np.vectorize(cdf)(xx)
icdf = interp1d(uu, xx)
self.icdf_interp = np.vectorize(icdf)
uu = rnd.rand(N, self.M)
E = self.icdf_interp(uu)
elif hasattr(self, 'pdf'):
# "acceptance-rejection" sampling
raise NotImplementedError
else:
raise KeyError
assert self.M == E.shape[1]
return E
# TODO 4: improve constructor (treatment of arg cases is too fragile).
class RV_with_mean_and_cov(RV):
"""Generic multivariate random variable characterized by mean and cov.
This class must be subclassed to provide sample(),
i.e. its main purpose is provide a common convenience constructor.
"""
def __init__(self, mu=0, C=0, M=None):
"""Init allowing for shortcut notation."""
if isinstance(mu, CovMat):
raise TypeError("Got a covariance paramter as mu. "
+ "Use kword syntax (C=...) ?")
# Set mu
mu = np.atleast_1d(mu)
assert mu.ndim == 1
if len(mu) > 1:
if M is None:
M = len(mu)
else:
assert len(mu) == M
else:
if M is not None:
mu = np.ones(M)*mu
# Set C
if isinstance(C, CovMat):
if M is None:
M = C.M
else:
if np.isscalar(C) and C == 0:
pass # Assign as pure 0!
else:
if np.isscalar(C):
M = len(mu)
C = CovMat(C*np.ones(M), 'diag')
else:
C = CovMat(C)
if M is None:
M = C.M
# Validation
if len(mu) not in (1, M):
raise TypeError("Inconsistent shapes of (M,mu,C)")
if M is None:
raise TypeError("Could not deduce the value of M")
try:
if M != C.M:
raise TypeError("Inconsistent shapes of (M,mu,C)")
except AttributeError:
pass
# Assign
self.M = M
self.mu = mu
self.C = C
def sample(self, N):
"""Sample N realizations. Returns N-by-M (ndim) sample matrix.
Example
-------
>>> plt.scatter(*(UniRV(C=randcov(2)).sample(10**4).T)) # doctest: +SKIP
"""
if self.C == 0:
D = np.zeros((N, self.M))
else:
D = self._sample(N)
return self.mu + D
def _sample(self, N):
raise NotImplementedError("Must be implemented in subclass")
class GaussRV(RV_with_mean_and_cov):
"""Gaussian (Normal) multivariate random variable."""
def _sample(self, N):
R = self.C.Right
D = rnd.randn(N, len(R)) @ R
return D
class LaplaceRV(RV_with_mean_and_cov):
"""Laplace (double exponential) multivariate random variable.
This is an elliptical generalization. Ref:
Eltoft (2006) "On the Multivariate Laplace Distribution".
"""
def _sample(self, N):
R = self.C.Right
z = rnd.exponential(1, N)
D = rnd.randn(N, len(R))
D = z[:, None]*D
return D @ R / sqrt(2)
class LaplaceParallelRV(RV_with_mean_and_cov):
"""A NON-elliptical multivariate version of Laplace (double exponential) RV."""
def _sample(self, N):
# R = self.C.Right # contour: sheared rectangle
R = self.C.sym_sqrt # contour: rotated rectangle
D = rnd.laplace(0, 1, (N, len(R)))
return D @ R / sqrt(2)
class StudRV(RV_with_mean_and_cov):
"""Student-t multivariate random variable.
Assumes the covariance exists,
which requires degreee-of-freedom (dof) > 1+ndim.
Also requires that dof be integer,
since chi2 is sampled via Gaussians.
"""
def __init__(self, dof, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dof = dof
def _sample(self, N):
R = self.C.Right
nu = self.dof
r = nu/np.sum(rnd.randn(N, nu)**2, axis=1) # InvChi2
D = sqrt(r)[:, None]*rnd.randn(N, len(R))
return D @ R * sqrt((nu-2)/nu)
class UniRV(RV_with_mean_and_cov):
"""Uniform multivariate random variable.
Has an elliptic-shape support.
Ref: Voelker et al. (2017) "Efficiently sampling
vectors and coordinates from the n-sphere and n-ball"
"""
def _sample(self, N):
R = self.C.Right
D = rnd.randn(N, len(R))
r = rnd.rand(N)**(1/len(R)) / np.sqrt(np.sum(D**2, axis=1))
D = r[:, None]*D
return D @ R * 2
class UniParallelRV(RV_with_mean_and_cov):
"""Uniform multivariate random variable.
Has a parallelogram-shaped support, as determined by the cholesky factor
applied to the (corners of) the hypercube.
"""
def _sample(self, N):
R = self.C.Right
D = rnd.rand(N, len(R))-0.5
return D @ R * sqrt(12)
|
nansencenter/DAPPER
|
dapper/tools/randvars.py
|
Python
|
mit
| 7,783
|
[
"Gaussian"
] |
0f243550c4659986f674e5adeb80b1ae80d531df34b79a322a73f1884d2d39a7
|
#############################################################################
## Pipeline is now on github: https://github.com/pminguez/geneticaPipeline ##
#############################################################################
import sys
from glob import glob
from subprocess import call
import argparse
import time
def countdown(t):
for t in range(t,-1,-1):
mins, secs = divmod(t, 60)
timeformat = '{:02d}'.format(secs)
sys.stdout.write('\rRunning in ' + timeformat + ' secs')
sys.stdout.flush()
time.sleep(1)
parser = argparse.ArgumentParser(description="Process Fastq files for getting variants")
parser.add_argument("-u", action="store", dest='user',
help="user name to look/store in the correct dir")
parser.add_argument("-I", action="store",dest='input',
help="path to input folder")
parser.add_argument("-T", action="store",dest='threads', type = int, default = 16,
help="specify number of threads to use")
parser.add_argument("-J", action="store",dest='parallelization', type = int, default = 5,
help="specify number of samples to run in parallel")
parser.add_argument("-duplicates", action="store_true",
help="set this flag to markduplicates with picardtools")
parser.add_argument("-local", action="store_true",
help="set this flag to run the pipeline using local paths")
args = parser.parse_args()
if args.input == None:
print ''
print 'ERROR: An input folder containing fastq files is needed'
print ''
parser.print_help()
exit()
#Importing samples
forward_paths = sorted(glob(args.input + '*_R1.fastq.gz'))
reverse_paths = sorted(glob(args.input + '*_R2.fastq.gz'))
if forward_paths == []:
print ''
print 'ERROR: No fastq files detected in ' + args.input + '.\nFastq files names should be named: name_R1.fastq.gz and name_R2.fastq.gz'
print ''
exit()
if len(forward_paths) != len(reverse_paths):
print ''
print 'ERROR: Different number of forward and reverse fastq files detected. PLEASE CHECK.'
print ''
exit()
print '---------------------------------------------------------------------------------------------'
print ' *****Running DAguilera Pipeline***** '
print '---------------------------------------------------------------------------------------------'
print ''
print 'Number of samples to analyze: ' + str(len(forward_paths))
print ''
print 'ARGUMENTS:'
print ''
print ' -User: ' + str(args.user)
print ' -Input: ' + str(args.input)
print ' -Threads: ' + str(args.threads)
print ' -Sample to parallelizate: ' + str(args.parallelization)
print ' -MarkDuplicates: ' + str(args.duplicates)
print ' -Running local: ' + str(args.local)
print ''
print '---------------------------------------------------------------------------------------------'
print 'Please review the arguments and number of samples to process...'
print ''
countdown(15)
print ''
print '---------------------------------------------------------------------------------------------'
#if args.local:
# genome_ref = "/home/"+ user +"/Documents/genome_data/hg19/ucsc.hg19.fasta"
# picardtools = "/mnt/datos1/GeneticaPipeDB/software/picard-tools-2.1.1/picard.jar"
# gatk = '/mnt/datos1/GeneticaPipeDB/software/GenomeAnalysisTK-3.5/GenomeAnalysisTK.jar'
# hg19_path = "/home/daguilera/Documents/genome_data/hg19/"
# annovar = "/mnt/datos1/GeneticaPipeDB/software/annovar/table_annovar.pl"
# annovarDB = "/mnt/datos1/GeneticaPipeDB/software/annovar/humandb"
# genome_fai = '/mnt/datos2/d.aguilera/CNVs_analysis/CoNVaDING-1.1.6/ucsc.hg19_convading.fasta.fai'
#else:
genome_ref = "/mnt/genetica/GeneticaPipeDB/genome_data/hg19/ucsc.hg19.fasta"
picardtools = "/mnt/genetica/GeneticaPipeDB/software/picard-tools-2.1.1/picard.jar"
gatk = "/mnt/genetica/GeneticaPipeDB/software/GenomeAnalysisTK-3.5/GenomeAnalysisTK.jar"
hg19_path = "/mnt/genetica/GeneticaPipeDB/genome_data/hg19/"
annovar = "/mnt/genetica/GeneticaPipeDB/software/annovar/table_annovar.pl"
annovarDB = "/mnt/genetica/GeneticaPipeDB/software/annovar/humandb"
#genome_fai = "/home/"+ user +"/genetica2/d.aguilera/CNVs_analysis/CoNVaDING-1.1.6/ucsc.hg19_convading.fasta.fai"
print ' Mapping fastq files (BWA) '
print '----------------------------------------------------------------------------------------------'
#Load genome reference to memory
call('bwa shm ' + genome_ref,shell = True)
#Loop samples for BWA
for i in range(0,len(forward_paths)):
sample_path = forward_paths[i][:forward_paths[i].rfind('/')+1]
sample_name = forward_paths[i][forward_paths[i].rfind('/')+1:forward_paths[i].rfind('_R')]
call('bwa mem -t' + str(args.threads) + ' -R "@RG\tID:' + sample_name + '\tLB:library\tPL:illumina\tPU:library\tSM:' + sample_name + '" ' + genome_ref + ' ' + forward_paths[i] + ' ' + reverse_paths[i] + ' > ' + sample_path + '/' + sample_name + '_bwa.sam',shell = True)
#Unload genome reference
call('bwa shm -d',shell = True)
print '----------------------------------------------------------------------------------------------'
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Sorting and creating bam and bai files of samples...'
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*.sam | parallel --no-notice -j" + str(args.parallelization) + " 'samtools sort {} -O BAM -@ " + str(args.threads / 2) + " -o {}_sorted.bam && samtools index {}_sorted.bam'", shell = True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] OK! '
print '----------------------------------------------------------------------------------------------'
indelrealigner_input = '*_sorted.bam'
#MarkDuplicates with picardtools
if args.duplicates:
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Marking Duplicates... '
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*_sorted.bam | parallel --no-notice -j" + str(args.parallelization) + " 'java -Xmx9g -jar " + picardtools + " \
MarkDuplicates \
I= {} \
O= {}_dedupped.bam \
CREATE_INDEX=true \
VALIDATION_STRINGENCY=SILENT \
TMP_DIR= " + sample_path + "working_temp \
M= {}_duplicate_metrics.txt'",shell = True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Marking Duplicates...OK! '
print '----------------------------------------------------------------------------------------------'
indelrealigner_input = '*_dedupped.bam'
#Empieza GATK
#Indel Realignment with GATK
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing IndelRealigment... '
print '---------------------------------------------------------------------------------------------'
call("find " + sample_path + indelrealigner_input + " | parallel --no-notice -j" + str(args.parallelization) + " 'java -Xmx9g -jar " + gatk + " \
-T IndelRealigner \
-R " + genome_ref + " \
-I {} \
-o {}_indelrealigned.bam \
-targetIntervals " + hg19_path + "hg19_indels_output.intervals \
-known " + hg19_path + "1000G_phase1.indels.hg19.sites.vcf \
-known " + hg19_path + "Mills_and_1000G_gold_standard.indels.hg19.sites.vcf \
-LOD 0.4 \
--consensusDeterminationModel KNOWNS_ONLY'",shell=True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing IndelRealigment...OK!'
print '----------------------------------------------------------------------------------------------'
#Quality
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing Base Quality Score Recalibration (Step1)...'
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*_indelrealigned.bam | parallel -j" + str(args.parallelization) + " 'java -Xmx9g -jar " + gatk + " \
-T BaseRecalibrator \
-R " + genome_ref + " \
-I {} \
-knownSites " + hg19_path + "1000G_phase1.indels.hg19.sites.vcf \
-knownSites " + hg19_path + "Mills_and_1000G_gold_standard.indels.hg19.sites.vcf \
-knownSites " + hg19_path + "dbsnp_138.hg19.vcf \
-o {}_recal_data.table'",shell=True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing Base Quality Score Recalibration...(Step1)OK!'
print '----------------------------------------------------------------------------------------------'
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing Base Quality Score Recalibration (Step2)...'
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*_indelrealigned.bam | parallel -j" + str(args.parallelization) + " 'java -Xmx9g -jar " + gatk + " \
-T PrintReads \
-R " + genome_ref + " \
-I {} \
-BQSR {}_recal_data.table \
-o {}_bqsr.bam'",shell=True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Doing Base Quality Score Recalibration (Step2)...OK!'
print '----------------------------------------------------------------------------------------------'
#Crea archivo gVCF desde BAM
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Calling the variants...'
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*_bqsr.bam | parallel -j" + str(args.parallelization) + " 'java -Xmx9g -jar " + gatk + " \
-T HaplotypeCaller \
-R " + genome_ref + " \
-I {} \
--emitRefConfidence GVCF \
--variant_index_type LINEAR \
--variant_index_parameter 128000 \
-o {}.g.vcf'",shell=True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Calling the variants...OK!'
print '----------------------------------------------------------------------------------------------'
#Crea VCF desde gVCF
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Genotyping in single mode...'
print '----------------------------------------------------------------------------------------------'
call("find " + sample_path + "*.g.vcf | parallel -j1 'java -Xmx28g -jar " + gatk + " \
-T GenotypeGVCFs \
-nt " + str(args.threads) + " \
-R " + genome_ref + " \
-V {} \
-o {}_singleGT_raw.vcf'",shell = True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Genotyping in single mode...OK!'
print '----------------------------------------------------------------------------------------------'
#Empieza annovar
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Annotating Variants...'
print '----------------------------------------------------------------------------------------------'
variants = sorted(glob(args.input + '*singleGT_raw.vcf'))
for vcffile in variants:
sample_path = vcffile[:vcffile.rfind('/')+1]
sample_name = vcffile[vcffile.rfind('/')+1:vcffile.rfind('singleGT_raw.vcf')]
output = sample_path + sample_name
call(annovar + ' ' + vcffile + ' ' + annovarDB + ' -buildver hg19 \
-out ' + output + ' \
--remove \
--otherinfo \
--protocol refGene,cytoBand,genomicSuperDups,esp6500siv2_all,1000g2015aug_eur,exac03,gnomad_exome,gnomad_genome,hrcr1,kaviar_20150923,popfreq_max_20150413,avsnp147,intervar_20170202,spidex,dbscsnv11,dbnsfp33a,revel,gwava,clinvar_20170130\
--operation g,r,r,f,f,f,f,f,f,f,f,f,f,f,f,f,f,f,f \
--nastring . \
--vcfinput \
--thread ' + str(args.threads),shell = True)
'''
call(annovar + ' ' + vcffile + ' ' + annovarDB + ' -buildver hg19 \
-out ' + output + ' \
--remove \
--otherinfo \
--protocol refGene,cytoBand,genomicSuperDups,esp6500siv2_all,1000g2015aug_eur,exac03,hrcr1,kaviar_20150923,popfreq_max_20150413,avsnp147,intervar_20170202,cosmic70,icgc21,spidex,dbscsnv11,dbnsfp33a,revel,gwava,clinvar_20170130,phastConsElements46way,tfbsConsSites,wgRna,targetScanS,gwasCatalog \
--operation g,r,r,f,f,f,f,f,f,f,f,f,f,f,f,f,f,f,f,r,r,r,r,r \
--nastring . \
--vcfinput \
--thread ' + str(args.threads),shell = True)
'''
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Annotating Variants...OK!'
print '----------------------------------------------------------------------------------------------'
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Formating and Filtering Variants...'
print '----------------------------------------------------------------------------------------------'
if args.local:
call('python /mnt/genetica/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step1_v2.py ' + sample_path + '/' + sample_name + '.hg19_multianno.txt' ,shell = True)
call('Rscript /mnt/genetica/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step2.R ' + sample_path + '/' + sample_name + '_annotated_formatted.txt',shell = True)
#call('python /mnt/datos1/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step1_cancer.py ' + sample_path + '/' + sample_name + '.hg19_multianno.txt' ,shell = True)
#call('Rscript /mnt/datos1/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step2.R ' + sample_path + '/' + sample_name + '_annotated_formatted.txt',shell = True)
else:
call('python /mnt/genetica/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step1_v2.py ' + sample_path + '/' + sample_name + '.hg19_multianno.txt' ,shell = True)
call('Rscript /mnt/genetica/GeneticaPipeDB/pipeline/annotation_scripts/vcf_processing_step2_server.R ' + sample_path + '/' + sample_name + '_annotated_formatted.txt',shell = True)
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] Formating and Filtering Variants...OK!'
print '----------------------------------------------------------------------------------------------'
print '----------------------------------------------------------------------------------------------'
print '[DAguilera_Pipeline] #VARIANTS READY FOR ANALYSIS#'
print '----------------------------------------------------------------------------------------------'
|
pminguez/geneticaPipeline
|
pipeline_v4.py
|
Python
|
gpl-3.0
| 15,242
|
[
"BWA"
] |
50a4582647df6ce00d1c47e91ee2a62493f24faba950baa6c9a873f7de989ac4
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from datetime import datetime
from unittest import TestCase
from django.test import ignore_warnings
from django.utils import html, safestring
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
class TestUtilsHtml(TestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
f = html.escape
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
for pattern in patterns:
self.check_output(f, pattern % value, pattern % output)
# Check repeated values.
self.check_output(f, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(f, '<&', '<&')
def test_format_html(self):
self.assertEqual(
html.format_html("{} {} {third} {fourth}",
"< Dangerous >",
html.mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=html.mark_safe("<i>safe again</i>")
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>"
)
def test_linebreaks(self):
f = html.linebreaks
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br />sub1<br />sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br />sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_tags(self):
f = html.strip_tags
items = (
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('hi, <f x', 'hi, <f x'),
('234<235, right?', '234<235, right?'),
('a4<a5 right?', 'a4<a5 right?'),
('b7>b2!', 'b7>b2!'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'),
('a<p a >b</p>c', 'abc'),
('d<a:b c:d>e</p>f', 'def'),
('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'),
)
for value, output in items:
self.check_output(f, value, output)
# Some convoluted syntax for which parsing may differ between python versions
output = html.strip_tags('<sc<!-- -->ript>test<<!-- -->/script>')
self.assertNotIn('<script>', output)
self.assertIn('test', output)
output = html.strip_tags('<script>alert()</script>&h')
self.assertNotIn('<script>', output)
self.assertIn('alert()', output)
# Test with more lengthy content (also catching performance regressions)
for filename in ('strip_tags1.html', 'strip_tags2.txt'):
path = os.path.join(os.path.dirname(upath(__file__)), 'files', filename)
with open(path, 'r') as fp:
content = force_text(fp.read())
start = datetime.now()
stripped = html.strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Please try again.", stripped)
self.assertNotIn('<', stripped)
def test_strip_spaces_between_tags(self):
f = html.strip_spaces_between_tags
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
self.check_output(f, value)
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_strip_entities(self):
f = html.strip_entities
# Strings that should come out untouched.
values = ("&", "&a", "&a", "a&#a")
for value in values:
self.check_output(f, value)
# Valid entities that should be stripped from the patterns.
entities = ("", "", "&a;", "&fdasdfasdfasdf;")
patterns = (
("asdf %(entity)s ", "asdf "),
("%(entity)s%(entity)s", ""),
("&%(entity)s%(entity)s", "&"),
("%(entity)s3", "3"),
)
for entity in entities:
for in_pattern, output in patterns:
self.check_output(f, in_pattern % {'entity': entity}, output)
def test_escapejs(self):
f = html.escapejs
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
('and lots of whitespace: \r\n\t\v\f\b', 'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
('paragraph separator:\u2029and line separator:\u2028', 'paragraph separator:\\u2029and line separator:\\u2028'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_remove_tags(self):
f = html.remove_tags
items = (
("<b><i>Yes</i></b>", "b i", "Yes"),
("<a>x</a> <p><b>y</b></p>", "a b", "x <p>y</p>"),
)
for value, tags, output in items:
self.assertEqual(f(value, tags), output)
def test_smart_urlquote(self):
quote = html.smart_urlquote
# Ensure that IDNs are properly quoted
self.assertEqual(quote('http://öäü.com/'), 'http://xn--4ca9at.com/')
self.assertEqual(quote('http://öäü.com/öäü/'), 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/')
# Ensure that everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered safe as per RFC
self.assertEqual(quote('http://example.com/path/öäü/'), 'http://example.com/path/%C3%B6%C3%A4%C3%BC/')
self.assertEqual(quote('http://example.com/%C3%B6/ä/'), 'http://example.com/%C3%B6/%C3%A4/')
self.assertEqual(quote('http://example.com/?x=1&y=2+3&z='), 'http://example.com/?x=1&y=2+3&z=')
self.assertEqual(quote('http://example.com/?x=<>"\''), 'http://example.com/?x=%3C%3E%22%27')
self.assertEqual(quote('http://example.com/?q=http://example.com/?x=1%26q=django'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
self.assertEqual(quote('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
def test_conditional_escape(self):
s = '<h1>interop</h1>'
self.assertEqual(html.conditional_escape(s),
'<h1>interop</h1>')
self.assertEqual(html.conditional_escape(safestring.mark_safe(s)), s)
|
maxsocl/django
|
tests/utils_tests/test_html.py
|
Python
|
bsd-3-clause
| 8,173
|
[
"ADF"
] |
bc8c147f1a27b5ea43a1a462d641561f75d9fefafb96c28b3affa9837b4d928a
|
#!/usr/bin/python
# Copyright 2012, SIL International
# All rights reserved.
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should also have received a copy of the GNU Lesser General Public
# License along with this library in the file named "LICENSE".
# If not, write to the Free Software Foundation, 51 Franklin Street,
# suite 500, Boston, MA 02110-1335, USA or visit their web page on the
# internet at http://www.fsf.org/licenses/lgpl.html.
# A RunView consists of two sub-views: a display of the current glyphs (QGraphicsView)
# and a list of corresponding glyph names (QPlainTextEdit).
from qtpy import QtCore, QtGui, QtWidgets
from graide.utils import ModelSuper, DataObj
from graide.layout import Layout
import sys, os, time, traceback
class GlyphPixmapItem(QtWidgets.QGraphicsPixmapItem) :
def __init__(self, index, px, model = None, parent = None, scene = None) :
if sys.version_info[0] < 3:
super(GlyphPixmapItem, self).__init__(px, parent, scene)
else:
super(GlyphPixmapItem, self).__init__(px, parent)
self.scene = scene
if self.scene: self.scene.addItem(self)
self.selected = False
self.index = index
self.highlighted = False
self.highlightType = ""
self.model = model
self.highlightColours = Layout.slotColours
def mousePressEvent(self, mouseEvent) :
if self.model :
self.model.glyphClicked(self, self.index, False)
def mouseDoubleClickEvent(self, mouseEvent) :
if self.scene :
self.scene.mouseDoubleClickEvent(mouseEvent)
def select(self, state) :
self.selected = state
self.update()
def highlight(self, type = 'default') :
self.highlighted = True
self.highlightType = type
def paint(self, painter, option, widget) :
r = QtCore.QRect(QtCore.QPoint(self.offset().x(), self.offset().y()), self.pixmap().size())
if self.selected :
painter.fillRect(r, option.palette.highlight())
elif self.highlighted and self.highlightType in self.highlightColours :
painter.fillRect(r, self.highlightColours[self.highlightType])
super(GlyphPixmapItem, self).paint(painter, option, widget) # paint the foreground
# Apparently not used
class RunTextView(QtWidgets.QPlainTextEdit) :
def __init__(self, creator, parent = None) :
super(RunTextView, self).__init__(parent=parent)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.creator = creator
def viewportEvent(self, event) :
if event.type() == QtCore.QEvent.MouseButtonPress :
return self.creator.tMousePress(event)
return False
# Used for both the output pane in the bottom left corner of the window
# and for the Passes and Rules tabs.
class RunView(QtCore.QObject, ModelSuper) :
MinHt = 70
slotSelected = QtCore.Signal(DataObj, ModelSuper, bool)
glyphSelected = QtCore.Signal(DataObj, ModelSuper, bool)
def __init__(self, font = None, run = None, parent = None, collision = False) : # parent = PassesView, Matcher, or none
super(RunView, self).__init__()
self.parent = parent
self.gview = QtWidgets.QGraphicsView(parent) # graphics view - glyphs
self.gview.setAlignment(QtCore.Qt.AlignLeft)
self.gview.mouseDoubleClickEvent = self.sEvent
if font :
self.gview.resize(self.gview.size().width(), max(font.pixrect.height(), RunView.MinHt))
else :
self.gview.resize(200, RunView.MinHt)
self._scene = QtWidgets.QGraphicsScene(self.gview) # the scene contains the pixmaps
self._scene.keyPressEvent = self.keyPressEvent
self._scene.mouseDoubleClickEvent = self.sEvent
self.tview = QtWidgets.QPlainTextEdit(parent) # text view - glyph names
self.tview.setReadOnly(True)
self.tview.mousePressEvent = self.tEvent
self.tview.mouseDoubleClickEvent = self.tEvent
self._fSelect = QtGui.QTextCharFormat()
self._fSelect.setBackground(QtWidgets.QApplication.palette().highlight())
self._fHighlights = {}
for c in Layout.slotColours.keys() :
self._fHighlights[c] = QtGui.QTextCharFormat()
self._fHighlights[c].setBackground(Layout.slotColours[c])
self.collision = collision
if run and font :
self.loadRun(run, font)
self.gview.setScene(self._scene)
def loadRun(self, run, font, resize = True) :
self.run = run
self._font = font
self.currselection = -1
self._scene.clear()
self._pixmaps = []
# There might not be a 1-to-1 correspondence between slots and pixmaps -
# slots with exclude glyphs create an extra pixmap:
self._slotToPixmap = {}
self._gindices = [0]
scale = font.size * 1. / font.upem
res = QtCore.QRect()
sels = []
self.tview.setExtraSelections([])
self.tview.setPlainText("")
self.updateData(run)
for i, s in enumerate(run) :
g = font[s.gid]
# Is this a pseudo-glyph?
try :
gidActual = int(g.getGdlProperty("*actualForPseudo*"))
#print s.gid," actual=",gidActual
except :
gidActual = 0
gActual = font[gidActual] if gidActual != 0 else g
self._slotToPixmap[i] = len(self._pixmaps)
if gActual and gActual.item and gActual.item.pixmap :
res = self.createPixmap(s, gActual, i, res, scale, model = self, scene = self._scene)
else :
#print "no GraideGlyph for",s.gid
self._pixmaps.append(None)
if g :
glyphName = g.GDLName() or g.psname
self.tview.moveCursor(QtGui.QTextCursor.End)
self.tview.insertPlainText(glyphName + " ") # 2 spaces between glyph names
self._gindices.append(self._gindices[-1] + len(glyphName) + 2)
if s.highlighted :
hselect = QtWidgets.QTextEdit.ExtraSelection()
if s.highlightType in self._fHighlights :
hselect.format = self._fHighlights[s.highlightType]
else :
hselect.format = self._fHighlights['default']
hselect.cursor = QtGui.QTextCursor(self.tview.document())
hselect.cursor.movePosition(QtGui.QTextCursor.NextCharacter, n=self._gindices[-2])
hselect.cursor.movePosition(QtGui.QTextCursor.NextCharacter,
QtGui.QTextCursor.KeepAnchor, self._gindices[-1] - 2 - self._gindices[-2])
sels.append(hselect)
if self.collision and s.getColExclGlyph() :
gExclude = s.getColExclGlyph()
gExclude = font[gExclude]
exclOff = s.getColExclOffsetSize()
resExcl = self.createPixmap(s, gExclude, i, res, scale, model = self, scene = self._scene, exclOff = exclOff)
if run.kernEdges is not None :
def doEdge(lastx, curry, edgex, slicey, scale, pen) : # local function
if edgex > 1e+37 or edgex < -1e+37 : return None # invalid
if lastx is not None :
# Draw a horizontal line connecting the previous vertical line to the next.
t = QtWidgets.QGraphicsLineItem(lastx * scale, -curry * scale, edgex * scale, -curry * scale, scene = self._scene)
t.setParentItem(self._pixmaps[-1])
t.setPen(pen)
self.kernLines.append(t)
# Draw a vertical line.
t = QtWidgets.QGraphicsLineItem(edgex * scale, -curry * scale, edgex * scale, -(curry + slicey) * scale, scene = self._scene)
t.setParentItem(self._pixmaps[-1])
t.setPen(pen)
self.kernLines.append(t)
return edgex
self.kernLines = []
pene = QtGui.QPen('green') # ending segment
peno = QtGui.QPen('blue') # opening segment
slicey = run.kernEdges[3] # slice y-width
# pen.setWidth(2) # pixels
# Show the margin of the glyph(s) on the ending segment.
curry = run.kernEdges[2] # y-offset
lastx = None
for edgex in run.kernEdges[0] : # x-offsets
lastx = doEdge(lastx, curry, edgex, slicey, scale, pene)
curry += slicey
# Now do the margin of the following glyph(s) on the opening segment.
curry = run.kernEdges[2]
lastx = None
for edgex in run.kernEdges[1] : # x-offsets
lastx = doEdge(lastx, curry, edgex, slicey, scale, peno)
curry += slicey
self.tview.moveCursor(QtGui.QTextCursor.Start) # scroll to top
if len(sels) :
self.tview.setExtraSelections(sels)
self.boundingRect = res
self._scene.setSceneRect(res)
if resize :
ht = max(res.height() - res.top() + 2, RunView.MinHt)
self.gview.setFixedSize(res.left() + res.width() + 2, ht)
self.gview.resize(res.left() + res.width() + 2, ht)
self.gview.updateScene([])
# Overridden for TweakableRunView.
def createPixmap(self, slot, glyph, index, res, scale, model = None, parent = None, scene = None, exclOff = None) :
exclude = (exclOff != None) # is this a collision.exclude.glyph?
if not exclOff :
exclOff = QtCore.QSize(0, 0)
px = GlyphPixmapItem(index, glyph.item.pixmap, model, parent, scene)
ppos = (((slot.drawPosX() + exclOff.width()) * scale) + glyph.item.left,
((-slot.drawPosY() - exclOff.height()) * scale) - glyph.item.top)
px.setOffset(*ppos)
self._pixmaps.append(px)
if slot :
if exclude :
slot.setExclPixmap(px)
else :
slot.setPixmap(px)
sz = glyph.item.pixmap.size()
r = QtCore.QRect(ppos[0], ppos[1], sz.width(), sz.height())
res = res.united(r)
return res
def updateData(self, run) :
pass # overridden by TweakableRunView
def glyphClicked(self, gitem, index, doubleClick) :
if index != self.currselection :
self.changeSelection(index, doubleClick)
elif doubleClick :
# Force the Glyph tab to be current:
self.glyphSelected.emit(self._font[self.run[self.currselection].gid], self, doubleClick)
def keyPressEvent(self, event) :
if self.currselection < 0 : return # no selection to move
newSel = -1
if event.key() == QtCore.Qt.Key_Left or event.key() == QtCore.Qt.Key_Right :
# Figure out the new selection.
if self.run.rtl :
forward = True if event.key() == QtCore.Qt.Key_Left else False
else :
forward = True if event.key() == QtCore.Qt.Key_Right else False
if forward :
newSel = self.currselection + 1
if newSel >= len(self._pixmaps) :
newSel = len(self._pixmaps) - 1
else :
newSel = self.currselection - 1
if newSel < 0 :
newSel = 0
if newSel >= 0 and newSel != self.currselection :
self.changeSelection(newSel, False)
def changeSelection(self, newSel, doubleClick) :
s = self.tview.extraSelections()
if self.currselection >= 0 :
self.selectPixmapForSlot(self.currselection, False)
s.pop()
if newSel >= 0 and self.currselection != newSel :
self.currselection = newSel
self.selectPixmapForSlot(newSel, True)
# Highlight the name of the selected glyph in the text view.
tselect = QtWidgets.QTextEdit.ExtraSelection()
tselect.format = self._fSelect
tselect.cursor = QtGui.QTextCursor(self.tview.document())
tselect.cursor.movePosition(QtGui.QTextCursor.NextCharacter, n=self._gindices[newSel])
tselect.cursor.movePosition(QtGui.QTextCursor.NextCharacter,
QtGui.QTextCursor.KeepAnchor, self._gindices[newSel + 1] - self._gindices[newSel] - 2 )
s.append(tselect)
selectedSlot = self.run[self.currselection]
self.slotSelected.emit(selectedSlot, self, doubleClick)
self.glyphSelected.emit(self._font[selectedSlot.gid], self, doubleClick)
else :
self.currselection = -1
self.tview.setExtraSelections(s)
def clearSelected(self) :
if self.currselection >= 0 :
self.selectPixmapForSlot(self.currselection, False)
s = self.tview.extraSelections()
s.pop()
self.tview.setExtraSelections(s)
self.currselection = -1
# There is not necessarily a one-to-one correspondence between slots and pixmaps
# (due to exclude glyphs), so this method maps from one to the other.
def selectPixmapForSlot(self, i, selectValue) :
if i >= 0 :
try :
try :
pixmap = self._pixmaps[self._slotToPixmap[i]]
except :
pixmap = self._pixmaps[i]
except :
pixmap = None
if pixmap :
pixmap.select(selectValue)
def tEvent(self, event) :
doubleClick = (event.type() == QtCore.QEvent.MouseButtonDblClick)
c = self.tview.cursorForPosition(event.pos()).position()
for (i, g) in enumerate(self._gindices) :
if c < g :
self.glyphClicked(None, i - 1, doubleClick)
return True
return False
def sEvent(self, event) :
if event.type() == QtCore.QEvent.MouseButtonDblClick :
image = QtGui.QImage(self._scene.width(), self._scene.height(), QtGui.QImage.Format_ARGB32)
image.fill(0xFFFFFFFF)
painter = QtGui.QPainter()
painter.begin(image)
self._scene.render(painter)
#time.sleep(3)
painter.end()
count = 1
fname = ''
while True :
fname = 'graide_image_{}.png'.format(str(count))
if not os.path.exists(fname) :
break
count += 1
image.save(fname)
#time.sleep(3)
print("Saved image to " + fname)
def clear(self) :
self._scene.clear()
self.tview.setPlainText("")
self.gview.update()
if __name__ == "__main__" :
import json, sys, os
from graide.font import GraideFont
from graide.run import Run
app = QtWidgets.QApplication(sys.argv)
# print(app.desktop().logicalDpiY())
tpath = os.path.join(os.path.dirname(sys.argv[0]), '../../tests')
jf = open(os.path.join(tpath, "padauk3.json"))
jinfo = json.load(jf)
font = GraideFont()
font.loadFont(os.path.join(tpath, "fonts/Padauk/Padauk.ttf"))
font.makebitmaps(40)
rinfo = jinfo['passes'][0]['slots']
run = Run(font, False)
run.addSlots(rinfo)
view = RunView(run, font).gview
print("Padauk RunView?") ###
view.show()
sys.exit(app.exec_())
|
silnrsi/graide
|
lib/graide/runview.py
|
Python
|
lgpl-2.1
| 16,301
|
[
"VisIt"
] |
69692404a664b53b2d4fe02b2917b61e8751d56a879f51fce828d05303172abb
|
import json
from CommonServerPython import *
from FindSimilarIncidentsByText import main
import random
nouns = ['people', 'history', 'way', 'art', 'world', 'information', 'map', 'two', 'family', 'government', 'health',
'system', 'computer', 'meat', 'year', 'thanks', 'music', 'person', 'reading', 'method', 'data', 'food',
'understanding', 'theory', 'law', 'bird', 'literature', 'problem', 'software', 'control', 'knowledge', 'power',
'ability', 'economics', 'love', 'internet', 'television', 'science', 'library', 'nature', 'fact', 'product',
'idea', 'temperature', 'investment', 'area', 'society', 'activity', 'story', 'industry', 'media', 'thing',
'oven', 'community', 'definition', 'safety', 'quality', 'development', 'language', 'management', 'player',
'variety', 'video', 'week', 'security', 'country', 'exam', 'movie', 'organization', 'equipment', 'physics',
'analysis', 'policy', 'series', 'thought', 'basis', 'boyfriend', 'direction', 'strategy', 'technology', 'army',
'camera', 'freedom', 'paper', 'environment', 'child', 'instance', 'month', 'truth', 'marketing', 'university',
'writing', 'article', 'department', 'difference', 'goal', 'news', 'audience', 'fishing', 'growth', 'income',
'marriage', 'user', 'combination', 'failure', 'meaning', 'medicine', 'philosophy', 'teacher', 'communication',
'night', 'chemistry', 'disease', 'disk', 'energy', 'nation', 'road', 'role', 'soup', 'advertising', 'location',
'success', 'addition', 'apartment', 'education', 'math', 'moment', 'painting', 'politics', 'attention',
'decision', 'event', 'property', 'shopping', 'student', 'wood', 'competition', 'distribution', 'entertainment',
'office', 'population', 'president', 'unit', 'category', 'cigarette', 'context', 'introduction', 'opportunity',
'performance', 'driver', 'flight', 'length', 'magazine', 'newspaper', 'relationship', 'teaching', 'cell',
'dealer', 'debate', 'finding', 'lake', 'member', 'message', 'phone', 'scene', 'appearance', 'association',
'concept', 'customer', 'death', 'discussion', 'housing', 'inflation', 'insurance', 'mood', 'woman', 'advice',
'blood', 'effort', 'expression', 'importance', 'opinion', 'payment', 'reality', 'responsibility', 'situation',
'skill', 'statement', 'wealth', 'application', 'city', 'county', 'depth', 'estate', 'foundation',
'grandmother', 'heart', 'perspective', 'photo', 'recipe', 'studio', 'topic', 'collection', 'depression',
'imagination', 'passion', 'percentage', 'resource', 'setting', 'ad', 'agency', 'college', 'connection',
'criticism', 'debt', 'description', 'memory', 'patience', 'secretary', 'solution', 'administration', 'aspect',
'attitude', 'director', 'personality', 'psychology', 'recommendation', 'response', 'selection', 'storage',
'version', 'alcohol', 'argument', 'complaint', 'contract', 'emphasis', 'highway', 'loss', 'membership',
'possession', 'preparation', 'steak', 'union', 'agreement', 'cancer', 'currency', 'employment', 'engineering',
'entry', 'interaction', 'limit', 'mixture', 'preference', 'region', 'republic', 'seat', 'tradition', 'virus',
'actor', 'classroom', 'delivery', 'device', 'difficulty', 'drama', 'election', 'engine', 'football',
'guidance', 'hotel', 'match', 'owner', 'priority', 'protection', 'suggestion', 'tension', 'variation',
'anxiety', 'atmosphere', 'awareness', 'bread', 'climate', 'comparison', 'confusion', 'construction',
'elevator', 'emotion', 'employee', 'employer', 'guest', 'height', 'leadership', 'mall', 'manager', 'operation',
'recording', 'respect', 'sample', 'transportation', 'boring', 'charity', 'cousin', 'disaster', 'editor',
'efficiency', 'excitement', 'extent', 'feedback', 'guitar', 'homework', 'leader', 'mom', 'outcome',
'permission', 'presentation', 'promotion', 'reflection', 'refrigerator', 'resolution', 'revenue', 'session',
'singer', 'tennis', 'basket', 'bonus', 'cabinet', 'childhood', 'church', 'clothes', 'coffee', 'dinner',
'drawing', 'hair', 'hearing', 'initiative', 'judgment', 'lab', 'measurement', 'mode', 'mud', 'orange',
'poetry', 'police', 'possibility', 'procedure', 'queen', 'ratio', 'relation', 'restaurant', 'satisfaction',
'sector', 'signature', 'significance', 'song', 'tooth', 'town', 'vehicle', 'volume', 'wife', 'accident',
'airport', 'appointment', 'arrival', 'assumption', 'baseball', 'chapter', 'committee', 'conversation',
'database', 'enthusiasm', 'error', 'explanation', 'farmer', 'gate', 'girl', 'hall', 'historian', 'hospital',
'injury', 'instruction', 'maintenance', 'manufacturer', 'meal', 'perception', 'pie', 'poem', 'presence',
'proposal', 'reception', 'replacement', 'revolution', 'river', 'son', 'speech', 'tea', 'village', 'warning',
'winner', 'worker', 'writer', 'assistance', 'breath', 'buyer', 'chest', 'chocolate', 'conclusion',
'contribution', 'cookie', 'courage', 'dad', 'desk', 'drawer', 'establishment', 'examination', 'garbage',
'grocery', 'honey', 'impression', 'improvement', 'independence', 'insect', 'inspection', 'inspector', 'king',
'ladder', 'menu', 'penalty', 'piano', 'potato', 'profession', 'professor', 'quantity', 'reaction',
'requirement', 'salad', 'sister', 'supermarket', 'tongue', 'weakness', 'wedding', 'affair', 'ambition',
'analyst', 'apple', 'assignment', 'assistant', 'bathroom', 'bedroom', 'beer', 'birthday', 'celebration',
'championship', 'cheek', 'client', 'consequence', 'departure', 'diamond', 'dirt', 'ear', 'fortune',
'friendship', 'funeral', 'gene', 'girlfriend', 'hat', 'indication', 'intention', 'lady', 'midnight',
'negotiation', 'obligation', 'passenger', 'pizza', 'platform', 'poet', 'pollution', 'recognition',
'reputation', 'shirt', 'sir', 'speaker', 'stranger', 'surgery', 'sympathy', 'tale', 'throat', 'trainer',
'uncle', 'youth', 'time', 'work', 'film', 'water', 'money', 'example', 'while', 'business', 'study', 'game',
'life', 'form', 'air', 'day', 'place', 'number', 'part', 'field', 'fish', 'back', 'process', 'heat', 'hand',
'experience', 'job', 'book', 'end', 'point', 'type', 'home', 'economy', 'value', 'body', 'market', 'guide',
'interest', 'state', 'radio', 'course', 'company', 'price', 'size', 'card', 'list', 'mind', 'trade', 'line',
'care', 'group', 'risk', 'word', 'fat', 'force', 'key', 'light', 'training', 'name', 'school', 'top', 'amount',
'level', 'order', 'practice', 'research', 'sense', 'service', 'piece', 'web', 'boss', 'sport', 'fun', 'house',
'page', 'term', 'test', 'answer', 'sound', 'focus', 'matter', 'kind', 'soil', 'board', 'oil', 'picture',
'access', 'garden', 'range', 'rate', 'reason', 'future', 'site', 'demand', 'exercise', 'image', 'case',
'cause', 'coast', 'action', 'age', 'bad', 'boat', 'record', 'result', 'section', 'building', 'mouse', 'cash',
'class', 'nothing', 'period', 'plan', 'store', 'tax', 'side', 'subject', 'space', 'rule', 'stock', 'weather',
'chance', 'figure', 'man', 'model', 'source', 'beginning', 'earth', 'program', 'chicken', 'design', 'feature',
'head', 'material', 'purpose', 'question', 'rock', 'salt', 'act', 'birth', 'car', 'dog', 'object', 'scale',
'sun', 'note', 'profit', 'rent', 'speed', 'style', 'war', 'bank', 'craft', 'half', 'inside', 'outside',
'standard', 'bus', 'exchange', 'eye', 'fire', 'position', 'pressure', 'stress', 'advantage', 'benefit', 'box',
'frame', 'issue', 'step', 'cycle', 'face', 'item', 'metal', 'paint', 'review', 'room', 'screen', 'structure',
'view', 'account', 'ball', 'discipline', 'medium', 'share', 'balance', 'bit', 'black', 'bottom', 'choice',
'gift', 'impact', 'machine', 'shape', 'tool', 'wind', 'address', 'average', 'career', 'culture', 'morning',
'pot', 'sign', 'table', 'task', 'condition', 'contact', 'credit', 'egg', 'hope', 'ice', 'network', 'north',
'square', 'attempt', 'date', 'effect', 'link', 'post', 'star', 'voice', 'capital', 'challenge', 'friend',
'self', 'shot', 'brush', 'couple', 'exit', 'front', 'function', 'lack', 'living', 'plant', 'plastic', 'spot',
'summer', 'taste', 'theme', 'track', 'wing', 'brain', 'button', 'click', 'desire', 'foot', 'gas', 'influence',
'notice', 'rain', 'wall', 'base', 'damage', 'distance', 'feeling', 'pair', 'savings', 'staff', 'sugar',
'target', 'text', 'animal', 'author', 'budget', 'discount', 'file', 'ground', 'lesson', 'minute', 'officer',
'phase', 'reference', 'register', 'sky', 'stage', 'stick', 'title', 'trouble', 'bowl', 'bridge', 'campaign',
'character', 'club', 'edge', 'evidence', 'fan', 'letter', 'lock', 'maximum', 'novel', 'option', 'pack', 'park',
'plenty', 'quarter', 'skin', 'sort', 'weight', 'baby', 'background', 'carry', 'dish', 'factor', 'fruit',
'glass', 'joint', 'master', 'muscle', 'red', 'strength', 'traffic', 'trip', 'vegetable', 'appeal', 'chart',
'gear', 'ideal', 'kitchen', 'land', 'log', 'mother', 'net', 'party', 'principle', 'relative', 'sale', 'season',
'signal', 'spirit', 'street', 'tree', 'wave', 'belt', 'bench', 'commission', 'copy', 'drop', 'minimum', 'path',
'progress', 'project', 'sea', 'south', 'status', 'stuff', 'ticket', 'tour', 'angle', 'blue', 'breakfast',
'confidence', 'daughter', 'degree', 'doctor', 'dot', 'dream', 'duty', 'essay', 'father', 'fee', 'finance',
'hour', 'juice', 'luck', 'milk', 'mouth', 'peace', 'pipe', 'stable', 'storm', 'substance', 'team', 'trick',
'afternoon', 'bat', 'beach', 'blank', 'catch', 'chain', 'consideration', 'cream', 'crew', 'detail', 'gold',
'interview', 'kid', 'mark', 'mission', 'pain', 'pleasure', 'score', 'screw', 'sex', 'shop', 'shower', 'suit',
'tone', 'window', 'agent', 'band', 'bath', 'block', 'bone', 'calendar', 'candidate', 'cap', 'coat', 'contest',
'corner', 'court', 'cup', 'district', 'door', 'east', 'finger', 'garage', 'guarantee', 'hole', 'hook',
'implement', 'layer', 'lecture', 'lie', 'manner', 'meeting', 'nose', 'parking', 'partner', 'profile', 'rice',
'routine', 'schedule', 'swimming', 'telephone', 'tip', 'winter', 'airline', 'bag', 'battle', 'bed', 'bill',
'bother', 'cake', 'code', 'curve', 'designer', 'dimension', 'dress', 'ease', 'emergency', 'evening',
'extension', 'farm', 'fight', 'gap', 'grade', 'holiday', 'horror', 'horse', 'host', 'husband', 'loan',
'mistake', 'mountain', 'nail', 'noise', 'occasion', 'package', 'patient', 'pause', 'phrase', 'proof', 'race',
'relief', 'sand', 'sentence', 'shoulder', 'smoke', 'stomach', 'string', 'tourist', 'towel', 'vacation', 'west',
'wheel', 'wine', 'arm', 'aside', 'associate', 'bet', 'blow', 'border', 'branch', 'breast', 'brother', 'buddy',
'bunch', 'chip', 'coach', 'cross', 'document', 'draft', 'dust', 'expert', 'floor', 'god', 'golf', 'habit',
'iron', 'judge', 'knife', 'landscape', 'league', 'mail', 'mess', 'native', 'opening', 'parent', 'pattern',
'pin', 'pool', 'pound', 'request', 'salary', 'shame', 'shelter', 'shoe', 'silver', 'tackle', 'tank', 'trust',
'assist', 'bake', 'bar', 'bell', 'bike', 'blame', 'boy', 'brick', 'chair', 'closet', 'clue', 'collar',
'comment', 'conference', 'devil', 'diet', 'fear', 'fuel', 'glove', 'jacket', 'lunch', 'monitor', 'mortgage',
'nurse', 'pace', 'panic', 'peak', 'plane', 'reward', 'row', 'sandwich', 'shock', 'spite', 'spray', 'surprise',
'till', 'transition', 'weekend', 'welcome', 'yard', 'alarm', 'bend', 'bicycle', 'bite', 'blind', 'bottle',
'cable', 'candle', 'clerk', 'cloud', 'concert', 'counter', 'flower', 'grandfather', 'harm', 'knee', 'lawyer',
'leather', 'load', 'mirror', 'neck', 'pension', 'plate', 'purple', 'ruin', 'ship', 'skirt', 'slice', 'snow',
'specialist', 'stroke', 'switch', 'trash', 'tune', 'zone', 'anger', 'award', 'bid', 'bitter', 'boot', 'bug',
'camp', 'candy', 'carpet', 'cat', 'champion', 'channel', 'clock', 'comfort', 'cow', 'crack', 'engineer',
'entrance', 'fault', 'grass', 'guy', 'hell', 'highlight', 'incident', 'island', 'joke', 'jury', 'leg', 'lip',
'mate', 'motor', 'nerve', 'passage', 'pen', 'pride', 'priest', 'prize', 'promise', 'resident', 'resort',
'ring', 'roof', 'rope', 'sail', 'scheme', 'script', 'sock', 'station', 'toe', 'tower', 'truck', 'witness', 'a',
'you', 'it', 'can', 'will', 'if', 'one', 'many', 'most', 'other', 'use', 'make', 'good', 'look', 'help', 'go',
'great', 'being', 'few', 'might', 'still', 'public', 'read', 'keep', 'start', 'give', 'human', 'local',
'general', 'she', 'specific', 'long', 'play', 'feel', 'high', 'tonight', 'put', 'common', 'set', 'change',
'simple', 'past', 'big', 'possible', 'particular', 'today', 'major', 'personal', 'current', 'national', 'cut',
'natural', 'physical', 'show', 'try', 'check', 'second', 'call', 'move', 'pay', 'let', 'increase', 'single',
'individual', 'turn', 'ask', 'buy', 'guard', 'hold', 'main', 'offer', 'potential', 'professional',
'international', 'travel', 'cook', 'alternative', 'following', 'special', 'working', 'whole', 'dance',
'excuse', 'cold', 'commercial', 'low', 'purchase', 'deal', 'primary', 'worth', 'fall', 'necessary', 'positive',
'produce', 'search', 'present', 'spend', 'talk', 'creative', 'tell', 'cost', 'drive', 'green', 'support',
'glad', 'remove', 'return', 'run', 'complex', 'due', 'effective', 'middle', 'regular', 'reserve',
'independent', 'leave', 'original', 'reach', 'rest', 'serve', 'watch', 'beautiful', 'charge', 'active',
'break', 'negative', 'safe', 'stay', 'visit', 'visual', 'affect', 'cover', 'report', 'rise', 'walk', 'white',
'beyond', 'junior', 'pick', 'unique', 'anything', 'classic', 'final', 'lift', 'mix', 'private', 'stop',
'teach', 'western', 'concern', 'familiar', 'fly', 'official', 'broad', 'comfortable', 'gain', 'maybe', 'rich',
'save', 'stand', 'young', 'heavy', 'hello', 'lead', 'listen', 'valuable', 'worry', 'handle', 'leading', 'meet',
'release', 'sell', 'finish', 'normal', 'press', 'ride', 'secret', 'spread', 'spring', 'tough', 'wait', 'brown',
'deep', 'display', 'flow', 'hit', 'objective', 'shoot', 'touch', 'cancel', 'chemical', 'cry', 'dump',
'extreme', 'push', 'conflict', 'eat', 'fill', 'formal', 'jump', 'kick', 'opposite', 'pass', 'pitch', 'remote',
'total', 'treat', 'vast', 'abuse', 'beat', 'burn', 'deposit', 'print', 'raise', 'sleep', 'somewhere',
'advance', 'anywhere', 'consist', 'dark', 'double', 'draw', 'equal', 'fix', 'hire', 'internal', 'join', 'kill',
'sensitive', 'tap', 'win', 'attack', 'claim', 'constant', 'drag', 'drink', 'guess', 'minor', 'pull', 'raw',
'soft', 'solid', 'wear', 'weird', 'wonder', 'annual', 'count', 'dead', 'doubt', 'feed', 'forever', 'impress',
'nobody', 'repeat', 'round', 'sing', 'slide', 'strip', 'whereas', 'wish', 'combine', 'command', 'dig',
'divide', 'equivalent', 'hang', 'hunt', 'initial', 'march', 'mention', 'spiritual', 'survey', 'tie', 'adult',
'brief', 'crazy', 'escape', 'gather', 'hate', 'prior', 'repair', 'rough', 'sad', 'scratch', 'sick', 'strike',
'employ', 'external', 'hurt', 'illegal', 'laugh', 'lay', 'mobile', 'nasty', 'ordinary', 'respond', 'royal',
'senior', 'split', 'strain', 'struggle', 'swim', 'train', 'upper', 'wash', 'yellow', 'convert', 'crash',
'dependent', 'fold', 'funny', 'grab', 'hide', 'miss', 'permit', 'quote', 'recover', 'resolve', 'roll', 'sink',
'slip', 'spare', 'suspect', 'sweet', 'swing', 'twist', 'upstairs', 'usual', 'abroad', 'brave', 'calm',
'concentrate', 'estimate', 'grand', 'male', 'mine', 'prompt', 'quiet', 'refuse', 'regret', 'reveal', 'rush',
'shake', 'shift', 'shine', 'steal', 'suck', 'surround', 'anybody', 'bear', 'brilliant', 'dare', 'dear',
'delay', 'drunk', 'female', 'hurry', 'inevitable', 'invite', 'kiss', 'neat', 'pop', 'punch', 'quit', 'reply',
'representative', 'resist', 'rip', 'rub', 'silly', 'smile', 'spell', 'stretch', 'stupid', 'tear', 'temporary',
'tomorrow', 'wake', 'wrap', 'yesterday', ]
default_args = {
'timeFrameHours': 5,
'threshold': 0.5,
'textFields': 'name,details',
'ignoreClosedIncidents': 'yes',
'maximumNumberOfIncidents': 10,
'minTextLength': 0,
'maxResults': 5,
'timeField': 'created',
'preProcessText': False
}
incident1 = {
'id': 1,
'name': 'This is incident1 bla',
'type': 'Phishing',
'details': " ".join([nouns[random.randrange(0, len(nouns))] for i in range(50)]),
'created': '2019-01-01',
'closed': '2019-01-01',
}
incident3 = {
'id': 3,
'name': 'This is incident3',
'type': 'Phishing',
'details': " ".join([nouns[random.randrange(0, len(nouns))] for i in range(50)]),
'created': '2019-01-01',
'closed': '2019-01-01',
}
incident4 = {
'id': 4,
'name': 'This is incident4',
'type': 'Phishing',
'details': " ".join([nouns[random.randrange(0, len(nouns))] for i in range(50)]),
'created': '2019-01-01',
'closed': '2019-01-01',
}
incident1_dup = {
'id': 2,
'name': 'This is incident2',
'type': 'Phishing',
'details': incident1['details'],
'created': '2019-01-01',
'closed': '2019-01-01',
}
def execute_command(command, args=None):
if command == 'GetIncidentsByQuery':
return [{'Contents': json.dumps([incident1_dup, incident3, incident4]), 'Type': 'note'}]
if command == 'WordTokenizerNLP':
values = json.loads(args['value'])
if len(values) == 1:
return values[1]
return values
else:
return []
def test_similar_context(mocker):
args = dict(default_args)
args.update({'similarIncidentFields': 'name', 'similarContextKeys': 'simpleValue'})
mocker.patch.object(demisto, 'args', return_value=args)
mocker.patch.object(demisto, 'incidents', return_value=[incident1])
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
result = main()
assert len(result['EntryContext']['similarIncidentList']) == 1
assert result['EntryContext']['similarIncidentList'][0]['rawId'] == 2
assert float(result['EntryContext']['similarIncident']['similarity']) > 0.9
def test_similar_context_with_pre_process(mocker):
args = dict(default_args)
args.update({'similarIncidentFields': 'name', 'similarContextKeys': 'simpleValue', 'preProcessText': True})
mocker.patch.object(demisto, 'args', return_value=args)
mocker.patch.object(demisto, 'incidents', return_value=[incident1])
mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command)
result = main()
assert len(result['EntryContext']['similarIncidentList']) == 1
assert result['EntryContext']['similarIncidentList'][0]['rawId'] == 2
assert float(result['EntryContext']['similarIncident']['similarity']) > 0.9
|
demisto/content
|
Packs/Base/Scripts/FindSimilarIncidentsByText/FindSimilarIncidentsByText_test.py
|
Python
|
mit
| 19,458
|
[
"VisIt"
] |
6e7ef50a5259df7fb20e0bae98e24ae68252fe141e05ec180d0885049e1a4a4a
|
#
# This file is part of the CCP1 Graphical User Interface (ccp1gui)
#
# (C) 2002-2006 CCLRC Daresbury Laboratory
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Implements the GAMESS-UK specific calculation (Calc) and
calculation editor (CalcEd) classes
"""
# import python modules
import os
import sys
import socket
import glob
# import external modules
import Tkinter
import Pmw
import tkFileDialog
# import internal modules
import calc, calced
from jobmanager.job import LOCALHOST
import tools
import viewer.help
from viewer.defaults import defaults
from viewer.paths import paths
#from interfaces import smeagolreader
from smeagolio import SmeagolIO
class SMEAGOLCalc(calc.Calc):
"""GAMESS-UK specifics."""
def __init__(self, **kw):
calc.Calc.__init__(self,**kw)
self.debug = None
self.set_program('SMEAGOL')
self.set_title('This file was generated by the CCP1 GUI')
smeagol_input = defaults.get_value( 'smeagol_input' )
if smeagol_input:
# User has opened an fdf file so assume this the input
self.set_parameter("input_files",[smeagol_input])
directory,filename = os.path.split( smeagol_input )
self.set_parameter("local_directory",directory)
name = os.path.splitext( filename )[0]
self.set_name(name)
else:
# Just use some sensible defaults
self.set_name("unknown")
self.set_parameter("input_files",[])
self.set_parameter("local_directory",paths['user'])
def get_editor_class(self):
return SMEAGOLCalcEd
def WriteInput(self,filename=None):
"""Write the SMEAGOL input file"""
return None
def makejob(self,writeinput=None,graph=None):
"""
Construct the sequence of job steps
"""
#Get an editor instance to pop up tk-error widgets
ed = self.get_editor()
#
# Need to decide what kind of job run
#
job = self.get_job()
if not job:
job = self.create_job()
jobtype = job.jobtype
# Run job from the specified directory
directory = self.get_parameter('local_directory')
os.chdir(directory)
print "job running from directory: %s" % directory
# Default values for all jobs - may be changed below
job_name = self.get_name("name")
job.name = job_name
# Block of code to tweak the job depending on how it is being run
if jobtype == LOCALHOST:
job_desc = 'Running SMEAGOL on %s' % LOCALHOST
elif jobtype == 'RMCS':
job_desc = 'Running SMEAGOL with RMCS'
elif jobtype == 'Nordugrid':
job_desc = 'Running SMEAGOL on Nordugrid'
elif jobtype == 'Globus':
job_desc = 'Running SMEAGOL with Globus'
job.clear_steps()
# Copy out all the input files
files = self.get_parameter("input_files" )
for f in files:
directory,filename = os.path.split( f )
print "got dir,file %s,%s" % (directory,filename)
job.add_step( jobmanager.COPY_OUT_FILE,
'transfer input: %s' % f,
local_filename=f,
remote_filename=filename)
stdin_file = files[0]
# hack - nuke the defaults
defaults.set_value('smeagol_input',None)
job.add_step( jobmanager.RUN_APP,
job_desc,
stdin_file=stdin_file)
directory = job.get_parameter("directory")
job.add_step(jobmanager.COPY_BACK_DIRECTORY,'recover outputs',remote_directory=directory)
job.add_tidy(self.endjob)
#jmht - hack
job.calc = self
return job
def set_job_defaults(self,job):
"""Set any default parameters for calculations with this type of job
This method should be overwritten (if need be) in any derived class.
"""
if job.jobtype == LOCALHOST:
pass
else:
pass
return None
def endjob(self,code=0):
"""This function is executed in the main thread"""
directory = self.get_parameter("local_directory")
if self.debug:
print 'running endjob in directory: %s - code=' % (directory,code)
# Get a reader objectt
#reader = smeagolreader.SmeagolReader()
reader = SmeagolIO()
# Get any trajectory files
anifiles = glob.glob("*.ANI")
for f in anifiles:
print "trying to read file ",f
#reader.read( f )
reader.ReadFile( f )
#results = reader.get_objects()
results = reader.GetObjects()
if not results:
return None
ed = self.get_editor()
if ed:
if ed.graph:
ed.graph.import_objects(results)
txt = "Objects loaded from punchfile:"
if code > 1:
txt = txt + "Structure update" + '\n'
else:
txt = txt + '\n'
for r in results:
txt = txt + r.title + '\n'
ed.Info(txt)
# Update
if ed.update_func and code > 0:
o = self.get_input("mol_obj")
#name = self.get_input("mol_name")
ed.update_func(o)
# jmht - is this a hack?
self.job = None
class SMEAGOLCalcEd(calced.CalcEd):
def __init__(self,root,calc,graph,**kw):
calced.CalcEd.__init__(self,root,calc,graph,**kw)
self.jobname_tool = tools.TextFieldTool(self,'name','Job Name')
self.balloon.bind( self.jobname_tool.widget, 'Specify the prefix for all output files' )
self.submission_policies = [ LOCALHOST, "SSH", "Loadleveler", "RMCS", "Nordugrid", "Globus"]
self.submission_tool = tools.SelectOptionTool(self,'submission','Job Submission',
self.submission_policies)
self.fileFrame = Pmw.Group( self.interior(), tag_text='Files to transfer' )
self.fileList = Pmw.ScrolledListBox(
self.fileFrame.interior(),
listbox_selectmode='extended',
items=self.calc.get_parameter("input_files")
)
self.addFileButton = Tkinter.Button( self.fileFrame.interior(),
text = 'Add',
command = self.AddFile)
self.delFileButton = Tkinter.Button( self.fileFrame.interior(),
text = 'Del',
command = self.DelFile)
self.LayoutToolsTk()
# initialise tools
for tool in self.tools:
tool.UpdateWidget()
def AddFile(self):
"""Add a file to the list to be exported
"""
oldfile = None
olddir = None
filepath = tkFileDialog.askopenfilename(initialfile=oldfile,
initialdir=olddir)
if len(filepath) == 0:
# User didn't select owt
return None
else:
filepath=str(filepath)
all = self.fileList.get()
# convert from list to tuple
files = []
for f in all:
files.append(f)
# Now add the new file
files.append( filepath )
self.fileList.setlist( files )
# Update the calculation file list
self.calc.set_parameter("input_files", files )
return None
def DelFile(self):
""" Remove a file from the list to be exported
"""
toRemove = self.fileList.getcurselection()
all = self.fileList.get()
files = [] # need to convert from tuple to a list
for m in all:
files.append( m )
for f in toRemove:
files.remove( f )
self.fileList.setlist( files )
return
def LayoutToolsTk(self):
"""Place the widgets belonging to the tools (ChargeTool etc)
This will generally be replaced by a more specific function
for a particular code interface.
"""
#Add Job tab
page = self.notebook.add('Job',tab_text='Job')
# Associate helpfile with notebook frame
tab = self.notebook.tab('Job')
viewer.help.sethelp(tab,'Job Tab')
page.jobgroup = Pmw.Group(page,tag_text="Job Group")
page.jobgroup.pack(side='top',expand='yes',fill='both')
self.jobname_tool.widget.pack(in_=page.jobgroup.interior())
# self.workingdirectory_tool.widget.pack(in_=page.jobgroup.interior())
# self.submission_frame = Tkinter.Frame(page.jobgroup.interior(),relief=Tkinter.RAISED,borderwidth=2)
self.submission_frame = Tkinter.Frame(page.jobgroup.interior())
self.submission_frame.pack()
self.submission_tool.widget.pack(in_=self.submission_frame,side='left')
self.submission_config_button = Tkinter.Button(self.submission_frame,
text='Configure...',
command=self.open_jobsub_editor)
self.submission_config_button.pack(side='left')
# Layout the file chooser
self.fileFrame.pack(in_=page.jobgroup.interior(),fill='both',expand=0)
self.fileList.pack( side='left' )
self.addFileButton.pack( side='left' )
self.delFileButton.pack( side='left' )
if __name__ == "__main__":
from objects.zmatrix import *
from interfaces.smeagol import *
from jobmanager import *
# Create a dummy object for the calculation
model = Zmatrix()
atom = ZAtom()
atom.symbol = 'H'
atom.name = 'H'
model.insert_atom(0,atom)
atom.coord = [ 0.,0.,0. ]
calc = SMEAGOLCalc()
calc.set_input('mol_obj',model)
root=Tkinter.Tk()
jm = JobManager()
je = JobEditor(root,jm)
smged = SMEAGOLCalcEd(root,calc,None,job_editor=je)
root.mainloop()
|
alexei-matveev/ccp1gui
|
interfaces/smeagol.py
|
Python
|
gpl-2.0
| 11,038
|
[
"GAMESS"
] |
b72f22b427ec6d1eab822c2dfb341ba6bd757d3da722ce2ea66835baa2fe938e
|
# vim: set expandtab ts=4 sw=4 filetype=python fileencoding=utf8:
import copy
import logging
import textwrap
import psycopg2.extras
from profiles.pg import RelationWrapper
log = logging.getLogger(__name__)
class IndicatorsFactory(psycopg2.extras.CompositeCaster):
def make(self, values):
d = dict(zip(self.attnames, values))
return Indicator(**d)
class Indicator(RelationWrapper):
def __init__(self, indicator_uuid, title, description,
pretty_label, indicator_value_format, indicator_category, source_document,
sas_variable, formula, extra_notes,
definition, universe, limitations, note, data_source,
data_as_of, numerator_tables, denominator_tables,
chart_label,
inserted, updated):
self.indicator_uuid = indicator_uuid
self.title = title
self.description = description
self.pretty_label = pretty_label
self.indicator_value_format = indicator_value_format
self.indicator_category = indicator_category
self.source_document = source_document
self.sas_variable = sas_variable
self.formula = formula
self.extra_notes = extra_notes
self.definition = definition
self.universe = universe
self.limitations = limitations
self.note = note
self.data_source = data_source
self.data_as_of = data_as_of
self.numerator_tables = numerator_tables
self.denominator_tables = denominator_tables
self.chart_label = chart_label
self.inserted = inserted
self.updated = updated
# Maybe set this
self.racial_split = []
self.indicator_CV = None
self.indicator_moe = None
@property
def __jsondata__(self):
d = copy.copy(self.__dict__)
return d
def __eq__(self, other):
return self.indicator_uuid == other.indicator_uuid
def __ne__(self, other):
if other:
return self.indicator_uuid != other.indicator_uuid
else:
return True
@classmethod
def by_indicator_uuid(cls, pgconn, indicator_uuid):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select (indicators.*)::indicators as indicator
from indicators
where indicator_uuid = %(indicator_uuid)s
"""), dict(indicator_uuid=indicator_uuid))
return cursor.fetchone().indicator
@classmethod
def select_all(cls, pgconn):
qry = textwrap.dedent("""
select (indicators.*)::indicators as x
from indicators
""")
cursor = pgconn.cursor()
cursor.execute(qry)
for row in cursor:
yield row.x
@classmethod
def insert(cls, pgconn, title, description,
indicator_value_format, indicator_category,
source_document, sas_variable, chart_label):
cursor = pgconn.cursor()
if indicator_value_format is None and '_' == title[0]:
indicator_value_format = 'percent'
cursor.execute(textwrap.dedent("""
insert into indicators
(title, description, indicator_value_format,
indicator_category, source_document, sas_variable,
chart_label)
values
(%s, %s, %s, %s, %s, %s, %s)
returning (indicators.*)::indicators as ind
"""),
[title, description, indicator_value_format,
indicator_category, source_document, sas_variable,
chart_label])
return cursor.fetchone().ind
@classmethod
def by_title(cls, pgconn, title):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select (indicators.*)::indicators ind
from indicators
where title = %s
"""), [title])
if cursor.rowcount:
return cursor.fetchone().ind
else:
raise KeyError(
"Sorry, no indicator with title {0} found!".format(
title))
@classmethod
def by_sas_variable(cls, pgconn, sas_variable):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select indicators.*::indicators as ind
from indicators
where sas_variable = %s
"""), [sas_variable])
for row in cursor:
yield row.ind
def set_all_visible(self, pgconn, visible=False):
"""
Set all values for this indicator to visible (true / false)
"""
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicator_location_values
set visible = %(visible)s
where indicator_uuid = %(indicator_uuid)s
"""), dict(visible=visible, indicator_uuid=self.indicator_uuid))
return self
def set_visible_years(self, pgconn, start_year, end_year, visible=False):
"""
Set all values for this indicator to visible (true / false)
"""
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicator_location_values
set visible = %(visible)s
where indicator_uuid = %(indicator_uuid)s
and date_part('year', observation_timestamp) >= %(start_year)s
and date_part('year', observation_timestamp) <= %(end_year)s
"""), dict(visible=visible, indicator_uuid=self.indicator_uuid,
end_year=end_year, start_year=start_year))
return self
def set_visible_year(self, pgconn, year, visible=False):
"""
Set all values for this indicator to visible (true / false)
"""
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicator_location_values
set visible = %(visible)s
where indicator_uuid = %(indicator_uuid)s
and observation_timestamp is not null
and date_part('year', observation_timestamp) = %(year)s
"""), dict(visible=visible, indicator_uuid=self.indicator_uuid, year=year))
return self
def update_description(self, pgconn, new_description, chart_label):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicators
set description = %s,
chart_label = %s
where indicator_uuid = %s
returning indicators.*::indicators as updated_ind
"""), [new_description, chart_label, self.indicator_uuid])
if cursor.rowcount:
updated_ind = cursor.fetchone().updated_ind
log.info("Updated description, chart_label on {0} to {1}, {2}".format(
updated_ind,
new_description,
chart_label))
return updated_ind
else:
raise KeyError("Could not find indicator {0}!".format(self))
def update_pretty_label(self, pgconn, new_pretty_label):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicators
set pretty_label = %s
where indicator_uuid = %s
returning indicators.*::indicators as updated_ind
"""), [new_pretty_label, self.indicator_uuid])
if cursor.rowcount:
updated_ind = cursor.fetchone().updated_ind
log.info("Updated pretty_label on {0} to {1}".format(
updated_ind,
updated_ind.pretty_label))
return updated_ind
else:
raise KeyError("Could not find indicator {0}!".format(self))
@classmethod
def update_description_by_title(cls, pgconn, title, description,
chart_label):
"""
Use the title to find this indicator. Then update the
description. Then return the updated indicator.
"""
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicators
set description = %s,
chart_label = %s
where title = %s
returning indicators.*::indicators as updated_ind
"""), [description, chart_label, title])
if cursor.rowcount:
updated_ind = cursor.fetchone().updated_ind
log.info("Updated description on {0} to {1}".format(
updated_ind,
description))
return updated_ind
else:
raise KeyError("Could not find indicator {0}!".format(title))
@classmethod
def update_extra_details_by_title(cls, pgconn, title, description,
definition,
universe, limitations, note, data_source, data_as_of,
numerator_tables, denominator_tables, chart_label):
"""
Use the title to find this indicator. Then update
with extra information. Then return the updated indicator.
"""
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicators
set description = %(description)s,
pretty_label = %(description)s,
definition = %(definition)s,
universe = %(universe)s,
limitations = %(limitations)s,
note = %(note)s,
data_source = %(data_source)s,
data_as_of = %(data_as_of)s,
numerator_tables = %(numerator_tables)s,
denominator_tables = %(denominator_tables)s,
chart_label = %(chart_label)s
where title = %(title)s
returning (indicators.*)::indicators as updated_ind
"""), locals())
if cursor.rowcount:
updated_ind = cursor.fetchone().updated_ind
log.info("Updated extra details on {0} {1}".format(
updated_ind, updated_ind.universe))
return updated_ind
else:
raise KeyError("Could not find indicator {0}!".format(title))
def __repr__(self):
return """<{0}.{1} (title="{2}")>""".format(
self.__class__.__module__,
self.__class__.__name__,
self.title)
def lookup_my_racial_split(self, pgconn, location_uuid):
""""
Looks up an indicator location value racial split
"""
racial_indicators= IndicatorLocationValue.find_racial_sub_indicators(
self.title)
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cursor.execute(textwrap.dedent("""
with indicator_value_location as
(
select
i.indicator_uuid, i.title, i.indicator_value_format,
l.title as location_title,
ilv.value, ilv.observation_timestamp, i.indicator_category
from indicator_location_values ilv
join indicators i on i.indicator_uuid = ilv.indicator_uuid
join locations l on l.location_uuid = ilv.location_uuid
where l.location_uuid = %(location_uuid)s
and i.title = any(%(indicators)s)
and ilv.visible = true
--and ilv.value != 999999
order by ilv.observation_timestamp asc
)
select (i.*)::indicators as indicator,
array_to_json(array_agg(ilv.*)) as indicator_values
from indicator_value_location ilv
join indicators i on ilv.indicator_uuid = i.indicator_uuid
group by (i.*)
"""), dict(location_uuid=location_uuid,
indicators=racial_indicators))
self.racial_split = [row for row in cursor.fetchall()]
return self
def lookup_cv_and_moe(self, pgconn, location_uuid):
""""
Looks up an indicator location value racial split
"""
cv_ind = 'cv' + self.title
m_ind = 'm' + self.title
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cursor.execute(textwrap.dedent("""
with indicator_value_location as
(
select
i.indicator_uuid, i.title, i.indicator_value_format,
l.title as location_title,
ilv.value, ilv.observation_timestamp, i.indicator_category
from indicator_location_values ilv
join indicators i on i.indicator_uuid = ilv.indicator_uuid
join locations l on l.location_uuid = ilv.location_uuid
where l.location_uuid = %(location_uuid)s
and i.title = any(%(indicators)s)
and ilv.visible = true
--and ilv.value != 999999
order by ilv.observation_timestamp asc
)
select (i.*)::indicators as indicator,
array_to_json(array_agg(ilv.*)) as indicator_values
from indicator_value_location ilv
join indicators i on ilv.indicator_uuid = i.indicator_uuid
group by (i.*)
"""), dict(location_uuid=location_uuid,
indicators=[cv_ind, m_ind]))
if cursor.rowcount > 1:
self.indicator_CV, self.indicator_moe = cursor.fetchall()
return self
def distinct_observation_timestamps(self, pgconn):
"""
Give us the distinct observable_timestamps for a given
indicator
"""
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
cursor.execute(textwrap.dedent("""
select distinct observation_timestamp,
observation_timestamp_label
from indicator_location_values
where indicator_uuid = %(indicator_uuid)s
and visible = True
order by observation_timestamp asc;
"""), dict(indicator_uuid=self.indicator_uuid))
for row in cursor.fetchall():
yield row
def all_indicator_location_values(self, pgconn, order_by_area=False):
"""
Give us all the values for a given indicator
across all times and locations
"""
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
if order_by_area:
order_by_clause = "order by st_area(l.location_shape) desc"
else:
order_by_clause = "order by l.title asc"
qry = textwrap.dedent("""
select (l.*)::locations as location,
st_area(l.location_shape) as location_area,
array_to_json(array_agg((ilv.*)::indicator_location_values))
as indicator_location_values
from indicator_location_values ilv
join locations l on l.location_uuid = ilv.location_uuid
where indicator_uuid = %(indicator_uuid)s
and l.display_me = true
group by l.location_uuid
{order_by_clause}
""")
qry = qry.format(order_by_clause=order_by_clause)
cursor.execute(qry, dict(indicator_uuid=self.indicator_uuid))
for row in cursor.fetchall():
yield row
def all_indicator_categories(pgconn):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select category from indicator_categories
"""))
return [row.category for row in cursor]
class IndicatorCategoryFactory(psycopg2.extras.CompositeCaster):
def make(self, values):
d = dict(zip(self.attnames, values))
return IndicatorCategory(**d)
class IndicatorCategory(RelationWrapper):
def __init__(self, category, description, inserted, updated):
self.category = category
self.description = description
self.inserted = inserted
self.updated = updated
@classmethod
def all(cls, pgconn):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select (indicator_categories.*)::indicator_categories ic
from indicator_categories
"""))
for row in cursor:
yield row.ic
@classmethod
def insert(cls, pgconn, category, description):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
insert into indicator_categories
(category, description)
values
(%s, %s)
returning indicator_categories.*::indicator_categories as ic
"""), [category, description])
return cursor.fetchone().ic
@classmethod
def by_category(cls, pgconn, category):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select indicator_categories.*::indicator_categories as ic
from indicator_categories
where category = %s
"""), [category])
if cursor.rowcount:
return cursor.fetchone().ic
else:
raise KeyError("No indicator_category {0}!".format(
category))
class IndicatorLocationValueFactory(psycopg2.extras.CompositeCaster):
def make(self, values):
d = dict(zip(self.attnames, values))
return IndicatorLocationValue(**d)
class IndicatorLocationValue(RelationWrapper):
def __init__(self, indicator_uuid, location_uuid,
observation_timestamp, observation_range,
value, observation_timestamp_label, visible, inserted, updated):
self.indicator_uuid = indicator_uuid
self.location_uuid = location_uuid
self.observation_timestamp = observation_timestamp
self.observation_range = observation_range
self.observation_timestamp_label = observation_timestamp_label
self.value = value
self.visible = visible
self.inserted = inserted
self.updated = updated
@classmethod
def insert(cls, pgconn, indicator_uuid, location_uuid,
observation_timestamp, observation_range, value):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
insert into indicator_location_values
(
indicator_uuid, location_uuid, observation_timestamp,
observation_range, value
)
values
(%s, %s, %s, %s, %s)
returning
(indicator_location_values.*)::indicator_location_values as indlocval
"""), [indicator_uuid, location_uuid, observation_timestamp,
observation_range, value])
return cursor.fetchone().indlocval
@classmethod
def by_ilo(cls, pgconn, indicator, location, observation_timestamp):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select indicator_location_values.*::indicator_location_values as ilv
from indicator_location_values
where (indicator_uuid, location_uuid, observation_timestamp)
= (%s, %s, %s)
"""), [indicator, location, observation_timestamp])
if cursor.rowcount:
return cursor.fetchone().ilv
else:
raise KeyError("Sorry, no ILV with {0}, {1}, {2} found!".format(
indicator,
location,
observation_timestamp))
@classmethod
def update_value(cls, pgconn, indicator, location,
observation_timestamp, value, visible=True):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
update indicator_location_values
set value = %s, visible = %s
where (indicator_uuid, location_uuid, observation_timestamp)
= (%s, %s, %s)
returning indicator_location_values.*::indicator_location_values as ilv
"""), [value, visible, indicator, location, observation_timestamp])
if cursor.rowcount:
ilv = cursor.fetchone().ilv
log.info("Updated {0}'s value to {1}.".format(ilv, value))
return ilv
else:
raise KeyError("Sorry, no ILV with {0}, {1}, {2} found!".format(
indicator,
location,
observation_timestamp))
def update_my_value(self, pgconn, new_value, visible=True):
if float(new_value) != self.value or self.visible != visible:
return self.update_value(
pgconn,
self.indicator_uuid,
self.location_uuid,
self.observation_timestamp,
float(new_value),
visible=visible)
@staticmethod
def look_up_racial_split(pgconn, indicator_title,
location_uuid, dt):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select
(indicators.*)::indicators as i,
(ilv.*) as indicator_value,
indicators.chart_label,
round(ilv.value - ilv_moe.value) as floor,
round(ilv.value + ilv_moe.value) as ceiling
from indicator_location_values ilv
join indicators
on ilv.indicator_uuid = indicators.indicator_uuid
left join indicators moe
on 'm' || indicators.title = moe.title
left join indicator_location_values ilv_moe
on moe.indicator_uuid = ilv_moe.indicator_uuid
and ilv_moe.location_uuid = %(location_uuid)s
and ilv_moe .observation_timestamp = %(dt)s
where indicators.title = any (%(race_indicator_titles)s)
and ilv.location_uuid = %(location_uuid)s
and ilv.observation_timestamp = %(dt)s
order by indicators.pretty_label
"""), dict(
race_indicator_titles=IndicatorLocationValue.find_racial_sub_indicators(indicator_title),
location_uuid=location_uuid,
dt=dt))
for row in cursor:
yield row._asdict()
@staticmethod
def find_racial_sub_indicators(indicator_title):
"""
The CWRU folks have no single pattern for how they racial splits
on statistics.
"""
# This one is my favorite -- it is completely unlike the other
# patterns.
if indicator_title == "pop":
return ["nhw", "nhb", "nhapi", "nho", "hisp"]
# rpass50 => w_rpass50
if indicator_title in set([
"rpassed3", "rpassed4", "rpassed6", "rpassed10",
"mpassed3", "mpassed4", "mpassed6", "mpassed10"
]):
return ["{0}_{1}".format(c, indicator_title) for c in 'abhow']
# _rpass50 => _w_rpass50
elif indicator_title in set([
"_rpassed50", "_rpassed20", "_rpassed10", "_rpassed41",
"_mpassed50", "_mpassed20", "_mpassed10", "_mpassed41",
]):
return ["_{0}{1}".format(c, indicator_title) for c in 'abhow']
# _attend => w_attend
elif indicator_title in set(["_attend"]):
return ["{0}{1}".format(c, indicator_title) for c in 'abhow']
# _emp => _wemp
elif indicator_title in set(["_emp", "_lf", "_lshs", "_hsgrad",
"_somecollassoc", "_bsp", "_bpv", "_native", "_foreign",
"_samehse1y", "_diffhs1y","_drove", "_walk", "_public_tran",
"_other_tran", "_workathome"
]):
log.info(indicator_title)
return ["_{0}{1}".format(c, indicator_title[1:]) for c in 'abhow']
# t_cburden50p => w_cburden50p
elif indicator_title.startswith("t_"):
return ["{0}{1}".format(c, indicator_title[1:]) for c in 'abhow']
elif indicator_title.startswith("_hh"):
return ["_{0}{1}".format(c, indicator_title[1:]) for c in 'abhow']
# _t_cburden50p => _w_cburden50p
elif indicator_title.startswith("_t_c"):
return ["_{0}_{1}".format(c, indicator_title[3:]) for c in 'abhow']
# _pa_snap => _wpa_snap
elif indicator_title.startswith("_pa_snap"):
return ["_{0}{1}".format(c, indicator_title[1:]) for c in 'abhow']
elif indicator_title.startswith("_"):
return ["_{0}{1}".format(c, indicator_title) for c in 'abhow']
# _t_cburden50p => _w_cburden50p
elif indicator_title.startswith("_"):
return ["_{0}_{1}".format(c, indicator_title[3:]) for c in 'abhow']
# xyz => wxyz
else:
return ["{0}{1}".format(c, indicator_title) for c in 'abhow']
@staticmethod
def find_available_observation_timestamps(pgconn, indicator_uuid,
location_uuid):
cursor = pgconn.cursor()
cursor.execute(textwrap.dedent("""
select ilv.observation_timestamp,
coalesce(
ilv.observation_timestamp_label,
to_char(
ilv.observation_timestamp,
'YYYY')) as observation_timestamp_label
from indicator_location_values ilv
where ilv.indicator_uuid = %(indicator_uuid)s
and ilv.location_uuid = %(location_uuid)s
order by ilv.observation_timestamp
"""), locals())
for row in cursor:
yield row._asdict()
|
216software/Profiles
|
profiles/profiles/pg/indicators.py
|
Python
|
mit
| 25,205
|
[
"MOE"
] |
ae1f20a43a6b044adbc5458d74cf915e7543cb41ac8597d689356b051e393021
|
COLORS = [
{
"x": 93,
"y": 138,
"z": 168,
"label": "Air Force blue"
},
{
"x": 240,
"y": 248,
"z": 255,
"label": "Alice blue"
},
{
"x": 227,
"y": 38,
"z": 54,
"label": "Alizarin crimson"
},
{
"x": 239,
"y": 222,
"z": 205,
"label": "Almond"
},
{
"x": 229,
"y": 43,
"z": 80,
"label": "Amaranth"
},
{
"x": 255,
"y": 191,
"z": 0,
"label": "Amber"
},
{
"x": 255,
"y": 126,
"z": 0,
"label": "Amber"
},
{
"x": 255,
"y": 3,
"z": 62,
"label": "American rose"
},
{
"x": 153,
"y": 102,
"z": 204,
"label": "Amethyst"
},
{
"x": 164,
"y": 198,
"z": 57,
"label": "Android Green"
},
{
"x": 242,
"y": 243,
"z": 244,
"label": "Anti-flash white"
},
{
"x": 205,
"y": 149,
"z": 117,
"label": "Antique brass"
},
{
"x": 145,
"y": 92,
"z": 131,
"label": "Antique fuchsia"
},
{
"x": 250,
"y": 235,
"z": 215,
"label": "Antique white"
},
{
"x": 0,
"y": 128,
"z": 0,
"label": "Ao"
},
{
"x": 141,
"y": 182,
"z": 0,
"label": "Apple green"
},
{
"x": 251,
"y": 206,
"z": 177,
"label": "Apricot"
},
{
"x": 0,
"y": 255,
"z": 255,
"label": "Aqua"
},
{
"x": 127,
"y": 255,
"z": 212,
"label": "Aquamarine"
},
{
"x": 75,
"y": 83,
"z": 32,
"label": "Army green"
},
{
"x": 233,
"y": 214,
"z": 107,
"label": "Arylide yellow"
},
{
"x": 178,
"y": 190,
"z": 181,
"label": "Ash grey"
},
{
"x": 135,
"y": 169,
"z": 107,
"label": "Asparagus"
},
{
"x": 255,
"y": 153,
"z": 102,
"label": "Atomic tangerine"
},
{
"x": 165,
"y": 42,
"z": 42,
"label": "Auburn"
},
{
"x": 253,
"y": 238,
"z": 0,
"label": "Aureolin"
},
{
"x": 110,
"y": 127,
"z": 128,
"label": "AuroMetalSaurus"
},
{
"x": 255,
"y": 32,
"z": 82,
"label": "Awesome"
},
{
"x": 0,
"y": 127,
"z": 255,
"label": "Azure"
},
{
"x": 240,
"y": 255,
"z": 255,
"label": "Azure mist/web"
},
{
"x": 137,
"y": 207,
"z": 240,
"label": "Baby blue"
},
{
"x": 161,
"y": 202,
"z": 241,
"label": "Baby blue eyes"
},
{
"x": 244,
"y": 194,
"z": 194,
"label": "Baby pink"
},
{
"x": 33,
"y": 171,
"z": 205,
"label": "Ball Blue"
},
{
"x": 250,
"y": 231,
"z": 181,
"label": "Banana Mania"
},
{
"x": 255,
"y": 225,
"z": 53,
"label": "Banana yellow"
},
{
"x": 132,
"y": 132,
"z": 130,
"label": "Battleship grey"
},
{
"x": 152,
"y": 119,
"z": 123,
"label": "Bazaar"
},
{
"x": 188,
"y": 212,
"z": 230,
"label": "Beau blue"
},
{
"x": 159,
"y": 129,
"z": 112,
"label": "Beaver"
},
{
"x": 245,
"y": 245,
"z": 220,
"label": "Beige"
},
{
"x": 255,
"y": 228,
"z": 196,
"label": "Bisque"
},
{
"x": 61,
"y": 43,
"z": 31,
"label": "Bistre"
},
{
"x": 254,
"y": 111,
"z": 94,
"label": "Bittersweet"
},
{
"x": 0,
"y": 0,
"z": 0,
"label": "Black"
},
{
"x": 255,
"y": 235,
"z": 205,
"label": "Blanched Almond"
},
{
"x": 49,
"y": 140,
"z": 231,
"label": "Bleu de France"
},
{
"x": 172,
"y": 229,
"z": 238,
"label": "Blizzard Blue"
},
{
"x": 250,
"y": 240,
"z": 190,
"label": "Blond"
},
{
"x": 0,
"y": 0,
"z": 255,
"label": "Blue"
},
{
"x": 0,
"y": 147,
"z": 175,
"label": "Blue"
},
{
"x": 0,
"y": 135,
"z": 189,
"label": "Blue"
},
{
"x": 51,
"y": 51,
"z": 153,
"label": "Blue"
},
{
"x": 2,
"y": 71,
"z": 254,
"label": "Blue"
},
{
"x": 162,
"y": 162,
"z": 208,
"label": "Blue Bell"
},
{
"x": 102,
"y": 153,
"z": 204,
"label": "Blue Gray"
},
{
"x": 13,
"y": 152,
"z": 186,
"label": "Blue-green"
},
{
"x": 138,
"y": 43,
"z": 226,
"label": "Blue-violet"
},
{
"x": 222,
"y": 93,
"z": 131,
"label": "Blush"
},
{
"x": 121,
"y": 68,
"z": 59,
"label": "Bole"
},
{
"x": 0,
"y": 149,
"z": 182,
"label": "Bondi blue"
},
{
"x": 204,
"y": 0,
"z": 0,
"label": "Boston University Red"
},
{
"x": 135,
"y": 50,
"z": 96,
"label": "Boysenberry"
},
{
"x": 0,
"y": 112,
"z": 255,
"label": "Brandeis blue"
},
{
"x": 181,
"y": 166,
"z": 66,
"label": "Brass"
},
{
"x": 203,
"y": 65,
"z": 84,
"label": "Brick red"
},
{
"x": 29,
"y": 172,
"z": 214,
"label": "Bright cerulean"
},
{
"x": 102,
"y": 255,
"z": 0,
"label": "Bright green"
},
{
"x": 191,
"y": 148,
"z": 228,
"label": "Bright lavender"
},
{
"x": 195,
"y": 33,
"z": 72,
"label": "Bright maroon"
},
{
"x": 255,
"y": 0,
"z": 127,
"label": "Bright pink"
},
{
"x": 8,
"y": 232,
"z": 222,
"label": "Bright turquoise"
},
{
"x": 209,
"y": 159,
"z": 232,
"label": "Bright ube"
},
{
"x": 244,
"y": 187,
"z": 255,
"label": "Brilliant lavender"
},
{
"x": 255,
"y": 85,
"z": 163,
"label": "Brilliant rose"
},
{
"x": 251,
"y": 96,
"z": 127,
"label": "Brink pink"
},
{
"x": 0,
"y": 66,
"z": 37,
"label": "British racing green"
},
{
"x": 205,
"y": 127,
"z": 50,
"label": "Bronze"
},
{
"x": 150,
"y": 75,
"z": 0,
"label": "Traditional Brown"
},
{
"x": 165,
"y": 42,
"z": 42,
"label": "Brown"
},
{
"x": 255,
"y": 193,
"z": 204,
"label": "Bubble gum"
},
{
"x": 231,
"y": 254,
"z": 255,
"label": "Bubbles"
},
{
"x": 240,
"y": 220,
"z": 130,
"label": "Buff"
},
{
"x": 72,
"y": 6,
"z": 7,
"label": "Bulgarian rose"
},
{
"x": 128,
"y": 0,
"z": 32,
"label": "Burgundy"
},
{
"x": 222,
"y": 184,
"z": 135,
"label": "Burlywood"
},
{
"x": 204,
"y": 85,
"z": 0,
"label": "Burnt orange"
},
{
"x": 233,
"y": 116,
"z": 81,
"label": "Burnt sienna"
},
{
"x": 138,
"y": 51,
"z": 36,
"label": "Burnt umber"
},
{
"x": 189,
"y": 51,
"z": 164,
"label": "Byzantine"
},
{
"x": 112,
"y": 41,
"z": 99,
"label": "Byzantium"
},
{
"x": 83,
"y": 104,
"z": 114,
"label": "Cadet"
},
{
"x": 95,
"y": 158,
"z": 160,
"label": "Cadet blue"
},
{
"x": 145,
"y": 163,
"z": 176,
"label": "Cadet grey"
},
{
"x": 0,
"y": 107,
"z": 60,
"label": "Cadmium green"
},
{
"x": 237,
"y": 135,
"z": 45,
"label": "Cadmium orange"
},
{
"x": 227,
"y": 0,
"z": 34,
"label": "Cadmium red"
},
{
"x": 255,
"y": 246,
"z": 0,
"label": "Cadmium yellow"
},
{
"x": 166,
"y": 123,
"z": 91,
"label": "Café au lait"
},
{
"x": 75,
"y": 54,
"z": 33,
"label": "Café noir"
},
{
"x": 30,
"y": 77,
"z": 43,
"label": "Cal Poly Pomona green"
},
{
"x": 163,
"y": 193,
"z": 173,
"label": "Cambridge Blue"
},
{
"x": 193,
"y": 154,
"z": 107,
"label": "Camel"
},
{
"x": 120,
"y": 134,
"z": 107,
"label": "Camouflage green"
},
{
"x": 255,
"y": 239,
"z": 0,
"label": "Canary yellow"
},
{
"x": 255,
"y": 8,
"z": 0,
"label": "Candy apple red"
},
{
"x": 228,
"y": 113,
"z": 122,
"label": "Candy pink"
},
{
"x": 0,
"y": 191,
"z": 255,
"label": "Capri"
},
{
"x": 89,
"y": 39,
"z": 32,
"label": "Caput mortuum"
},
{
"x": 196,
"y": 30,
"z": 58,
"label": "Cardinal"
},
{
"x": 0,
"y": 204,
"z": 153,
"label": "Caribbean green"
},
{
"x": 255,
"y": 0,
"z": 64,
"label": "Carmine"
},
{
"x": 235,
"y": 76,
"z": 66,
"label": "Carmine pink"
},
{
"x": 255,
"y": 0,
"z": 56,
"label": "Carmine red"
},
{
"x": 255,
"y": 166,
"z": 201,
"label": "Carnation pink"
},
{
"x": 179,
"y": 27,
"z": 27,
"label": "Carnelian"
},
{
"x": 153,
"y": 186,
"z": 221,
"label": "Carolina blue"
},
{
"x": 237,
"y": 145,
"z": 33,
"label": "Carrot orange"
},
{
"x": 172,
"y": 225,
"z": 175,
"label": "Celadon"
},
{
"x": 178,
"y": 255,
"z": 255,
"label": "Celeste"
},
{
"x": 73,
"y": 151,
"z": 208,
"label": "Celestial blue"
},
{
"x": 222,
"y": 49,
"z": 99,
"label": "Cerise"
},
{
"x": 236,
"y": 59,
"z": 131,
"label": "Cerise pink"
},
{
"x": 0,
"y": 123,
"z": 167,
"label": "Cerulean"
},
{
"x": 42,
"y": 82,
"z": 190,
"label": "Cerulean blue"
},
{
"x": 0,
"y": 122,
"z": 165,
"label": "CG Blue"
},
{
"x": 224,
"y": 60,
"z": 49,
"label": "CG Red"
},
{
"x": 160,
"y": 120,
"z": 90,
"label": "Chamoisee"
},
{
"x": 250,
"y": 214,
"z": 165,
"label": "Champagne"
},
{
"x": 54,
"y": 69,
"z": 79,
"label": "Charcoal"
},
{
"x": 223,
"y": 255,
"z": 0,
"label": "Traditional Chartreuse"
},
{
"x": 127,
"y": 255,
"z": 0,
"label": "Chartreuse"
},
{
"x": 255,
"y": 183,
"z": 197,
"label": "Cherry blossom pink"
},
{
"x": 205,
"y": 92,
"z": 92,
"label": "Chestnut"
},
{
"x": 123,
"y": 63,
"z": 0,
"label": "Traditional Chocolate"
},
{
"x": 210,
"y": 105,
"z": 30,
"label": "Chocolate"
},
{
"x": 255,
"y": 167,
"z": 0,
"label": "Chrome yellow"
},
{
"x": 152,
"y": 129,
"z": 123,
"label": "Cinereous"
},
{
"x": 227,
"y": 66,
"z": 52,
"label": "Cinnabar"
},
{
"x": 210,
"y": 105,
"z": 30,
"label": "Cinnamon"
},
{
"x": 228,
"y": 208,
"z": 10,
"label": "Citrine"
},
{
"x": 251,
"y": 204,
"z": 231,
"label": "Classic rose"
},
{
"x": 0,
"y": 71,
"z": 171,
"label": "Cobalt"
},
{
"x": 210,
"y": 105,
"z": 30,
"label": "Cocoa brown"
},
{
"x": 111,
"y": 78,
"z": 55,
"label": "Coffee"
},
{
"x": 155,
"y": 221,
"z": 255,
"label": "Columbia blue"
},
{
"x": 0,
"y": 46,
"z": 99,
"label": "Cool black"
},
{
"x": 140,
"y": 146,
"z": 172,
"label": "Cool grey"
},
{
"x": 184,
"y": 115,
"z": 51,
"label": "Copper"
},
{
"x": 153,
"y": 102,
"z": 102,
"label": "Copper rose"
},
{
"x": 255,
"y": 56,
"z": 0,
"label": "Coquelicot"
},
{
"x": 255,
"y": 127,
"z": 80,
"label": "Coral"
},
{
"x": 248,
"y": 131,
"z": 121,
"label": "Coral pink"
},
{
"x": 255,
"y": 64,
"z": 64,
"label": "Coral red"
},
{
"x": 137,
"y": 63,
"z": 69,
"label": "Cordovan"
},
{
"x": 251,
"y": 236,
"z": 93,
"label": "Corn"
},
{
"x": 179,
"y": 27,
"z": 27,
"label": "Cornell Red"
},
{
"x": 100,
"y": 149,
"z": 237,
"label": "Cornflower blue"
},
{
"x": 255,
"y": 248,
"z": 220,
"label": "Cornsilk"
},
{
"x": 255,
"y": 248,
"z": 231,
"label": "Cosmic latte"
},
{
"x": 255,
"y": 188,
"z": 217,
"label": "Cotton candy"
},
{
"x": 255,
"y": 253,
"z": 208,
"label": "Cream"
},
{
"x": 220,
"y": 20,
"z": 60,
"label": "Crimson"
},
{
"x": 190,
"y": 0,
"z": 50,
"label": "Crimson glory"
},
{
"x": 0,
"y": 255,
"z": 255,
"label": "Cyan"
},
{
"x": 0,
"y": 183,
"z": 235,
"label": "Cyan"
},
{
"x": 255,
"y": 255,
"z": 49,
"label": "Daffodil"
},
{
"x": 240,
"y": 225,
"z": 48,
"label": "Dandelion"
},
{
"x": 0,
"y": 0,
"z": 139,
"label": "Dark blue"
},
{
"x": 101,
"y": 67,
"z": 33,
"label": "Dark brown"
},
{
"x": 93,
"y": 57,
"z": 84,
"label": "Dark byzantium"
},
{
"x": 164,
"y": 0,
"z": 0,
"label": "Dark candy apple red"
},
{
"x": 8,
"y": 69,
"z": 126,
"label": "Dark cerulean"
},
{
"x": 152,
"y": 105,
"z": 96,
"label": "Dark chestnut"
},
{
"x": 205,
"y": 91,
"z": 69,
"label": "Dark coral"
},
{
"x": 0,
"y": 139,
"z": 139,
"label": "Dark cyan"
},
{
"x": 83,
"y": 104,
"z": 120,
"label": "Dark electric blue"
},
{
"x": 184,
"y": 134,
"z": 11,
"label": "Dark goldenrod"
},
{
"x": 169,
"y": 169,
"z": 169,
"label": "Dark gray"
},
{
"x": 1,
"y": 50,
"z": 32,
"label": "Dark green"
},
{
"x": 26,
"y": 36,
"z": 33,
"label": "Dark jungle green"
},
{
"x": 189,
"y": 183,
"z": 107,
"label": "Dark khaki"
},
{
"x": 72,
"y": 60,
"z": 50,
"label": "Dark lava"
},
{
"x": 115,
"y": 79,
"z": 150,
"label": "Dark lavender"
},
{
"x": 139,
"y": 0,
"z": 139,
"label": "Dark magenta"
},
{
"x": 0,
"y": 51,
"z": 102,
"label": "Dark midnight blue"
},
{
"x": 85,
"y": 107,
"z": 47,
"label": "Dark olive green"
},
{
"x": 255,
"y": 140,
"z": 0,
"label": "Dark orange"
},
{
"x": 153,
"y": 50,
"z": 204,
"label": "Dark orchid"
},
{
"x": 119,
"y": 158,
"z": 203,
"label": "Dark pastel blue"
},
{
"x": 3,
"y": 192,
"z": 60,
"label": "Dark pastel green"
},
{
"x": 150,
"y": 111,
"z": 214,
"label": "Dark pastel purple"
},
{
"x": 194,
"y": 59,
"z": 34,
"label": "Dark pastel red"
},
{
"x": 231,
"y": 84,
"z": 128,
"label": "Dark pink"
},
{
"x": 0,
"y": 51,
"z": 153,
"label": "Dark powder blue"
},
{
"x": 135,
"y": 38,
"z": 87,
"label": "Dark raspberry"
},
{
"x": 139,
"y": 0,
"z": 0,
"label": "Dark red"
},
{
"x": 233,
"y": 150,
"z": 122,
"label": "Dark salmon"
},
{
"x": 86,
"y": 3,
"z": 25,
"label": "Dark scarlet"
},
{
"x": 143,
"y": 188,
"z": 143,
"label": "Dark sea green"
},
{
"x": 60,
"y": 20,
"z": 20,
"label": "Dark sienna"
},
{
"x": 72,
"y": 61,
"z": 139,
"label": "Dark slate blue"
},
{
"x": 47,
"y": 79,
"z": 79,
"label": "Dark slate gray"
},
{
"x": 23,
"y": 114,
"z": 69,
"label": "Dark spring green"
},
{
"x": 145,
"y": 129,
"z": 81,
"label": "Dark tan"
},
{
"x": 255,
"y": 168,
"z": 18,
"label": "Dark tangerine"
},
{
"x": 72,
"y": 60,
"z": 50,
"label": "Dark taupe"
},
{
"x": 204,
"y": 78,
"z": 92,
"label": "Dark terra cotta"
},
{
"x": 0,
"y": 206,
"z": 209,
"label": "Dark turquoise"
},
{
"x": 148,
"y": 0,
"z": 211,
"label": "Dark violet"
},
{
"x": 0,
"y": 105,
"z": 62,
"label": "Dartmouth green"
},
{
"x": 85,
"y": 85,
"z": 85,
"label": "Davy's grey"
},
{
"x": 215,
"y": 10,
"z": 83,
"label": "Debian red"
},
{
"x": 169,
"y": 32,
"z": 62,
"label": "Deep carmine"
},
{
"x": 239,
"y": 48,
"z": 56,
"label": "Deep carmine pink"
},
{
"x": 233,
"y": 105,
"z": 44,
"label": "Deep carrot orange"
},
{
"x": 218,
"y": 50,
"z": 135,
"label": "Deep cerise"
},
{
"x": 250,
"y": 214,
"z": 165,
"label": "Deep champagne"
},
{
"x": 185,
"y": 78,
"z": 72,
"label": "Deep chestnut"
},
{
"x": 112,
"y": 66,
"z": 65,
"label": "Deep coffee"
},
{
"x": 193,
"y": 84,
"z": 193,
"label": "Deep fuchsia"
},
{
"x": 0,
"y": 75,
"z": 73,
"label": "Deep jungle green"
},
{
"x": 153,
"y": 85,
"z": 187,
"label": "Deep lilac"
},
{
"x": 204,
"y": 0,
"z": 204,
"label": "Deep magenta"
},
{
"x": 255,
"y": 203,
"z": 164,
"label": "Deep peach"
},
{
"x": 255,
"y": 20,
"z": 147,
"label": "Deep pink"
},
{
"x": 255,
"y": 153,
"z": 51,
"label": "Deep saffron"
},
{
"x": 0,
"y": 191,
"z": 255,
"label": "Deep sky blue"
},
{
"x": 21,
"y": 96,
"z": 189,
"label": "Denim"
},
{
"x": 193,
"y": 154,
"z": 107,
"label": "Desert"
},
{
"x": 237,
"y": 201,
"z": 175,
"label": "Desert sand"
},
{
"x": 105,
"y": 105,
"z": 105,
"label": "Dim gray"
},
{
"x": 30,
"y": 144,
"z": 255,
"label": "Dodger blue"
},
{
"x": 215,
"y": 24,
"z": 104,
"label": "Dogwood rose"
},
{
"x": 133,
"y": 187,
"z": 101,
"label": "Dollar bill"
},
{
"x": 150,
"y": 113,
"z": 23,
"label": "Drab"
},
{
"x": 0,
"y": 0,
"z": 156,
"label": "Duke blue"
},
{
"x": 225,
"y": 169,
"z": 95,
"label": "Earth yellow"
},
{
"x": 194,
"y": 178,
"z": 128,
"label": "Ecru"
},
{
"x": 97,
"y": 64,
"z": 81,
"label": "Eggplant"
},
{
"x": 240,
"y": 234,
"z": 214,
"label": "Eggshell"
},
{
"x": 16,
"y": 52,
"z": 166,
"label": "Egyptian blue"
},
{
"x": 125,
"y": 249,
"z": 255,
"label": "Electric blue"
},
{
"x": 255,
"y": 0,
"z": 63,
"label": "Electric crimson"
},
{
"x": 0,
"y": 255,
"z": 255,
"label": "Electric cyan"
},
{
"x": 0,
"y": 255,
"z": 0,
"label": "Electric green"
},
{
"x": 111,
"y": 0,
"z": 255,
"label": "Electric indigo"
},
{
"x": 244,
"y": 187,
"z": 255,
"label": "Electric lavender"
},
{
"x": 204,
"y": 255,
"z": 0,
"label": "Electric lime"
},
{
"x": 191,
"y": 0,
"z": 255,
"label": "Electric purple"
},
{
"x": 63,
"y": 0,
"z": 255,
"label": "Electric ultramarine"
},
{
"x": 143,
"y": 0,
"z": 255,
"label": "Electric violet"
},
{
"x": 255,
"y": 255,
"z": 0,
"label": "Electric yellow"
},
{
"x": 80,
"y": 200,
"z": 120,
"label": "Emerald"
},
{
"x": 150,
"y": 200,
"z": 162,
"label": "Eton blue"
},
{
"x": 193,
"y": 154,
"z": 107,
"label": "Fallow"
},
{
"x": 128,
"y": 24,
"z": 24,
"label": "Falu red"
},
{
"x": 181,
"y": 51,
"z": 137,
"label": "Fandango"
},
{
"x": 244,
"y": 0,
"z": 161,
"label": "Fashion fuchsia"
},
{
"x": 229,
"y": 170,
"z": 112,
"label": "Fawn"
},
{
"x": 77,
"y": 93,
"z": 83,
"label": "Feldgrau"
},
{
"x": 79,
"y": 121,
"z": 66,
"label": "Fern green"
},
{
"x": 255,
"y": 40,
"z": 0,
"label": "Ferrari Red"
},
{
"x": 108,
"y": 84,
"z": 30,
"label": "Field drab"
},
{
"x": 178,
"y": 34,
"z": 34,
"label": "Firebrick"
},
{
"x": 206,
"y": 32,
"z": 41,
"label": "Fire engine red"
},
{
"x": 226,
"y": 88,
"z": 34,
"label": "Flame"
},
{
"x": 252,
"y": 142,
"z": 172,
"label": "Flamingo pink"
},
{
"x": 247,
"y": 233,
"z": 142,
"label": "Flavescent"
},
{
"x": 238,
"y": 220,
"z": 130,
"label": "Flax"
},
{
"x": 255,
"y": 250,
"z": 240,
"label": "Floral white"
},
{
"x": 255,
"y": 191,
"z": 0,
"label": "Fluorescent orange"
},
{
"x": 255,
"y": 20,
"z": 147,
"label": "Fluorescent pink"
},
{
"x": 204,
"y": 255,
"z": 0,
"label": "Fluorescent yellow"
},
{
"x": 255,
"y": 0,
"z": 79,
"label": "Folly"
},
{
"x": 1,
"y": 68,
"z": 33,
"label": "Traditional Forest green"
},
{
"x": 34,
"y": 139,
"z": 34,
"label": "Forest green"
},
{
"x": 166,
"y": 123,
"z": 91,
"label": "French beige"
},
{
"x": 0,
"y": 114,
"z": 187,
"label": "French blue"
},
{
"x": 134,
"y": 96,
"z": 142,
"label": "French lilac"
},
{
"x": 246,
"y": 74,
"z": 138,
"label": "French rose"
},
{
"x": 255,
"y": 0,
"z": 255,
"label": "Fuchsia"
},
{
"x": 255,
"y": 119,
"z": 255,
"label": "Fuchsia pink"
},
{
"x": 228,
"y": 132,
"z": 0,
"label": "Fulvous"
},
{
"x": 204,
"y": 102,
"z": 102,
"label": "Fuzzy Wuzzy"
},
{
"x": 220,
"y": 220,
"z": 220,
"label": "Gainsboro"
},
{
"x": 228,
"y": 155,
"z": 15,
"label": "Gamboge"
},
{
"x": 248,
"y": 248,
"z": 255,
"label": "Ghost white"
},
{
"x": 176,
"y": 101,
"z": 0,
"label": "Ginger"
},
{
"x": 96,
"y": 130,
"z": 182,
"label": "Glaucous"
},
{
"x": 230,
"y": 232,
"z": 250,
"label": "Glitter"
},
{
"x": 212,
"y": 175,
"z": 55,
"label": "Metallic Gold"
},
{
"x": 255,
"y": 215,
"z": 0,
"label": "Gold"
},
{
"x": 153,
"y": 101,
"z": 21,
"label": "Golden brown"
},
{
"x": 252,
"y": 194,
"z": 0,
"label": "Golden poppy"
},
{
"x": 255,
"y": 223,
"z": 0,
"label": "Golden yellow"
},
{
"x": 218,
"y": 165,
"z": 32,
"label": "Goldenrod"
},
{
"x": 168,
"y": 228,
"z": 160,
"label": "Granny Smith Apple"
},
{
"x": 128,
"y": 128,
"z": 128,
"label": "Gray"
},
{
"x": 127,
"y": 127,
"z": 127,
"label": "Gray"
},
{
"x": 190,
"y": 190,
"z": 190,
"label": "Gray"
},
{
"x": 70,
"y": 89,
"z": 69,
"label": "Gray-asparagus"
},
{
"x": 0,
"y": 255,
"z": 0,
"label": "Green"
},
{
"x": 0,
"y": 128,
"z": 0,
"label": "Green"
},
{
"x": 0,
"y": 168,
"z": 119,
"label": "Green"
},
{
"x": 0,
"y": 159,
"z": 107,
"label": "Green"
},
{
"x": 0,
"y": 165,
"z": 80,
"label": "Green"
},
{
"x": 102,
"y": 176,
"z": 50,
"label": "Green"
},
{
"x": 173,
"y": 255,
"z": 47,
"label": "Green-yellow"
},
{
"x": 169,
"y": 154,
"z": 134,
"label": "Grullo"
},
{
"x": 0,
"y": 255,
"z": 127,
"label": "Guppie green"
},
{
"x": 102,
"y": 56,
"z": 84,
"label": "Halaya ube"
},
{
"x": 68,
"y": 108,
"z": 207,
"label": "Han blue"
},
{
"x": 82,
"y": 24,
"z": 250,
"label": "Han purple"
},
{
"x": 233,
"y": 214,
"z": 107,
"label": "Hansa yellow"
},
{
"x": 63,
"y": 255,
"z": 0,
"label": "Harlequin"
},
{
"x": 201,
"y": 0,
"z": 22,
"label": "Harvard crimson"
},
{
"x": 218,
"y": 145,
"z": 0,
"label": "Harvest Gold"
},
{
"x": 128,
"y": 128,
"z": 0,
"label": "Heart Gold"
},
{
"x": 223,
"y": 115,
"z": 255,
"label": "Heliotrope"
},
{
"x": 244,
"y": 0,
"z": 161,
"label": "Hollywood cerise"
},
{
"x": 240,
"y": 255,
"z": 240,
"label": "Honeydew"
},
{
"x": 0,
"y": 112,
"z": 0,
"label": "Hooker's green"
},
{
"x": 255,
"y": 29,
"z": 206,
"label": "Hot magenta"
},
{
"x": 255,
"y": 105,
"z": 180,
"label": "Hot pink"
},
{
"x": 53,
"y": 94,
"z": 59,
"label": "Hunter green"
},
{
"x": 252,
"y": 247,
"z": 94,
"label": "Icterine"
},
{
"x": 178,
"y": 236,
"z": 93,
"label": "Inchworm"
},
{
"x": 19,
"y": 136,
"z": 8,
"label": "India green"
},
{
"x": 205,
"y": 92,
"z": 92,
"label": "Indian red"
},
{
"x": 227,
"y": 168,
"z": 87,
"label": "Indian yellow"
},
{
"x": 0,
"y": 65,
"z": 106,
"label": "Indigo Dye"
},
{
"x": 75,
"y": 0,
"z": 130,
"label": "Indigo"
},
{
"x": 0,
"y": 47,
"z": 167,
"label": "International Klein Blue"
},
{
"x": 255,
"y": 79,
"z": 0,
"label": "International orange"
},
{
"x": 90,
"y": 79,
"z": 207,
"label": "Iris"
},
{
"x": 244,
"y": 240,
"z": 236,
"label": "Isabelline"
},
{
"x": 0,
"y": 144,
"z": 0,
"label": "Islamic green"
},
{
"x": 255,
"y": 255,
"z": 240,
"label": "Ivory"
},
{
"x": 0,
"y": 168,
"z": 107,
"label": "Jade"
},
{
"x": 248,
"y": 222,
"z": 126,
"label": "Jasmine"
},
{
"x": 215,
"y": 59,
"z": 62,
"label": "Jasper"
},
{
"x": 165,
"y": 11,
"z": 94,
"label": "Jazzberry jam"
},
{
"x": 250,
"y": 218,
"z": 94,
"label": "Jonquil"
},
{
"x": 189,
"y": 218,
"z": 87,
"label": "June bud"
},
{
"x": 41,
"y": 171,
"z": 135,
"label": "Jungle green"
},
{
"x": 76,
"y": 187,
"z": 23,
"label": "Kelly green"
},
{
"x": 195,
"y": 176,
"z": 145,
"label": "Khaki"
},
{
"x": 240,
"y": 230,
"z": 140,
"label": "Light khaki"
},
{
"x": 232,
"y": 0,
"z": 13,
"label": "KU Crimson"
},
{
"x": 8,
"y": 120,
"z": 48,
"label": "La Salle Green"
},
{
"x": 214,
"y": 202,
"z": 221,
"label": "Languid lavender"
},
{
"x": 38,
"y": 97,
"z": 156,
"label": "Lapis lazuli"
},
{
"x": 254,
"y": 254,
"z": 34,
"label": "Laser Lemon"
},
{
"x": 169,
"y": 186,
"z": 157,
"label": "Laurel green"
},
{
"x": 207,
"y": 16,
"z": 32,
"label": "Lava"
},
{
"x": 181,
"y": 126,
"z": 220,
"label": "Floral Lavender"
},
{
"x": 230,
"y": 230,
"z": 250,
"label": "Lavender"
},
{
"x": 204,
"y": 204,
"z": 255,
"label": "Lavender blue"
},
{
"x": 255,
"y": 240,
"z": 245,
"label": "Lavender blush"
},
{
"x": 196,
"y": 195,
"z": 208,
"label": "Lavender gray"
},
{
"x": 148,
"y": 87,
"z": 235,
"label": "Lavender indigo"
},
{
"x": 238,
"y": 130,
"z": 238,
"label": "Lavender magenta"
},
{
"x": 230,
"y": 230,
"z": 250,
"label": "Lavender mist"
},
{
"x": 251,
"y": 174,
"z": 210,
"label": "Lavender pink"
},
{
"x": 150,
"y": 123,
"z": 182,
"label": "Lavender purple"
},
{
"x": 251,
"y": 160,
"z": 227,
"label": "Lavender rose"
},
{
"x": 124,
"y": 252,
"z": 0,
"label": "Lawn green"
},
{
"x": 255,
"y": 247,
"z": 0,
"label": "Lemon"
},
{
"x": 255,
"y": 250,
"z": 205,
"label": "Lemon chiffon"
},
{
"x": 253,
"y": 213,
"z": 177,
"label": "Light apricot"
},
{
"x": 173,
"y": 216,
"z": 230,
"label": "Light blue"
},
{
"x": 181,
"y": 101,
"z": 29,
"label": "Light brown"
},
{
"x": 230,
"y": 103,
"z": 113,
"label": "Light carmine pink"
},
{
"x": 240,
"y": 128,
"z": 128,
"label": "Light coral"
},
{
"x": 147,
"y": 204,
"z": 234,
"label": "Light cornflower blue"
},
{
"x": 245,
"y": 105,
"z": 145,
"label": "Light Crimson"
},
{
"x": 224,
"y": 255,
"z": 255,
"label": "Light cyan"
},
{
"x": 249,
"y": 132,
"z": 239,
"label": "Light fuchsia pink"
},
{
"x": 250,
"y": 250,
"z": 210,
"label": "Light goldenrod yellow"
},
{
"x": 211,
"y": 211,
"z": 211,
"label": "Light gray"
},
{
"x": 144,
"y": 238,
"z": 144,
"label": "Light green"
},
{
"x": 240,
"y": 230,
"z": 140,
"label": "Light khaki"
},
{
"x": 177,
"y": 156,
"z": 217,
"label": "Light pastel purple"
},
{
"x": 255,
"y": 182,
"z": 193,
"label": "Light pink"
},
{
"x": 255,
"y": 160,
"z": 122,
"label": "Light salmon"
},
{
"x": 255,
"y": 153,
"z": 153,
"label": "Light salmon pink"
},
{
"x": 32,
"y": 178,
"z": 170,
"label": "Light sea green"
},
{
"x": 135,
"y": 206,
"z": 250,
"label": "Light sky blue"
},
{
"x": 119,
"y": 136,
"z": 153,
"label": "Light slate gray"
},
{
"x": 179,
"y": 139,
"z": 109,
"label": "Light taupe"
},
{
"x": 230,
"y": 143,
"z": 172,
"label": "Light Thulian pink"
},
{
"x": 255,
"y": 255,
"z": 237,
"label": "Light yellow"
},
{
"x": 200,
"y": 162,
"z": 200,
"label": "Lilac"
},
{
"x": 191,
"y": 255,
"z": 0,
"label": "Lime"
},
{
"x": 0,
"y": 255,
"z": 0,
"label": "Lime"
},
{
"x": 50,
"y": 205,
"z": 50,
"label": "Lime green"
},
{
"x": 25,
"y": 89,
"z": 5,
"label": "Lincoln green"
},
{
"x": 250,
"y": 240,
"z": 230,
"label": "Linen"
},
{
"x": 193,
"y": 154,
"z": 107,
"label": "Lion"
},
{
"x": 83,
"y": 75,
"z": 79,
"label": "Liver"
},
{
"x": 230,
"y": 32,
"z": 32,
"label": "Lust"
},
{
"x": 255,
"y": 0,
"z": 255,
"label": "Magenta"
},
{
"x": 202,
"y": 31,
"z": 123,
"label": "Magenta Dye"
},
{
"x": 255,
"y": 0,
"z": 144,
"label": "Magenta"
},
{
"x": 170,
"y": 240,
"z": 209,
"label": "Magic mint"
},
{
"x": 248,
"y": 244,
"z": 255,
"label": "Magnolia"
},
{
"x": 192,
"y": 64,
"z": 0,
"label": "Mahogany"
},
{
"x": 251,
"y": 236,
"z": 93,
"label": "Maize"
},
{
"x": 96,
"y": 80,
"z": 220,
"label": "Majorelle Blue"
},
{
"x": 11,
"y": 218,
"z": 81,
"label": "Malachite"
},
{
"x": 151,
"y": 154,
"z": 170,
"label": "Manatee"
},
{
"x": 255,
"y": 130,
"z": 67,
"label": "Mango Tango"
},
{
"x": 116,
"y": 195,
"z": 101,
"label": "Mantis"
},
{
"x": 128,
"y": 0,
"z": 0,
"label": "Maroon"
},
{
"x": 176,
"y": 48,
"z": 96,
"label": "Maroon"
},
{
"x": 224,
"y": 176,
"z": 255,
"label": "Mauve"
},
{
"x": 145,
"y": 95,
"z": 109,
"label": "Mauve taupe"
},
{
"x": 239,
"y": 152,
"z": 170,
"label": "Mauvelous"
},
{
"x": 115,
"y": 194,
"z": 251,
"label": "Maya blue"
},
{
"x": 229,
"y": 183,
"z": 59,
"label": "Meat brown"
},
{
"x": 102,
"y": 221,
"z": 170,
"label": "Medium aquamarine"
},
{
"x": 0,
"y": 0,
"z": 205,
"label": "Medium blue"
},
{
"x": 226,
"y": 6,
"z": 44,
"label": "Medium candy apple red"
},
{
"x": 175,
"y": 64,
"z": 53,
"label": "Medium carmine"
},
{
"x": 243,
"y": 229,
"z": 171,
"label": "Medium champagne"
},
{
"x": 3,
"y": 80,
"z": 150,
"label": "Medium electric blue"
},
{
"x": 28,
"y": 53,
"z": 45,
"label": "Medium jungle green"
},
{
"x": 221,
"y": 160,
"z": 221,
"label": "Medium lavender magenta"
},
{
"x": 186,
"y": 85,
"z": 211,
"label": "Medium orchid"
},
{
"x": 0,
"y": 103,
"z": 165,
"label": "Medium Persian blue"
},
{
"x": 147,
"y": 112,
"z": 219,
"label": "Medium purple"
},
{
"x": 187,
"y": 51,
"z": 133,
"label": "Medium red-violet"
},
{
"x": 60,
"y": 179,
"z": 113,
"label": "Medium sea green"
},
{
"x": 123,
"y": 104,
"z": 238,
"label": "Medium slate blue"
},
{
"x": 201,
"y": 220,
"z": 135,
"label": "Medium spring bud"
},
{
"x": 0,
"y": 250,
"z": 154,
"label": "Medium spring green"
},
{
"x": 103,
"y": 76,
"z": 71,
"label": "Medium taupe"
},
{
"x": 0,
"y": 84,
"z": 180,
"label": "Medium teal blue"
},
{
"x": 72,
"y": 209,
"z": 204,
"label": "Medium turquoise"
},
{
"x": 199,
"y": 21,
"z": 133,
"label": "Medium violet-red"
},
{
"x": 253,
"y": 188,
"z": 180,
"label": "Melon"
},
{
"x": 25,
"y": 25,
"z": 112,
"label": "Midnight blue"
},
{
"x": 0,
"y": 73,
"z": 83,
"label": "Eagle Green"
},
{
"x": 255,
"y": 196,
"z": 12,
"label": "Mikado yellow"
},
{
"x": 62,
"y": 180,
"z": 137,
"label": "Mint"
},
{
"x": 245,
"y": 255,
"z": 250,
"label": "Mint cream"
},
{
"x": 152,
"y": 255,
"z": 152,
"label": "Mint green"
},
{
"x": 255,
"y": 228,
"z": 225,
"label": "Misty rose"
},
{
"x": 250,
"y": 235,
"z": 215,
"label": "Moccasin"
},
{
"x": 150,
"y": 113,
"z": 23,
"label": "Mode beige"
},
{
"x": 115,
"y": 169,
"z": 194,
"label": "Moonstone blue"
},
{
"x": 174,
"y": 12,
"z": 0,
"label": "Mordant red 19"
},
{
"x": 173,
"y": 223,
"z": 173,
"label": "Moss green"
},
{
"x": 48,
"y": 186,
"z": 143,
"label": "Mountain Meadow"
},
{
"x": 153,
"y": 122,
"z": 141,
"label": "Mountbatten pink"
},
{
"x": 197,
"y": 75,
"z": 140,
"label": "Mulberry"
},
{
"x": 242,
"y": 243,
"z": 244,
"label": "Munsell"
},
{
"x": 255,
"y": 219,
"z": 88,
"label": "Mustard"
},
{
"x": 33,
"y": 66,
"z": 30,
"label": "Myrtle"
},
{
"x": 24,
"y": 69,
"z": 59,
"label": "MSU Green"
},
{
"x": 246,
"y": 173,
"z": 198,
"label": "Nadeshiko pink"
},
{
"x": 42,
"y": 128,
"z": 0,
"label": "Napier green"
},
{
"x": 250,
"y": 218,
"z": 94,
"label": "Naples yellow"
},
{
"x": 255,
"y": 222,
"z": 173,
"label": "Navajo white"
},
{
"x": 0,
"y": 0,
"z": 128,
"label": "Navy blue"
},
{
"x": 255,
"y": 163,
"z": 67,
"label": "Neon Carrot"
},
{
"x": 254,
"y": 89,
"z": 194,
"label": "Neon fuchsia"
},
{
"x": 57,
"y": 255,
"z": 20,
"label": "Neon green"
},
{
"x": 164,
"y": 221,
"z": 237,
"label": "Non-photo blue"
},
{
"x": 5,
"y": 144,
"z": 51,
"label": "North Texas Green"
},
{
"x": 0,
"y": 119,
"z": 190,
"label": "Ocean Boat Blue"
},
{
"x": 204,
"y": 119,
"z": 34,
"label": "Ochre"
},
{
"x": 0,
"y": 128,
"z": 0,
"label": "Office green"
},
{
"x": 207,
"y": 181,
"z": 59,
"label": "Old gold"
},
{
"x": 253,
"y": 245,
"z": 230,
"label": "Old lace"
},
{
"x": 121,
"y": 104,
"z": 120,
"label": "Old lavender"
},
{
"x": 103,
"y": 49,
"z": 71,
"label": "Old mauve"
},
{
"x": 192,
"y": 128,
"z": 129,
"label": "Old rose"
},
{
"x": 128,
"y": 128,
"z": 0,
"label": "Olive"
},
{
"x": 107,
"y": 142,
"z": 35,
"label": "Olive Drab #3"
},
{
"x": 60,
"y": 52,
"z": 31,
"label": "Olive Drab #7"
},
{
"x": 154,
"y": 185,
"z": 115,
"label": "Olivine"
},
{
"x": 15,
"y": 15,
"z": 15,
"label": "Onyx"
},
{
"x": 183,
"y": 132,
"z": 167,
"label": "Opera mauve"
},
{
"x": 255,
"y": 127,
"z": 0,
"label": "Orange"
},
{
"x": 251,
"y": 153,
"z": 2,
"label": "Orange"
},
{
"x": 255,
"y": 165,
"z": 0,
"label": "Orange"
},
{
"x": 255,
"y": 159,
"z": 0,
"label": "Orange peel"
},
{
"x": 255,
"y": 69,
"z": 0,
"label": "Orange-red"
},
{
"x": 218,
"y": 112,
"z": 214,
"label": "Orchid"
},
{
"x": 101,
"y": 67,
"z": 33,
"label": "Otter brown"
},
{
"x": 65,
"y": 74,
"z": 76,
"label": "Outer Space"
},
{
"x": 255,
"y": 110,
"z": 74,
"label": "Outrageous Orange"
},
{
"x": 0,
"y": 33,
"z": 71,
"label": "Oxford Blue"
},
{
"x": 153,
"y": 0,
"z": 0,
"label": "OU Crimson Red"
},
{
"x": 0,
"y": 102,
"z": 0,
"label": "Pakistan green"
},
{
"x": 39,
"y": 59,
"z": 226,
"label": "Palatinate blue"
},
{
"x": 104,
"y": 40,
"z": 96,
"label": "Palatinate purple"
},
{
"x": 188,
"y": 212,
"z": 230,
"label": "Pale aqua"
},
{
"x": 175,
"y": 238,
"z": 238,
"label": "Pale blue"
},
{
"x": 152,
"y": 118,
"z": 84,
"label": "Pale brown"
},
{
"x": 175,
"y": 64,
"z": 53,
"label": "Pale carmine"
},
{
"x": 155,
"y": 196,
"z": 226,
"label": "Pale cerulean"
},
{
"x": 221,
"y": 173,
"z": 175,
"label": "Pale chestnut"
},
{
"x": 218,
"y": 138,
"z": 103,
"label": "Pale copper"
},
{
"x": 171,
"y": 205,
"z": 239,
"label": "Pale cornflower blue"
},
{
"x": 230,
"y": 190,
"z": 138,
"label": "Pale gold"
},
{
"x": 238,
"y": 232,
"z": 170,
"label": "Pale goldenrod"
},
{
"x": 152,
"y": 251,
"z": 152,
"label": "Pale green"
},
{
"x": 220,
"y": 208,
"z": 255,
"label": "Pale lavender"
},
{
"x": 249,
"y": 132,
"z": 229,
"label": "Pale magenta"
},
{
"x": 250,
"y": 218,
"z": 221,
"label": "Pale pink"
},
{
"x": 221,
"y": 160,
"z": 221,
"label": "Pale plum"
},
{
"x": 219,
"y": 112,
"z": 147,
"label": "Pale red-violet"
},
{
"x": 150,
"y": 222,
"z": 209,
"label": "Pale robin egg blue"
},
{
"x": 201,
"y": 192,
"z": 187,
"label": "Pale silver"
},
{
"x": 236,
"y": 235,
"z": 189,
"label": "Pale spring bud"
},
{
"x": 188,
"y": 152,
"z": 126,
"label": "Pale taupe"
},
{
"x": 219,
"y": 112,
"z": 147,
"label": "Pale violet-red"
},
{
"x": 120,
"y": 24,
"z": 74,
"label": "Pansy purple"
},
{
"x": 255,
"y": 239,
"z": 213,
"label": "Papaya whip"
},
{
"x": 80,
"y": 200,
"z": 120,
"label": "Paris Green"
},
{
"x": 174,
"y": 198,
"z": 207,
"label": "Pastel blue"
},
{
"x": 131,
"y": 105,
"z": 83,
"label": "Pastel brown"
},
{
"x": 207,
"y": 207,
"z": 196,
"label": "Pastel gray"
},
{
"x": 119,
"y": 221,
"z": 119,
"label": "Pastel green"
},
{
"x": 244,
"y": 154,
"z": 194,
"label": "Pastel magenta"
},
{
"x": 255,
"y": 179,
"z": 71,
"label": "Pastel orange"
},
{
"x": 255,
"y": 209,
"z": 220,
"label": "Pastel pink"
},
{
"x": 179,
"y": 158,
"z": 181,
"label": "Pastel purple"
},
{
"x": 255,
"y": 105,
"z": 97,
"label": "Pastel red"
},
{
"x": 203,
"y": 153,
"z": 201,
"label": "Pastel violet"
},
{
"x": 253,
"y": 253,
"z": 150,
"label": "Pastel yellow"
},
{
"x": 128,
"y": 0,
"z": 128,
"label": "Patriarch"
},
{
"x": 64,
"y": 64,
"z": 79,
"label": "Payne's grey"
},
{
"x": 255,
"y": 229,
"z": 180,
"label": "Peach"
},
{
"x": 255,
"y": 204,
"z": 153,
"label": "Peach-orange"
},
{
"x": 255,
"y": 218,
"z": 185,
"label": "Peach puff"
},
{
"x": 250,
"y": 223,
"z": 173,
"label": "Peach-yellow"
},
{
"x": 209,
"y": 226,
"z": 49,
"label": "Pear"
},
{
"x": 234,
"y": 224,
"z": 200,
"label": "Pearl"
},
{
"x": 136,
"y": 216,
"z": 192,
"label": "Pearl Aqua"
},
{
"x": 230,
"y": 226,
"z": 0,
"label": "Peridot"
},
{
"x": 204,
"y": 204,
"z": 255,
"label": "Periwinkle"
},
{
"x": 28,
"y": 57,
"z": 187,
"label": "Persian blue"
},
{
"x": 0,
"y": 166,
"z": 147,
"label": "Persian green"
},
{
"x": 50,
"y": 18,
"z": 122,
"label": "Persian indigo"
},
{
"x": 217,
"y": 144,
"z": 88,
"label": "Persian orange"
},
{
"x": 247,
"y": 127,
"z": 190,
"label": "Persian pink"
},
{
"x": 112,
"y": 28,
"z": 28,
"label": "Persian plum"
},
{
"x": 204,
"y": 51,
"z": 51,
"label": "Persian red"
},
{
"x": 254,
"y": 40,
"z": 162,
"label": "Persian rose"
},
{
"x": 223,
"y": 0,
"z": 255,
"label": "Phlox"
},
{
"x": 0,
"y": 15,
"z": 137,
"label": "Phthalo blue"
},
{
"x": 18,
"y": 53,
"z": 36,
"label": "Phthalo green"
},
{
"x": 253,
"y": 221,
"z": 230,
"label": "Piggy pink"
},
{
"x": 1,
"y": 121,
"z": 111,
"label": "Pine green"
},
{
"x": 255,
"y": 192,
"z": 203,
"label": "Pink"
},
{
"x": 255,
"y": 153,
"z": 102,
"label": "Pink-orange"
},
{
"x": 231,
"y": 172,
"z": 207,
"label": "Pink pearl"
},
{
"x": 247,
"y": 143,
"z": 167,
"label": "Pink Sherbet"
},
{
"x": 147,
"y": 197,
"z": 114,
"label": "Pistachio"
},
{
"x": 229,
"y": 228,
"z": 226,
"label": "Platinum"
},
{
"x": 142,
"y": 69,
"z": 133,
"label": "Traditional Plum"
},
{
"x": 221,
"y": 160,
"z": 221,
"label": "Plum"
},
{
"x": 255,
"y": 90,
"z": 54,
"label": "Portland Orange"
},
{
"x": 176,
"y": 224,
"z": 230,
"label": "Powder blue"
},
{
"x": 255,
"y": 143,
"z": 0,
"label": "Princeton orange"
},
{
"x": 112,
"y": 28,
"z": 28,
"label": "Prune"
},
{
"x": 0,
"y": 49,
"z": 83,
"label": "Prussian blue"
},
{
"x": 223,
"y": 0,
"z": 255,
"label": "Psychedelic purple"
},
{
"x": 204,
"y": 136,
"z": 153,
"label": "Puce"
},
{
"x": 255,
"y": 117,
"z": 24,
"label": "Pumpkin"
},
{
"x": 128,
"y": 0,
"z": 128,
"label": "Purple"
},
{
"x": 159,
"y": 0,
"z": 197,
"label": "Purple"
},
{
"x": 160,
"y": 32,
"z": 240,
"label": "Purple"
},
{
"x": 105,
"y": 53,
"z": 156,
"label": "Purple Heart"
},
{
"x": 150,
"y": 120,
"z": 182,
"label": "Purple mountain majesty"
},
{
"x": 254,
"y": 78,
"z": 218,
"label": "Purple pizzazz"
},
{
"x": 80,
"y": 64,
"z": 77,
"label": "Purple taupe"
},
{
"x": 81,
"y": 72,
"z": 79,
"label": "Quartz"
},
{
"x": 255,
"y": 53,
"z": 94,
"label": "Radical Red"
},
{
"x": 227,
"y": 11,
"z": 93,
"label": "Raspberry"
},
{
"x": 145,
"y": 95,
"z": 109,
"label": "Raspberry glace"
},
{
"x": 226,
"y": 80,
"z": 152,
"label": "Raspberry pink"
},
{
"x": 179,
"y": 68,
"z": 108,
"label": "Raspberry rose"
},
{
"x": 130,
"y": 102,
"z": 68,
"label": "Raw umber"
},
{
"x": 255,
"y": 51,
"z": 204,
"label": "Razzle dazzle rose"
},
{
"x": 227,
"y": 37,
"z": 107,
"label": "Razzmatazz"
},
{
"x": 255,
"y": 0,
"z": 0,
"label": "Red"
},
{
"x": 242,
"y": 0,
"z": 60,
"label": "Red"
},
{
"x": 196,
"y": 2,
"z": 51,
"label": "Red"
},
{
"x": 237,
"y": 28,
"z": 36,
"label": "Red"
},
{
"x": 254,
"y": 39,
"z": 18,
"label": "Red"
},
{
"x": 165,
"y": 42,
"z": 42,
"label": "Red-brown"
},
{
"x": 199,
"y": 21,
"z": 133,
"label": "Red-violet"
},
{
"x": 171,
"y": 78,
"z": 82,
"label": "Redwood"
},
{
"x": 0,
"y": 64,
"z": 64,
"label": "Rich black"
},
{
"x": 241,
"y": 167,
"z": 254,
"label": "Rich brilliant lavender"
},
{
"x": 215,
"y": 0,
"z": 64,
"label": "Rich carmine"
},
{
"x": 8,
"y": 146,
"z": 208,
"label": "Rich electric blue"
},
{
"x": 167,
"y": 107,
"z": 207,
"label": "Rich lavender"
},
{
"x": 182,
"y": 102,
"z": 210,
"label": "Rich lilac"
},
{
"x": 176,
"y": 48,
"z": 96,
"label": "Rich maroon"
},
{
"x": 65,
"y": 72,
"z": 51,
"label": "Rifle green"
},
{
"x": 0,
"y": 204,
"z": 204,
"label": "Robin egg blue"
},
{
"x": 255,
"y": 0,
"z": 127,
"label": "Rose"
},
{
"x": 249,
"y": 66,
"z": 158,
"label": "Rose bonbon"
},
{
"x": 103,
"y": 72,
"z": 70,
"label": "Rose ebony"
},
{
"x": 183,
"y": 110,
"z": 121,
"label": "Rose gold"
},
{
"x": 227,
"y": 38,
"z": 54,
"label": "Rose madder"
},
{
"x": 255,
"y": 102,
"z": 204,
"label": "Rose pink"
},
{
"x": 170,
"y": 152,
"z": 169,
"label": "Rose quartz"
},
{
"x": 144,
"y": 93,
"z": 93,
"label": "Rose taupe"
},
{
"x": 171,
"y": 78,
"z": 82,
"label": "Rose vale"
},
{
"x": 101,
"y": 0,
"z": 11,
"label": "Rosewood"
},
{
"x": 212,
"y": 0,
"z": 0,
"label": "Rosso corsa"
},
{
"x": 188,
"y": 143,
"z": 143,
"label": "Rosy brown"
},
{
"x": 0,
"y": 56,
"z": 168,
"label": "Royal azure"
},
{
"x": 0,
"y": 35,
"z": 102,
"label": "Traditional Royal blue"
},
{
"x": 65,
"y": 105,
"z": 225,
"label": "Royal blue"
},
{
"x": 202,
"y": 44,
"z": 146,
"label": "Royal fuchsia"
},
{
"x": 120,
"y": 81,
"z": 169,
"label": "Royal purple"
},
{
"x": 224,
"y": 17,
"z": 95,
"label": "Ruby"
},
{
"x": 255,
"y": 0,
"z": 40,
"label": "Ruddy"
},
{
"x": 187,
"y": 101,
"z": 40,
"label": "Ruddy brown"
},
{
"x": 225,
"y": 142,
"z": 150,
"label": "Ruddy pink"
},
{
"x": 168,
"y": 28,
"z": 7,
"label": "Rufous"
},
{
"x": 128,
"y": 70,
"z": 27,
"label": "Russet"
},
{
"x": 183,
"y": 65,
"z": 14,
"label": "Rust"
},
{
"x": 0,
"y": 86,
"z": 63,
"label": "Sacramento State green"
},
{
"x": 139,
"y": 69,
"z": 19,
"label": "Saddle brown"
},
{
"x": 255,
"y": 103,
"z": 0,
"label": "Safety orange (blaze orange)"
},
{
"x": 244,
"y": 196,
"z": 48,
"label": "Saffron"
},
{
"x": 35,
"y": 41,
"z": 122,
"label": "St. Patrick's blue"
},
{
"x": 255,
"y": 140,
"z": 105,
"label": "Salmon"
},
{
"x": 255,
"y": 145,
"z": 164,
"label": "Salmon pink"
},
{
"x": 194,
"y": 178,
"z": 128,
"label": "Sand"
},
{
"x": 150,
"y": 113,
"z": 23,
"label": "Sand dune"
},
{
"x": 236,
"y": 213,
"z": 64,
"label": "Sandstorm"
},
{
"x": 244,
"y": 164,
"z": 96,
"label": "Sandy brown"
},
{
"x": 150,
"y": 113,
"z": 23,
"label": "Sandy taupe"
},
{
"x": 80,
"y": 125,
"z": 42,
"label": "Sap green"
},
{
"x": 15,
"y": 82,
"z": 186,
"label": "Sapphire"
},
{
"x": 203,
"y": 161,
"z": 53,
"label": "Satin sheen gold"
},
{
"x": 255,
"y": 36,
"z": 0,
"label": "Scarlet"
},
{
"x": 255,
"y": 36,
"z": 0,
"label": "Crayola Scarlet"
},
{
"x": 255,
"y": 216,
"z": 0,
"label": "School bus yellow"
},
{
"x": 118,
"y": 255,
"z": 122,
"label": "Screamin' Green"
},
{
"x": 46,
"y": 139,
"z": 87,
"label": "Sea green"
},
{
"x": 50,
"y": 20,
"z": 20,
"label": "Seal brown"
},
{
"x": 255,
"y": 245,
"z": 238,
"label": "Seashell"
},
{
"x": 255,
"y": 186,
"z": 0,
"label": "Selective yellow"
},
{
"x": 112,
"y": 66,
"z": 20,
"label": "Sepia"
},
{
"x": 138,
"y": 121,
"z": 93,
"label": "Shadow"
},
{
"x": 0,
"y": 158,
"z": 96,
"label": "Shamrock green"
},
{
"x": 252,
"y": 15,
"z": 192,
"label": "Shocking pink"
},
{
"x": 136,
"y": 45,
"z": 23,
"label": "Sienna"
},
{
"x": 192,
"y": 192,
"z": 192,
"label": "Silver"
},
{
"x": 203,
"y": 65,
"z": 11,
"label": "Sinopia"
},
{
"x": 0,
"y": 116,
"z": 116,
"label": "Skobeloff"
},
{
"x": 135,
"y": 206,
"z": 235,
"label": "Sky blue"
},
{
"x": 207,
"y": 113,
"z": 175,
"label": "Sky magenta"
},
{
"x": 106,
"y": 90,
"z": 205,
"label": "Slate blue"
},
{
"x": 112,
"y": 128,
"z": 144,
"label": "Slate gray"
},
{
"x": 0,
"y": 51,
"z": 153,
"label": "Smalt (Dark powder blue)"
},
{
"x": 147,
"y": 61,
"z": 65,
"label": "Smokey topaz"
},
{
"x": 16,
"y": 12,
"z": 8,
"label": "Smoky black"
},
{
"x": 255,
"y": 250,
"z": 250,
"label": "Snow"
},
{
"x": 15,
"y": 192,
"z": 252,
"label": "Spiro Disco Ball"
},
{
"x": 254,
"y": 253,
"z": 255,
"label": "Splashed white"
},
{
"x": 167,
"y": 252,
"z": 0,
"label": "Spring bud"
},
{
"x": 0,
"y": 255,
"z": 127,
"label": "Spring green"
},
{
"x": 70,
"y": 130,
"z": 180,
"label": "Steel blue"
},
{
"x": 250,
"y": 218,
"z": 94,
"label": "Stil de grain yellow"
},
{
"x": 153,
"y": 0,
"z": 0,
"label": "Stizza"
},
{
"x": 228,
"y": 217,
"z": 111,
"label": "Straw"
},
{
"x": 255,
"y": 204,
"z": 51,
"label": "Sunglow"
},
{
"x": 250,
"y": 214,
"z": 165,
"label": "Sunset"
},
{
"x": 210,
"y": 180,
"z": 140,
"label": "Tan"
},
{
"x": 249,
"y": 77,
"z": 0,
"label": "Tangelo"
},
{
"x": 242,
"y": 133,
"z": 0,
"label": "Tangerine"
},
{
"x": 255,
"y": 204,
"z": 0,
"label": "Tangerine yellow"
},
{
"x": 72,
"y": 60,
"z": 50,
"label": "Taupe"
},
{
"x": 139,
"y": 133,
"z": 137,
"label": "Taupe gray"
},
{
"x": 208,
"y": 240,
"z": 192,
"label": "Tea green"
},
{
"x": 248,
"y": 131,
"z": 121,
"label": "Orange Tea rose"
},
{
"x": 244,
"y": 194,
"z": 194,
"label": "Tea rose"
},
{
"x": 0,
"y": 128,
"z": 128,
"label": "Teal"
},
{
"x": 54,
"y": 117,
"z": 136,
"label": "Teal blue"
},
{
"x": 0,
"y": 109,
"z": 91,
"label": "Teal green"
},
{
"x": 205,
"y": 87,
"z": 0,
"label": "Tenné (Tawny)"
},
{
"x": 226,
"y": 114,
"z": 91,
"label": "Terra cotta"
},
{
"x": 216,
"y": 191,
"z": 216,
"label": "Thistle"
},
{
"x": 222,
"y": 111,
"z": 161,
"label": "Thulian pink"
},
{
"x": 252,
"y": 137,
"z": 172,
"label": "Tickle Me Pink"
},
{
"x": 10,
"y": 186,
"z": 181,
"label": "Tiffany Blue"
},
{
"x": 224,
"y": 141,
"z": 60,
"label": "Tiger's eye"
},
{
"x": 219,
"y": 215,
"z": 210,
"label": "Timberwolf"
},
{
"x": 238,
"y": 230,
"z": 0,
"label": "Titanium yellow"
},
{
"x": 255,
"y": 99,
"z": 71,
"label": "Tomato"
},
{
"x": 116,
"y": 108,
"z": 192,
"label": "Toolbox"
},
{
"x": 255,
"y": 200,
"z": 124,
"label": "Topaz"
},
{
"x": 253,
"y": 14,
"z": 53,
"label": "Tractor red"
},
{
"x": 128,
"y": 128,
"z": 128,
"label": "Trolley Grey"
},
{
"x": 0,
"y": 117,
"z": 94,
"label": "Tropical rain forest"
},
{
"x": 0,
"y": 115,
"z": 207,
"label": "True Blue"
},
{
"x": 65,
"y": 125,
"z": 193,
"label": "Tufts Blue"
},
{
"x": 222,
"y": 170,
"z": 136,
"label": "Tumbleweed"
},
{
"x": 181,
"y": 114,
"z": 129,
"label": "Turkish rose"
},
{
"x": 48,
"y": 213,
"z": 200,
"label": "Turquoise"
},
{
"x": 0,
"y": 255,
"z": 239,
"label": "Turquoise blue"
},
{
"x": 160,
"y": 214,
"z": 180,
"label": "Turquoise green"
},
{
"x": 102,
"y": 66,
"z": 77,
"label": "Tuscan red"
},
{
"x": 138,
"y": 73,
"z": 107,
"label": "Twilight lavender"
},
{
"x": 102,
"y": 2,
"z": 60,
"label": "Tyrian purple"
},
{
"x": 0,
"y": 51,
"z": 170,
"label": "UA blue"
},
{
"x": 217,
"y": 0,
"z": 76,
"label": "UA red"
},
{
"x": 136,
"y": 120,
"z": 195,
"label": "Ube"
},
{
"x": 83,
"y": 104,
"z": 149,
"label": "UCLA Blue"
},
{
"x": 255,
"y": 179,
"z": 0,
"label": "UCLA Gold"
},
{
"x": 60,
"y": 208,
"z": 112,
"label": "UFO Green"
},
{
"x": 18,
"y": 10,
"z": 143,
"label": "Ultramarine"
},
{
"x": 65,
"y": 102,
"z": 245,
"label": "Ultramarine blue"
},
{
"x": 255,
"y": 111,
"z": 255,
"label": "Ultra pink"
},
{
"x": 99,
"y": 81,
"z": 71,
"label": "Umber"
},
{
"x": 91,
"y": 146,
"z": 229,
"label": "United Nations blue"
},
{
"x": 183,
"y": 135,
"z": 39,
"label": "University of California Gold"
},
{
"x": 255,
"y": 255,
"z": 102,
"label": "Unmellow Yellow"
},
{
"x": 1,
"y": 68,
"z": 33,
"label": "UP Forest green"
},
{
"x": 123,
"y": 17,
"z": 19,
"label": "UP Maroon"
},
{
"x": 174,
"y": 32,
"z": 41,
"label": "Upsdell red"
},
{
"x": 225,
"y": 173,
"z": 33,
"label": "Urobilin"
},
{
"x": 153,
"y": 0,
"z": 0,
"label": "USC Cardinal"
},
{
"x": 255,
"y": 204,
"z": 0,
"label": "USC Gold"
},
{
"x": 211,
"y": 0,
"z": 63,
"label": "Utah Crimson"
},
{
"x": 243,
"y": 229,
"z": 171,
"label": "Vanilla"
},
{
"x": 197,
"y": 179,
"z": 88,
"label": "Vegas gold"
},
{
"x": 200,
"y": 8,
"z": 21,
"label": "Venetian red"
},
{
"x": 67,
"y": 179,
"z": 174,
"label": "Verdigris"
},
{
"x": 227,
"y": 66,
"z": 52,
"label": "Vermilion"
},
{
"x": 160,
"y": 32,
"z": 240,
"label": "Veronica"
},
{
"x": 143,
"y": 0,
"z": 255,
"label": "Violet"
},
{
"x": 127,
"y": 0,
"z": 255,
"label": "Violet"
},
{
"x": 134,
"y": 1,
"z": 175,
"label": "Violet"
},
{
"x": 238,
"y": 130,
"z": 238,
"label": "Violet"
},
{
"x": 64,
"y": 130,
"z": 109,
"label": "Viridian"
},
{
"x": 146,
"y": 39,
"z": 36,
"label": "Vivid auburn"
},
{
"x": 159,
"y": 29,
"z": 53,
"label": "Vivid burgundy"
},
{
"x": 218,
"y": 29,
"z": 129,
"label": "Vivid cerise"
},
{
"x": 255,
"y": 160,
"z": 137,
"label": "Vivid tangerine"
},
{
"x": 159,
"y": 0,
"z": 255,
"label": "Vivid violet"
},
{
"x": 0,
"y": 66,
"z": 66,
"label": "Warm black"
},
{
"x": 100,
"y": 84,
"z": 82,
"label": "Wenge"
},
{
"x": 245,
"y": 222,
"z": 179,
"label": "Wheat"
},
{
"x": 255,
"y": 255,
"z": 255,
"label": "White"
},
{
"x": 245,
"y": 245,
"z": 245,
"label": "White smoke"
},
{
"x": 162,
"y": 173,
"z": 208,
"label": "Wild blue yonder"
},
{
"x": 255,
"y": 67,
"z": 164,
"label": "Wild Strawberry"
},
{
"x": 252,
"y": 108,
"z": 133,
"label": "Wild Watermelon"
},
{
"x": 114,
"y": 47,
"z": 55,
"label": "Wine"
},
{
"x": 201,
"y": 160,
"z": 220,
"label": "Wisteria"
},
{
"x": 115,
"y": 134,
"z": 120,
"label": "Xanadu"
},
{
"x": 15,
"y": 77,
"z": 146,
"label": "Yale Blue"
},
{
"x": 255,
"y": 255,
"z": 0,
"label": "Yellow"
},
{
"x": 239,
"y": 204,
"z": 0,
"label": "Yellow"
},
{
"x": 255,
"y": 211,
"z": 0,
"label": "Yellow"
},
{
"x": 255,
"y": 239,
"z": 0,
"label": "Yellow"
},
{
"x": 254,
"y": 254,
"z": 51,
"label": "Yellow"
},
{
"x": 154,
"y": 205,
"z": 50,
"label": "Yellow-green"
},
{
"x": 255,
"y": 174,
"z": 66,
"label": "Yellow Orange"
},
{
"x": 0,
"y": 20,
"z": 168,
"label": "Zaffre"
},
{
"x": 44,
"y": 22,
"z": 8,
"label": "Zinnwaldite brown"
}
]
|
adregan/pnger
|
colors/colors.py
|
Python
|
mit
| 59,169
|
[
"Amber"
] |
98055cc4d72545d8c8ae733eaf891f001cedf2ec69341d6fbd65e1a445985aad
|
from ase.dft import get_distribution_moment
import numpy as np
precision = 1E-8
x = np.linspace(-50., 50., 1000)
y = np.exp(-x**2 / 2.)
area, center, mom2 = get_distribution_moment(x, y, (0, 1, 2))
assert sum((abs(area - np.sqrt(2. * np.pi)), abs(center), abs(mom2 - 1.))) < precision
x = np.linspace(-1., 1., 100000)
for order in range(0, 9):
y = x**order
area = get_distribution_moment(x, y)
assert abs(area - (1. - (-1.)**(order + 1)) / (order + 1.)) < precision
x = np.linspace(-50., 50., 100)
y = np.exp(-2. * (x - 7.)**2 / 10.) + np.exp(-2. * (x + 5.)**2 / 10.)
center=get_distribution_moment(x, y, 1)
assert abs(center - 1.) < precision
|
grhawk/ASE
|
tools/ase/test/distmom.py
|
Python
|
gpl-2.0
| 661
|
[
"ASE"
] |
7267918a5170f0deb2220dc0d29cf6a1f5cbb6481efee28c94fb5d9b61d8d529
|
# materials.py is a subroutine of NumBAT that defines Material objects,
# these represent dispersive lossy refractive indices and possess
# methods to interpolate n from tabulated data.
# Copyright (C) 2017 Bjorn Sturmberg, Kokou Dossou.
# NumBAT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import numpy as np
from scipy.interpolate import interp1d
import matplotlib
matplotlib.use('pdf')
import matplotlib.pyplot as plt
import json
import re
this_directory = os.path.dirname(os.path.realpath(__file__))
data_location = os.path.join(this_directory, "material_data", "")
class Material(object):
""" Represents a material with:
Refractive index []
Density [kg/m3]
Stiffness tensor component [Pa]
Photoelastic tensor component []
Acoustic loss tensor component [Pa s]
"""
def __init__(self,data_file):
try:
self.load_data_file(data_file)
except FileNotFoundError:
print('Material data file not found.')
def load_data_file(self, data_file, alt_path=''):
"""
Load data from json file.
Args:
data_file (str): name of data file located in NumBAT/backend/material_data
alt_path (str): non standard path to data_file
"""
with open(data_location+data_file+'.json','r') as fin:
s_in = ''.join(fin.readlines())
s_in = re.sub(r'//.*\n','\n', s_in)
self._params = json.loads(s_in)
self.file_name = self._params['file_name'] # Name of this file, will be used as identifier
self.chemical = self._params['chemical'] # Chemical composition
self.author = self._params['author'] # Author of data
self.date = self._params['date'] # Year of data publication/measurement
self.institution = self._params['institution'] # Source institution
self.doi = self._params['doi'] # doi or, failing that, the http address
Re_n = self._params['Re_n'] # Real part of refractive index []
Im_n = self._params['Im_n'] # Imaginary part of refractive index []
self.n = (Re_n + 1j*Im_n) # Complex refractive index []
self.s = self._params['s'] # Density [kg/m3]
self.c_11 = self._params['c_11'] # Stiffness tensor component [Pa]
self.c_12 = self._params['c_12'] # Stiffness tensor component [Pa]
self.c_44 = self._params['c_44'] # Stiffness tensor component [Pa]
self.p_11 = self._params['p_11'] # Photoelastic tensor component []
self.p_12 = self._params['p_12'] # Photoelastic tensor component []
self.p_44 = self._params['p_44'] # Photoelastic tensor component []
self.eta_11 = self._params['eta_11'] # Acoustic loss tensor component [Pa s]
self.eta_12 = self._params['eta_12'] # Acoustic loss tensor component [Pa s]
self.eta_44 = self._params['eta_44'] # Acoustic loss tensor component [Pa s]
try: # full anisotropic tensor components
self.c_11 = self._params['c_11']
self.c_12 = self._params['c_12']
self.c_13 = self._params['c_13']
self.c_14 = self._params['c_14']
self.c_15 = self._params['c_15']
self.c_16 = self._params['c_16']
self.c_21 = self._params['c_21']
self.c_22 = self._params['c_22']
self.c_23 = self._params['c_23']
self.c_24 = self._params['c_24']
self.c_25 = self._params['c_25']
self.c_26 = self._params['c_26']
self.c_31 = self._params['c_31']
self.c_32 = self._params['c_32']
self.c_33 = self._params['c_33']
self.c_34 = self._params['c_34']
self.c_35 = self._params['c_35']
self.c_36 = self._params['c_36']
self.c_41 = self._params['c_41']
self.c_42 = self._params['c_42']
self.c_43 = self._params['c_43']
self.c_44 = self._params['c_44']
self.c_45 = self._params['c_45']
self.c_46 = self._params['c_46']
self.c_51 = self._params['c_51']
self.c_52 = self._params['c_52']
self.c_53 = self._params['c_53']
self.c_54 = self._params['c_54']
self.c_55 = self._params['c_55']
self.c_56 = self._params['c_56']
self.c_61 = self._params['c_61']
self.c_62 = self._params['c_62']
self.c_63 = self._params['c_63']
self.c_64 = self._params['c_64']
self.c_65 = self._params['c_65']
self.c_66 = self._params['c_66']
self.p_11 = self._params['p_11']
self.p_12 = self._params['p_12']
self.p_13 = self._params['p_13']
self.p_14 = self._params['p_14']
self.p_15 = self._params['p_15']
self.p_16 = self._params['p_16']
self.p_21 = self._params['p_21']
self.p_22 = self._params['p_22']
self.p_23 = self._params['p_23']
self.p_24 = self._params['p_24']
self.p_25 = self._params['p_25']
self.p_26 = self._params['p_26']
self.p_31 = self._params['p_31']
self.p_32 = self._params['p_32']
self.p_33 = self._params['p_33']
self.p_34 = self._params['p_34']
self.p_35 = self._params['p_35']
self.p_36 = self._params['p_36']
self.p_41 = self._params['p_41']
self.p_42 = self._params['p_42']
self.p_43 = self._params['p_43']
self.p_44 = self._params['p_44']
self.p_45 = self._params['p_45']
self.p_46 = self._params['p_46']
self.p_51 = self._params['p_51']
self.p_52 = self._params['p_52']
self.p_53 = self._params['p_53']
self.p_54 = self._params['p_54']
self.p_55 = self._params['p_55']
self.p_56 = self._params['p_56']
self.p_61 = self._params['p_61']
self.p_62 = self._params['p_62']
self.p_63 = self._params['p_63']
self.p_64 = self._params['p_64']
self.p_65 = self._params['p_65']
self.p_66 = self._params['p_66']
self.eta_11 = self._params['eta_11']
self.eta_12 = self._params['eta_12']
self.eta_13 = self._params['eta_13']
self.eta_14 = self._params['eta_14']
self.eta_15 = self._params['eta_15']
self.eta_16 = self._params['eta_16']
self.eta_21 = self._params['eta_21']
self.eta_22 = self._params['eta_22']
self.eta_23 = self._params['eta_23']
self.eta_24 = self._params['eta_24']
self.eta_25 = self._params['eta_25']
self.eta_26 = self._params['eta_26']
self.eta_31 = self._params['eta_31']
self.eta_32 = self._params['eta_32']
self.eta_33 = self._params['eta_33']
self.eta_34 = self._params['eta_34']
self.eta_35 = self._params['eta_35']
self.eta_36 = self._params['eta_36']
self.eta_41 = self._params['eta_41']
self.eta_42 = self._params['eta_42']
self.eta_43 = self._params['eta_43']
self.eta_44 = self._params['eta_44']
self.eta_45 = self._params['eta_45']
self.eta_46 = self._params['eta_46']
self.eta_51 = self._params['eta_51']
self.eta_52 = self._params['eta_52']
self.eta_53 = self._params['eta_53']
self.eta_54 = self._params['eta_54']
self.eta_55 = self._params['eta_55']
self.eta_56 = self._params['eta_56']
self.eta_61 = self._params['eta_61']
self.eta_62 = self._params['eta_62']
self.eta_63 = self._params['eta_63']
self.eta_64 = self._params['eta_64']
self.eta_65 = self._params['eta_65']
self.eta_66 = self._params['eta_66']
self.anisotropic = True
except KeyError:
self.anisotropic = False
def rotate_axis(self, theta, rotate_axis, save_rotated_tensors=False):
""" Rotate crystal axis by theta radians.
Args:
theta (float): Angle to rotate by in radians.
rotate_axis (str): Axis around which to rotate.
Keyword Args:
save_rotated_tensors (bool): Save rotated tensors to csv.
Returns:
``Material`` object with rotated tensor values.
"""
# STIFFNESS
if self.anisotropic == False:
self.c_14 = self.c_15 = 0
self.c_24 = self.c_25 = 0
self.c_34 = self.c_35 = self.c_36 = 0
self.c_41 = self.c_42 = self.c_43 = self.c_45 = self.c_46 = 0
self.c_51 = self.c_52 = self.c_53 = self.c_54 = self.c_56 = 0
self.c_63 = self.c_64 = self.c_65 = 0
# check isotropic values
self.c_22 = self.c_11
self.c_33 = self.c_11
self.c_21 = self.c_12
self.c_13 = self.c_12
self.c_23 = self.c_12
self.c_31 = self.c_32 = self.c_12
self.c_55 = self.c_44
self.c_66 = self.c_44
self.c_16 = self.c_61 = 0 # only 4 comps change from zero to non-zero in rotation
self.c_26 = self.c_62 = 0 # only 4 comps change from zero to non-zero in rotation
tensor = np.array([[self.c_11, self.c_12, self.c_13, self.c_14, self.c_15, self.c_16],
[self.c_21, self.c_22, self.c_23, self.c_24, self.c_25, self.c_26],
[self.c_31, self.c_32, self.c_33, self.c_34, self.c_35, self.c_36],
[self.c_41, self.c_42, self.c_43, self.c_44, self.c_45, self.c_46],
[self.c_51, self.c_52, self.c_53, self.c_54, self.c_55, self.c_56],
[self.c_61, self.c_62, self.c_63, self.c_64, self.c_65, self.c_66]])
tensor_rotated = rotate_tensor(tensor, theta, rotate_axis)
[[self.c_11, self.c_12, self.c_13, self.c_14, self.c_15, self.c_16],
[self.c_21, self.c_22, self.c_23, self.c_24, self.c_25, self.c_26],
[self.c_31, self.c_32, self.c_33, self.c_34, self.c_35, self.c_36],
[self.c_41, self.c_42, self.c_43, self.c_44, self.c_45, self.c_46],
[self.c_51, self.c_52, self.c_53, self.c_54, self.c_55, self.c_56],
[self.c_61, self.c_62, self.c_63, self.c_64, self.c_65, self.c_66]] = tensor_rotated
if save_rotated_tensors:
np.savetxt('rotated_c_tensor.csv', tensor_rotated, delimiter=',')
# PHOTOELASTIC
if self.anisotropic == False:
self.p_14 = self.p_15 = 0
self.p_24 = self.p_25 = 0
self.p_34 = self.p_35 = self.p_36 = 0
self.p_41 = self.p_42 = self.p_43 = self.p_45 = self.p_46 = 0
self.p_51 = self.p_52 = self.p_53 = self.p_54 = self.p_56 = 0
self.p_63 = self.p_64 = self.p_65 = 0
# check isotropic values
self.p_22 = self.p_11
self.p_33 = self.p_11
self.p_21 = self.p_12
self.p_13 = self.p_12
self.p_23 = self.p_12
self.p_31 = self.p_32 = self.p_12
self.p_55 = self.p_44
self.p_66 = self.p_44
self.p_16 = self.p_61 = 0
self.p_26 = self.p_62 = 0
tensor = np.array([[self.p_11, self.p_12, self.p_13, self.p_14, self.p_15, self.p_16],
[self.p_21, self.p_22, self.p_23, self.p_24, self.p_25, self.p_26],
[self.p_31, self.p_32, self.p_33, self.p_34, self.p_35, self.p_36],
[self.p_41, self.p_42, self.p_43, self.p_44, self.p_45, self.p_46],
[self.p_51, self.p_52, self.p_53, self.p_54, self.p_55, self.p_56],
[self.p_61, self.p_62, self.p_63, self.p_64, self.p_65, self.p_66]])
tensor_rotated = rotate_tensor(tensor, theta, rotate_axis)
[[self.p_11, self.p_12, self.p_13, self.p_14, self.p_15, self.p_16],
[self.p_21, self.p_22, self.p_23, self.p_24, self.p_25, self.p_26],
[self.p_31, self.p_32, self.p_33, self.p_34, self.p_35, self.p_36],
[self.p_41, self.p_42, self.p_43, self.p_44, self.p_45, self.p_46],
[self.p_51, self.p_52, self.p_53, self.p_54, self.p_55, self.p_56],
[self.p_61, self.p_62, self.p_63, self.p_64, self.p_65, self.p_66]] = tensor_rotated
if save_rotated_tensors:
np.savetxt('rotated_p_tensor.csv', tensor_rotated, delimiter=',')
# ETA
if self.anisotropic == False:
self.eta_14 = self.eta_15 = 0
self.eta_24 = self.eta_25 = 0
self.eta_34 = self.eta_35 = self.eta_36 = 0
self.eta_41 = self.eta_42 = self.eta_43 = self.eta_45 = self.eta_46 = 0
self.eta_51 = self.eta_52 = self.eta_53 = self.eta_54 = self.eta_56 = 0
self.eta_63 = self.eta_64 = self.eta_65 = 0
# check isotropic values
self.eta_22 = self.eta_11
self.eta_33 = self.eta_11
self.eta_21 = self.eta_12
self.eta_13 = self.eta_12
self.eta_23 = self.eta_12
self.eta_31 = self.eta_32 = self.eta_12
self.eta_55 = self.eta_44
self.eta_66 = self.eta_44
self.eta_16 = self.eta_61 = 0
self.eta_26 = self.eta_62 = 0
tensor = np.array([[self.eta_11, self.eta_12, self.eta_13, self.eta_14, self.eta_15, self.eta_16],
[self.eta_21, self.eta_22, self.eta_23, self.eta_24, self.eta_25, self.eta_26],
[self.eta_31, self.eta_32, self.eta_33, self.eta_34, self.eta_35, self.eta_36],
[self.eta_41, self.eta_42, self.eta_43, self.eta_44, self.eta_45, self.eta_46],
[self.eta_51, self.eta_52, self.eta_53, self.eta_54, self.eta_55, self.eta_56],
[self.eta_61, self.eta_62, self.eta_63, self.eta_64, self.eta_65, self.eta_66]])
tensor_rotated = rotate_tensor(tensor, theta, rotate_axis)
[[self.eta_11, self.eta_12, self.eta_13, self.eta_14, self.eta_15, self.eta_16],
[self.eta_21, self.eta_22, self.eta_23, self.eta_24, self.eta_25, self.eta_26],
[self.eta_31, self.eta_32, self.eta_33, self.eta_34, self.eta_35, self.eta_36],
[self.eta_41, self.eta_42, self.eta_43, self.eta_44, self.eta_45, self.eta_46],
[self.eta_51, self.eta_52, self.eta_53, self.eta_54, self.eta_55, self.eta_56],
[self.eta_61, self.eta_62, self.eta_63, self.eta_64, self.eta_65, self.eta_66]] = tensor_rotated
if save_rotated_tensors:
np.savetxt('rotated_eta_tensor.csv', tensor_rotated, delimiter=',')
# Array that converts between 4th rank tensors in terms of x,y,z and Voigt notation
# [[xx,xy,xz], [yx,yy,yz], [zx,zy,zz]]
to_Voigt = np.array([[0,5,4], [5,1,3], [4,3,2]])
def rotation_matrix_sum(i, j, k, l, tensor_orig, mat_R):
"""
Inner loop of rotation matrix summation.
"""
tensor_prime_comp = 0
for q in range(3):
for r in range(3):
V1 = to_Voigt[q,r]
for s in range(3):
for t in range(3):
V2 = to_Voigt[s,t]
tensor_prime_comp += mat_R[i,q] * mat_R[j,r] * mat_R[k,s] * mat_R[l,t] * tensor_orig[V1,V2]
return tensor_prime_comp
def rotate_tensor(tensor_orig, theta, rotation_axis):
"""
Rotate all acoustic material tensor by theta radians around chosen
rotation_axis.
Args:
tensor_orig (array): Tensor to be rotated.
theta (float): Angle to rotate by in radians.
rotation_axis (str): Axis around which to rotate.
"""
if rotation_axis == 'x-axis':
mat_R = np.array([[1,0,0], [0,np.cos(theta),-np.sin(theta)], [0,np.sin(theta),np.cos(theta)]])
if rotation_axis == 'y-axis':
mat_R = np.array([[np.cos(theta),0,np.sin(theta)], [0,1,0], [-np.sin(theta),0,np.cos(theta)]])
if rotation_axis == 'z-axis':
mat_R = np.array([[np.cos(theta),-np.sin(theta),0], [np.sin(theta),np.cos(theta),0], [0,0,1]])
tensor_prime = np.zeros((6,6))
for i in range(3):
for j in range(3):
V1 = to_Voigt[i,j]
for k in range(3):
for l in range(3):
V2 = to_Voigt[k,l]
tensor_prime[V1,V2] = rotation_matrix_sum(i,j,k,l,tensor_orig,mat_R)
return tensor_prime
def isotropic_stiffness(E, v):
"""
Calculate the stiffness matrix components of isotropic
materials, given the two free parameters.
Ref: www.efunda.com/formulae/solid_mechanics/mat_mechanics/hooke_isotropic.cfm
Args:
E (float): Youngs modulus
v (float): Poisson ratio
"""
c_11 = E*(1-v)/((1+v)*(1-2*v))
c_12 = E*(v)/((1+v)*(1-2*v))
c_44 = (E*(1-2*v)/((1+v)*(1-2*v)))/2
return c_11, c_12, c_44
materials_dict = {}
for file in os.listdir(data_location):
if file.endswith(".json"):
materials_dict[file[:-5]] = Material(file[:-5])
|
bjornsturmberg/NumBAT
|
backend/materials.py
|
Python
|
gpl-3.0
| 18,724
|
[
"CRYSTAL"
] |
09fbef764f105529ae1c28be380d42a2d9f19a127bf8fbb4edc3248a264ccb23
|
'''
Given a TableDataSet and an output file name write a netCDF file.
Created on Feb 28, 2017
@author: cyoung
'''
class NetCDFWriter :
def __init__(self,tableDataSet,outputFile) :
self.tableDataSet=tableDataSet
self.outputFile=outputFile
def write (self):
pass
|
charles-g-young/Table2NetCDF
|
gov/noaa/gmd/table_2_netcdf/NetCDFWriter.py
|
Python
|
apache-2.0
| 306
|
[
"NetCDF"
] |
03115d283b8962aa72cdec6590a69317a270a0fced101375050cf05f56074fe0
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import unittest
import tempfile
from monty.json import MontyDecoder
from monty.serialization import loadfn
from pymatgen.io.vasp.sets import *
from pymatgen.io.vasp.inputs import Poscar, Kpoints
from pymatgen.core import Specie, Lattice, Structure
from pymatgen.core.surface import SlabGenerator
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.vasp.outputs import Vasprun
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
dec = MontyDecoder()
class MITMPRelaxSetTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
if "PMG_VASP_PSP_DIR" not in os.environ:
os.environ["PMG_VASP_PSP_DIR"] = test_dir
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
cls.structure = poscar.structure
cls.coords = [[0, 0, 0], [0.75, 0.5, 0.75]]
cls.lattice = Lattice(
[[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
cls.mitset = MITRelaxSet(cls.structure)
cls.mitset_unsorted = MITRelaxSet(cls.structure, sort_structure=False)
cls.mpset = MPRelaxSet(cls.structure)
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_poscar(self):
structure = Structure(self.lattice, ["Fe", "Mn"], self.coords)
mitparamset = MITRelaxSet(structure, sort_structure=False)
s_unsorted = mitparamset.poscar.structure
mitparamset = MITRelaxSet(structure, sort_structure=True)
s_sorted = mitparamset.poscar.structure
self.assertEqual(s_unsorted[0].specie.symbol, 'Fe')
self.assertEqual(s_sorted[0].specie.symbol, 'Mn')
def test_potcar_symbols(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.75, 0.25, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
structure = Structure(lattice, ["P", "Fe", "O"], coords)
mitparamset = MITRelaxSet(structure)
syms = mitparamset.potcar_symbols
self.assertEqual(syms, ['Fe', 'P', 'O'])
paramset = MPRelaxSet(structure, sort_structure=False)
syms = paramset.potcar_symbols
self.assertEqual(syms, ['P', 'Fe_pv', 'O'])
def test_lda_potcar(self):
structure = Structure(self.lattice, ["P", "Fe"], self.coords)
p = MITRelaxSet(structure, potcar_functional="LDA").potcar
self.assertEqual(p.functional, 'LDA')
def test_nelect(self):
coords = [[0] * 3, [0.5] * 3, [0.75] * 3]
lattice = Lattice.cubic(4)
s = Structure(lattice, ['Si', 'Si', 'Fe'], coords)
self.assertAlmostEqual(MITRelaxSet(s).nelect, 16)
# Check that it works even when oxidation states are present. Was a bug
# previously.
s = Structure(lattice, ['Si4+', 'Si4+', 'Fe2+'], coords)
self.assertAlmostEqual(MITRelaxSet(s).nelect, 16)
self.assertAlmostEqual(MPRelaxSet(s).nelect, 22)
def test_get_incar(self):
incar = self.mpset.incar
self.assertEqual(incar['LDAUU'], [5.3, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 0.0012)
incar = self.mitset.incar
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
self.assertAlmostEqual(incar['EDIFF'], 1e-5)
si = 14
coords = list()
coords.append(np.array([0, 0, 0]))
coords.append(np.array([0.75, 0.5, 0.75]))
# Silicon structure for testing.
latt = Lattice(np.array([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]]))
struct = Structure(latt, [si, si], coords)
incar = MPRelaxSet(struct).incar
self.assertNotIn("LDAU", incar)
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Fe", "Mn"], coords)
incar = MPRelaxSet(struct).incar
self.assertNotIn('LDAU', incar)
# check fluorides
struct = Structure(lattice, ["Fe", "F"], coords)
incar = MPRelaxSet(struct).incar
self.assertEqual(incar['LDAUU'], [5.3, 0])
self.assertEqual(incar['MAGMOM'], [5, 0.6])
struct = Structure(lattice, ["Fe", "F"], coords)
incar = MITRelaxSet(struct).incar
self.assertEqual(incar['LDAUU'], [4.0, 0])
# Make sure this works with species.
struct = Structure(lattice, ["Fe2+", "O2-"], coords)
incar = MPRelaxSet(struct).incar
self.assertEqual(incar['LDAUU'], [5.3, 0])
struct = Structure(lattice, ["Fe", "Mn"], coords,
site_properties={'magmom': (5.2, -4.5)})
incar = MPRelaxSet(struct).incar
self.assertEqual(incar['MAGMOM'], [-4.5, 5.2])
incar = MITRelaxSet(struct, sort_structure=False).incar
self.assertEqual(incar['MAGMOM'], [5.2, -4.5])
struct = Structure(lattice, [Specie("Fe", 2, {'spin': 4.1}), "Mn"],
coords)
incar = MPRelaxSet(struct).incar
self.assertEqual(incar['MAGMOM'], [5, 4.1])
struct = Structure(lattice, ["Mn3+", "Mn4+"], coords)
incar = MITRelaxSet(struct).incar
self.assertEqual(incar['MAGMOM'], [4, 3])
userset = MPRelaxSet(struct,
user_incar_settings={
'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
self.assertEqual(userset.incar['MAGMOM'], [100, 0.6])
# sulfide vs sulfate test
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
coords.append([0.25, 0.5, 0])
struct = Structure(lattice, ["Fe", "Fe", "S"], coords)
incar = MITRelaxSet(struct).incar
self.assertEqual(incar['LDAUU'], [1.9, 0])
# Make sure Matproject sulfides are ok.
self.assertNotIn('LDAUU', MPRelaxSet(struct).incar)
struct = Structure(lattice, ["Fe", "S", "O"], coords)
incar = MITRelaxSet(struct).incar
self.assertEqual(incar['LDAUU'], [4.0, 0, 0])
# Make sure Matproject sulfates are ok.
self.assertEqual(MPRelaxSet(struct).incar['LDAUU'], [5.3, 0, 0])
# test for default LDAUU value
userset_ldauu_fallback = MPRelaxSet(struct,
user_incar_settings={
'LDAUU': {'Fe': 5.0, 'S': 0}}
)
self.assertEqual(userset_ldauu_fallback.incar['LDAUU'], [5.0, 0, 0])
# test that van-der-Waals parameters are parsed correctly
incar = MITRelaxSet(struct, vdw='optB86b').incar
self.assertEqual(incar['GGA'], 'Mk')
self.assertEqual(incar['LUSE_VDW'], True)
self.assertEqual(incar['PARAM1'], 0.1234)
# Test that NELECT is updated when a charge is present
si = 14
coords = list()
coords.append(np.array([0, 0, 0]))
coords.append(np.array([0.75, 0.5, 0.75]))
# Silicon structure for testing.
latt = Lattice(np.array([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]]))
struct = Structure(latt, [si, si], coords,charge=1)
mpr = MPRelaxSet(struct)
self.assertEqual(mpr.incar["NELECT"],mpr.nelect+1,"NELECT not properly set for nonzero charge")
def test_get_kpoints(self):
kpoints = MPRelaxSet(self.structure).kpoints
self.assertEqual(kpoints.kpts, [[2, 4, 5]])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
kpoints = MPRelaxSet(self.structure, user_kpoints_settings={
"reciprocal_density": 1000}).kpoints
self.assertEqual(kpoints.kpts, [[6, 10, 13]])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
kpoints_obj = Kpoints(kpts=[[3, 3, 3]])
kpoints_return = MPRelaxSet(self.structure,
user_kpoints_settings=kpoints_obj).kpoints
self.assertEqual(kpoints_return.kpts, [[3, 3, 3]])
kpoints = self.mitset.kpoints
self.assertEqual(kpoints.kpts, [[25]])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic)
recip_paramset = MPRelaxSet(self.structure, force_gamma=True)
recip_paramset.kpoints_settings = {"reciprocal_density": 40}
kpoints = recip_paramset.kpoints
self.assertEqual(kpoints.kpts, [[2, 4, 5]])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
def test_all_input(self):
d = self.mitset.all_input
self.assertEqual(d["INCAR"]["ISMEAR"], -5)
s = self.structure.copy()
s.make_supercell(4)
paramset = MPRelaxSet(s)
d = paramset.all_input
self.assertEqual(d["INCAR"]["ISMEAR"], 0)
def test_as_from_dict(self):
mitset = MITRelaxSet(self.structure)
mpset = MPRelaxSet(self.structure)
mpuserset = MPRelaxSet(self.structure,
user_incar_settings={
'MAGMOM': {"Fe": 10, "S": -5, "Mn3+": 100}}
)
d = mitset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v._config_dict["INCAR"]["LDAUU"]["O"]["Fe"], 4)
d = mpset.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v._config_dict["INCAR"]["LDAUU"]["O"]["Fe"], 5.3)
d = mpuserset.as_dict()
v = dec.process_decoded(d)
# self.assertEqual(type(v), MPVaspInputSet)
self.assertEqual(v.user_incar_settings["MAGMOM"],
{"Fe": 10, "S": -5, "Mn3+": 100})
def test_hubbard_off_and_ediff_override(self):
p = MPRelaxSet(self.structure, user_incar_settings={"LDAU": False,
"EDIFF": 1e-10})
self.assertNotIn("LDAUU", p.incar)
self.assertEqual(p.incar["EDIFF"], 1e-10)
def test_write_input(self):
self.mitset.write_input(".", make_dir_if_not_present=True)
for f in ["INCAR", "KPOINTS", "POSCAR", "POTCAR"]:
self.assertTrue(os.path.exists(f))
self.assertFalse(os.path.exists("Fe4P4O16.cif"))
self.mitset.write_input(".", make_dir_if_not_present=True,
include_cif=True)
self.assertTrue(os.path.exists("Fe4P4O16.cif"))
for f in ["INCAR", "KPOINTS", "POSCAR", "POTCAR", "Fe4P4O16.cif"]:
os.remove(f)
def test_user_potcar_settings(self):
vis = MPRelaxSet(self.structure, user_potcar_settings={"Fe": "Fe"})
potcar = vis.potcar
self.assertEqual(potcar.symbols, ["Fe", "P", "O"])
class MPStaticSetTest(PymatgenTest):
def setUp(self):
self.tmp = tempfile.mkdtemp()
warnings.simplefilter("ignore")
def test_init(self):
prev_run = os.path.join(test_dir, "relaxation")
vis = MPStaticSet.from_prev_calc(prev_calc_dir=prev_run)
self.assertEqual(vis.incar["NSW"], 0)
# Check that the ENCUT has been inherited.
self.assertEqual(vis.incar["ENCUT"], 600)
self.assertEqual(vis.kpoints.style, Kpoints.supported_modes.Monkhorst)
# Check as from dict.
vis = MPStaticSet.from_dict(vis.as_dict())
self.assertEqual(vis.incar["NSW"], 0)
# Check that the ENCUT has been inherited.
self.assertEqual(vis.incar["ENCUT"], 600)
self.assertEqual(vis.kpoints.style, Kpoints.supported_modes.Monkhorst)
non_prev_vis = MPStaticSet(vis.structure,
user_incar_settings={"LORBIT": 12,
"LWAVE": True})
self.assertEqual(non_prev_vis.incar["NSW"], 0)
# Check that the ENCUT and Kpoints style has NOT been inherited.
self.assertEqual(non_prev_vis.incar["ENCUT"], 520)
# Check that user incar settings are applied.
self.assertEqual(non_prev_vis.incar["LORBIT"], 12)
self.assertTrue(non_prev_vis.incar["LWAVE"])
self.assertEqual(non_prev_vis.kpoints.style,
Kpoints.supported_modes.Gamma)
v2 = MPStaticSet.from_dict(non_prev_vis.as_dict())
self.assertEqual(v2.incar["ENCUT"], 520)
# Check that user incar settings are applied.
self.assertEqual(v2.incar["LORBIT"], 12)
leps_vis = MPStaticSet.from_prev_calc(prev_calc_dir=prev_run,
lepsilon=True)
self.assertTrue(leps_vis.incar["LEPSILON"])
self.assertEqual(leps_vis.incar["IBRION"], 8)
self.assertNotIn("NPAR", leps_vis.incar)
self.assertNotIn("NSW", leps_vis.incar)
self.assertEqual(non_prev_vis.kpoints.kpts, [[11, 10, 10]])
non_prev_vis = MPStaticSet(vis.structure, reciprocal_density=200)
self.assertEqual(non_prev_vis.kpoints.kpts, [[14, 12, 12]])
# Check LCALCPOL flag
lcalcpol_vis = MPStaticSet.from_prev_calc(prev_calc_dir=prev_run,
lcalcpol=True)
self.assertTrue(lcalcpol_vis.incar["LCALCPOL"])
def tearDown(self):
shutil.rmtree(self.tmp)
warnings.resetwarnings()
class MPNonSCFSetTest(PymatgenTest):
def setUp(self):
self.tmp = tempfile.mkdtemp()
warnings.simplefilter("ignore")
def test_init(self):
prev_run = os.path.join(test_dir, "relaxation")
vis = MPNonSCFSet.from_prev_calc(
prev_calc_dir=prev_run, mode="Line", copy_chgcar=False,
user_incar_settings={"SIGMA": 0.025})
self.assertEqual(vis.incar["NSW"], 0)
# Check that the ENCUT has been inherited.
self.assertEqual(vis.incar["ENCUT"], 600)
# Check that the user_incar_settings works
self.assertEqual(vis.incar["SIGMA"], 0.025)
self.assertEqual(vis.kpoints.style, Kpoints.supported_modes.Reciprocal)
# Check as from dict.
vis = MPNonSCFSet.from_dict(vis.as_dict())
self.assertEqual(vis.incar["NSW"], 0)
# Check that the ENCUT has been inherited.
self.assertEqual(vis.incar["ENCUT"], 600)
self.assertEqual(vis.kpoints.style, Kpoints.supported_modes.Reciprocal)
vis.write_input(self.tmp)
self.assertFalse(os.path.exists(os.path.join(self.tmp, "CHGCAR")))
vis = MPNonSCFSet.from_prev_calc(prev_calc_dir=prev_run,
mode="Line", copy_chgcar=True)
vis.write_input(self.tmp)
self.assertTrue(os.path.exists(os.path.join(self.tmp, "CHGCAR")))
def test_optics(self):
prev_run = os.path.join(test_dir, "relaxation")
vis = MPNonSCFSet.from_prev_calc(
prev_calc_dir=prev_run, copy_chgcar=False, optics=True,
mode="Uniform", nedos=2001)
self.assertEqual(vis.incar["NSW"], 0)
# Check that the ENCUT has been inherited.
self.assertEqual(vis.incar["ENCUT"], 600)
self.assertTrue(vis.incar["LOPTICS"])
self.assertEqual(vis.kpoints.style, Kpoints.supported_modes.Reciprocal)
def tearDown(self):
shutil.rmtree(self.tmp)
warnings.resetwarnings()
class MagmomLdauTest(PymatgenTest):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_structure_from_prev_run(self):
vrun = Vasprun(os.path.join(test_dir, "vasprun.xml.magmom_ldau"))
structure = vrun.final_structure
poscar = Poscar(structure)
structure_decorated = get_structure_from_prev_run(vrun, sym_prec=0)
ldau_ans = {'LDAUU': [5.3, 0.0], 'LDAUL': [2, 0], 'LDAUJ': [0.0, 0.0]}
magmom_ans = [5.0, 5.0, 5.0, 5.0, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6]
ldau_dict = {}
for key in ('LDAUU', 'LDAUJ', 'LDAUL'):
if hasattr(structure_decorated[0], key.lower()):
m = dict(
[(site.specie.symbol, getattr(site, key.lower()))
for site in structure_decorated])
ldau_dict[key] = [m[sym] for sym in poscar.site_symbols]
magmom = [site.magmom for site in structure_decorated]
self.assertEqual(ldau_dict, ldau_ans)
self.assertEqual(magmom, magmom_ans)
def test_ln_magmom(self):
YAML_PATH = os.path.join(os.path.dirname(__file__), "../VASPIncarBase.yaml")
MAGMOM_SETTING = loadfn(YAML_PATH)["MAGMOM"]
structure = Structure.from_file(os.path.join(test_dir, "La4Fe4O12.cif"))
structure.add_oxidation_state_by_element({"La": +3, "Fe": +3, "O": -2})
for ion in MAGMOM_SETTING:
s = structure.copy()
s.replace_species({"La3+": ion})
vis = MPRelaxSet(s)
fe_pos = vis.poscar.comment.index("Fe")
if fe_pos == 0:
magmom_ans = [5] * 4 + [MAGMOM_SETTING[ion]] * 4 + [0.6] * 12
else:
magmom_ans = [MAGMOM_SETTING[ion]] * 4 + [5] * 4 + [0.6] * 12
self.assertEqual(vis.incar["MAGMOM"], magmom_ans)
class MITMDSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(filepath)
self.struct = poscar.structure
self.mitmdparam = MITMDSet(self.struct, 300, 1200, 10000)
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_params(self):
param = self.mitmdparam
syms = param.potcar_symbols
self.assertEqual(syms, ['Fe', 'P', 'O'])
incar = param.incar
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 1e-5)
kpoints = param.kpoints
self.assertEqual(kpoints.kpts, [(1, 1, 1)])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
def test_as_from_dict(self):
d = self.mitmdparam.as_dict()
v = dec.process_decoded(d)
self.assertEqual(type(v), MITMDSet)
self.assertEqual(v._config_dict["INCAR"]["TEBEG"], 300)
class MVLNPTMDSetTest(unittest.TestCase):
def setUp(self):
file_path = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(file_path)
self.struct = poscar.structure
self.mvl_npt_set = MVLNPTMDSet(self.struct, start_temp=0,
end_temp=300, nsteps=1000)
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_incar(self):
npt_set = self.mvl_npt_set
syms = npt_set.potcar_symbols
self.assertEqual(syms, ['Fe', 'P', 'O'])
incar = npt_set.incar
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 1e-5)
self.assertEqual(incar["LANGEVIN_GAMMA_L"], 1)
self.assertEqual(incar["LANGEVIN_GAMMA"], [10, 10, 10])
enmax = max([npt_set.potcar[i].keywords["ENMAX"] for i in
range(self.struct.ntypesp)])
self.assertAlmostEqual(incar["ENCUT"], 1.5 * enmax)
self.assertEqual(incar["IALGO"], 48)
self.assertEqual(incar["ISIF"], 3)
self.assertEqual(incar["MDALGO"], 3)
self.assertEqual(incar["SMASS"], 0)
kpoints = npt_set.kpoints
self.assertEqual(kpoints.kpts, [(1, 1, 1)])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
def test_as_from_dict(self):
d = self.mvl_npt_set.as_dict()
v = dec.process_decoded(d)
self.assertEqual(type(v), MVLNPTMDSet)
self.assertEqual(v._config_dict["INCAR"]["NSW"], 1000)
class MITNEBSetTest(unittest.TestCase):
def setUp(self):
c1 = [[0.5] * 3, [0.9] * 3]
c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
structs = []
for s in s1.interpolate(s2, 3, pbc=True):
structs.append(Structure.from_sites(s.sites, to_unit_cell=True))
self.structures = structs
self.vis = MITNEBSet(self.structures)
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_potcar_symbols(self):
syms = self.vis.potcar_symbols
self.assertEqual(syms, ['Si'])
def test_incar(self):
incar = self.vis.incar
self.assertNotIn("LDAUU", incar)
self.assertAlmostEqual(incar['EDIFF'], 0.00001)
def test_kpoints(self):
kpoints = self.vis.kpoints
self.assertEqual(kpoints.kpts, [[25]])
self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic)
def test_as_from_dict(self):
d = self.vis.as_dict()
v = dec.process_decoded(d)
self.assertEqual(v._config_dict["INCAR"]["IMAGES"], 2)
def test_write_input(self):
self.vis.write_input(".", write_cif=True,
write_endpoint_inputs=True,
write_path_cif=True)
self.assertTrue(os.path.exists("INCAR"))
self.assertTrue(os.path.exists("KPOINTS"))
self.assertTrue(os.path.exists("POTCAR"))
self.assertTrue(os.path.exists("00/POSCAR"))
self.assertTrue(os.path.exists("01/POSCAR"))
self.assertTrue(os.path.exists("02/POSCAR"))
self.assertTrue(os.path.exists("03/POSCAR"))
self.assertFalse(os.path.exists("04/POSCAR"))
self.assertTrue(os.path.exists("00/INCAR"))
self.assertTrue(os.path.exists("path.cif"))
for d in ["00", "01", "02", "03"]:
shutil.rmtree(d)
for f in ["INCAR", "KPOINTS", "POTCAR", "path.cif"]:
os.remove(f)
class MPSOCSetTest(PymatgenTest):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_from_prev_calc(self):
prev_run = os.path.join(test_dir, "fe_monomer")
vis = MPSOCSet.from_prev_calc(prev_calc_dir=prev_run, magmom=[3],
saxis=(1, 0, 0),
user_incar_settings={"SIGMA": 0.025})
self.assertEqual(vis.incar["ISYM"], -1)
self.assertTrue(vis.incar["LSORBIT"])
self.assertEqual(vis.incar["ICHARG"], 11)
self.assertEqual(vis.incar["SAXIS"], [1, 0, 0])
self.assertEqual(vis.incar["MAGMOM"], [[0, 0, 3]])
self.assertEqual(vis.incar['SIGMA'], 0.025)
class MVLSlabSetTest(PymatgenTest):
def setUp(self):
if "PMG_VASP_PSP_DIR" not in os.environ:
os.environ["PMG_VASP_PSP_DIR"] = test_dir
s = PymatgenTest.get_structure("Li2O")
gen = SlabGenerator(s, (1, 0, 0), 10, 10)
self.slab = gen.get_slab()
self.bulk = self.slab.oriented_unit_cell
vis_bulk = MVLSlabSet(self.bulk, bulk=True)
vis = MVLSlabSet(self.slab)
vis_dipole = MVLSlabSet(self.slab, auto_dipole=True)
self.d_bulk = vis_bulk.all_input
self.d_slab = vis.all_input
self.d_dipole = vis_dipole.all_input
self.vis = vis
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_user_incar_settings(self):
# Make sure user incar settings properly override AMIX.
si = self.get_structure('Si')
vis = MVLSlabSet(si, user_incar_settings={"AMIX": 0.1})
self.assertEqual(vis.incar["AMIX"], 0.1)
def test_bulk(self):
incar_bulk = self.d_bulk["INCAR"]
poscar_bulk = self.d_bulk["POSCAR"]
self.assertEqual(incar_bulk["ISIF"], 3)
self.assertEqual(poscar_bulk.structure.formula,
self.bulk.formula)
def test_slab(self):
incar_slab = self.d_slab["INCAR"]
poscar_slab = self.d_slab["POSCAR"]
potcar_slab = self.d_slab["POTCAR"]
self.assertEqual(incar_slab["AMIN"], 0.01)
self.assertEqual(incar_slab["AMIX"], 0.2)
self.assertEqual(incar_slab["BMIX"], 0.001)
self.assertEqual(incar_slab["NELMIN"], 8)
# No volume relaxation during slab calculations
self.assertEqual(incar_slab["ISIF"], 2)
self.assertEqual(potcar_slab.functional, 'PBE')
self.assertEqual(potcar_slab.symbols[1], u'Li_sv')
self.assertEqual(potcar_slab.symbols[0], u'O')
self.assertEqual(poscar_slab.structure.formula,
self.slab.formula)
# Test auto-dipole
dipole_incar = self.d_dipole["INCAR"]
self.assertTrue(dipole_incar["LDIPOL"])
self.assertArrayAlmostEqual(dipole_incar["DIPOL"],
[0.2323, 0.2323, 0.2165], decimal=4)
self.assertEqual(dipole_incar["IDIPOL"], 3)
def test_kpoints(self):
kpoints_slab = self.d_slab["KPOINTS"].kpts[0]
kpoints_bulk = self.d_bulk["KPOINTS"].kpts[0]
self.assertEqual(kpoints_bulk[0], kpoints_slab[0])
self.assertEqual(kpoints_bulk[1], kpoints_slab[1])
self.assertEqual(kpoints_bulk[0], 15)
self.assertEqual(kpoints_bulk[1], 15)
self.assertEqual(kpoints_bulk[2], 15)
# The last kpoint in a slab should always be 1
self.assertEqual(kpoints_slab[2], 1)
def test_as_dict(self):
vis_dict = self.vis.as_dict()
new = MVLSlabSet.from_dict(vis_dict)
class MVLElasticSetTest(PymatgenTest):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_incar(self):
mvlparam = MVLElasticSet(self.get_structure("Graphite"))
incar = mvlparam.incar
self.assertEqual(incar["IBRION"], 6)
self.assertEqual(incar["NFREE"], 2)
self.assertEqual(incar["POTIM"], 0.015)
self.assertNotIn("NPAR", incar)
class MVLGWSetTest(PymatgenTest):
def setUp(self):
self.tmp = tempfile.mkdtemp()
if "PMG_VASP_PSP_DIR" not in os.environ:
os.environ["PMG_VASP_PSP_DIR"] = test_dir
self.s = PymatgenTest.get_structure("Li2O")
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_static(self):
mvlgwsc = MVLGWSet(self.s)
incar = mvlgwsc.incar
self.assertEqual(incar["SIGMA"], 0.01)
kpoints = mvlgwsc.kpoints
self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
symbols = mvlgwsc.potcar.symbols
self.assertEqual(symbols, ["Li_sv_GW", "O_GW"])
def test_diag(self):
prev_run = os.path.join(test_dir, "relaxation")
mvlgwdiag = MVLGWSet.from_prev_calc(prev_run, copy_wavecar=True,
mode="diag")
mvlgwdiag.write_input(self.tmp)
self.assertTrue(os.path.exists(os.path.join(self.tmp, "WAVECAR")))
self.assertEqual(mvlgwdiag.incar["NBANDS"], 32)
self.assertEqual(mvlgwdiag.incar["ALGO"], "Exact")
self.assertTrue(mvlgwdiag.incar["LOPTICS"])
def test_bse(self):
prev_run = os.path.join(test_dir, "relaxation")
mvlgwgbse = MVLGWSet.from_prev_calc(prev_run, copy_wavecar=True,
mode="BSE")
mvlgwgbse.write_input(self.tmp)
self.assertTrue(os.path.exists(os.path.join(self.tmp, "WAVECAR")))
self.assertTrue(os.path.exists(os.path.join(self.tmp, "WAVEDER")))
prev_run = os.path.join(test_dir, "relaxation")
mvlgwgbse = MVLGWSet.from_prev_calc(prev_run, copy_wavecar=False,
mode="GW")
self.assertEqual(mvlgwgbse.incar["NOMEGA"], 80)
self.assertEqual(mvlgwgbse.incar["ENCUTGW"], 250)
self.assertEqual(mvlgwgbse.incar["ALGO"], "GW0")
mvlgwgbse1 = MVLGWSet.from_prev_calc(prev_run, copy_wavecar=False,
mode="BSE")
self.assertEqual(mvlgwgbse1.incar["ANTIRES"], 0)
self.assertEqual(mvlgwgbse1.incar["NBANDSO"], 20)
self.assertEqual(mvlgwgbse1.incar["ALGO"], "BSE")
def tearDown(self):
shutil.rmtree(self.tmp)
class MPHSEBSTest(PymatgenTest):
def setUp(self):
self.tmp = tempfile.mkdtemp()
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_init(self):
prev_run = os.path.join(test_dir, "static_silicon")
vis = MPHSEBSSet.from_prev_calc(prev_calc_dir=prev_run, mode="uniform")
self.assertTrue(vis.incar["LHFCALC"])
self.assertEqual(len(vis.kpoints.kpts), 16)
vis = MPHSEBSSet.from_prev_calc(prev_calc_dir=prev_run, mode="gap")
self.assertTrue(vis.incar["LHFCALC"])
self.assertEqual(len(vis.kpoints.kpts), 18)
vis = MPHSEBSSet.from_prev_calc(prev_calc_dir=prev_run, mode="line")
self.assertTrue(vis.incar["LHFCALC"])
self.assertEqual(vis.incar['HFSCREEN'], 0.2)
self.assertEqual(vis.incar['NSW'], 0)
self.assertEqual(vis.incar['ISYM'], 3)
self.assertEqual(len(vis.kpoints.kpts), 180)
class MVLScanRelaxSetTest(PymatgenTest):
def setUp(self):
file_path = os.path.join(test_dir, 'POSCAR')
poscar = Poscar.from_file(file_path)
self.struct = poscar.structure
self.mvl_scan_set = MVLScanRelaxSet(self.struct,
potcar_functional="PBE_52",
user_incar_settings={"NSW": 500})
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_incar(self):
incar = self.mvl_scan_set.incar
self.assertIn("METAGGA", incar)
self.assertIn("LASPH", incar)
self.assertIn("ADDGRID", incar)
self.assertEqual(incar["NSW"], 500)
# Test SCAN+rVV10
scan_rvv10_set = MVLScanRelaxSet(self.struct, vdw="rVV10")
self.assertEqual(scan_rvv10_set.incar["BPARAM"], 15.7)
def test_potcar(self):
self.assertEqual(self.mvl_scan_set.potcar.functional, "PBE_52")
test_potcar_set_1 = MVLScanRelaxSet(
self.struct, potcar_functional="PBE_54")
self.assertEqual(test_potcar_set_1.potcar.functional, "PBE_54")
self.assertRaises(ValueError, MVLScanRelaxSet,
self.struct, potcar_functional="PBE")
def test_as_from_dict(self):
d = self.mvl_scan_set.as_dict()
v = dec.process_decoded(d)
self.assertEqual(type(v), MVLScanRelaxSet)
self.assertEqual(v._config_dict["INCAR"]["METAGGA"], "SCAN")
self.assertEqual(v.user_incar_settings["NSW"], 500)
class FuncTest(PymatgenTest):
def test_batch_write_input(self):
structures = [PymatgenTest.get_structure("Li2O"),
PymatgenTest.get_structure("LiFePO4")]
batch_write_input(structures)
for d in ['Li4Fe4P4O16_1', 'Li2O1_0']:
for f in ["INCAR", "KPOINTS", "POSCAR", "POTCAR"]:
self.assertTrue(os.path.exists(os.path.join(d, f)))
for d in ['Li4Fe4P4O16_1', 'Li2O1_0']:
shutil.rmtree(d)
class MVLGBSetTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(test_dir, 'Li.cif')
self.s = Structure.from_file(filepath)
self.bulk = MVLGBSet(self.s)
self.slab = MVLGBSet(self.s, slab_mode=True)
self.d_bulk = self.bulk.all_input
self.d_slab = self.slab.all_input
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_bulk(self):
incar_bulk = self.d_bulk["INCAR"]
self.assertEqual(incar_bulk["ISIF"], 3)
def test_slab(self):
incar_slab = self.d_slab["INCAR"]
self.assertEqual(incar_slab["ISIF"], 2)
def test_kpoints(self):
kpoints = self.d_slab["KPOINTS"]
k_a = int(40 / (self.s.lattice.abc[0]) + 0.5)
k_b = int(40 / (self.s.lattice.abc[1]) + 0.5)
self.assertEqual(kpoints.kpts, [[k_a, k_b, 1]])
if __name__ == '__main__':
unittest.main()
|
johnson1228/pymatgen
|
pymatgen/io/vasp/tests/test_sets.py
|
Python
|
mit
| 32,924
|
[
"VASP",
"pymatgen"
] |
f97fcfa23476659282c1b683d00d068eedc168263b918ed7c13df35a7b2750a4
|
import gp_controller as gpc
import iCubInterface
import numpy as np
import yarp
import time
import math
import matplotlib.pylab
import random
import os
import sys
#import as find_lines
def exitModule(resetProbability):
randomNum = random.random()
if randomNum < resetProbability:
return True
return False
def logArray(array,fd):
for i in range(len(array)):
fd.write(str(array[i]))
fd.write(" ")
def readValueFromFile(fileName):
fd = open(fileName,"r")
line = fd.readline().split()
value = int(line[0])
fd.close()
return value
def writeIntoFile(fileName,string):
fd = open(fileName,"w")
fd.write(string)
fd.close()
def addDescriptionData(dataString,parameter,value):
dataString = dataString + parameter + " " + value + "\n"
def readImage(cameraPort,yarp_image):
cameraPort.read(yarp_image)
def getFeedbackAngle(yarp_image,img_array):
img_bgr = img_array[:,:,[2,1,0]]
t = find_lines.load_t_matrix()
theta = find_lines.run_system(img_bgr, t)
return float(theta)
def calculateFeedbackAngleDifference(previousFbAngle,currentFbAngle,fbAngleRange):
delta = currentFbAngle - previousFbAngle
if abs(delta) < fbAngleRange/2.0:
fbAngleDifference = delta
else:
fbAngleDifference = np.sign(-delta)*(fbAngleRange - abs(delta))
return fbAngleDifference
def main():
# module parameters
expID = 39
maxIterations = [ 77, 14, 134, 66, 34, 81, 52, 31, 48, 66]
proximalJointStartPos = 40
distalJointStartPos = 0
joint1StartPos = 18
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
startingPosEncs = [-44, joint1StartPos, -4, 39,-14, 2, 2, 18, 10, 0,163, 0, 0,proximalJointStartPos,distalJointStartPos, 0]
# 0 1 2 3 4 5
headStartingPosEncs = [-29, 0, 18, 0, 0, 0]
actionEnabled = True
rolloutsNumFirst = 30
rolloutsNumStd = 10
finger = 1
proximalJoint = 13
distalJoint = 14
proximalJointEnc = 6
distalJointEnc_1 = 7
distalJointEnc_2 = 8
resetProbability = 0.02
actionDuration = 0.25
pauseDuration = 0.0
maxFbAngle = math.pi
minFbAngle = -math.pi
maxFbAngleDifference = math.pi/3.0
fbAngleRange = maxFbAngle - minFbAngle
normalizedMaxVoltageY = 1.0
maxVoltageProxJointY = 250.0
maxVoltageDistJointY = 800.0
slopeAtMaxVoltageY = 1.0
waitTimeForFingersRepositioning = 7.0
dataDumperPortName = "/gpc/log:i"
iCubIconfigFileName = "iCubInterface.txt"
inputFilePath = "./"
initInputFileName = "controller_init_roll_fing.txt"
standardInputFileName = "controller_input.txt"
outputFilePath = "./"
outputFileName = "controller_output.txt"
dataPath = "./data/experiments/"
jointsToActuate = [proximalJoint,distalJoint]
fileNameIterID = "iterationID.txt"
fileNameExperimentID = "experimentID.txt"
fileNameExpParams = "parameters.txt"
isNewExperiment = False
if len(sys.argv) > 1:
if sys.argv[1] == 'new':
isNewExperiment = True
expID = readValueFromFile(fileNameExperimentID)
if isNewExperiment:
expID = expID + 1
writeIntoFile(fileNameExperimentID,str(expID))
# create output folder name
experimentFolderName = dataPath + "exp_" + str(expID) + "/" # could be changed adding more information about the experiment
print expID,isNewExperiment
if os.path.exists(experimentFolderName):
# get iteration ID
iterID = readValueFromFile(fileNameIterID)
writeIntoFile(fileNameIterID,str(iterID+1))
inputFileFullName = inputFilePath + standardInputFileName
rolloutsNum = rolloutsNumStd
else:
# create directory, create an experiment descrition file and reset iteration ID
os.mkdir(experimentFolderName)
descriptionData = ""
descriptionData = descriptionData + "proximalJointMaxVoltage " + str(maxVoltageProxJointY) + "\n"
descriptionData = descriptionData + "distalJointMaxVoltage " + str(maxVoltageDistJointY) + "\n"
descriptionData = descriptionData + "slopeAtMaxVoltage " + str(slopeAtMaxVoltageY) + "\n"
descriptionData = descriptionData + "actionDuration " + str(actionDuration) + "\n"
descriptionData = descriptionData + "pauseDuration " + str(pauseDuration) + "\n"
descriptionData = descriptionData + "finger " + str(finger) + "\n"
descriptionData = descriptionData + "jointActuated " + str(proximalJoint) + " " + str(distalJoint) + "\n"
descriptionData = descriptionData + "jointStartingPositions " + str(proximalJointStartPos) + " " + str(distalJointStartPos) + "\n"
descriptionData = descriptionData + "resetProbabilty " + str(resetProbability) + "\n"
descriptionData = descriptionData + "additionaNotes " + "" + "\n"
writeIntoFile(experimentFolderName + fileNameExpParams,descriptionData)
iterID = 0
writeIntoFile(fileNameIterID,"1")
inputFileFullName = inputFilePath + initInputFileName
rolloutsNum = rolloutsNumFirst
outputInputFileSuffix = str(expID) + "_" + str(iterID);
backupOutputFileFullName = experimentFolderName + "contr_out_" + outputInputFileSuffix + ".txt"
backupInputFileFullName = experimentFolderName + "contr_in_" + outputInputFileSuffix + ".txt"
outputFileFullName = outputFilePath + outputFileName
# calculate voltageX-voltageY mapping parameters (voltageY = k*(voltageX^(1/3)))
k = pow(3*slopeAtMaxVoltageY*(pow(normalizedMaxVoltageY,2)),(1/3.0))
maxVoltageX = pow(normalizedMaxVoltageY/k,3)
# load gaussian process controller
gp = gpc.GPController(inputFileFullName)
gp.load_controller()
# load iCub interface
iCubI = iCubInterface.ICubInterface(dataDumperPortName,iCubIconfigFileName)
iCubI.loadInterfaces()
# cameras port
cameraPort = yarp.Port()
cameraPortName = "/gpc/leftEye"
cameraPort.open(cameraPortName)
yarp.Network.connect("/icub/cam/left",cameraPortName)
# image settings
width = 640
height = 480
# Create numpy array to receive the image and the YARP image wrapped around it
img_array = np.zeros((height, width, 3), dtype=np.uint8)
yarp_image = yarp.ImageRgb()
yarp_image.resize(width, height)
yarp_image.setExternal(img_array, img_array.shape[1], img_array.shape[0])
# set start position
if actionEnabled:
iCubI.setArmPosition(startingPosEncs)
iCubI.setHeadPosition(headStartingPosEncs)
#iCubI.setRefVelocity(jointsToActuate,100)
# wait for the user
raw_input("- press enter to start the controller -")
fd = open(outputFileFullName,"w")
fd.write("nrollouts: ")
fd.write(str(rolloutsNum))
fd.write("\n")
fd.close()
# initialize velocity mode
if actionEnabled:
iCubI.setOpenLoopMode(jointsToActuate)
rolloutsCounter = 0
while rolloutsCounter < rolloutsNum:
print "starting iteration n. ",rolloutsCounter + 1
fd = open(outputFileFullName,"a")
fd.write("# HEADER ")
fd.write(str(rolloutsCounter + 1))
fd.write("\n")
iterCounter = 0
exit = False
voltage = [0,0]
oldVoltage = [0,0]
realVoltage = [0,0]
readImage(cameraPort,yarp_image)
# currentFbAngle = getFeedbackAngle(yarp_image,img_array)
# main loop
while iterCounter < maxIterations[rolloutsCounter%10] and not exit:
# read tactile data
fullTactileData = iCubI.readTactileData()
tactileData = []
for j in range(12):
tactileData.append(fullTactileData.get(12*finger+j).asDouble())
#print np.sum(tactileData[0:12])
# read encoders data from port
fullEncodersData = iCubI.readEncodersDataFromPort()
encodersData = []
encodersData.append(fullEncodersData.get(proximalJointEnc).asDouble())
encodersData.append(fullEncodersData.get(distalJointEnc_1).asDouble())
encodersData.append(fullEncodersData.get(distalJointEnc_2).asDouble())
state = [tactileData,encodersData,voltage]
# store image to be processed while action is applied
readImage(cameraPort,yarp_image)
# choose action
action = gp.get_control(state)
# update and cut voltage
oldVoltage[0] = voltage[0]
oldVoltage[1] = voltage[1]
voltage[0] = action[0] #voltage[0] + action[0];
voltage[1] = action[1] #voltage[1] + action[1];
#if abs(voltage[0]) > maxVoltageX:
# voltage[0] = maxVoltageX*np.sign(voltage[0])
#if abs(voltage[1]) > maxVoltageX:
# voltage[1] = maxVoltageX*np.sign(voltage[1])
# calculate real applied voltage
realVoltage[0] = maxVoltageProxJointY*k*pow(abs(voltage[0]),1/3.0)*np.sign(voltage[0])
realVoltage[1] = maxVoltageDistJointY*k*pow(abs(voltage[1]),1/3.0)*np.sign(voltage[1])
# voltage safety check (it should never happen!)
if abs(realVoltage[0]) > maxVoltageProxJointY:
realVoltage[0] = maxVoltageProxJointY*np.sign(realVoltage[0])
print 'warning, voltage out of bounds!'
if abs(realVoltage[1]) > maxVoltageDistJointY:
realVoltage[1] = maxVoltageDistJointY*np.sign(realVoltage[1])
print 'warning, voltage out of bounds!'
# apply action
if actionEnabled:
iCubI.openLoopCommand(proximalJoint,realVoltage[0])
iCubI.openLoopCommand(distalJoint,realVoltage[1])
# get feedback angle
# previousFbAngle = currentFbAngle
beforeTS = time.time()
# if rolloutsCounter == 0 and iterCounter < 50:
# matplotlib.image.imsave('images/test_'+ str(rolloutsCounter) + '_' + str(iterCounter) +'.tiff', img_array, format='tiff')
# currentFbAngle = getFeedbackAngle(yarp_image,img_array)
# fbAngleDifference = calculateFeedbackAngleDifference(previousFbAngle,currentFbAngle,fbAngleRange)
# if abs(fbAngleDifference) > maxFbAngleDifference:
# currentFbAngle = previousFbAngle
# fbAngleDifference = 0.0
# print fbAngleDifference
afterTS = time.time()
timeToSleep = max(actionDuration-(afterTS-beforeTS),0)
time.sleep(timeToSleep)
#print "curr ",previousFbAngle*180/3.1415,"diff ",fbAngleDifference*180/3.1415,afterTS - beforeTS,timeToSleep
# wait for stabilization
time.sleep(pauseDuration)
# log data
iCubI.logData(tactileData + encodersData + oldVoltage + voltage)#[action[0],action[1]])
logArray(tactileData,fd)
logArray(encodersData,fd)
logArray(oldVoltage,fd)
logArray(action,fd)
fbAngleDifference = 0; # TODO TO REMOVE
logArray([fbAngleDifference],fd)
fd.write("\n")
#print 'prev ',previousFbAngle*100/3.1415,'curr ',currentFbAngle*100/3.1415,'diff ',fbAngleDifference*100/3.1415
iterCounter = iterCounter + 1
exit = False #exitModule(resetProbability)
fd.close()
if actionEnabled:
print "finger ripositioning..."
# finger repositioning
iCubI.setPositionMode(jointsToActuate)
iCubI.setJointPosition(1,joint1StartPos + 12)
time.sleep(1)
iCubI.setJointPosition(proximalJoint,proximalJointStartPos)
iCubI.setJointPosition(distalJoint,distalJointStartPos)
time.sleep(3)
iCubI.setJointPosition(1,joint1StartPos)
time.sleep(2)
iCubI.setOpenLoopMode(jointsToActuate)
# iCubI.setPositionMode(jointsToActuate)
# iCubI.setJointPosition(proximalJoint,0.0)
# iCubI.setJointPosition(distalJoint,0.0)
# time.sleep(waitTimeForFingersRepositioning)
# iCubI.setJointPosition(proximalJoint,proximalJointStartPos)
# iCubI.setJointPosition(distalJoint,distalJointStartPos)
# time.sleep(waitTimeForFingersRepositioning)
# iCubI.setOpenLoopMode(jointsToActuate)
print "...done"
rolloutsCounter = rolloutsCounter + 1
os.system("cp " + inputFileFullName + " " + backupInputFileFullName)
os.system("cp " + outputFileFullName + " " + backupOutputFileFullName)
# copy input and output file
# restore position mode and close iCubInterface
if actionEnabled:
iCubI.setPositionMode(jointsToActuate)
cameraPort.close()
iCubI.closeInterface()
if __name__ == "__main__":
main()
|
tacman-fp7/tactile-control
|
python/iCubControllerOld.py
|
Python
|
gpl-2.0
| 13,070
|
[
"Gaussian"
] |
77c56d5bda89b893b73c01bb0727fcd20ed8c335a62a80abf72d9c7805201dd5
|
# noqa
import pytest
from pathlib import Path
from birdy import utils
from .common import resource_file
def test_is_url(): # noqa: D103
assert utils.is_url("http://localhost:5000/wps")
assert utils.is_url("file:///path/to/my/file.txt")
assert not utils.is_url("myfile.txt")
def test_is_file(): # noqa: D103
assert not utils.is_file(None)
assert utils.is_file(resource_file("dummy.nc"))
long_str = "".join("a" for i in range(260))
assert not utils.is_file(long_str)
assert utils.is_file(Path(resource_file("dummy.nc")))
def test_sanitize(): # noqa: D103
assert utils.sanitize("output") == "output"
assert utils.sanitize("My Output 1") == "my_output_1"
assert utils.sanitize("a.b") == "a_b"
assert utils.sanitize("a-b") == "a_b"
assert utils.sanitize("return") == "return_"
assert utils.sanitize("Finally") == "finally_"
def test_delist(): # noqa: D103
assert utils.delist(["one", "two"]) == ["one", "two"]
assert utils.delist(["one"]) == "one"
assert utils.delist("one") == "one"
class TestEncode: # noqa: D101
nc = resource_file("dummy.nc")
xml = resource_file("wps_emu_caps.xml")
def test_str(self): # noqa: D102
s = "just a string"
assert utils.embed(s) == (s, "utf-8")
def test_local_fn(self): # noqa: D102
nc, enc = utils.embed(self.nc, "application/x-netcdf")
assert isinstance(nc, bytes)
assert enc == "base64"
xml, enc = utils.embed(self.xml, "text/xml")
assert isinstance(xml, str)
assert enc == "utf-8"
def test_local_uri(self): # noqa: D102
xml, enc = utils.embed("file://" + self.xml, "text/xml")
assert isinstance(xml, str)
def test_path(self): # noqa: D102
p = Path(self.nc)
nc, enc = utils.embed(p, "application/x-netcdf")
assert isinstance(nc, bytes)
def test_file(self): # noqa: D102
with open(self.nc, "rb") as fp:
nc, enc = utils.embed(fp, "application/x-netcdf")
assert isinstance(nc, bytes)
class TestGuessType: # noqa: D101
def test_zip(self): # noqa: D102
mime, enc = utils.guess_type(
"LSJ_LL.zip",
["application/gml+xml", "application/zip", "application/x-zipped-shp"],
)
assert mime == "application/zip"
mime, enc = utils.guess_type(
"LSJ_LL.zip",
["application/gml+xml", "application/x-zipped-shp"],
)
assert mime == "application/x-zipped-shp"
def test_nc(self): # noqa: D102
mime, enc = utils.guess_type(
"https://remote.org/thredds/dodsC/a.nc",
["application/x-netcdf", "application/x-ogc-dods"],
)
assert mime == "application/x-ogc-dods"
mime, enc = utils.guess_type(
"https://remote.org/thredds/file/a.nc",
["application/x-ogc-dods", "application/x-netcdf"],
)
assert mime == "application/x-netcdf"
def test_path(self): # noqa: D102
from pathlib import Path
mime, enc = utils.guess_type(
Path("shape.json"), ["wrong", "application/geo+json"]
)
assert mime == "application/geo+json"
mime, enc = utils.guess_type(
Path("data.nc"), ["application/x-ogc-dods", "application/x-netcdf"]
)
assert mime == "application/x-netcdf"
mime, enc = utils.guess_type(
Path("file:///dodsC/data.nc"),
["application/x-netcdf", "application/x-ogc-dods"],
)
assert mime == "application/x-ogc-dods"
@pytest.mark.online
def test_is_opendap_url():
# This test uses online requests, and the servers are not as stable as hoped.
# We should record these requests so that the tests don't break when the servers are down.
url = (
"https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/"
"birdhouse/nrcan/nrcan_canada_daily_v2/tasmin/nrcan_canada_daily_tasmin_2017.nc"
)
assert utils.is_opendap_url(url)
url = url.replace("dodsC", "fileServer")
assert not utils.is_opendap_url(url)
# no Content-Description header
# url = "http://test.opendap.org/opendap/netcdf/examples/tos_O1_2001-2002.nc"
# assert is_opendap_url(url)
url = "invalid_schema://something"
assert not utils.is_opendap_url(url)
url = "https://www.example.com"
assert not utils.is_opendap_url(url)
url = "/missing_schema"
assert not utils.is_opendap_url(url)
|
bird-house/birdy
|
tests/test_utils.py
|
Python
|
apache-2.0
| 4,521
|
[
"NetCDF"
] |
4d9656c6593e146e35babf7fe976f04d9d685e8f64574265a054b41f0f3e3491
|
# -*- coding: utf-8 -*-
#
# MNE documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 11 10:45:48 2010.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import os.path as op
from datetime import date
import sphinxgallery
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
curdir = op.dirname(__file__)
sys.path.append(op.abspath(op.join(curdir, '..', 'mne')))
sys.path.append(op.abspath(op.join(curdir, 'sphinxext')))
import mne
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
import numpy_ext.numpydoc
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'numpy_ext.numpydoc',
# 'sphinx.ext.intersphinx',
# 'flow_diagram',
'sphinxgallery.gen_gallery']
autosummary_generate = True
autodoc_default_flags = ['inherited-members']
# extensions = ['sphinx.ext.autodoc',
# 'sphinx.ext.doctest',
# 'sphinx.ext.todo',
# 'sphinx.ext.pngmath',
# 'sphinx.ext.inheritance_diagram',
# 'numpydoc',
# 'ipython_console_highlighting',
# 'only_directives']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MNE'
copyright = u'2012-%s, MNE Developers' % date.today().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = mne.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = ['config_doc.rst']
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
exclude_patterns = ['source/generated']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['mne.']
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'navbar_title': ' ',
'source_link_position': "footer",
'bootswatch_theme': "flatly",
'navbar_sidebarrel': False,
'bootstrap_version': "3",
'navbar_links': [("Tutorials", "tutorials"),
("Gallery", "auto_examples/index"),
("Manual", "manual/index"),
("API", "python_reference"),
("FAQ", "faq"),
("Cite", "cite"),
],
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "_static/mne_logo_small.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "favicon.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static', '_images', sphinxgallery.glr_path_static()]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# variables to pass to HTML templating engine
build_dev_html = bool(int(os.environ.get('BUILD_DEV_HTML', False)))
html_context = {'use_google_analytics': True, 'use_twitter': True,
'use_media_buttons': True, 'build_dev_html': build_dev_html}
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'mne-doc'
# -- Options for LaTeX output ------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
# ('index', 'MNE.tex', u'MNE Manual',
# u'MNE Contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = "_static/logo.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = True
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = True
trim_doctests_flags = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
sphinxgallery_conf = {
'examples_dir' : ['../examples', '../tutorials'],
'gallery_dir' : ['auto_examples', 'auto_tutorials'],
'doc_module': ('sphinxgallery', 'numpy'),
'reference_url': {
'mne': None,
'matplotlib': 'http://matplotlib.org',
'numpy': 'http://docs.scipy.org/doc/numpy-1.9.1',
'scipy': 'http://docs.scipy.org/doc/scipy-0.11.0/reference',
'mayavi': 'http://docs.enthought.com/mayavi/mayavi'},
'use_mayavi': True,
}
|
antiface/mne-python
|
doc/conf.py
|
Python
|
bsd-3-clause
| 9,345
|
[
"Mayavi"
] |
654433d8eaa180bde2cf00a1c0d7a5b08de60c11a869e0e68e48dcc4f3b045d2
|
#! /usr/bin/env python
#
# Copyright 2000-2001, GMD, Sankt Augustin
# -- German National Research Center for Information Technology
#
# Copyright 2010, LIFIA - Facultad de Informatica - Univ. Nacional de La Plata
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
import os
import re
import getopt
import pprint
import string
from stat import ST_SIZE
from dvbobjects.utils import *
from dvbobjects.utils.SpecFile import *
from dvbobjects.DSMCC import BIOP
from dvbobjects.DVB.DataCarousel import TransactionId
######################################################################
# Constants
TIMEOUT = 0xFFFFFFFF
BLOCK_SIZE = 4066
UPDATE_FLAG = 0
TABLE_ID = 0x3C
DEBUG = 0
######################################################################
# Tunables
MAX_MODULE_SIZE = 1024 * 64 # containing > 1 BIOP Message
OPT_MODULE_SIZE = MAX_MODULE_SIZE # Optimal ??? cf. MHP
assert OPT_MODULE_SIZE <= MAX_MODULE_SIZE
######################################################################
class ModuleBuilder:
def __init__(self, OUTPUT_DIR, MODULE_ID):
self.module_id = MODULE_ID
filename = "%s/%04d.mod" % (OUTPUT_DIR, self.module_id)
self.name = filename # needed for debug
if DEBUG:
print "NEW MODULE: %s" % filename
self.__file = open(filename, "wb")
self.size = 0
def hasRoom(self, requestedSize):
if self.size == 0:
# Anything goes...
return 1
elif self.size + requestedSize <= OPT_MODULE_SIZE:
# Module isn't empty, but still have space
return 1
else:
# Start new module
return 0
def write(self, bytes):
msgSize = len(bytes)
if DEBUG:
print "ADD (mod %d, size %d+%d=%d)" % ( self.module_id, self.size, msgSize, self.size+msgSize)
self.__file.write(bytes)
self.size = self.size + msgSize
######################################################################
class ObjectCarouselBuilder: # Build an object carousel with 1 DII, this limits the number of modules to a number we never reached...
def __init__(self, OUTPUT_DIR, CAROUSEL_ID, DOWNLOAD_ID, ASSOC_TAG, MODULE_VERSION, UPDATE_FLAG):
self.MODULE_ID = 1
# The builder generate modules and specification files
# for informations not included in the modules themself
self.__SPECdii = open("%s/DII.spec" % OUTPUT_DIR, "w")
self.DSI_TransactionId = TransactionId(
version = MODULE_VERSION,
identification = 0,
updateFlag = UPDATE_FLAG,
)
self.DII_TransactionId = TransactionId(
version = MODULE_VERSION,
identification = 1,
updateFlag = UPDATE_FLAG,
)
self.__spec = SuperGroupSpec(
transactionId = self.DSI_TransactionId,
version = MODULE_VERSION,
srg_ior = "%s/SRG_IOR" % OUTPUT_DIR,
)
self.__spec.addGroup(
transactionId = self.DII_TransactionId,
version = MODULE_VERSION,
downloadId = DOWNLOAD_ID,
assocTag = ASSOC_TAG,
blockSize = BLOCK_SIZE,
)
# Output variables
self.OUTPUT_DIR = OUTPUT_DIR
self.DOWNLOAD_ID = DOWNLOAD_ID
self.CAROUSEL_ID = CAROUSEL_ID
self.ASSOC_TAG = ASSOC_TAG
self.MODULE_VERSION = MODULE_VERSION
# currently open modules indexed by type
self.__ModByType = {
'dir': 0,
'fil': 0,
'ste': 0,
}
self.__TypeInUse = {
'dir': 0,
'fil': 0,
'ste': 0,
}
self.__ModById = {}
# Table of Contents, for debugging
self.TOC = []
def genSpec(self):
self.__spec.write(self.OUTPUT_DIR)
def addDirectory(self, node):
self.__addNode(node, "dir")
def addFile(self, node):
self.__addNode(node, "fil")
def addStreamEvent(self, node):
self.__addNode(node, "ste")
def __addNode(self, node, type):
msg = node.message()
msgBytes = msg.pack()
msgSize = len(msgBytes)
modid = ""
# Check for additional file descriptors, like suggested module id
if os.path.exists(msg.PATH + '.descriptors'):
modid = open(msg.PATH + '.descriptors', 'rt').read()
if not modid in self.__ModById:
if int(modid) > self.MODULE_ID:
self.__ModById[modid] = self.__nextModule(modid)
self.__ModById[modid].write(msgBytes)
else:
print "WARNING: Module Id for file " + msg.PATH + " already in use, moduleid ignored"
modid = ""
else:
if self.__ModById[modid].hasRoom(msgSize):
self.__ModById[modid].write(msgBytes)
else:
print "WARNING: Can't add file " + msg.PATH + " to module id " + modid + " because modules it is full"
modid = ""
if modid == "":
# file has not a reserved a module id or its reservartion is invalid
if not self.__TypeInUse[type]:
self.__ModByType[type] = self.__nextModule(self.MODULE_ID)
self.__TypeInUse[type] = 1
if self.__ModByType[type].hasRoom(msgSize):
self.__ModByType[type].write(msgBytes)
else:
self.__ModByType[type] = self.__nextModule(self.MODULE_ID)
self.__ModByType[type].write(msgBytes)
modid = str(self.__ModByType[type].module_id)
node.bind(
carouselId = self.CAROUSEL_ID,
moduleId = int(modid),
assoc_tag = self.ASSOC_TAG,
DII_TransactionId = self.DII_TransactionId,
)
self.TOC.append((
int(modid),
msg.objectKey,
os.path.basename(msg.PATH),
msg.objectKind,
msgSize,
))
def __nextModule(self, modid):
mod = ModuleBuilder(self.OUTPUT_DIR, int(modid))
self.MODULE_ID = mod.module_id + 1
while str(self.MODULE_ID) in self.__ModById: # check if module id was reserved
self.MODULE_ID = self.MODULE_ID + 1
self.__SPECdii.write("%s 0x%02X 0x%04X 0x%02X\n" % (
mod.name,
TABLE_ID,
mod.module_id,
self.MODULE_VERSION))
self.__spec.addModule(
tableId = TABLE_ID,
moduleId = mod.module_id,
moduleVersion = self.MODULE_VERSION,
)
return mod
######################################################################
class FSNode(DVBobject): # superclass for FSDirectory, FSSteam and FSFile.
def __init__(self, KEY_SERIAL_NUMBER):
self.KEY = KEY_SERIAL_NUMBER
def IOR(self, carouselId, moduleId, key, assoc_tag, DII_TransactionId):
iop = BIOP.IOP.IOR(
PATH = self.PATH, # for debugging
type_id = self.MessageClass.objectKind,
carouselId = carouselId,
moduleId = moduleId,
objectKey = key,
assocTag = assoc_tag,
transactionId = DII_TransactionId,
timeout = TIMEOUT,
)
return iop
def _checkBinding(self):
try:
raise "Already Bound", self._binding
except AttributeError:
pass
######################################################################
class FSFile(FSNode): # A File in a File System destined for an Object Carousel.
MessageClass = BIOP.FileMessage
BindingClass = BIOP.ObjectFileBinding
def __init__(self, path, KEY_SERIAL_NUMBER):
FSNode.__init__(self, KEY_SERIAL_NUMBER)
assert(len(path) > 0)
self.PATH = path
self.contentSize = os.stat(path)[ST_SIZE]
def bind(self, carouselId, moduleId, assoc_tag, DII_TransactionId):
self._checkBinding()
filename = os.path.basename(self.PATH)
self._binding = self.BindingClass(
nameId = filename + "\x00",
IOR = self.IOR(carouselId, moduleId, self.KEY, assoc_tag, DII_TransactionId),
contentSize = self.contentSize,
)
def binding(self):
return self._binding
def message(self):
msg = self.MessageClass(
PATH = self.PATH,
objectKey = self.KEY,
contentSize = self.contentSize,
)
return msg
def shipMessage(self, theObjectCarouselBuilder):
theObjectCarouselBuilder.addFile(self)
######################################################################
class FSStreamEvent(FSNode): # A Directory in a File System destined to genereate a StreamEvent Object for Object Carousel.
MessageClass = BIOP.StreamEventMessage
BindingClass = BIOP.ObjectStreamEventBinding
def __init__(self, path, KEY_SERIAL_NUMBER):
FSNode.__init__(self, KEY_SERIAL_NUMBER)
assert(len(path) > 0)
self.PATH = path
def bind(self, carouselId, moduleId, assoc_tag, DII_TransactionId):
self._checkBinding()
filename = os.path.basename(self.PATH)
self._binding = self.BindingClass(
nameId = filename + "\x00",
IOR = self.IOR(carouselId, moduleId, self.KEY, assoc_tag, DII_TransactionId),
)
def binding(self):
return self._binding
def message(self):
msg = self.MessageClass(
PATH = self.PATH,
objectKey = self.KEY,
)
return msg
def shipMessage(self, theObjectCarouselBuilder):
theObjectCarouselBuilder.addStreamEvent(self)
######################################################################
class FSDir(FSNode, ObjectCarouselBuilder):# A Directory in a File System destined for an Object Carousel.
MessageClass = BIOP.DirectoryMessage
BindingClass = BIOP.ContextBinding
def __init__(self, path, KEY_SERIAL_NUMBER):
FSNode.__init__(self, KEY_SERIAL_NUMBER)
assert(len(path) > 0)
self.PATH = path
self.bindings = []
self.visitKEY = KEY_SERIAL_NUMBER
def bind(self, carouselId, moduleId, assoc_tag, DII_TransactionId):
self._checkBinding()
filename = os.path.basename(self.PATH)
self._binding = self.BindingClass(
nameId = filename + "\x00",
IOR = self.IOR(carouselId, moduleId, self.KEY, assoc_tag, DII_TransactionId),
)
def binding(self):
return self._binding
def message(self):
msg = self.MessageClass(
PATH = self.PATH,
objectKey = self.KEY,
bindings = self.bindings,
)
return msg
def visit(self, theObjectCarouselBuilder): #Depth first visit
#REJECT_EXT = ['.pyc', '.o', '.so']
REJECT_EXT = ['.descriptors']
#REJECT_FN = ['x', 'tmp']
#REJECT_FN = []
EVENT_EXT = ['.event']
assert os.path.isdir(self.PATH), self.PATH
try:
ls = os.listdir(self.PATH)
except:
print self.PATH
raise
ls.sort()
# todo: order by decreasing size
for filename in ls:
path = os.path.join(self.PATH, filename)
if os.path.splitext(filename)[1] in REJECT_EXT:
continue
#if filename in REJECT_FN:
# continue
#elif os.path.splitext(filename)[1] in REJECT_EXT:
# continue
#elif os.path.islink(path):
# continue
if os.path.isfile(path):
self.visitKEY = self.visitKEY + 1
obj = FSFile(path, self.visitKEY)
obj.shipMessage(theObjectCarouselBuilder)
if DEBUG:
print obj.message()
print
elif os.path.isdir(path):
if os.path.splitext(filename)[1] in EVENT_EXT:
self.visitKEY = self.visitKEY + 1
obj = FSStreamEvent(path, self.visitKEY)
obj.shipMessage(theObjectCarouselBuilder)
if DEBUG:
print obj.message()
print
else:
self.visitKEY = self.visitKEY + 1
obj = FSDir(path, self.visitKEY)
obj.visit(theObjectCarouselBuilder)
self.visitKEY = obj.visitKEY
if DEBUG:
print obj.message()
print
else:
continue
self.bindings.append(obj.binding())
# THIS directory (i.e. self) is complete, so...
self.shipMessage(theObjectCarouselBuilder)
if DEBUG:
print self.message()
print
def shipMessage(self, theObjectCarouselBuilder):
theObjectCarouselBuilder.addDirectory(self)
######################################################################
class FSRoot(FSDir): #A Directory in a File System destined as Service Gateway for an Object Carousel
MessageClass = BIOP.ServiceGatewayMessage
######################################################################
def GenModules(INPUT_DIR, OUTPUT_DIR, CAROUSEL_ID, DOWNLOAD_ID, ASSOC_TAG, MODULE_VERSION, UPDATE_FLAG):
root = FSRoot(INPUT_DIR, 0)
theObjectCarouselBuilder = ObjectCarouselBuilder(OUTPUT_DIR, CAROUSEL_ID, DOWNLOAD_ID, ASSOC_TAG, MODULE_VERSION, UPDATE_FLAG)
root.visit(theObjectCarouselBuilder)
out = open("%s/SRG_IOR" % OUTPUT_DIR, "wb")
out.write(root.binding().IOR.pack())
out.close()
theObjectCarouselBuilder.genSpec()
if DEBUG:
print root.binding().IOR
pprint.pprint(theObjectCarouselBuilder.TOC)
######################################################################
OPTIONS = "h"
LONG_OPTIONS = [
"help",
]
def Usage(return_code = 1):
print ("Usage: %s"
" <InputDirectory>"
" <OutputModulesDirectory>"
" download_id"
" carousel_id"
" association_tag"
" version" ) % (
sys.argv[0])
sys.exit(return_code)
def CheckArgs():
try:
opts, args = getopt.getopt(
sys.argv[1:], OPTIONS, LONG_OPTIONS)
except getopt.error:
Usage()
for opt_name, opt_val in opts:
if opt_name in ['-h', '--help']:
Usage(0)
if len(args) == 8:
UPDATE_FLAG = int(args[7])
print ("Setting update flag to"),
print (UPDATE_FLAG)
args = args[:-1]
if len(args) == 7:
BLOCK_SIZE = int(args[6])
print ("Using custom blocksize of"),
print (BLOCK_SIZE)
args = args[:-1]
if len(args) <> 6:
Usage()
INPUT_DIR, OUTPUT_DIR, CAROUSEL_ID, DOWNLOAD_ID, ASSOC_TAG, MODULE_VERSION = args
GenModules(INPUT_DIR, OUTPUT_DIR, int(CAROUSEL_ID), int(DOWNLOAD_ID), int(ASSOC_TAG, 16), int(MODULE_VERSION, 16), int(UPDATE_FLAG))
######################################################################
if __name__ == '__main__':
CheckArgs()
|
0xalen/opencaster_isdb-tb
|
tools/oc2sec/file2mod.py
|
Python
|
gpl-2.0
| 15,593
|
[
"VisIt"
] |
64b96f23a189b134f2f7762b23c8b918454db00a2ca97a54b2a996046df3ddee
|
"""
DIRAC.StorageManagementSystem package
"""
|
DIRACGrid/DIRAC
|
src/DIRAC/StorageManagementSystem/__init__.py
|
Python
|
gpl-3.0
| 49
|
[
"DIRAC"
] |
d08d8d7218f0b1ca98e276017c1745e35271d7728c027544227891ab57911175
|
from unittest import TestCase
from dark.blast.score import bitScoreToEValue, eValueToBitScore
class TestBitScoreToEValue(TestCase):
"""
Tests for the bitScoreToEValue function.
"""
def testTrivial(self):
"""
For a bit score of 1.0 on a database of size 1 with a query length
1, and no length adjustment, we should get an e-value of 0.5
"""
self.assertEqual(0.5, bitScoreToEValue(bitScore=1.0,
dbSize=1,
dbSequenceCount=1,
queryLength=1,
lengthAdjustment=0))
def testErwiniaPhage(self):
"""
Test values corresponding to the following observed BLAST match against
Erwinia phage phiEaH2, complete genome.
<Iteration>
<Iteration_iter-num>3</Iteration_iter-num>
<Iteration_query-ID>Query_3</Iteration_query-ID>
<Iteration_query-def>SK7F6:834:2401</Iteration_query-def>
<Iteration_query-len>111</Iteration_query-len>
<Iteration_hits>
<Hit>
<Hit_num>1</Hit_num>
<Hit_id>gi|431810571|ref|NC_019929.1|</Hit_id>
<Hit_def>Erwinia phage phiEaH2, complete genome</Hit_def>
<Hit_accession>NC_019929</Hit_accession>
<Hit_len>243050</Hit_len>
<Hit_hsps>
<Hsp>
<Hsp_num>1</Hsp_num>
<Hsp_bit-score>37.3537</Hsp_bit-score>
<Hsp_score>40</Hsp_score>
<Hsp_evalue>0.0813089</Hsp_evalue>
<Hsp_query-from>86</Hsp_query-from>
<Hsp_query-to>108</Hsp_query-to>
<Hsp_hit-from>33712</Hsp_hit-from>
<Hsp_hit-to>33690</Hsp_hit-to>
<Hsp_query-frame>1</Hsp_query-frame>
<Hsp_hit-frame>-1</Hsp_hit-frame>
<Hsp_identity>22</Hsp_identity>
<Hsp_positive>22</Hsp_positive>
<Hsp_gaps>0</Hsp_gaps>
<Hsp_align-len>23</Hsp_align-len>
<Hsp_qseq>GATAACTTCCAGGAATTCGTCCA</Hsp_qseq>
<Hsp_hseq>GATAACTTCCAGGAATTCGTTCA</Hsp_hseq>
<Hsp_midline>|||||||||||||||||||| ||</Hsp_midline>
</Hsp>
</Hit_hsps>
</Hit>
</Iteration_hits>
<Iteration_stat>
<Statistics>
<Statistics_db-num>5660</Statistics_db-num>
<Statistics_db-len>168142520</Statistics_db-len>
<Statistics_hsp-len>26</Statistics_hsp-len>
<Statistics_eff-space>14279605600</Statistics_eff-space>
<Statistics_kappa>0.41</Statistics_kappa>
<Statistics_lambda>0.625</Statistics_lambda>
<Statistics_entropy>0.78</Statistics_entropy>
</Statistics>
</Iteration_stat>
</Iteration>
"""
self.assertAlmostEqual(0.0813089,
bitScoreToEValue(bitScore=37.3537,
dbSize=168142520,
dbSequenceCount=5660,
queryLength=111,
lengthAdjustment=26),
places=4)
def testParameciumBursaria(self):
"""
Test values corresponding to the following observed BLAST match against
Paramecium bursaria Chlorella virus NE-JV-1, partial genome.
(Long lines are continued with a backslash to keep Python linters
quiet.)
<Iteration>
<Iteration_iter-num>14</Iteration_iter-num>
<Iteration_query-ID>Query_14</Iteration_query-ID>
<Iteration_query-def>SK7F6:99:509</Iteration_query-def>
<Iteration_query-len>172</Iteration_query-len>
<Iteration_hits>
<Hit>
<Hit_num>1</Hit_num>
<Hit_id>gi|448932590|gb|JX997176.1|</Hit_id>
<Hit_def>Paramecium bursaria Chlorella virus NE-JV-1, partial \
genome</Hit_def>
<Hit_accession>JX997176</Hit_accession>
<Hit_len>326559</Hit_len>
<Hit_hsps>
<Hsp>
<Hsp_num>1</Hsp_num>
<Hsp_bit-score>42.7638</Hsp_bit-score>
<Hsp_score>46</Hsp_score>
<Hsp_evalue>0.0359052</Hsp_evalue>
<Hsp_query-from>93</Hsp_query-from>
<Hsp_query-to>164</Hsp_query-to>
<Hsp_hit-from>140106</Hsp_hit-from>
<Hsp_hit-to>140038</Hsp_hit-to>
<Hsp_query-frame>1</Hsp_query-frame>
<Hsp_hit-frame>-1</Hsp_hit-frame>
<Hsp_identity>55</Hsp_identity>
<Hsp_positive>55</Hsp_positive>
<Hsp_gaps>3</Hsp_gaps>
<Hsp_align-len>72</Hsp_align-len>
<Hsp_qseq>AGGAATCCCTAAATGAAGTCCAAGAAGAAATCCCCTGAAGGAATTATAAAGAGA\
AATCCCTGAAGTAATTCC</Hsp_qseq>
<Hsp_hseq>AGAAATCCCTGAA-GAAATCCCTGAAGACATCCC-TGAAGAAATCCCTGA-AGA\
AATCCCTGAAGAAATCCC</Hsp_hseq>
<Hsp_midline>|| ||||||| || ||| ||| ||||| ||||| ||||| ||| | \
|||||||||||||| ||| ||</Hsp_midline>
</Hsp>
</Hit_hsps>
</Hit>
</Iteration_hits>
<Iteration_stat>
<Statistics>
<Statistics_db-num>1456080</Statistics_db-num>
<Statistics_db-len>1931895878</Statistics_db-len>
<Statistics_hsp-len>30</Statistics_hsp-len>
<Statistics_eff-space>268126313876</Statistics_eff-space>
<Statistics_kappa>0.41</Statistics_kappa>
<Statistics_lambda>0.625</Statistics_lambda>
<Statistics_entropy>0.78</Statistics_entropy>
</Statistics>
</Iteration_stat>
</Iteration>
"""
self.assertAlmostEqual(0.0359052,
bitScoreToEValue(bitScore=42.7638,
dbSize=1931895878,
dbSequenceCount=1456080,
queryLength=172,
lengthAdjustment=30),
places=4)
class TestEValueToBitScore(TestCase):
"""
Tests for the eValueToBitScore function.
"""
def testTrivial(self):
"""
For an e-value of 1.0 on a database of size 1 with total query length
1, we should get a bit score of 0.0
"""
self.assertEqual(0.0, eValueToBitScore(eValue=1.0,
dbSize=1,
dbSequenceCount=1,
queryLength=1,
lengthAdjustment=0))
def testErwiniaPhage(self):
"""
Test values corresponding to the following observed BLAST match against
Erwinia phage phiEaH2, complete genome.
<Iteration>
<Iteration_iter-num>3</Iteration_iter-num>
<Iteration_query-ID>Query_3</Iteration_query-ID>
<Iteration_query-def>SK7F6:834:2401</Iteration_query-def>
<Iteration_query-len>111</Iteration_query-len>
<Iteration_hits>
<Hit>
<Hit_num>1</Hit_num>
<Hit_id>gi|431810571|ref|NC_019929.1|</Hit_id>
<Hit_def>Erwinia phage phiEaH2, complete genome</Hit_def>
<Hit_accession>NC_019929</Hit_accession>
<Hit_len>243050</Hit_len>
<Hit_hsps>
<Hsp>
<Hsp_num>1</Hsp_num>
<Hsp_bit-score>37.3537</Hsp_bit-score>
<Hsp_score>40</Hsp_score>
<Hsp_evalue>0.0813089</Hsp_evalue>
<Hsp_query-from>86</Hsp_query-from>
<Hsp_query-to>108</Hsp_query-to>
<Hsp_hit-from>33712</Hsp_hit-from>
<Hsp_hit-to>33690</Hsp_hit-to>
<Hsp_query-frame>1</Hsp_query-frame>
<Hsp_hit-frame>-1</Hsp_hit-frame>
<Hsp_identity>22</Hsp_identity>
<Hsp_positive>22</Hsp_positive>
<Hsp_gaps>0</Hsp_gaps>
<Hsp_align-len>23</Hsp_align-len>
<Hsp_qseq>GATAACTTCCAGGAATTCGTCCA</Hsp_qseq>
<Hsp_hseq>GATAACTTCCAGGAATTCGTTCA</Hsp_hseq>
<Hsp_midline>|||||||||||||||||||| ||</Hsp_midline>
</Hsp>
</Hit_hsps>
</Hit>
</Iteration_hits>
<Iteration_stat>
<Statistics>
<Statistics_db-num>5660</Statistics_db-num>
<Statistics_db-len>168142520</Statistics_db-len>
<Statistics_hsp-len>26</Statistics_hsp-len>
<Statistics_eff-space>14279605600</Statistics_eff-space>
<Statistics_kappa>0.41</Statistics_kappa>
<Statistics_lambda>0.625</Statistics_lambda>
<Statistics_entropy>0.78</Statistics_entropy>
</Statistics>
</Iteration_stat>
</Iteration>
"""
self.assertAlmostEqual(37.3537,
eValueToBitScore(eValue=0.0813089,
dbSize=168142520,
dbSequenceCount=5660,
queryLength=111,
lengthAdjustment=26),
places=4)
def testParameciumBursaria(self):
"""
Test values corresponding to the following observed BLAST match against
Paramecium bursaria Chlorella virus NE-JV-1, partial genome.
(Long lines are continued with a backslash to keep Python linters
quiet.)
<Iteration>
<Iteration_iter-num>14</Iteration_iter-num>
<Iteration_query-ID>Query_14</Iteration_query-ID>
<Iteration_query-def>SK7F6:99:509</Iteration_query-def>
<Iteration_query-len>172</Iteration_query-len>
<Iteration_hits>
<Hit>
<Hit_num>1</Hit_num>
<Hit_id>gi|448932590|gb|JX997176.1|</Hit_id>
<Hit_def>Paramecium bursaria Chlorella virus NE-JV-1, partial \
genome</Hit_def>
<Hit_accession>JX997176</Hit_accession>
<Hit_len>326559</Hit_len>
<Hit_hsps>
<Hsp>
<Hsp_num>1</Hsp_num>
<Hsp_bit-score>42.7638</Hsp_bit-score>
<Hsp_score>46</Hsp_score>
<Hsp_evalue>0.0359052</Hsp_evalue>
<Hsp_query-from>93</Hsp_query-from>
<Hsp_query-to>164</Hsp_query-to>
<Hsp_hit-from>140106</Hsp_hit-from>
<Hsp_hit-to>140038</Hsp_hit-to>
<Hsp_query-frame>1</Hsp_query-frame>
<Hsp_hit-frame>-1</Hsp_hit-frame>
<Hsp_identity>55</Hsp_identity>
<Hsp_positive>55</Hsp_positive>
<Hsp_gaps>3</Hsp_gaps>
<Hsp_align-len>72</Hsp_align-len>
<Hsp_qseq>AGGAATCCCTAAATGAAGTCCAAGAAGAAATCCCCTGAAGGAATTATAAAGAGA\
AATCCCTGAAGTAATTCC</Hsp_qseq>
<Hsp_hseq>AGAAATCCCTGAA-GAAATCCCTGAAGACATCCC-TGAAGAAATCCCTGA-AGA\
AATCCCTGAAGAAATCCC</Hsp_hseq>
<Hsp_midline>|| ||||||| || ||| ||| ||||| ||||| ||||| ||| | \
|||||||||||||| ||| ||</Hsp_midline>
</Hsp>
</Hit_hsps>
</Hit>
</Iteration_hits>
<Iteration_stat>
<Statistics>
<Statistics_db-num>1456080</Statistics_db-num>
<Statistics_db-len>1931895878</Statistics_db-len>
<Statistics_hsp-len>30</Statistics_hsp-len>
<Statistics_eff-space>268126313876</Statistics_eff-space>
<Statistics_kappa>0.41</Statistics_kappa>
<Statistics_lambda>0.625</Statistics_lambda>
<Statistics_entropy>0.78</Statistics_entropy>
</Statistics>
</Iteration_stat>
</Iteration>
"""
self.assertAlmostEqual(42.7638,
eValueToBitScore(eValue=0.0359052,
dbSize=1931895878,
dbSequenceCount=1456080,
queryLength=172,
lengthAdjustment=30),
places=4)
|
terrycojones/dark-matter
|
test/blast/test_score.py
|
Python
|
mit
| 12,605
|
[
"BLAST"
] |
697a5c41b86c16767476a3f7975db18fe09d89137444d6736ebba7d7a78b14ab
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flags used by smurf training and evaluation."""
from absl import flags
FLAGS = flags.FLAGS
# General flags.
flags.DEFINE_bool(
'no_tf_function', False, 'If True, run without'
' tf functions. This incurs a performance hit, but can'
' make debugging easier.')
flags.DEFINE_string('train_on', '',
'"format0:path0;format1:path1", e.g. "kitti:/tmp/..."')
flags.DEFINE_string('eval_on', '',
'"format0:path0;format1:path1", e.g. "kitti:/tmp/..."')
flags.DEFINE_string('plot_dir', '', 'Path to directory where plots are saved.')
flags.DEFINE_string('checkpoint_dir', '',
'Path to directory for saving and restoring checkpoints.')
flags.DEFINE_string('init_checkpoint_dir', '',
'Path to directory for initializing from a checkpoint.')
flags.DEFINE_bool('check_data', False,
'Flag to indicate whether to check the dataset.')
flags.DEFINE_bool(
'plot_debug_info', False,
'Flag to indicate whether to plot debug info during training.')
flags.DEFINE_bool(
'reset_global_step', True, 'Reset global step to 0 after '
'loading from init_checkpoint')
flags.DEFINE_bool(
'reset_optimizer', True, 'Reset optimizer internals after '
'loading from init_checkpoint')
# Training flags.
flags.DEFINE_bool('evaluate_during_train', False,
'Whether or not to have the GPU train job perform evaluation '
'between epochs.')
flags.DEFINE_bool('from_scratch', False,
'Train from scratch. Do not restore the last checkpoint.')
flags.DEFINE_bool('no_checkpointing', False,
'Do not save model checkpoints during training.')
flags.DEFINE_integer('epoch_length', 1000,
'Number of gradient steps per epoch.')
flags.DEFINE_integer('num_train_steps', int(75000),
'Number of gradient steps to train for.')
flags.DEFINE_integer('selfsup_after_num_steps', int(31250),
'Number of gradient steps before self-supervision.')
flags.DEFINE_integer('selfsup_ramp_up_steps', int(6250),
'Number of gradient steps for ramping up self-sup.')
flags.DEFINE_integer('shuffle_buffer_size', 1024,
'Shuffle buffer size for training.')
flags.DEFINE_integer('height', 296, 'Image height for training and evaluation.')
flags.DEFINE_integer('width', 696, 'Image height for training and evaluation.')
flags.DEFINE_bool('crop_instead_of_resize', True, 'Crops images for training '
'instead of resizing the images.')
flags.DEFINE_integer('seq_len', 2, 'Sequence length for training flow.')
flags.DEFINE_integer('virtual_gpus', 1, 'How many virtual GPUs to run with.')
flags.DEFINE_integer('global_gpu_batch_size', 1, 'Batch size for training flow '
'on gpu. If using multiple GPUs, this is the sum of the '
'batch size across all GPU replicas.')
flags.DEFINE_integer('num_gpus', 1, '')
flags.DEFINE_string('optimizer', 'adam', 'One of "adam", "sgd"')
flags.DEFINE_bool('gradient_clipping', True, 'Apply gradient clipping.')
flags.DEFINE_float('gradient_clipping_max_value', 1.0, 'Maximum value used '
'for the gradient clipping if active.')
flags.DEFINE_float('start_learning_rate', 1e-4, 'The initial learning rate '
'which will be warmed up into the final learning rate.')
flags.DEFINE_float('warm_up_steps', 0, 'Number of steps to warm up into the '
'final learning rate.')
flags.DEFINE_float('gpu_learning_rate', 2e-4, 'Learning rate for training '
'SMURF on GPU.')
flags.DEFINE_integer('lr_decay_after_num_steps', 62500, '')
flags.DEFINE_integer('lr_decay_steps', 2500, '')
flags.DEFINE_string('lr_decay_type', 'exponential',
'One of ["none", "exponential"]')
flags.DEFINE_bool(
'stop_gradient_mask', True, 'Whether or not to stop the '
'gradient propagation through the occlusion mask.')
flags.DEFINE_bool(
'full_size_warp', True, 'Whether or not to perform the warp '
'at full resolution.')
flags.DEFINE_integer('num_occlusion_iterations', 1,
'If occlusion estimation is "iterative"')
flags.DEFINE_bool('only_forward', False, 'Only compute loss in the forward '
'temporal direction.')
flags.DEFINE_string('teacher_image_version', 'original',
'one of original, augmented')
flags.DEFINE_bool('log_per_replica_values', False, 'Whether or not to log per '
'replica info.')
flags.DEFINE_float('dropout_rate', 0.1, 'Amount of level dropout.')
flags.DEFINE_bool(
'resize_selfsup', True, 'Bilinearly resize the cropped image'
'during self-supervision.')
flags.DEFINE_integer(
'selfsup_crop_height', 64,
'Number of pixels removed from the image at top and bottom'
'for self-supervision.')
flags.DEFINE_integer(
'selfsup_crop_width', 64,
'Number of pixels removed from the image left and right'
'for self-supervision.')
flags.DEFINE_float(
'fb_sigma_teacher', 0.003,
'Forward-backward consistency scaling constant used for self-supervision.')
flags.DEFINE_float(
'fb_sigma_student', 0.03,
'Forward-backward consistency scaling constant used for self-supervision.')
flags.DEFINE_string('selfsup_mask', 'gaussian',
'One of [gaussian, ddflow, advection, none]')
flags.DEFINE_float('weight_supervision', 0.1, 'Weight for the supervised-loss.')
flags.DEFINE_float('weight_census', 1.0, 'Weight for census loss.')
flags.DEFINE_float('weight_smooth1', 0.0, 'Weight for smoothness loss.')
flags.DEFINE_float('weight_smooth2', 2.0, 'Weight for smoothness loss.')
flags.DEFINE_float('smoothness_edge_constant', 150.,
'Edge constant for smoothness loss.')
flags.DEFINE_string('smoothness_edge_weighting', 'exponential',
'One of: gaussian, exponential')
flags.DEFINE_integer('smoothness_at_level', 2, 'Resolution level at which the '
'smoothness loss will be applied if active.')
flags.DEFINE_integer('smoothness_after_num_steps', -1,
'Number of steps to take before turning on smoothness '
'loss.')
flags.DEFINE_float('weight_selfsup', 0.3, 'Weight for self-supervision loss.')
# Occlusion estimation parameters
flags.DEFINE_string('occlusion_estimation', 'wang',
'One of: none, brox, wang')
flags.DEFINE_integer('occ_after_num_steps_brox', 25000, '')
flags.DEFINE_integer('occ_after_num_steps_wang', 0, '')
flags.DEFINE_integer('occ_after_num_steps_forward_collision', 0, '')
flags.DEFINE_string(
'distance_census', 'ddflow', 'Which type of distance '
'metric to use when computing loss.')
flags.DEFINE_string(
'feature_architecture', 'raft',
'Which type of feature architecture to use. '
'Supported values are pwc or raft.')
flags.DEFINE_string(
'flow_architecture', 'raft', 'Which type of flow architecture to use. '
'Supported values are pwc or raft.')
flags.DEFINE_string(
'train_mode', 'sequence-unsupervised',
'Controls what kind of training loss '
'should be used. Can be one of the following options: '
'unsupervised, supervised, sequence-supervised, sequence-unsupervised.')
flags.DEFINE_bool(
'resize_gt_flow_supervision', True, 'Whether or not to '
'resize ground truth flow for the supervised loss.')
flags.DEFINE_multi_string(
'config_file', None,
'Path to a Gin config file. Can be specified multiple times. '
'Order matters, later config files override former ones.')
flags.DEFINE_multi_string(
'gin_bindings', None,
'Newline separated list of Gin parameter bindings. Can be specified '
'multiple times. Overrides config from --config_file.')
flags.DEFINE_bool('run_eval_once', False, 'If True, run the evaluator only one '
'time.')
|
google-research/google-research
|
smurf/smurf_flags.py
|
Python
|
apache-2.0
| 8,532
|
[
"Gaussian"
] |
2fae5e8eb701cace514a05d15e8dcb288f85b9bc3b3fec5462e8ca9626b8e6a3
|
""" DIRAC FileCatalog Security Manager mix-in class
"""
__RCSID__ = "$Id$"
import os
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Security.Properties import FC_MANAGEMENT
_readMethods = ['exists', 'isFile', 'getFileSize', 'getFileMetadata',
'getReplicas', 'getReplicaStatus', 'getFileAncestors',
'getFileDescendents', 'listDirectory', 'isDirectory',
'getDirectoryReplicas', 'getDirectorySize', 'getDirectoryMetadata']
_writeMethods = ['changePathOwner', 'changePathGroup', 'changePathMode',
'addFile', 'setFileStatus', 'removeFile', 'addReplica',
'removeReplica', 'setReplicaStatus', 'setReplicaHost',
'addFileAncestors', 'createDirectory', 'removeDirectory',
'setMetadata', '__removeMetadata']
class SecurityManagerBase(object):
def __init__(self, database=None):
self.db = database
def setDatabase(self, database):
self.db = database
def getPathPermissions(self, paths, credDict):
""" Get path permissions according to the policy
"""
return S_ERROR('The getPathPermissions method must be implemented in the inheriting class')
def hasAccess(self, opType, paths, credDict):
# Map the method name to Read/Write
if opType in _readMethods:
opType = 'Read'
elif opType in _writeMethods:
opType = 'Write'
# Check if admin access is granted first
result = self.hasAdminAccess(credDict)
if not result['OK']:
return result
if result['Value']:
# We are admins, allow everything
permissions = {}
for path in paths:
permissions[path] = True
return S_OK({'Successful': permissions, 'Failed': {}})
successful = {}
failed = {}
if not opType.lower() in ['read', 'write', 'execute']:
return S_ERROR("Operation type not known")
if self.db.globalReadAccess and (opType.lower() == 'read'):
for path in paths:
successful[path] = True
resDict = {'Successful': successful, 'Failed': {}}
return S_OK(resDict)
result = self.getPathPermissions(paths, credDict)
if not result['OK']:
return result
permissions = result['Value']['Successful']
for path, permDict in permissions.items():
if permDict[opType]:
successful[path] = True
else:
successful[path] = False
failed.update(result['Value']['Failed'])
resDict = {'Successful': successful, 'Failed': failed}
return S_OK(resDict)
def hasAdminAccess(self, credDict):
if FC_MANAGEMENT in credDict['properties']:
return S_OK(True)
return S_OK(False)
class NoSecurityManager(SecurityManagerBase):
def getPathPermissions(self, paths, credDict):
""" Get path permissions according to the policy
"""
permissions = {}
for path in paths:
permissions[path] = {'Read': True, 'Write': True, 'Execute': True}
return S_OK({'Successful': permissions, 'Failed': {}})
def hasAccess(self, opType, paths, credDict):
successful = {}
for path in paths:
successful[path] = True
resDict = {'Successful': successful, 'Failed': {}}
return S_OK(resDict)
def hasAdminAccess(self, credDict):
return S_OK(True)
class DirectorySecurityManager(SecurityManagerBase):
def getPathPermissions(self, paths, credDict):
""" Get path permissions according to the policy
"""
toGet = dict(zip(paths, [[path] for path in paths]))
permissions = {}
failed = {}
while toGet:
res = self.db.dtree.getPathPermissions(toGet.keys(), credDict)
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop(path)
for path, error in res['Value']['Failed'].items():
if error != 'No such file or directory':
for resolvedPath in toGet[path]:
failed[resolvedPath] = error
toGet.pop(path)
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read': True, 'Write': True, 'Execute': True}
if os.path.dirname(path) not in toGet:
toGet[os.path.dirname(path)] = []
toGet[os.path.dirname(path)] += resolvedPaths
toGet.pop(path)
if self.db.globalReadAccess:
for path in permissions:
permissions[path]['Read'] = True
return S_OK({'Successful': permissions, 'Failed': failed})
class FullSecurityManager(SecurityManagerBase):
def getPathPermissions(self, paths, credDict):
""" Get path permissions according to the policy
"""
toGet = dict(zip(paths, [[path] for path in paths]))
permissions = {}
failed = {}
res = self.db.fileManager.getPathPermissions(paths, credDict)
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop(path)
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read': True, 'Write': True, 'Execute': True}
if os.path.dirname(path) not in toGet:
toGet[os.path.dirname(path)] = []
toGet[os.path.dirname(path)] += resolvedPaths
toGet.pop(path)
while toGet:
paths = toGet.keys()
res = self.db.dtree.getPathPermissions(paths, credDict)
if not res['OK']:
return res
for path, mode in res['Value']['Successful'].items():
for resolvedPath in toGet[path]:
permissions[resolvedPath] = mode
toGet.pop(path)
for path, error in res['Value']['Failed'].items():
if error != 'No such file or directory':
for resolvedPath in toGet[path]:
failed[resolvedPath] = error
toGet.pop(path)
for path, resolvedPaths in toGet.items():
if path == '/':
for resolvedPath in resolvedPaths:
permissions[path] = {'Read': True, 'Write': True, 'Execute': True}
if os.path.dirname(path) not in toGet:
toGet[os.path.dirname(path)] = []
toGet[os.path.dirname(path)] += resolvedPaths
toGet.pop(path)
if self.db.globalReadAccess:
for path in permissions:
permissions[path]['Read'] = True
return S_OK({'Successful': permissions, 'Failed': failed})
class DirectorySecurityManagerWithDelete(DirectorySecurityManager):
""" This security manager implements a Delete operation.
For Read, Write, Execute, it's behavior is the one of DirectorySecurityManager.
For Delete, if the directory does not exist, we return True.
If the directory exists, then we test the Write permission
"""
def hasAccess(self, opType, paths, credDict):
# The other SecurityManager do not support the Delete operation,
# and it is transformed in Write
# so we keep the original one
if opType in ['removeFile', 'removeReplica', 'removeDirectory']:
self.opType = 'Delete'
elif opType in _readMethods:
self.opType = 'Read'
elif opType in _writeMethods:
self.opType = 'Write'
res = super(DirectorySecurityManagerWithDelete, self).hasAccess(opType, paths, credDict)
# We reinitialize self.opType in case someone would call getPathPermissions directly
self.opType = ''
return res
def getPathPermissions(self, paths, credDict):
""" Get path permissions according to the policy
"""
# If we are testing in anything else than a Delete, just return the parent methods
if hasattr(self, 'opType') and self.opType.lower() != 'delete':
return super(DirectorySecurityManagerWithDelete, self).getPathPermissions(paths, credDict)
# If the object (file or dir) does not exist, we grant the permission
res = self.db.dtree.exists(paths)
if not res['OK']:
return res
nonExistingDirectories = set(path for path in res['Value']['Successful'] if not res['Value']['Successful'][path])
res = self.db.fileManager.exists(paths)
if not res['OK']:
return res
nonExistingFiles = set(path for path in res['Value']['Successful'] if not res['Value']['Successful'][path])
nonExistingObjects = nonExistingDirectories & nonExistingFiles
permissions = {}
failed = {}
for path in nonExistingObjects:
permissions[path] = {'Read': True, 'Write': True, 'Execute': True}
# The try catch is just to protect in case there are duplicate in the paths
try:
paths.remove(path)
except Exception as _e:
try:
paths.pop(path)
except Exception as _ee:
pass
# For all the paths that exist, check the write permission
if paths:
res = super(DirectorySecurityManagerWithDelete, self).getPathPermissions(paths, credDict)
if not res['OK']:
return res
failed = res['Value']['Failed']
permissions.update(res['Value']['Successful'])
return S_OK({'Successful': permissions, 'Failed': failed})
class PolicyBasedSecurityManager(SecurityManagerBase):
""" This security manager loads a python plugin and forwards the
calls to it. The python plugin has to be defined in the CS under
/Systems/DataManagement/YourSetup/FileCatalog/SecurityPolicy
"""
def __init__(self, database=False):
super(PolicyBasedSecurityManager, self).__init__(database)
from DIRAC.ConfigurationSystem.Client.PathFinder import getServiceSection
from DIRAC import gConfig
from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgPath
serviceSection = getServiceSection('DataManagement/FileCatalog')
pluginPath = gConfig.getValue(cfgPath(serviceSection, 'SecurityPolicy'))
if not pluginPath:
raise Exception("SecurityPolicy not defined in service options")
pluginCls = self.__loadPlugin(pluginPath)
self.policyObj = pluginCls(database=database)
# For the old clients to work with the new policy (since getPathPermissions is meant to disappear...)
# we fetch the old SecurityManager, and we call it if needed in the plugin.
oldSecurityManagerName = gConfig.getValue(cfgPath(serviceSection, 'OldSecurityManager'), '')
self.policyObj.oldSecurityManager = None
if oldSecurityManagerName:
self.policyObj.oldSecurityManager = eval("%s(self.db)" % oldSecurityManagerName)
@staticmethod
def __loadPlugin(pluginPath):
""" Create an instance of requested plugin class, loading and importing it when needed.
This function could raise ImportError when plugin cannot be found or TypeError when
loaded class object isn't inherited from SecurityManagerBase class.
:param str pluginName: dotted path to plugin, specified as in import statement, i.e.
"DIRAC.CheesShopSystem.private.Cheddar" or alternatively in 'normal' path format
"DIRAC/CheesShopSystem/private/Cheddar"
:return: object instance
This function try to load and instantiate an object from given path. It is assumed that:
- :pluginPath: is pointing to module directory "importable" by python interpreter, i.e.: it's
package's top level directory is in $PYTHONPATH env variable,
- the module should consist a class definition following module name,
- the class itself is inherited from SecurityManagerBase
If above conditions aren't meet, function is throwing exceptions:
- ImportError when class cannot be imported
- TypeError when class isn't inherited from SecurityManagerBase
"""
if "/" in pluginPath:
pluginPath = ".".join([chunk for chunk in pluginPath.split("/") if chunk])
pluginName = pluginPath.split(".")[-1]
if pluginName not in globals():
mod = __import__(pluginPath, globals(), fromlist=[pluginName])
pluginClassObj = getattr(mod, pluginName)
else:
pluginClassObj = globals()[pluginName]
if not issubclass(pluginClassObj, SecurityManagerBase):
raise TypeError("Security policy '%s' isn't inherited from SecurityManagerBase class" % pluginName)
return pluginClassObj
def hasAccess(self, opType, paths, credDict):
return self.policyObj.hasAccess(opType, paths, credDict)
def getPathPermissions(self, paths, credDict):
return self.policyObj.getPathPermissions(paths, credDict)
|
fstagni/DIRAC
|
DataManagementSystem/DB/FileCatalogComponents/SecurityManager.py
|
Python
|
gpl-3.0
| 12,375
|
[
"DIRAC"
] |
2f6204f9fe6acf78d78f2055be322209f97eb97fa4df3ffc75dc76ff2d3e75c9
|
#!/usr/bin/env python
"""
ADS to BibDesk -- frictionless import of ADS publications into BibDesk
Copyright (C) 2013 Rui Pereira <rui.pereira@gmail.com> and
Jonathan Sick <jonathansick@mac.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Based on ADS to Bibdesk automator action by
Jonathan Sick, jonathansick@mac.com, August 2007
Input may be one of the following:
- ADS abstract page URL
- ADS bibcode
- arXiv abstract page
- arXiv identifier
"""
import datetime
import difflib
import fnmatch
import glob
import logging
import math
import optparse
import os
import pprint
import re
import socket
import sys
import tempfile
import time
# cgi.parse_qs is deprecated since 2.6
# but OS X 10.5 only has 2.5
import cgi
import urllib2
import urlparse
import subprocess as sp
try:
import AppKit
except ImportError:
# is this not the system python?
syspath = eval(sp.Popen('/usr/bin/env python -c "import sys; print(sys.path)"',
shell=True, stdout=sp.PIPE).stdout.read())
for p in syspath:
if os.path.isdir(p) and glob.glob(os.path.join(p, '*AppKit*')):
sys.path.insert(0, p)
break
# retry
try:
import AppKit
except ImportError:
import webbrowser
url = 'http://packages.python.org/pyobjc/install.html'
msg = 'Please install PyObjC...'
print msg
sp.call('osascript -e "tell application \\"System Events\\" to display dialog \\"%s\\" buttons {\\"OK\\"} default button \\"OK\\""' % msg,
shell=True, stdout=open('/dev/null', 'w'))
# open browser in PyObjC install page
webbrowser.open(url)
sys.exit()
from HTMLParser import HTMLParser, HTMLParseError
from htmlentitydefs import name2codepoint
# default timeout for url calls
socket.setdefaulttimeout(240)
def main():
"""Parse options and launch main loop"""
usage = """Usage: %prog [options] [article_token or pdf_directory]
adsbibdesk helps you add astrophysics articles listed on NASA/ADS
and arXiv.org to your BibDesk database. There are three modes
in this command line interface:
1. Article mode, for adding single papers to BibDesk given tokens.
2. PDF Ingest mode, where PDFs in a directory are analyzed and added to
BibDesk with ADS meta data.
3. Pre-print Update mode, for updating arXiv pre-prints automatically
with newer bibcodes.
In article mode, adsbibdesk accepts many kinds of article tokens:
- the URL of an ADS or arXiv article page,
- the ADS bibcode of an article (e.g. 1998ApJ...500..525S), or
- the arXiv identifier of an article (e.g. 0911.4956).
(Example: `adsbibdesk 1998ApJ...500..525S`)
In PDF Ingest mode, you specify a directory containing PDFs instead of
an article token (Example: `adsbibdesk -p pdfs` will ingest PDFs from
the pdfs/ directory).
In Pre-print Update mode, every article with an arXiv bibcode will be
updated if it has a new bibcode."""
version = "3.1"
epilog = "For more information, visit www.jonathansick.ca/adsbibdesk" \
+ " email jonathansick at mac.com or tweet @jonathansick"
parser = optparse.OptionParser(usage=usage, version=version,
epilog=epilog)
parser.add_option('-d', '--debug',
dest="debug", default=False, action="store_true",
help="Debug mode; prints extra statements")
parser.add_option('-o', '--only_pdf',
default=False, action='store_true',
help="Download and open PDF for the selected [article_token].")
pdfIngestGroup = optparse.OptionGroup(parser, "PDF Ingest Mode",
description=None)
pdfIngestGroup.add_option('-p', '--ingest_pdfs',
dest="ingestPdfs", default=False, action="store_true",
help="Ingest a folder of PDFs."
" Positional argument should be directory"
" containing PDFs."
" e.g., `adsbibdesk -p .` for the current directory")
pdfIngestGroup.add_option('-r', '--recursive',
dest='recursive', default=True, action="store_false",
help="Search for PDFs recursively in the directory tree.")
parser.add_option_group(pdfIngestGroup)
arXivUpdateGroup = optparse.OptionGroup(parser, "Pre-print Update Mode",
description=None)
arXivUpdateGroup.add_option('-u', '--update_arxiv',
default=False, action="store_true",
help='Check arXiv pre-prints for updated bibcodes')
arXivUpdateGroup.add_option('-f', '--from_date',
help='MM/YY date of publication from which to start updating arXiv')
arXivUpdateGroup.add_option('-t', '--to_date',
help='MM/YY date of publication up to which update arXiv')
parser.add_option_group(arXivUpdateGroup)
options, args = parser.parse_args()
# Get preferences from (optional) config file
prefs = Preferences()
# inject options into preferences for later reference
prefs['options'] = options.__dict__
if options.debug:
prefs['debug'] = True
# Logging saves to log file on when in DEBUG mode
# Always prints to STDOUT as well
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
filename=prefs['log_path'])
if not prefs['debug']:
logging.getLogger('').setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logging.getLogger('').addHandler(ch)
logging.info("Starting ADS to BibDesk")
logging.debug("ADS to BibDesk version %s", version)
logging.debug("Python: %s", sys.version)
if options.ingestPdfs:
ingest_pdfs(options, args, prefs)
elif options.only_pdf:
# short-circuit process_articles
# since BibDesk is not needed
process_token(args[0], prefs, None)
elif options.update_arxiv:
update_arxiv(options, prefs)
else:
process_articles(args, prefs)
def mkMNRAS(author, year):
# Make MNRAS bibtex entry.
# Adapted from Natalia's original version
N_authors = len(author)
author1 = re.sub('},.*|\W', '', author[0])
if (N_authors >= 2): author2 = re.sub('},.*|\W', '', author[1])
if (N_authors >= 3): author3 = re.sub('},.*|\W', '', author[2])
if (N_authors == 1): MNRAScitekey = author1 + '.' + year
if (N_authors == 2): MNRAScitekey = author1 + '.' + author2 + '.' + year
if (N_authors == 3): MNRAScitekey = author1 + '.' + author2 + '.' + author3 + '.' + year
if (N_authors >= 4): MNRAScitekey = author1 + '.etal.' + year
return {'MNRAScitekey': MNRAScitekey}
def process_articles(args, prefs, delay=15):
"""Workflow for processing article tokens"""
if args:
articleTokens = list(args)
else:
# Try to use standard input
articleTokens = [s.strip()
for s in sys.stdin.readlines()
if s.strip()]
# AppKit hook for BibDesk
bibdesk = BibDesk()
for articleToken in articleTokens:
process_token(articleToken, prefs, bibdesk)
if len(articleTokens) > 1 and articleToken != articleTokens[-1]:
time.sleep(delay)
bibdesk.app.dealloc()
def process_token(articleToken, prefs, bibdesk):
"""Process a single article token from the user.
:param articleToken: Any user-supplied `str` token.
:param prefs: A `Preferences` instance.
:param bibdesk: A `BibDesk` AppKit hook instance.
"""
# Determine what we're dealing with
# The goal is to get a URL into ADS
logging.debug("process_token found article token %s", articleToken)
connector = ADSConnector(articleToken, prefs)
# parse the ADS HTML file
ads = ADSHTMLParser(prefs=prefs)
if isinstance(connector.adsRead, basestring):
ads.parse(connector.adsRead)
# parsed from arXiv - dummy ads info
elif connector.adsRead and getattr(connector, 'bibtex') is not None:
ads.bibtex = connector.bibtex
ads.arxivid = ads.bibtex.Eprint
ads.author = ads.bibtex.Author.split(' and ')
ads.year = ads.bibtex.Year
ads.title = ads.bibtex.Title
ads.abstract = ads.bibtex.Abstract
ads.comment = ads.bibtex.AdsComment
# original URL where we *should* have gotten the info
ads.bibtex.AdsURL = connector.adsURL
# inject arXiv mirror into ArXivURL
if 'arxiv_mirror' in prefs and prefs['arxiv_mirror']:
tmpurl = urlparse.urlsplit(ads.bibtex.ArXivURL)
ads.bibtex.ArXivURL = urlparse.urlunsplit((tmpurl.scheme,
prefs['arxiv_mirror'],
tmpurl.path, tmpurl.query,
tmpurl.fragment))
# link for PDF download
try:
ads.links = {'preprint': [l.get('href', '')
for l in ads.bibtex.info['link']
if l.get('title') == 'pdf'][0]}
except IndexError:
pass
elif connector.adsRead is None:
logging.debug("process_token skipping %s", articleToken)
return False
# Get MnrasCitekey
if connector.adsRead is not None:
ads.bibtex.info.update(mkMNRAS(ads.author, ads.year))
if not isinstance(connector.adsRead, basestring):
if getattr(connector, 'bibtex') is not None:
ads.bibtex.MNRAScitekey = ads.bibtex.info['MNRAScitekey']
# Remove ADS keywords?
if not ads.prefs['ads_keywords']:
if 'keywords' in ads.bibtex.info.keys():
del ads.bibtex.info['keywords']
# get PDF first
pdf = ads.getPDF()
if prefs['options'].get('only_pdf'):
if not pdf.endswith('.pdf'):
return False
# just open PDF
reader = ('pdf_reader' in prefs and
prefs['pdf_reader'] is not None) and\
prefs['pdf_reader'] or 'Finder'
app = AppKit.NSAppleScript.alloc()
app.initWithSource_('tell application "%s" '
'to open ("%s" as POSIX file)' % (reader, pdf)
).executeAndReturnError_(None)
# get name of the used viewer
# (Finder may be defaulted to something else than Preview)
if reader == 'Finder':
reader = app.initWithSource_(
'return name of (info for (path to frontmost application))'
).executeAndReturnError_(None)[0].stringValue()
logging.debug('opening %s with %s' % (pdf, reader))
if 'skim' in reader.lower():
time.sleep(1) # give it time to open
app.initWithSource_('tell application "%s" to set view settings '
'of first document to {auto scales:true}'
% reader).executeAndReturnError_(None)
app.dealloc()
return True
# search for already existing publication
# with exactly the same title and first author
# match title and first author using fuzzy string comparison
found = difflib.get_close_matches(ads.title, bibdesk.titles,
n=1, cutoff=.7)
keptPDFs = []
# first author is the same
if found and difflib.SequenceMatcher(None,
bibdesk.authors(bibdesk.pid(found[0]))[0],
ads.author[0]).ratio() > .6:
delete_pubs = True
if prefs['debug']:
pubs = bibdesk.citekeys(bibdesk.pid(found[0]))
print "@@> Found repeated publications: %s" % pubs
answer = raw_input(" Delete them [y/N]? ")
if ((answer == '') | (answer == 'n') | (answer == 'N')):
delete_pubs = False
print "@@> Not deleting %s" % pubs
else:
print "@@> Deleting %s" % pubs
if delete_pubs:
# Copy old keywords
if not ads.prefs['ads_keywords']:
ads.bibtex.info['keywords'] = bibdesk.keywords(bibdesk.pid(found[0]))
# Deal with pdf
keptPDFs += bibdesk.safe_delete(bibdesk.pid(found[0]))
notify('Duplicate publication removed',
articleToken, ads.title)
bibdesk.refresh()
# add new entry
pub = bibdesk('import from "%s"' % ads.bibtex.__str__().replace('\\', r'\\').replace('"', r'\"'))
pub = pub.descriptorAtIndex_(1).descriptorAtIndex_(3).stringValue() # pub id
# automatic cite key
bibdesk('set cite key to generated cite key', pub)
# abstract
if ads.abstract.startswith('http://'):
# old scanned articles
bibdesk('make new linked URL at end of linked URLs '
'with data "%s"' % ads.abstract, pub)
else:
bibdesk('set abstract to "%s"'
% balance_brackets(ads.abstract.replace('\\', r'\\').replace('"', r'\"')), pub)
if pdf.endswith('.pdf'):
# register PDF into BibDesk
bibdesk('add POSIX file "%s" to beginning of linked files' % pdf, pub)
# URL for electronic version - only add it if no DOI link present
# (they are very probably the same)
elif 'http' in pdf and not bibdesk('value of field "doi"', pub).stringValue():
bibdesk('make new linked URL at end of linked URLs with data "%s"' % pdf, pub)
# add URLs as linked URL if not there yet
urls = bibdesk('value of fields whose name ends with "url"', pub, strlist=True)
urlspub = bibdesk('linked URLs', pub, strlist=True)
for u in [u for u in urls if u not in urlspub]:
bibdesk('make new linked URL at end of linked URLs with data "%s"' % u, pub)
# add old annotated files
for keptPDF in keptPDFs:
bibdesk('add POSIX file "%s" to end of linked files' % keptPDF, pub)
# automatic file name
if 'auto_file' in prefs:
if prefs['auto_file']:
bibdesk('auto file', pub)
# save bibdesk file
bibdesk('save')
notify('New publication added',
bibdesk('cite key', pub).stringValue(),
ads.title)
def balance_brackets(instr):
'''
Balance unpaired brackets in abstracts, which cause errors in Bibdesk.
Code based on
https://stackoverflow.com/questions/6701853/parentheses-pairing-issue
The difference is that it balances out the string.
Commented by Natalia@UFSC - 14/Sep/2017.
'''
# Creating a dictionary where key = opening bracket and value = closing bracket.
iparens = iter('(){}[]<>')
parens = dict(zip(iparens, iparens))
inv_parens = {v: k for k, v in parens.iteritems()}
closing = parens.values()
# Starting a stack of closing brackets to be searched for
stack = []
# Starting the paired output string
outstr = ''
for c in instr:
# See if this character matches an opening bracket in the dictionary.
# If so, save the closing bracket in the variable d -- to be matched later on!
d = parens.get(c, None)
outstr += c
# Save this to-be-matched opening bracket to the stack
if d is not None:
stack.append(d)
# For each closing bracket, check if there is a previous opening bracket by
# consulting the stack.
# Note that (not list) = False if the list is empty, and it is the pythonic way
# to test whether a list is empty.
elif c in closing:
if (stack):
c_popped = stack.pop()
# This opening bracket is unpaired: pair it!
if (c != c_popped):
outstr = outstr[:-1] + c_popped
# This closing bracket is unpaired: pair it!
else:
outstr = outstr[:-1] + inv_parens[c] + c
# If thre is anything left on the stack, add the end of the string to pair
# the opening brackets without matchs
if (stack):
outstr += ''.join(stack)
return outstr
def ingest_pdfs(options, args, prefs):
"""Workflow for attempting to ingest a directory of PDFs into BibDesk.
This workflow attempts to scape DOIs from the PDF text, which are then
added to BibDesk using the usual `process_token` function.
"""
assert len(args) == 1, "Please pass a path to a directory"
pdfDir = args[0]
assert os.path.exists(pdfDir) is True, "%s does not exist" % pdfDir
print "Searching", pdfDir
if options.recursive:
# Recursive glob solution from
# http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python
pdfPaths = []
for root, dirnames, filenames in os.walk(pdfDir):
for filename in fnmatch.filter(filenames, '*.pdf'):
pdfPaths.append(os.path.join(root, filename))
else:
pdfPaths = glob.glob(os.path.join(pdfDir, "*.pdf"))
# Process each PDF, looking for a DOI
grabber = PDFDOIGrabber()
found = []
for i, pdfPath in enumerate(pdfPaths):
dois = grabber.search(pdfPath)
if len(dois) == 0:
logging.info("%i of %i: no DOIs for %s" % (i + 1, len(pdfPaths), pdfPath))
else:
found.extend(list(dois))
for doi in dois:
logging.info("%i of %i: %s = %s" % (i + 1, len(pdfPaths),
os.path.basename(pdfPath), doi))
# let process_articles inject everything
if found:
logging.info('Adding %i articles to BibDesk...' % len(found))
process_articles(found, prefs)
def update_arxiv(options, prefs):
"""
Workflow for updating arXiv pre-prints automatically with newer bibcodes
(replaces update_bibdesk_arxiv.sh)
"""
assert options.from_date is None or \
re.match('^\d{2}/\d{2}$', options.from_date) is not None, \
'--from_date needs to be in MM/YY format'
assert options.to_date is None or \
re.match('^\d{2}/\d{2}$', options.to_date) is not None, \
'--to_date needs to be in MM/YY format'
def b2d(bibtex):
"""BibTex -> publication date"""
m = re.search('Month = \{?(\w*)\}?', bibtex).group(1)
y = re.search('Year = \{?(\d{4})\}?', bibtex).group(1)
return datetime.datetime.strptime(m+y, '%b%Y')
def recent(added, fdate, tdate):
fromdate = fdate is not None and\
datetime.datetime.strptime(fdate, '%m/%y')\
or datetime.datetime(1900, 1, 1)
todate = tdate is not None and\
datetime.datetime.strptime(tdate, '%m/%y')\
or datetime.datetime(3000, 1, 1)
return fromdate <= added <= todate
# frontmost opened BibDesk document
bibdesk = BibDesk()
ids = []
# check for adsurl containing arxiv or astro.ph bibcodes
arxiv = bibdesk('return publications whose '
'(value of field "Adsurl" contains "arXiv") or '
'(value of field "Adsurl" contains "astro.ph")')
if arxiv.numberOfItems():
# extract arxiv id from the ADS url
ids = [u.split('bib_query?')[-1].split('abs/')[-1] for u in
bibdesk('tell publications whose '
'(value of field "Adsurl" contains "arXiv") or '
'(value of field "Adsurl" contains "astro.ph") '
'to return value of field "Adsurl"', strlist=True)]
dates = [b2d(b) for b in
bibdesk('tell publications whose '
'(value of field "Adsurl" contains "arXiv") or '
'(value of field "Adsurl" contains "astro.ph") '
'to return bibtex string', strlist=True)]
# arxiv ids to search
ids = [b for d, b in zip(dates, ids)
if recent(d, options.from_date, options.to_date)]
bibdesk.app.dealloc()
if not ids:
print 'Nothing to update!'
sys.exit()
else:
n = len(ids)
t = math.ceil(n * 15. / 60.)
logging.info('Checking %i arXiv entries for changes...' % n)
logging.info('(to prevent ADS flooding this will take a while, check back '
'in around %i %s)' % (t, t > 1 and 'minutes' or 'minute'))
changed = []
for n, i in enumerate(ids):
# sleep for 15 seconds, to prevent ADS flooding
time.sleep(15)
logging.debug("arxiv id %s" % i)
# these are ADS bibcodes by default
adsURL = urlparse.urlunsplit(('http', prefs['ads_mirror'],
'cgi-bin/bib_query', i, ''))
logging.debug("adsURL %s" % adsURL)
# parse the ADS HTML file
ads = ADSHTMLParser(prefs=prefs)
try:
ads.parse_at_url(adsURL)
except ADSException, err:
logging.debug('%s update failed: %s' % (i, err))
continue
logging.debug("ads.bibtex %s" % ads.bibtex)
if ads.bibtex is None: # ADSHTMLParser failed
logging.debug("FAILURE: ads.bibtex is None!")
continue
if ads.bibtex.bibcode != i:
logging.info('%i. %s has become %s' % (n + 1, i, ads.bibtex.bibcode))
changed.append(i)
else:
logging.info('%i. %s has not changed' % (n + 1, i))
continue
# run changed entries through the main loop
if changed and raw_input('Updating %i entries, continue? (y/[n]) '
% len(changed)) in ('Y', 'y'):
logging.info('(to prevent ADS flooding, we will wait for a while between '
'each update, so go grab a coffee)')
process_articles(changed, prefs)
elif not changed:
logging.info('Nothing to update!')
# adapted of original by Moises Aranas
# https://github.com/maranas/pyNotificationCenter
def notify(title, subtitle, desc, sticky=False):
try:
import objc
notification = objc.lookUpClass('NSUserNotification').alloc().init()
notification.setTitle_(title)
notification.setInformativeText_(desc)
notification.setSubtitle_(subtitle)
objc.lookUpClass('NSUserNotificationCenter').defaultUserNotificationCenter().scheduleNotification_(notification)
notification.dealloc()
# this will be either ImportError or objc.nosuchclass_error
except Exception:
# revert to growl
if subtitle:
desc = subtitle + ': ' + desc
growlNotify(title, desc, sticky)
def growlNotify(title, desc, sticky=False):
title = title.replace('"', r'\"')
desc = desc.replace('"', r'\"')
# http://bylr.net/3/2011/09/applescript-and-growl/
# is growl running?
app = AppKit.NSAppleScript.alloc()
growl = app.initWithSource_('tell application "System Events" to return '
'processes whose creator type contains "GRRR"'
).executeAndReturnError_(None)[0]
if growl.numberOfItems():
growlapp = growl.descriptorAtIndex_(1).descriptorAtIndex_(3).stringValue()
# register
app.initWithSource_('tell application "%s" to register as '
'application "BibDesk" '
'all notifications {"BibDesk notification"} '
'default notifications {"BibDesk notification"}'
% growlapp).executeAndReturnError_(None)
# and notify
app.initWithSource_('tell application "%s" to notify with name '
'"BibDesk notification" application name "BibDesk" '
'priority 0 title "%s" description "%s" %s'
% (growlapp, title, desc, "with sticky" if sticky else '')
).executeAndReturnError_(None)
app.dealloc()
def hasAnnotations(f):
return sp.Popen("strings %s | grep -E 'Contents[ ]{0,1}\('" % f,
shell=True, stdout=sp.PIPE,
stderr=open('/dev/null', 'w')).stdout.read() != ''
def getRedirect(url):
"""Utility function to intercept final URL of HTTP redirection"""
import httplib
url = urlparse.urlsplit(url)
conn = httplib.HTTPConnection(url.netloc)
conn.request('GET', url.path + '?' + url.query)
return conn.getresponse().getheader('Location')
class PDFDOIGrabber(object):
"""Converts PDFs to text and attempts to match all DOIs"""
def __init__(self):
super(PDFDOIGrabber, self).__init__()
regstr = r'(10[.][0-9]{4,}(?:[.][0-9]+)*/(?:(?!["&\'<>\)])\S)+)'
self.pattern = re.compile(regstr)
def search(self, pdfPath):
"""Return a list of DOIs in the text of the PDF at `pdfPath`"""
jsonPath = os.path.splitext(pdfPath)[0] + ".json"
if os.path.exists(jsonPath):
os.remove(jsonPath)
sp.call('pdf2json -q "%s" "%s"' % (pdfPath, jsonPath), shell=True)
data = open(jsonPath, 'r').read()
doiMatches = self.pattern.findall(data)
if os.path.exists(jsonPath):
os.remove(jsonPath)
# strings can find some stuff that pdf2json does not
if not doiMatches:
data = sp.Popen("strings %s" % pdfPath,
shell=True, stdout=sp.PIPE,
stderr=open('/dev/null', 'w')).stdout.read()
doiMatches = self.pattern.findall(data)
return set(doiMatches)
class ADSConnector(object):
"""Receives input (token), derives an ADS url, and attempts to connect
to the corresponding ADS abstract page with urllib2.urlopen().
Tokens are tested in order of:
- arxiv identifiers
- bibcodes / digital object identifier (DOI)
- ADS urls
- arxiv urls
"""
def __init__(self, token, prefs):
super(ADSConnector, self).__init__()
self.token = str(token)
self.prefs = prefs
self.adsURL = None # string URL to ADS
self.adsRead = None # a urllib2.urlopen connection to ADS
self.urlParts = urlparse.urlsplit(token) # supposing it is a URL
# An arXiv identifier or URL?
if self._is_arxiv():
logging.debug("ADSConnector found arXiv ID %s", self.token)
# Try to open the ADS page
if not self._read(self.adsURL):
# parse arxiv instead:
logging.debug('ADS page (%s) not found for %s' %
(self.adsURL, self.token))
notify('ADS page not found', self.token,
'Parsing the arXiv page...')
arxivBib = ArXivParser()
try:
arxivBib.parse_at_id(self.arxivID)
logging.debug("arXiv page (%s) parsed for %s" % (arxivBib.url, self.token))
except ArXivException, err:
logging.debug("ADS and arXiv failed, you're in trouble...")
raise ADSException(err)
# dummy adsRead and bibtex
self.adsRead = True
self.bibtex = arxivBib
# A bibcode from ADS?
elif not self.urlParts.scheme and self._is_bibcode():
logging.debug("ADSConnector found bibcode/DOI %s", self.token)
else:
# If the path lacks http://, tack it on because the token *must* be a URL now
if not self.token.startswith("http://"):
self.token = 'http://' + self.token
# supposing it is a URL
self.urlParts = urlparse.urlsplit(self.token)
# An abstract page at any ADS mirror site?
if self.urlParts.netloc in self.prefs.adsmirrors and self._is_ads_page():
logging.debug("ADSConnector found ADS page %s", self.token)
def _is_arxiv(self):
"""Try to classify the token as an arxiv article, either:
- new style (YYMM.NNNN), or
- old style (astro-ph/YYMMNNN)
:return: True if ADS page is recovered
"""
arxivPattern = re.compile('(\d{4,6}.\d{4,6}|astro\-ph/\d{7})')
arxivMatches = arxivPattern.findall(self.token)
if len(arxivMatches) == 1:
self.arxivID = arxivMatches[0]
self.adsURL = urlparse.urlunsplit(('http',
self.prefs['ads_mirror'],
'cgi-bin/bib_query',
'arXiv:%s' % self.arxivID, ''))
return True
else:
self.arxivID = None
return False
def _is_bibcode(self):
"""Test if the token corresponds to an ADS bibcode or DOI"""
self.adsURL = urlparse.urlunsplit(('http', self.prefs['ads_mirror'],
'doi/%s' % self.token, '', ''))
read = self._read(self.adsURL)
if read:
return read
else:
self.adsURL = urlparse.urlunsplit(('http',
self.prefs['ads_mirror'],
'abs/%s' % self.token, '', ''))
read = self._read(self.adsURL)
return read
def _is_ads_page(self):
"""Test if the token is a url to an ADS abstract page"""
# use our ADS mirror
url = self.urlParts
self.adsURL = urlparse.urlunsplit((url.scheme,
self.prefs['ads_mirror'],
url.path, url.query, url.fragment))
return self._read(self.adsURL)
def _read(self, adsURL):
"""Attempt a connection to adsURL, saving the read to
self.adsread.
:return: True if successful, False otherwise
"""
try:
# remove <head>...</head> - often broken HTML
self.adsRead = re.sub(r'<head>[\s\S]*</head>', '',
urllib2.urlopen(adsURL).read())
return True
except urllib2.HTTPError:
return False
class Preferences(object):
"""Manages the preferences on disk and in memory. Preferences are accessed
with by a dictionary-like interface.
"""
def __init__(self):
self.prefsPath = os.path.join(os.getenv('HOME'), '.adsbibdesk')
self._adsmirrors = ['adsabs.harvard.edu',
'cdsads.u-strasbg.fr',
'ukads.nottingham.ac.uk',
'esoads.eso.org',
'ads.ari.uni-heidelberg.de',
'ads.inasan.ru',
'ads.mao.kiev.ua',
'ads.astro.puc.cl',
'ads.on.br',
'ads.nao.ac.jp',
'ads.bao.ac.cn',
'ads.iucaa.ernet.in',
'www.ads.lipi.go.id']
self.prefs = self._getDefaultPrefs() # Hard coded defaults dictionary
newPrefs = self._getPrefs() # load user prefs from disk
self.prefs.update(newPrefs) # override defaults with user prefs
self._keys = self.prefs.keys()
self._iterIndex = -1
def __getitem__(self, key):
return self.prefs[key]
def __setitem__(self, key, value):
self.prefs[key] = value
self._keys = self.prefs.keys()
def __iter__(self):
return self
def next(self):
if self._iterIndex == len(self._keys) - 1:
self._iterIndex = -1
raise StopIteration
self._iterIndex += 1
return self._keys[self._iterIndex]
def _getDefaultPrefs(self):
""":return: a dictionary of the full set of default preferences. This
is done in case the user's preference file is missing a key-value pair.
"""
return {"ads_mirror": "adsabs.harvard.edu",
"arxiv_mirror": None,
"download_pdf": True,
"pdf_reader": None,
"auto_file": True,
"ssh_user": None,
"ssh_server": None,
"ads_keywords": True,
"debug": False,
"log_path": os.path.expanduser("~/.adsbibdesk.log")}
def _getPrefs(self):
"""Read preferences files from `self.prefsPath`, creates one otherwise."""
prefs = {}
# create a default preference file if non existing
if not os.path.exists(self.prefsPath):
self._writeDefaultPrefs()
for l in open(self.prefsPath):
if l.strip() and not l.strip().startswith('#'):
if '=' not in l:
# badly formed setting
continue
k, v = l.strip().split('=')
if not v:
v = None
elif v.strip().lower() in ('true', 'yes'):
v = True
elif v.strip().lower() in ('false', 'no'):
v = False
elif v.strip().lower() == 'none':
v = None
prefs[k] = v
return prefs
def _writeDefaultPrefs(self):
"""
Set a default preferences file (~/.adsbibdesk)
"""
prefs = open(self.prefsPath, 'w')
print >> prefs, """# ADS mirror
ads_mirror=%s
# arXiv mirror
# (leave it unset to use the arXiv mirror pointed by your ADS mirror)
arxiv_mirror=%s
# download PDFs?
download_pdf=%s
# set these to use your account on a remote machine for fetching
# (refereed) PDF's you have no access locally
ssh_user=%s
ssh_server=%s""" % (self.prefs['ads_mirror'], self.prefs['arxiv_mirror'],
self.prefs['download_pdf'], self.prefs['ssh_user'],
self.prefs['ssh_server'])
prefs.close()
@property
def adsmirrors(self):
return self._adsmirrors
class BibTex(object):
def __init__(self, url):
"""
Create BibTex instance from ADS BibTex URL
"""
bibtex = urllib2.urlopen(url).readlines()
bibtex = ' '.join([l.strip() for l in bibtex]).strip()
bibtex = bibtex[re.search('@[A-Z]+\{', bibtex).start():]
self.type, self.bibcode, self.info = self.parsebib(bibtex)
def __str__(self):
return (','.join(['@' + self.type + '{' + self.bibcode] +
['%s=%s' % (i, j) for i, j in self.info.items()]) + '}'
).encode('utf-8')
def parsebib(self, bibtex):
"""
Parse bibtex code into dictionary
"""
r = re.search('(?<=^@)(?P<type>[A-Z]+){(?P<bibcode>\S+)(?P<info>,.+)}$', bibtex)
s = re.split('(,\s\w+\s=\s)', r.group('info'))
info = dict([(i[1:].replace('=', '').strip(), j.strip())
for i, j in zip(s[1::2], s[2::2])])
return r.group('type'), r.group('bibcode'), info
class BibDesk(object):
def __init__(self):
"""
Manage BibDesk publications using AppKit
"""
self.app = AppKit.NSAppleScript.alloc()
self.refresh()
def __call__(self, cmd, pid=None, strlist=False, error=False):
"""
Run AppleScript command on first document of BibDesk
:param cmd: AppleScript command string
:param pid: address call to first/last publication of document
:param strlist: return output as list of string
:param error: return full output of call, including error
"""
if pid is None:
# address all publications
cmd = 'tell first document of application "BibDesk" to %s' % cmd
else:
# address a single publicatin
cmd = 'tell first document of application "BibDesk" to '\
'tell first publication whose id is "%s" to %s' % (pid, cmd)
output = self.app.initWithSource_(cmd).executeAndReturnError_(None)
if not error:
output = output[0]
if strlist:
# objective C nuisances...
output = [output.descriptorAtIndex_(i + 1).stringValue()
for i in range(output.numberOfItems())]
return output
def refresh(self):
self.titles = self('return title of publications', strlist=True)
self.ids = self('return id of publications', strlist=True)
self.keys = self('return cite key of publications', strlist=True)
self.kwords = self('return keywords of publications', strlist=True)
def pid(self, title):
return self.ids[self.titles.index(title)]
def authors(self, pid):
"""
Get name of authors of publication
"""
return self('name of authors', pid, strlist=True)
def citekeys(self, pid):
"""
Get citekeys of publication
"""
return self.keys[self.ids == pid]
def keywords(self, pid):
"""
Get keywords of publication
"""
return '{' + self.kwords[self.ids == pid] + '}'
def safe_delete(self, pid):
"""
Safely delete publication + PDFs, taking into account
the existence of PDFs with Skim notes
"""
keptPDFs = []
files = self('POSIX path of linked files', pid, strlist=True)
notes = self('text Skim notes of linked files', pid, strlist=True)
for f, n in zip([f for f in files if f is not None],
[n for n in notes if n is not None]):
if f.lower().endswith('pdf'):
if '_notes_' in f:
keptPDFs.append(f)
else:
# check for annotations
if n or hasAnnotations(f):
suffix = 1
path, ext = os.path.splitext(f)
backup = path + '_notes_%i.pdf' % suffix
while os.path.exists(backup):
suffix += 1
backup = path + '_notes_%i.pdf' % suffix
# rename
os.rename(f, backup)
keptPDFs.append(backup)
if os.path.exists(path + '.skim'):
os.rename(path + '.skim',
path + '_notes_%i.skim' % suffix)
else:
# remove file
os.remove(f)
# delete publication
self('delete', pid)
return keptPDFs
class ADSException(Exception):
pass
class ADSHTMLParser(HTMLParser):
def __init__(self, *args, **kwargs):
HTMLParser.__init__(self)
self.links = {}
self.tag = ''
self.get_abs = False
# None = not seen yet, False = seen but do not store yet, True = store
self.get_comment = None
self.entities = {}
self.bibtex = None
self.abstract = None
self.comment = None
self.title = ''
self.author = []
self.arxivid = None
self.year = ''
self.prefs = kwargs.get('prefs', Preferences()).prefs
def mathml(self):
"""
Generate dictionary with MathML -> unicode conversion from
http://www.w3.org/Math/characters/byalpha.html
"""
w3 = 'http://www.w3.org/Math/characters/byalpha.html'
mathml = re.search('(?<=<pre>).+(?=</pre>)',
urllib2.urlopen(w3).read(), re.DOTALL).group()
entities = {}
for l in mathml[:-1].splitlines():
s = l.split(',')
#ignore double hex values like 'U02266-00338'
if '-' not in s[1]:
#hexadecimal -> int values, for unichr
entities[s[0].strip()] = int(s[1].strip()[1:], 16)
return entities
def parse_at_url(self, url):
"""Helper method to read data from URL, and passes on to parse()."""
try:
htmlData = urllib2.urlopen(url).read()
except urllib2.URLError, err:
logging.debug("ADSHTMLParser timed out on URL: %s", url)
raise ADSException(err)
self.parse(htmlData)
def parse(self, htmlData):
"""
Feed url into our own HTMLParser and parse found bibtex
htmlData is a string containing HTML data from ADS page.
"""
self.feed(htmlData)
logging.debug("ADSHTMLParser found links: %s",
pprint.pformat(self.links))
if 'bibtex' in self.links:
self.bibtex = BibTex(self.links['bibtex'])
self.title = re.search('(?<={).+(?=})',
self.bibtex.info['title']).group()\
.replace('{', '').replace('}', '').encode('utf-8')
self.author = [a.strip().encode('utf-8') for a in
re.search('(?<={).+(?=})', self.bibtex.info['author']).group().split(' and ')]
self.year = self.bibtex.info['year']
# bibtex do not have the comment from ADS
if self.comment:
self.comment = '"' + self.comment.replace('"',"'") + '"'
# construct ArXivURL from arXiv identifier
if self.arxivid:
if 'arxiv_mirror' not in self.prefs or not self.prefs['arxiv_mirror']:
# test HTTP redirect to get the arXiv mirror used by ADS
try:
mirror = urlparse.urlsplit(getRedirect(self.links['preprint'])).netloc
except KeyError:
mirror = 'arxiv.org' # this should not happen
else:
mirror = self.prefs['arxiv_mirror']
url = urlparse.urlunsplit(('http', mirror, 'abs/'+self.arxivid, None, None))
self.bibtex.info.update({'arxivurl': '"' + url + '"'})
def handle_starttag(self, tag, attrs):
#abstract
if tag.lower() == 'hr' and self.get_abs:
self.abstract = self.tag.strip().decode('utf-8')
self.get_abs = False
self.tag = ''
#handle old scanned articles abstracts
elif tag.lower() == 'img' and self.get_abs:
self.tag += dict(attrs)['src'].replace('&', unichr(38))
#links
elif tag.lower() == 'a':
if 'href' in dict(attrs):
href = dict(attrs)['href'].replace('&', unichr(38))
query = cgi.parse_qs(urlparse.urlsplit(href).query)
if 'bibcode' in query:
if 'link_type' in query:
self.links[query['link_type'][0].lower()] = href
elif 'data_type' in query:
self.links[query['data_type'][0].lower()] = href
# comment
elif tag.lower() == 'td' and self.get_comment is False and 'valign' in dict(attrs):
self.get_comment = True
def handle_endtag(self, tag):
if self.get_comment and tag.lower() == 'td':
self.comment = self.tag.strip().decode('utf-8')
self.get_comment = None
self.tag = ''
def handle_data(self, data):
if self.get_abs:
self.tag += data.replace('\n', ' ')
if self.get_comment:
self.tag += data
#beginning of abstract found
if data.strip() == 'Abstract':
self.get_abs = True
if data.strip() == 'Comment:':
self.get_comment = False
#store arXiv identifier
if re.search('arXiv:(\d{4,6}.\d{4,6}|astro\-ph/\d{7})', data) is not None:
self.arxivid = re.search('arXiv:(\d{4,6}.\d{4,6}|astro\-ph/\d{7})', data).group(1)
#handle html entities
def handle_entityref(self, name):
if self.get_abs:
if name in name2codepoint:
c = name2codepoint[name]
self.tag += unichr(c).encode('utf-8')
else:
#fetch mathml
if not self.entities:
#cache dict
self.entities = self.mathml()
if name in self.entities:
c = self.entities[name]
self.tag += unichr(c).encode('utf-8')
else:
#nothing worked, leave it as-is
self.tag += '&' + name + ';'
#handle unicode chars in utf-8
def handle_charref(self, name):
if self.get_abs:
self.tag += unichr(int(name)).encode('utf-8')
def getPDF(self):
"""
Fetch PDF and save it locally in a temporary file.
Tries by order:
- refereed article
- refereed article using another machine (set ssh_user & ssh_server)
- arXiv preprint
- electronic journal link
"""
if not self.links:
return 'failed'
elif 'download_pdf' in self.prefs and not self.prefs['download_pdf']:
return 'not downloaded'
def filetype(filename):
return sp.Popen('file %s' % filename, shell=True,
stdout=sp.PIPE,
stderr=sp.PIPE).stdout.read()
# refereed
if 'article' in self.links:
url = self.links['article']
if "MNRAS" in url: # Special case for MNRAS URLs to deal with iframe
parser = MNRASParser(self.prefs)
try:
parser.parse(url)
except MNRASException:
# this probably means we have a PDF directly from ADS, just continue
print 'excepct'
pass
if parser.pdfURL is not None:
url = parser.pdfURL
# try locally
fd, pdf = tempfile.mkstemp(suffix='.pdf')
# test for HTTP auth need
try:
os.fdopen(fd, 'wb').write(urllib2.urlopen(url).read())
except urllib2.HTTPError:
# dummy file
open(pdf, 'w').write('dummy')
except urllib2.URLError:
logging.debug('%s timed out' % url)
pass
if 'PDF document' in filetype(pdf):
return pdf
# try in remote server
# you need to set SSH public key authentication for this to work!
elif 'ssh_user' in self.prefs and self.prefs['ssh_user'] is not None:
fd, pdf = tempfile.mkstemp(suffix='.pdf')
cmd = 'ssh %s@%s \"touch adsbibdesk.pdf; wget -O adsbibdesk.pdf \\"%s\\"\"' % (self.prefs['ssh_user'],
self.prefs['ssh_server'],
url)
cmd2 = 'scp -q %s@%s:adsbibdesk.pdf %s' % (self.prefs['ssh_user'],
self.prefs['ssh_server'],
pdf)
sp.Popen(cmd, shell=True,
stdout=sp.PIPE, stderr=sp.PIPE).communicate()
sp.Popen(cmd2, shell=True,
stdout=sp.PIPE, stderr=sp.PIPE).communicate()
if 'PDF document' in filetype(pdf):
return pdf
# arXiv
if 'preprint' in self.links:
# arXiv page
url = self.links['preprint']
mirror = None
# fetch PDF directly without parsing the arXiv page
if self.arxivid is not None:
# user defined mirror?
if 'arxiv_mirror' not in self.prefs or not self.prefs['arxiv_mirror']:
# test HTTP redirect to get the arXiv mirror used by ADS
mirror = urlparse.urlsplit(getRedirect(url)).netloc
else:
mirror = self.prefs['arxiv_mirror']
url = urlparse.urlunsplit(('http', mirror, 'pdf/'+self.arxivid, None, None))
logging.debug('arXiv PDF (%s)' % url)
else:
# search for PDF link in the arXiv page
# this should be *deprecated*
for line in urllib2.urlopen(url):
if '<h1><a href="/">' in line:
mirror = re.search('<h1><a href="/">(.*ar[xX]iv.org)', line)
elif 'dc:identifier' in line:
begin = re.search('dc:identifier="', line).end()
url = urlparse.urlsplit(line[begin:-2].replace('&', unichr(38)).lower())
# use automatic mirror chosen by the ADS mirror
if ('arxiv_mirror' not in self.prefs or not self.prefs['arxiv_mirror']) \
and mirror is not None:
url = urlparse.urlunsplit((url.scheme, mirror.group(1),
url.path, url.query,
url.fragment))
break
elif self.prefs['arxiv_mirror']:
url = urlparse.urlunsplit((url.scheme,
self.prefs['arxiv_mirror'],
url.path, url.query,
url.fragment))
break
logging.debug('arXiv PDF url (*should be DEPRECATED!*): %s' % url)
# get arXiv PDF
fd, pdf = tempfile.mkstemp(suffix='.pdf')
os.fdopen(fd, 'wb').write(urllib2.urlopen(url.replace('abs', 'pdf')).read())
if 'PDF document' in filetype(pdf):
return pdf
# PDF was not yet generated in the mirror?
elif '...processing...' in open(pdf).read():
while '...processing...' in open(pdf).read():
logging.debug('waiting 30s for PDF regeneration')
notify('Waiting for arXiv...', '',
'PDF is being generated, retrying in 30s...')
time.sleep(30)
open(pdf, 'wb').write(urllib2.urlopen(url.replace('abs', 'pdf')).read())
if 'PDF document' in filetype(pdf):
return pdf
else:
return url
else:
return url
# electronic journal
if 'ejournal' in self.links:
return self.links['ejournal']
return 'failed'
class ArXivException(Exception):
pass
class ArXivParser(object):
def __init__(self):
"""
Parse arXiv information for a *single* arxiv_id
:param arxiv_id: arXiv identifier
"""
pass
def parse_at_id(self, arxiv_id):
"""Helper method to read data from URL, and passes on to parse()."""
from xml.etree import ElementTree
self.url = 'http://export.arxiv.org/api/query?id_list=' + arxiv_id
try:
self.xml = ElementTree.fromstring(urllib2.urlopen(self.url).read())
except (urllib2.HTTPError, urllib2.URLError), err:
logging.debug("ArXivParser failed on URL: %s", self.url)
raise ArXivException(err)
self.info = self.parse(self.xml)
self.bib = self.bibtex(self.info)
def parse(self, xml):
# recursive xml -> list of (tag, info)
getc = lambda e: [(c.tag.split('}')[-1], c.getchildren() and
dict(getc(c)) or
(c.text is not None and re.sub('\s+', ' ', c.text.strip()) or c.attrib))
for c in e.getchildren()]
# article info
info = {}
for k,v in getc(xml.getchildren()[-1]): # the last item is the article
if isinstance(v, dict):
info.setdefault(k, []).append(v)
else:
info[k] = v
return info
def bibtex(self, info):
"""
Create BibTex entry. Sets a bunch of "attributes" that are used
explictly on __str__ as BibTex entries
:param info: parsed info dict from arXiv
"""
self.Author = ' and '.join(['{%s}, %s' % (a['name'].split()[-1],
'~'.join(a['name'].split()[:-1]))
for a in info['author']
if len(a['name'].strip()) > 1]).encode('utf-8')
self.Title = info['title'].encode('utf-8')
self.Abstract = info['summary'].encode('utf-8')
try:
self.AdsComment = info['comment'].replace('"',"'").encode('utf-8')
except:
self.AdsComment = ''
self.Jornal = 'ArXiv e-prints'
self.ArchivePrefix = 'arXiv'
self.ArXivURL = info['id']
self.Eprint = info['id'].split('abs/')[-1]
self.PrimaryClass = info['primary_category'][0]['term']
self.Year, self.Month = datetime.datetime.strptime(info['published'],
'%Y-%m-%dT%H:%M:%SZ').strftime('%Y %b').split()
def __str__(self):
import string
return '@article{%s,\n' % self.Eprint +\
'\n'.join(['%s = {%s},' % (k,v)
for k,v in
sorted([(k, v.decode('utf-8'))
for k,v in self.__dict__.iteritems()
if k[0] in string.uppercase])]) +\
'}'
class MNRASException(Exception):
pass
class MNRASParser(HTMLParser):
"""Handle MNRAS refereed article PDFs.
Unlike other journals, the ADS "Full Refereed Journal Article" URL for a
MNRAS article points to a PDF embedded in an iframe. This class extracts
the PDF url given the ADS link.
"""
def __init__(self, prefs):
HTMLParser.__init__(self)
self.prefs = prefs
self.pdfURL = None
def parse(self, url):
"""Parse URL to MNRAS PDF page"""
try:
self.feed = urllib2.urlopen(url)
self.pdfURL = self.feed.url.split('+')[0]
except urllib2.URLError, err: # HTTP timeout
logging.debug("MNRASParser timed out: %s", url)
raise MNRASException(err)
except HTMLParseError, err:
raise MNRASException(err)
if __name__ == '__main__':
main()
|
wschoenell/ads_bibdesk
|
adsbibdesk.py
|
Python
|
gpl-3.0
| 55,654
|
[
"VisIt"
] |
88bc38ec33448ee862009b14f0d4616a4b1bf0813e88a1f58b80737e5942290f
|
import unittest
from mock import MagicMock, patch
from ncclient.transport.ssh import SSHSession
from ncclient.transport import AuthenticationError, SessionCloseError
import paramiko
from ncclient.devices.junos import JunosDeviceHandler
import sys
try:
import selectors
except ImportError:
import selectors2 as selectors
reply_data = """<rpc-reply xmlns:junos="http://xml.juniper.net/junos/12.1X46/junos" attrib1 = "test">
<software-information>
<host-name>R1</host-name>
<product-model>firefly-perimeter</product-model>
<product-name>firefly-perimeter</product-name>
<package-information>
<name>junos</name>
<comment>JUNOS Software Release [12.1X46-D10.2]</comment>
</package-information>
</software-information>
<cli>
<banner></banner>
</cli>
</rpc-reply>"""
reply_ok = """<rpc-reply>
<ok/>
<rpc-reply/>"""
# A buffer of data with two complete messages and an incomplete message
rpc_reply = reply_data + "\n]]>]]>\n" + reply_ok + "\n]]>]]>\n" + reply_ok
reply_ok_chunk = "\n#%d\n%s\n##\n" % (len(reply_ok), reply_ok)
# einarnn: this test message had to be reduced in size as the improved
# 1.1 parsing finds a whole fragment in it, so needed to have less
# data in it than the terminating '>'
reply_ok_partial_chunk = "\n#%d\n%s" % (len(reply_ok), reply_ok[:-1])
# A buffer of data with two complete messages and an incomplete message
rpc_reply11 = "\n#%d\n%s\n#%d\n%s\n##\n%s%s" % (
30, reply_data[:30], len(reply_data[30:]), reply_data[30:],
reply_ok_chunk, reply_ok_partial_chunk)
rpc_reply_part_1 = """<rpc-reply xmlns:junos="http://xml.juniper.net/junos/12.1X46/junos" attrib1 = "test">
<software-information>
<host-name>R1</host-name>
<product-model>firefly-perimeter</product-model>
<product-name>firefly-perimeter</product-name>
<package-information>
<name>junos</name>
<comment>JUNOS Software Release [12.1X46-D10.2]</comment>
</package-information>
</software-information>
<cli>
<banner></banner>
</cli>
</rpc-reply>
]]>]]"""
rpc_reply_part_2 = """>
<rpc-reply>
<ok/>
<rpc-reply/>"""
class TestSSH(unittest.TestCase):
def _test_parsemethod(self, mock_dispatch, parsemethod, reply, ok_chunk,
expected_messages):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
if sys.version >= "3.0":
obj._buffer.write(bytes(reply, "utf-8"))
remainder = bytes(ok_chunk, "utf-8")
else:
obj._buffer.write(reply)
remainder = ok_chunk
parsemethod(obj)
for i in range(0, len(expected_messages)):
call = mock_dispatch.call_args_list[i][0][0]
self.assertEqual(call, expected_messages[i])
self.assertEqual(obj._buffer.getvalue(), remainder)
@patch('ncclient.transport.ssh.Session._dispatch_message')
def test_parse(self, mock_dispatch):
self._test_parsemethod(mock_dispatch, SSHSession._parse, rpc_reply,
"\n" + reply_ok, [reply_data])
@patch('ncclient.transport.ssh.Session._dispatch_message')
def test_parse11(self, mock_dispatch):
self._test_parsemethod(mock_dispatch, SSHSession._parse11, rpc_reply11,
reply_ok_partial_chunk, [reply_data, reply_ok])
@patch('ncclient.transport.ssh.Session._dispatch_message')
def test_parse_incomplete_delimiter(self, mock_dispatch):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
if sys.version >= "3.0":
b = bytes(rpc_reply_part_1, "utf-8")
obj._buffer.write(b)
obj._parse()
self.assertFalse(mock_dispatch.called)
b = bytes(rpc_reply_part_2, "utf-8")
obj._buffer.write(b)
obj._parse()
self.assertTrue(mock_dispatch.called)
else:
obj._buffer.write(rpc_reply_part_1)
obj._parse()
self.assertFalse(mock_dispatch.called)
obj._buffer.write(rpc_reply_part_2)
obj._parse()
self.assertTrue(mock_dispatch.called)
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.agent.AgentSSH.get_keys')
def test_auth_agent(self, mock_get_key, mock_auth_public_key):
key = paramiko.PKey(msg="hello")
mock_get_key.return_value = [key]
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
obj._auth('user', 'password', [], True, True)
self.assertEqual(
(mock_auth_public_key.call_args_list[0][0][1]).__repr__(),
key.__repr__())
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.agent.AgentSSH.get_keys')
def test_auth_agent_exception(self, mock_get_key, mock_auth_public_key):
key = paramiko.PKey()
mock_get_key.return_value = [key]
mock_auth_public_key.side_effect = paramiko.ssh_exception.AuthenticationException
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
self.assertRaises(AuthenticationError,
obj._auth,'user', None, [], True, False)
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.pkey.PKey.from_private_key_file')
def test_auth_keyfiles(self, mock_get_key, mock_auth_public_key):
key = paramiko.PKey()
mock_get_key.return_value = key
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
obj._auth('user', 'password', ["key_file_name"], False, True)
self.assertEqual(
(mock_auth_public_key.call_args_list[0][0][1]).__repr__(),
key.__repr__())
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.pkey.PKey.from_private_key_file')
def test_auth_keyfiles_exception(self, mock_get_key, mock_auth_public_key):
key = paramiko.PKey()
mock_get_key.side_effect = paramiko.ssh_exception.PasswordRequiredException
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
self.assertRaises(AuthenticationError,
obj._auth,'user', None, ["key_file_name"], False, True)
@patch('os.path.isfile')
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.pkey.PKey.from_private_key_file')
def test_auth_default_keyfiles(self, mock_get_key, mock_auth_public_key,
mock_is_file):
key = paramiko.PKey()
mock_get_key.return_value = key
mock_is_file.return_value = True
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
obj._auth('user', 'password', [], False, True)
self.assertEqual(
(mock_auth_public_key.call_args_list[0][0][1]).__repr__(),
key.__repr__())
@patch('os.path.isfile')
@patch('paramiko.transport.Transport.auth_publickey')
@patch('paramiko.pkey.PKey.from_private_key_file')
def test_auth_default_keyfiles_exception(self, mock_get_key,
mock_auth_public_key, mock_is_file):
key = paramiko.PKey()
mock_is_file.return_value = True
mock_get_key.side_effect = paramiko.ssh_exception.PasswordRequiredException
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
self.assertRaises(AuthenticationError,
obj._auth,'user', None, [], False, True)
@patch('paramiko.transport.Transport.auth_password')
def test_auth_password(self, mock_auth_password):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
obj._auth('user', 'password', [], False, True)
self.assertEqual(
mock_auth_password.call_args_list[0][0],
('user',
'password'))
@patch('paramiko.transport.Transport.auth_password')
def test_auth_exception(self, mock_auth_password):
mock_auth_password.side_effect = Exception
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
self.assertRaises(AuthenticationError,
obj._auth, 'user', 'password', [], False, True)
def test_auth_no_methods_exception(self):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
self.assertRaises(AuthenticationError,
obj._auth,'user', None, [], False, False)
@patch('paramiko.transport.Transport.close')
def test_close(self, mock_close):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._transport = paramiko.Transport(MagicMock())
obj._transport.active = True
obj._connected = True
obj.close()
mock_close.assert_called_once_with()
self.assertFalse(obj._connected)
@patch('paramiko.hostkeys.HostKeys.load')
def test_load_host_key(self, mock_load):
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj.load_known_hosts("file_name")
mock_load.assert_called_once_with("file_name")
@patch('os.path.expanduser')
@patch('paramiko.hostkeys.HostKeys.load')
def test_load_host_key_2(self, mock_load, mock_os):
mock_os.return_value = "file_name"
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj.load_known_hosts()
mock_load.assert_called_once_with("file_name")
@unittest.skipIf(sys.version_info.major == 2, "test not supported < Python3")
@patch('ncclient.transport.ssh.SSHSession.close')
@patch('paramiko.channel.Channel.recv')
@patch('selectors.DefaultSelector.select')
@patch('ncclient.transport.ssh.Session._dispatch_error')
def test_run_receive_py3(self, mock_error, mock_selector, mock_recv, mock_close):
mock_selector.return_value = True
mock_recv.return_value = 0
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._channel = paramiko.Channel("c100")
obj.run()
self.assertTrue(
isinstance(
mock_error.call_args_list[0][0][0],
SessionCloseError))
@unittest.skipIf(sys.version_info.major == 2, "test not supported < Python3")
@patch('ncclient.transport.ssh.SSHSession.close')
@patch('paramiko.channel.Channel.send_ready')
@patch('paramiko.channel.Channel.send')
@patch('selectors.DefaultSelector.select')
@patch('ncclient.transport.ssh.Session._dispatch_error')
def test_run_send_py3(self, mock_error, mock_selector, mock_send, mock_ready, mock_close):
mock_selector.return_value = False
mock_ready.return_value = True
mock_send.return_value = -1
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._channel = paramiko.Channel("c100")
obj._q.put("rpc")
obj.run()
self.assertEqual(mock_send.call_args_list[0][0][0], "rpc]]>]]>")
self.assertTrue(
isinstance(
mock_error.call_args_list[0][0][0],
SessionCloseError))
@unittest.skipIf(sys.version_info.major >= 3, "test not supported >= Python3")
@patch('ncclient.transport.ssh.SSHSession.close')
@patch('paramiko.channel.Channel.recv')
@patch('selectors2.DefaultSelector')
@patch('ncclient.transport.ssh.Session._dispatch_error')
def test_run_receive_py2(self, mock_error, mock_selector, mock_recv, mock_close):
mock_selector.select.return_value = True
mock_recv.return_value = 0
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._channel = paramiko.Channel("c100")
obj.run()
self.assertTrue(
isinstance(
mock_error.call_args_list[0][0][0],
SessionCloseError))
@unittest.skip("test currently non-functional")
@patch('ncclient.transport.ssh.SSHSession.close')
@patch('paramiko.channel.Channel.send_ready')
@patch('paramiko.channel.Channel.send')
@patch('selectors2.DefaultSelector')
@patch('ncclient.transport.ssh.Session._dispatch_error')
def test_run_send_py2(self, mock_error, mock_selector, mock_send, mock_ready, mock_close):
mock_selector.select.return_value = False
mock_ready.return_value = True
mock_send.return_value = -1
device_handler = JunosDeviceHandler({'name': 'junos'})
obj = SSHSession(device_handler)
obj._channel = paramiko.Channel("c100")
obj._q.put("rpc")
obj.run()
self.assertEqual(mock_send.call_args_list[0][0][0], "rpc]]>]]>")
self.assertTrue(
isinstance(
mock_error.call_args_list[0][0][0],
SessionCloseError))
|
einarnn/ncclient
|
test/unit/transport/test_ssh.py
|
Python
|
apache-2.0
| 13,804
|
[
"Firefly"
] |
eb31fdf38d5102d40212b634f7fce51852b70e809d1b39c22732ac425b975950
|
"""Acceptance tests for LMS-hosted Programs pages"""
from nose.plugins.attrib import attr
from ...fixtures.programs import FakeProgram, ProgramsFixture, ProgramsConfigMixin
from ...fixtures.course import CourseFixture
from ..helpers import UniqueCourseTest
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.programs import ProgramListingPage
class ProgramListingPageBase(ProgramsConfigMixin, UniqueCourseTest):
"""Base class used for program listing page tests."""
def setUp(self):
super(ProgramListingPageBase, self).setUp()
self.set_programs_api_configuration(is_enabled=True)
self.listing_page = ProgramListingPage(self.browser)
def stub_api(self, course_id=None):
"""Stub out the programs API with fake data."""
name = 'Fake Program'
status = 'active'
org_key = self.course_info['org']
course_id = course_id if course_id else self.course_id
ProgramsFixture().install_programs([
FakeProgram(name=name, status=status, org_key=org_key, course_id=course_id),
])
def auth(self, enroll=True):
"""Authenticate, enrolling the user in the configured course if requested."""
CourseFixture(**self.course_info).install()
course_id = self.course_id if enroll else None
AutoAuthPage(self.browser, course_id=course_id).visit()
class ProgramListingPageTest(ProgramListingPageBase):
"""Verify user-facing behavior of the program listing page."""
def test_no_enrollments(self):
"""Verify that no cards appear when the user has no enrollments."""
self.stub_api()
self.auth(enroll=False)
self.listing_page.visit()
self.assertTrue(self.listing_page.is_sidebar_present)
self.assertFalse(self.listing_page.are_cards_present)
def test_no_programs(self):
"""
Verify that no cards appear when the user has enrollments
but none are included in an active program.
"""
course_id = self.course_id.replace(
self.course_info['run'],
'other_run'
)
self.stub_api(course_id=course_id)
self.auth()
self.listing_page.visit()
self.assertTrue(self.listing_page.is_sidebar_present)
self.assertFalse(self.listing_page.are_cards_present)
def test_enrollments_and_programs(self):
"""
Verify that cards appear when the user has enrollments
which are included in at least one active program.
"""
self.stub_api()
self.auth()
self.listing_page.visit()
self.assertTrue(self.listing_page.is_sidebar_present)
self.assertTrue(self.listing_page.are_cards_present)
@attr('a11y')
class ProgramListingPageA11yTest(ProgramListingPageBase):
"""Test program listing page accessibility."""
def test_empty_a11y(self):
"""Test a11y of the page's empty state."""
self.stub_api()
self.auth(enroll=False)
self.listing_page.visit()
self.assertTrue(self.listing_page.is_sidebar_present)
self.assertFalse(self.listing_page.are_cards_present)
self.listing_page.a11y_audit.check_for_accessibility_errors()
def test_cards_a11y(self):
"""Test a11y when program cards are present."""
self.stub_api()
self.auth()
self.listing_page.visit()
self.assertTrue(self.listing_page.is_sidebar_present)
self.assertTrue(self.listing_page.are_cards_present)
self.listing_page.a11y_audit.check_for_accessibility_errors()
|
devs1991/test_edx_docmode
|
common/test/acceptance/tests/lms/test_programs.py
|
Python
|
agpl-3.0
| 3,590
|
[
"VisIt"
] |
d07811190a59865c5e8608106dbc6ce19a4c15d4e5b28c97aee1d2a4d82666c5
|
"""
Utilities for handling the Graphic Unit Interface.
.. todo::
- Switch to Ttk instead of Tk for a better look of the GUI.
- Insert a button to decide if plot or not the singular values.
- Insert a button to decide the path where to save the structures at the end of the procedure.
- Use grid instead of pack
"""
import Tkinter
from tkFileDialog import askopenfilename
import ezyrb as ez
import numpy as np
import sys
import os
import webbrowser
class Gui(object):
"""
The class for the Graphic Unit Interface.
:cvar string output_name: name of the variable (or output) we want to extract from the solution file.
:cvar string weights_name: name of the weights to be extracted from the solution file for the computation
of the errors. If the solution files does not contain any weight (like volume or area of the cells)
the weight is set to 1 for all the cells.
:cvar string namefile_prefix: path and prefix of the solution files. The files are supposed to be named with
the same prefix, plus an increasing numeration (from 0) in the same order as the parameter points.
:cvar string file_format: format of the solution files.
:cvar string url: url of the github page of EZyRB.
:cvar string tria_path: path of the triangulation file.
:cvar string tria_path: path of the pod_basis file.
:cvar string parsing_file_path: path of the file to be parsed in order to allow to write the new output.
:cvar string new_mu: new parameter values. The values must be separated by a comma.
:cvar string outfilename: name of the new output file.
:cvar string finish_label: string that says when the online step is done.
:cvar Tkinter.Label label_new_mu: label where to print the new parameter value.
:cvar Tkinter.Label label_error: label where to print the maximum error on the tesselation.
:cvar Tkinter.Label label_tria: label where to print the triangulation file path.
:cvar Tkinter.Label label_basis: label where to print the pod basis file path.
:cvar Tkinter.Label label_parsing_file: label where to print the parsing file path.
:cvar Tkinter.Label label_finish_online: label where to print the finish message.
:cvar Pod/Interp ezyrb_handler: class for the model reduction. It can be both a Pod and a
Interp class (it depends on the is_scalar_switch boolean).
:cvar bool is_scalar_switch: switch to decide is the output of interest is a scalar or a field.
"""
def __init__(self):
self.root = Tkinter.Tk()
self.root.title('EZyRB')
self.output_name = Tkinter.StringVar()
self.weights_name = Tkinter.StringVar()
self.namefile_prefix = Tkinter.StringVar()
self.file_format = Tkinter.StringVar()
self.url = 'https://github.com/mathLab/EZyRB'
self.tria_path = Tkinter.StringVar()
self.basis_path = Tkinter.StringVar()
self.parsing_file_path = Tkinter.StringVar()
self.new_mu = Tkinter.StringVar()
self.outfilename = Tkinter.StringVar()
self.finish_label = Tkinter.StringVar()
self.label_new_mu = None
self.label_error = None
self.label_tria = None
self.label_basis = None
self.label_parsing_file = None
self.label_finish_online = None
self.ezyrb_handler = None
self.is_scalar_switch = Tkinter.BooleanVar()
self.logo_label = None
self.img = None
def _start_ezyrb_offline(self):
"""
The private method starts the ezyrb algorithm. Offline Step.
"""
'''#output_name = 'Pressure_drop'
output_name = 'Pressure'
weights_name = 'Weights'
#namefile_prefix = 'tests/test_datasets/matlab_scalar_0'
namefile_prefix = 'tests/test_datasets/matlab_0'
file_format = '.vtk'''
if self.is_scalar_switch.get() != True:
#self.ezyrb_handler = ez.pod.Pod(output_name, weights_name, namefile_prefix, file_format)
self.ezyrb_handler = ez.pod.Pod(self.output_name.get(), self.weights_name.get(), self.namefile_prefix.get(), self.file_format.get())
else:
#self.ezyrb_handler = ez.interpolation.Interp(output_name, namefile_prefix, file_format)
self.ezyrb_handler = ez.interp.Interp(self.output_name.get(), self.namefile_prefix.get(), self.file_format.get())
self.ezyrb_handler.start()
self.label_new_mu.configure(text='New parameter value ' + str(self.ezyrb_handler.cvt_handler.mu_values[:,-1]))
self.label_error.configure(text='Error ' + str(self.ezyrb_handler.cvt_handler.max_error))
def _start_ezyrb_online(self):
"""
The private method starts the ezyrb algorithm. Online Step.
"""
mu_value = np.fromstring(self.new_mu.get(), dtype=float, sep=',')
directory = (os.path.dirname(self.basis_path.get()) + '/')
online_handler = ez.online.Online(mu_value, self.output_name.get(), directory=directory, is_scalar=self.is_scalar_switch.get())
online_handler.run()
online_handler.write_file(self.outfilename.get(), self.parsing_file_path.get())
self.finish_label.set('Online step ended. New output file saved.')
def _chose_tria_file(self):
"""
The private method explores the file system and allows to select the wanted triangulation file.
Up to now, you can select only .npy file.
"""
filename_tria = askopenfilename(filetypes=[("Python file",('*.npy'))])
self.tria_path.set(filename_tria)
self.label_tria.configure(fg='green')
def _chose_basis_file(self):
"""
The private method explores the file system and allows to select the wanted triangulation file.
Up to now, you can select only .npy file.
"""
filename_basis = askopenfilename(filetypes=[("Python file",('*.npy'))])
self.basis_path.set(filename_basis)
self.label_basis.configure(fg='green')
def _chose_parsing_file(self):
"""
The private method explores the file system and allows to select the wanted triangulation file.
Up to now, you can select only .npy file.
"""
filename_parsing_file = askopenfilename(filetypes=[("VTK file",'*.vtk'), ("Matlab file",'*.mat'), ('All','*')])
self.parsing_file_path.set(filename_parsing_file)
self.label_parsing_file.configure(fg='green')
def _add_snapshot(self):
"""
The private method adds a snapshot to the database.
"""
self.ezyrb_handler.add_snapshot()
self.label_new_mu.configure(text='New parameter value' + str(self.ezyrb_handler.cvt_handler.mu_values[:,-1]))
self.label_error.configure(text='Error ' + str(self.ezyrb_handler.cvt_handler.max_error))
def _finish(self):
"""
The private method to stop the iterations and save the structures necessary for the online step.
"""
self.ezyrb_handler.write_structures()
def _quit(self):
"""
The private method close the program.
"""
self.root.destroy()
def _goto_website(self):
"""
The private method opens the EZyRB main page on github.
It is used for info about EZyRB in the menu.
"""
webbrowser.open(self.url)
def main(self):
"""
The method inizializes and visualizes the window.
"""
self.logo_panel = Tkinter.Canvas(self.root, height=60 , width=60)
self.logo_panel.pack(side = "bottom", padx = 5, pady = 5,anchor=Tkinter.SE)
self.img = Tkinter.PhotoImage(master=self.logo_panel, file='readme/logo_EZyRB_gui.gif')
self.logo_panel.create_image(35,35, image=self.img)
online_offline_frame = Tkinter.Frame(self.root)
online_offline_frame.pack()
## OFFLINE
offline_frame = Tkinter.Frame(online_offline_frame, relief=Tkinter.GROOVE, borderwidth=1, bg='#c1d0f0')
offline_frame.grid(row=0, column=0, padx=5, pady=5)
Tkinter.Label(offline_frame, text="OFFLINE", bg='#c1d0f0', font=("Arial", 20)).pack()
text_input_frame = Tkinter.Frame(offline_frame, relief=Tkinter.GROOVE, borderwidth=1)
text_input_frame.pack(padx=5, pady=5, anchor=Tkinter.W)
# Buttons 1
Tkinter.Label(text_input_frame, text="Path and prefix").grid(row=0, column=0)
Tkinter.Entry(text_input_frame, bd=5, textvariable=self.namefile_prefix).grid(row=0, column=1)
# Button 2
Tkinter.Label(text_input_frame, text="Output of interest").grid(row=2, column=0)
Tkinter.Entry(text_input_frame, bd=5, textvariable=self.output_name).grid(row=2, column=1)
Tkinter.Label(text_input_frame, text="Output is a").grid(row=3, column=0, pady=2)
switch_frame = Tkinter.Frame(text_input_frame)
switch_frame.grid(row=3, column=1, pady=2)
Tkinter.Radiobutton(switch_frame, text="Scalar", variable=self.is_scalar_switch, value=True).pack(side=Tkinter.LEFT)
Tkinter.Radiobutton(switch_frame, text="Field", variable=self.is_scalar_switch, value=False).pack(side=Tkinter.RIGHT)
# Button 3
label_weights = Tkinter.Label(text_input_frame, text="Weight name").grid(row=1, column=0)
Tkinter.Entry(text_input_frame, bd=5, textvariable=self.weights_name).grid(row=1, column=1)
# Button 4
format_frame = Tkinter.Frame(text_input_frame)
format_frame.grid(row=4, column=1, pady=2)
Tkinter.Label(text_input_frame, text="Select file format").grid(row=4, column=0, pady=2)
Tkinter.Radiobutton(format_frame, text=".vtk", variable=self.file_format, value='.vtk').pack(side=Tkinter.LEFT)
Tkinter.Radiobutton(format_frame, text=".mat", variable=self.file_format, value='.mat').pack(side=Tkinter.RIGHT)
# Start button
start_frame_offline = Tkinter.Frame(offline_frame)
start_frame_offline.pack(padx=10, pady=10)
Tkinter.Button(start_frame_offline, text="Start EZyRB", command=self._start_ezyrb_offline, bg='#065893', fg='#f19625', font='bold').pack(padx=5, pady=5)
display_frame = Tkinter.Frame(offline_frame, relief=Tkinter.GROOVE, borderwidth=1)
display_frame.pack()
self.label_new_mu = Tkinter.Label(display_frame, text='Start EZyRB to find the new parameter value')
self.label_new_mu.pack(padx=0, pady=2, anchor=Tkinter.W)
self.label_error= Tkinter.Label(display_frame, text='Start EZyRB to find the maximum error')
self.label_error.pack(padx=0, pady=0, anchor=Tkinter.W)
# Enrich database button
chose_frame = Tkinter.Frame(offline_frame)
chose_frame.pack(padx=5, pady=5)
Tkinter.Button(chose_frame, text ="Enrich", command = self._add_snapshot, bg='green', fg='white', font='bold').pack(side=Tkinter.LEFT,padx=5, pady=5)
# Finish button
Tkinter.Button(chose_frame, text ="Finish", command = self._finish, bg='red', fg='white', font='bold').pack(side=Tkinter.RIGHT, padx=5, pady=5)
## ONLINE
online_frame = Tkinter.Frame(online_offline_frame, relief=Tkinter.GROOVE, borderwidth=1, bg='#80ff80')
online_frame.grid(row=0, column=1, padx=5, pady=5)
Tkinter.Label(online_frame, text="ONLINE", bg='#80ff80', font=("Arial", 20)).pack()
text_input_online_frame = Tkinter.Frame(online_frame, relief=Tkinter.GROOVE, borderwidth=1)
text_input_online_frame.pack(padx=5, pady=5, anchor=Tkinter.W)
Tkinter.Button(text_input_online_frame, text ="Pick triangulation", command = self._chose_tria_file).grid(row=0, column=0)
self.label_tria=Tkinter.Label(text_input_online_frame, textvariable=self.tria_path, fg='red')
self.tria_path.set("No triangulation chosen!")
self.label_tria.grid(row=0, column=1)
Tkinter.Button(text_input_online_frame, text ="Pick pod basis", command = self._chose_basis_file).grid(row=1, column=0)
self.label_basis=Tkinter.Label(text_input_online_frame, textvariable=self.basis_path, fg='red')
self.basis_path.set("No basis chosen!")
self.label_basis.grid(row=1, column=1)
Tkinter.Label(text_input_online_frame, text="Output of interest").grid(row=2, column=0)
Tkinter.Entry(text_input_online_frame, bd=5, textvariable=self.output_name).grid(row=2, column=1)
Tkinter.Label(text_input_online_frame, text="Output is a").grid(row=3, column=0, pady=2)
switch_frame_online = Tkinter.Frame(text_input_online_frame)
switch_frame_online.grid(row=3, column=1, pady=2)
Tkinter.Radiobutton(switch_frame_online, text="Scalar", variable=self.is_scalar_switch, value=True).pack(side=Tkinter.LEFT)
Tkinter.Radiobutton(switch_frame_online, text="Field", variable=self.is_scalar_switch, value=False).pack(side=Tkinter.RIGHT)
format_frame_online = Tkinter.Frame(text_input_online_frame)
format_frame_online.grid(row=4, column=1, pady=2)
Tkinter.Label(text_input_online_frame, text="Select file format").grid(row=4, column=0, pady=2)
Tkinter.Radiobutton(format_frame_online, text=".vtk", variable=self.file_format, value='.vtk').pack(side=Tkinter.LEFT)
Tkinter.Radiobutton(format_frame_online, text=".mat", variable=self.file_format, value='.mat').pack(side=Tkinter.RIGHT)
Tkinter.Button(text_input_online_frame, text ="File for parsing", command = self._chose_parsing_file).grid(row=5, column=0)
self.label_parsing_file = Tkinter.Label(text_input_online_frame, textvariable=self.parsing_file_path, fg='red')
self.parsing_file_path.set("No parsing file chosen!")
self.label_parsing_file.grid(row=5, column=1)
Tkinter.Label(text_input_online_frame, text="New parameter").grid(row=6, column=0)
Tkinter.Entry(text_input_online_frame, bd=5, textvariable=self.new_mu).grid(row=6, column=1)
Tkinter.Label(text_input_online_frame, text="Output file name").grid(row=7, column=0)
Tkinter.Entry(text_input_online_frame, bd=5, textvariable=self.outfilename).grid(row=7, column=1)
start_frame_online = Tkinter.Frame(online_frame)
start_frame_online.pack(padx = 10, pady = 10)
Tkinter.Button(start_frame_online, text="Start EZyRB", command = self._start_ezyrb_online, bg='#065893', fg='#f19625', font='bold').pack(padx=5, pady=5)
self.label_finish_online = Tkinter.Label(online_frame, textvariable=self.finish_label, bg='#80ff80')
self.finish_label.set("")
self.label_finish_online.pack()
# Menu
menubar = Tkinter.Menu(self.root)
mainmenu = Tkinter.Menu(menubar, tearoff=0)
mainmenu.add_command(label="Quit", command=self._quit)
menubar.add_cascade(label="EZyRB", menu=mainmenu)
helpmenu = Tkinter.Menu(menubar, tearoff=0)
helpmenu.add_command(label="About...", command=self._goto_website)
menubar.add_cascade(label="Help", menu=helpmenu)
self.root.config(menu=menubar)
def start(self):
self.root.mainloop()
|
fsalmoir/EZyRB
|
ezyrb/gui.py
|
Python
|
mit
| 13,946
|
[
"VTK"
] |
c7a599edc3f919f8095c77b8c33d7e56b138b72319eefe6a4b8579c81f10f03c
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Gaussian and planewaves mixed density fitting
Ref:
J. Chem. Phys. 147, 164119 (2017)
'''
import os
import time
import tempfile
import numpy
import h5py
import scipy.linalg
from pyscf import lib
from pyscf.lib import logger
from pyscf.df.outcore import _guess_shell_ranges
from pyscf.pbc import tools
from pyscf.pbc import gto
from pyscf.pbc.df import outcore
from pyscf.pbc.df import ft_ao
from pyscf.pbc.df import df
from pyscf.pbc.df import aft
from pyscf.pbc.df.df import fuse_auxcell, _round_off_to_odd_mesh
from pyscf.pbc.df.df_jk import zdotNN, zdotCN, zdotNC
from pyscf.pbc.lib.kpts_helper import (is_zero, gamma_point, member, unique,
KPT_DIFF_TOL)
from pyscf.pbc.df import mdf_jk
from pyscf.pbc.df import mdf_ao2mo
from pyscf.pbc.df.aft import _sub_df_jk_
from pyscf import __config__
# kpti == kptj: s2 symmetry
# kpti == kptj == 0 (gamma point): real
def _make_j3c(mydf, cell, auxcell, kptij_lst, cderi_file):
t1 = (time.clock(), time.time())
log = logger.Logger(mydf.stdout, mydf.verbose)
max_memory = max(2000, mydf.max_memory-lib.current_memory()[0])
fused_cell, fuse = fuse_auxcell(mydf, auxcell)
# Create swap file to avoid huge cderi_file. see also function
# pyscf.pbc.df.df._make_j3c
swapfile = tempfile.NamedTemporaryFile(dir=os.path.dirname(cderi_file))
fswap = lib.H5TmpFile(swapfile.name)
# Unlink swapfile to avoid trash
swapfile = None
outcore._aux_e2(cell, fused_cell, fswap, 'int3c2e', aosym='s2',
kptij_lst=kptij_lst, dataname='j3c-junk', max_memory=max_memory)
t1 = log.timer_debug1('3c2e', *t1)
nao = cell.nao_nr()
naux = auxcell.nao_nr()
mesh = mydf.mesh
Gv, Gvbase, kws = cell.get_Gv_weights(mesh)
b = cell.reciprocal_vectors()
gxyz = lib.cartesian_prod([numpy.arange(len(x)) for x in Gvbase])
ngrids = gxyz.shape[0]
kptis = kptij_lst[:,0]
kptjs = kptij_lst[:,1]
kpt_ji = kptjs - kptis
uniq_kpts, uniq_index, uniq_inverse = unique(kpt_ji)
log.debug('Num uniq kpts %d', len(uniq_kpts))
log.debug2('uniq_kpts %s', uniq_kpts)
# j2c ~ (-kpt_ji | kpt_ji)
j2c = fused_cell.pbc_intor('int2c2e', hermi=1, kpts=uniq_kpts)
for k, kpt in enumerate(uniq_kpts):
aoaux = ft_ao.ft_ao(fused_cell, Gv, None, b, gxyz, Gvbase, kpt).T
aoaux = fuse(aoaux)
coulG = mydf.weighted_coulG(kpt, False, mesh)
LkR = numpy.asarray(aoaux.real, order='C')
LkI = numpy.asarray(aoaux.imag, order='C')
j2c_k = fuse(fuse(j2c[k]).T).T.copy()
if is_zero(kpt): # kpti == kptj
j2c_k -= lib.dot(LkR*coulG, LkR.T)
j2c_k -= lib.dot(LkI*coulG, LkI.T)
else:
# aoaux ~ kpt_ij, aoaux.conj() ~ kpt_kl
j2cR, j2cI = zdotCN(LkR*coulG, LkI*coulG, LkR.T, LkI.T)
j2c_k -= j2cR + j2cI * 1j
fswap['j2c/%d'%k] = j2c_k
aoaux = LkR = LkI = j2cR = j2cI = coulG = None
j2c = None
def cholesky_decomposed_metric(uniq_kptji_id):
j2c = numpy.asarray(fswap['j2c/%d'%uniq_kptji_id])
j2c_negative = None
# Note large difference may be found in results between the CD/eig treatments.
# In some systems, small integral errors can lead to different treatments of
# linear dependency which can be observed in the total energy/orbital energy
# around 4th decimal place.
# try:
# j2c = scipy.linalg.cholesky(j2c, lower=True)
# j2ctag = 'CD'
# except scipy.linalg.LinAlgError as e:
#
# Abandon CD treatment for better numerical stability
w, v = scipy.linalg.eigh(j2c)
log.debug('MDF metric for kpt %s cond = %.4g, drop %d bfns',
uniq_kptji_id, w[-1]/w[0], numpy.count_nonzero(w<mydf.linear_dep_threshold))
v1 = v[:,w>mydf.linear_dep_threshold].T.conj()
v1 /= numpy.sqrt(w[w>mydf.linear_dep_threshold]).reshape(-1,1)
j2c = v1
if cell.dimension == 2 and cell.low_dim_ft_type != 'inf_vacuum':
idx = numpy.where(w < -mydf.linear_dep_threshold)[0]
if len(idx) > 0:
j2c_negative = (v[:,idx]/numpy.sqrt(-w[idx])).conj().T
j2ctag = 'eig'
return j2c, j2c_negative, j2ctag
feri = h5py.File(cderi_file, 'a')
feri['j3c-kptij'] = kptij_lst
nsegs = len(fswap['j3c-junk/0'])
def make_kpt(uniq_kptji_id, cholesky_j2c): # kpt = kptj - kpti
kpt = uniq_kpts[uniq_kptji_id]
log.debug1('kpt = %s', kpt)
adapted_ji_idx = numpy.where(uniq_inverse == uniq_kptji_id)[0]
adapted_kptjs = kptjs[adapted_ji_idx]
nkptj = len(adapted_kptjs)
log.debug1('adapted_ji_idx = %s', adapted_ji_idx)
j2c, j2c_negative, j2ctag = cholesky_j2c
Gaux = ft_ao.ft_ao(fused_cell, Gv, None, b, gxyz, Gvbase, kpt).T
Gaux = fuse(Gaux)
Gaux *= mydf.weighted_coulG(kpt, False, mesh)
kLR = Gaux.T.real.copy('C')
kLI = Gaux.T.imag.copy('C')
if is_zero(kpt): # kpti == kptj
aosym = 's2'
nao_pair = nao*(nao+1)//2
if cell.dimension == 3:
vbar = fuse(mydf.auxbar(fused_cell))
ovlp = cell.pbc_intor('int1e_ovlp', hermi=1, kpts=adapted_kptjs)
ovlp = [lib.pack_tril(s) for s in ovlp]
else:
aosym = 's1'
nao_pair = nao**2
mem_now = lib.current_memory()[0]
log.debug2('memory = %s', mem_now)
max_memory = max(2000, mydf.max_memory-mem_now)
# nkptj for 3c-coulomb arrays plus 1 Lpq array
buflen = min(max(int(max_memory*.38e6/16/naux/(nkptj+1)), 1), nao_pair)
shranges = _guess_shell_ranges(cell, buflen, aosym)
buflen = max([x[2] for x in shranges])
# +1 for a pqkbuf
if aosym == 's2':
Gblksize = max(16, int(max_memory*.1e6/16/buflen/(nkptj+1)))
else:
Gblksize = max(16, int(max_memory*.2e6/16/buflen/(nkptj+1)))
Gblksize = min(Gblksize, ngrids, 16384)
pqkRbuf = numpy.empty(buflen*Gblksize)
pqkIbuf = numpy.empty(buflen*Gblksize)
# buf for ft_aopair
buf = numpy.empty((nkptj,buflen*Gblksize), dtype=numpy.complex128)
def pw_contract(istep, sh_range, j3cR, j3cI):
bstart, bend, ncol = sh_range
if aosym == 's2':
shls_slice = (bstart, bend, 0, bend)
else:
shls_slice = (bstart, bend, 0, cell.nbas)
for p0, p1 in lib.prange(0, ngrids, Gblksize):
dat = ft_ao._ft_aopair_kpts(cell, Gv[p0:p1], shls_slice, aosym,
b, gxyz[p0:p1], Gvbase, kpt,
adapted_kptjs, out=buf)
nG = p1 - p0
for k, ji in enumerate(adapted_ji_idx):
aoao = dat[k].reshape(nG,ncol)
pqkR = numpy.ndarray((ncol,nG), buffer=pqkRbuf)
pqkI = numpy.ndarray((ncol,nG), buffer=pqkIbuf)
pqkR[:] = aoao.real.T
pqkI[:] = aoao.imag.T
lib.dot(kLR[p0:p1].T, pqkR.T, -1, j3cR[k], 1)
lib.dot(kLI[p0:p1].T, pqkI.T, -1, j3cR[k], 1)
if not (is_zero(kpt) and gamma_point(adapted_kptjs[k])):
lib.dot(kLR[p0:p1].T, pqkI.T, -1, j3cI[k], 1)
lib.dot(kLI[p0:p1].T, pqkR.T, 1, j3cI[k], 1)
for k, ji in enumerate(adapted_ji_idx):
if is_zero(kpt) and gamma_point(adapted_kptjs[k]):
v = j3cR[k]
else:
v = j3cR[k] + j3cI[k] * 1j
if j2ctag == 'CD':
v = scipy.linalg.solve_triangular(j2c, v, lower=True, overwrite_b=True)
feri['j3c/%d/%d'%(ji,istep)] = v
else:
feri['j3c/%d/%d'%(ji,istep)] = lib.dot(j2c, v)
# low-dimension systems
if j2c_negative is not None:
feri['j3c-/%d/%d'%(ji,istep)] = lib.dot(j2c_negative, v)
with lib.call_in_background(pw_contract) as compute:
col1 = 0
for istep, sh_range in enumerate(shranges):
log.debug1('int3c2e [%d/%d], AO [%d:%d], ncol = %d', \
istep+1, len(shranges), *sh_range)
bstart, bend, ncol = sh_range
col0, col1 = col1, col1+ncol
j3cR = []
j3cI = []
for k, idx in enumerate(adapted_ji_idx):
v = [fswap['j3c-junk/%d/%d'%(idx,i)][0,col0:col1].T for i in range(nsegs)]
v = fuse(numpy.vstack(v))
if is_zero(kpt) and cell.dimension == 3:
for i in numpy.where(vbar != 0)[0]:
v[i] -= vbar[i] * ovlp[k][col0:col1]
j3cR.append(numpy.asarray(v.real, order='C'))
if is_zero(kpt) and gamma_point(adapted_kptjs[k]):
j3cI.append(None)
else:
j3cI.append(numpy.asarray(v.imag, order='C'))
v = None
compute(istep, sh_range, j3cR, j3cI)
for ji in adapted_ji_idx:
del(fswap['j3c-junk/%d'%ji])
# Wrapped around boundary and symmetry between k and -k can be used
# explicitly for the metric integrals. We consider this symmetry
# because it is used in the df_ao2mo module when contracting two 3-index
# integral tensors to the 4-index 2e integral tensor. If the symmetry
# related k-points are treated separately, the resultant 3-index tensors
# may have inconsistent dimension due to the numerial noise when handling
# linear dependency of j2c.
def conj_j2c(cholesky_j2c):
j2c, j2c_negative, j2ctag = cholesky_j2c
if j2c_negative is None:
return j2c.conj(), None, j2ctag
else:
return j2c.conj(), j2c_negative.conj(), j2ctag
a = cell.lattice_vectors() / (2*numpy.pi)
def kconserve_indices(kpt):
'''search which (kpts+kpt) satisfies momentum conservation'''
kdif = numpy.einsum('wx,ix->wi', a, uniq_kpts + kpt)
kdif_int = numpy.rint(kdif)
mask = numpy.einsum('wi->i', abs(kdif - kdif_int)) < KPT_DIFF_TOL
uniq_kptji_ids = numpy.where(mask)[0]
return uniq_kptji_ids
done = numpy.zeros(len(uniq_kpts), dtype=bool)
for k, kpt in enumerate(uniq_kpts):
if done[k]:
continue
log.debug1('Cholesky decomposition for j2c at kpt %s', k)
cholesky_j2c = cholesky_decomposed_metric(k)
# The k-point k' which has (k - k') * a = 2n pi. Metric integrals have the
# symmetry S = S
uniq_kptji_ids = kconserve_indices(-kpt)
log.debug1("Symmetry pattern (k - %s)*a= 2n pi", kpt)
log.debug1(" make_kpt for uniq_kptji_ids %s", uniq_kptji_ids)
for uniq_kptji_id in uniq_kptji_ids:
if not done[uniq_kptji_id]:
make_kpt(uniq_kptji_id, cholesky_j2c)
done[uniq_kptji_ids] = True
# The k-point k' which has (k + k') * a = 2n pi. Metric integrals have the
# symmetry S = S*
uniq_kptji_ids = kconserve_indices(kpt)
log.debug1("Symmetry pattern (k + %s)*a= 2n pi", kpt)
log.debug1(" make_kpt for %s", uniq_kptji_ids)
cholesky_j2c = conj_j2c(cholesky_j2c)
for uniq_kptji_id in uniq_kptji_ids:
if not done[uniq_kptji_id]:
make_kpt(uniq_kptji_id, cholesky_j2c)
done[uniq_kptji_ids] = True
feri.close()
# valence_exp = 1. are typically the Gaussians in the valence
VALENCE_EXP = getattr(__config__, 'pbc_df_mdf_valence_exp', 1.0)
def _mesh_for_valence(cell, valence_exp=VALENCE_EXP):
'''Energy cutoff estimation'''
precision = cell.precision * 10
Ecut_max = 0
for i in range(cell.nbas):
l = cell.bas_angular(i)
es = cell.bas_exp(i).copy()
es[es>valence_exp] = valence_exp
cs = abs(cell.bas_ctr_coeff(i)).max(axis=1)
ke_guess = gto.cell._estimate_ke_cutoff(es, l, cs, precision)
Ecut_max = max(Ecut_max, ke_guess.max())
mesh = tools.cutoff_to_mesh(cell.lattice_vectors(), Ecut_max)
mesh = numpy.min((mesh, cell.mesh), axis=0)
if cell.dimension < 2 or cell.low_dim_ft_type == 'inf_vacuum':
mesh[cell.dimension:] = cell.mesh[cell.dimension:]
return _round_off_to_odd_mesh(mesh)
del(VALENCE_EXP)
class MDF(df.DF):
'''Gaussian and planewaves mixed density fitting
'''
def __init__(self, cell, kpts=numpy.zeros((1,3))):
self.cell = cell
self.stdout = cell.stdout
self.verbose = cell.verbose
self.max_memory = cell.max_memory
self.kpts = kpts # default is gamma point
self.kpts_band = None
self._auxbasis = None
self.mesh = _mesh_for_valence(cell)
# In MDF, fitting PWs (self.mesh), and parameters eta and exp_to_discard
# are related to each other. The compensated function does not need to
# be very smooth. It just needs to be expanded by the specified PWs
# (self.mesh). self.eta is estimated on the fly based on the value of
# self.mesh.
self.eta = None
# Any functions which are more diffused than the compensated Gaussian
# are linearly dependent to the PWs. They can be removed from the
# auxiliary set without affecting the accuracy of MDF. exp_to_discard
# can be set to the value of self.eta
self.exp_to_discard = None
# The following attributes are not input options.
self.exxdiv = None # to mimic KRHF/KUHF object in function get_coulG
self.auxcell = None
self.blockdim = getattr(__config__, 'df_df_DF_blockdim', 240)
self.linear_dep_threshold = df.LINEAR_DEP_THR
self._j_only = False
# If _cderi_to_save is specified, the 3C-integral tensor will be saved in this file.
self._cderi_to_save = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
# If _cderi is specified, the 3C-integral tensor will be read from this file
self._cderi = None
self._rsh_df = {} # Range separated Coulomb DF objects
self._keys = set(self.__dict__.keys())
@property
def eta(self):
if self._eta is not None:
return self._eta
else:
cell = self.cell
if cell.dimension == 0:
return 0.2
ke_cutoff = tools.mesh_to_cutoff(cell.lattice_vectors(), self.mesh)
ke_cutoff = ke_cutoff[:cell.dimension].min()
return aft.estimate_eta_for_ke_cutoff(cell, ke_cutoff, cell.precision)
@eta.setter
def eta(self, x):
self._eta = x
@property
def exp_to_discard(self):
if self._exp_to_discard is not None:
return self._exp_to_discard
else:
return self.eta
@exp_to_discard.setter
def exp_to_discard(self, x):
self._exp_to_discard = x
_make_j3c = _make_j3c
# Note: Special exxdiv by default should not be used for an arbitrary
# input density matrix. When the df object was used with the molecular
# post-HF code, get_jk was often called with an incomplete DM (e.g. the
# core DM in CASCI). An SCF level exxdiv treatment is inadequate for
# post-HF methods.
def get_jk(self, dm, hermi=1, kpts=None, kpts_band=None,
with_j=True, with_k=True, omega=None, exxdiv=None):
if omega is not None: # J/K for RSH functionals
return _sub_df_jk_(self, dm, hermi, kpts, kpts_band,
with_j, with_k, omega, exxdiv)
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return mdf_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j,
with_k, exxdiv)
vj = vk = None
if with_k:
vk = mdf_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = mdf_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
get_eri = get_ao_eri = mdf_ao2mo.get_eri
ao2mo = get_mo_eri = mdf_ao2mo.general
ao2mo_7d = mdf_ao2mo.ao2mo_7d
def update_mp(self):
pass
def update_cc(self):
pass
def update(self):
pass
################################################################################
# With this function to mimic the molecular DF.loop function, the pbc gamma
# point DF object can be used in the molecular code
def loop(self, blksize=None):
for dat in aft.AFTDF.loop(self, blksize):
yield dat
for dat in df.DF.loop(self, blksize):
yield dat
def get_naoaux(self):
return df.DF.get_naoaux(self) + aft.AFTDF.get_naoaux(self)
|
gkc1000/pyscf
|
pyscf/pbc/df/mdf.py
|
Python
|
apache-2.0
| 17,797
|
[
"Gaussian",
"PySCF"
] |
ba6c6a9a9e534045b3c3f1dc0a4f22f115215ef119f232869a526d0f00037df2
|
#!/usr/bin/python
########################################################################
# 15 May 2014
# Patrick Lombard, Centre for Stem Stem Research
# Core Bioinformatics Group
# University of Cambridge
# All right reserved.
########################################################################
import subprocess
import sys, os, re
import pybedtools
import pysam
import argparse
import operator
import pkg_resources
import pychiptools
##Must include scaling!
def genomeCoverage(name, genome, house=None, deseq=None, split=False):
print "==> Converting bed to bedGraph...\n"
inbed = pybedtools.BedTool(name+"_ucsc.BED")
if split:
if house:
outcov1 = inbed.genome_coverage(bg=True, strand="+", genome=genome, scale=house)
output1 = name+"_pos_house.bedGraph"
outcov2 = inbed.genome_coverage(bg=True, strand="-", genome=genome, scale=house)
output2 = name+"_neg_house.bedGraph"
output = [output1, output2]
outcov1.saveas(output1)
outcov2.saveas(output2)
elif deseq:
outcov1 = inbed.genome_coverage(bg=True, strand="+", genome=genome, scale=deseq)
output1 = name+"_pos_deseq.bedGraph"
outcov2 = inbed.genome_coverage(bg=True, strand="-", genome=genome, scale=deseq)
output2 = name+"_neg_deseq.bedGraph"
output = [output1, output2]
outcov1.saveas(output1)
outcov2.saveas(output2)
else:
if house:
outcov = inbed.genome_coverage(bg=True, genome=genome, scale=house)
output = name+"_house.bedGraph"
elif deseq:
outcov = inbed.genome_coverage(bg=True, genome=genome, scale=deseq)
output = name+"_deseq2.bedGraph"
outcov.saveas(output)
return output
def bedgraphtobigwig(bedgraph, chrom, split=False):
if split:
for bedg in bedgraph:
bw = re.sub(".bedGraph$", ".bw", bedg)
print "==> Converting bedGraph to bigWig...\n"
command = ["bedGraphToBigWig", bedg, chrom, bw]
subprocess.call(command)
else:
bw = re.sub(".bedGraph$", ".bw", bedgraph)
print "==> Converting bedGraph to bigWig...\n"
command = ["bedGraphToBigWig", bedgraph, chrom, bw]
subprocess.call(command)
def normalise_to_housekeeper(count_file):
#print "==> Normalising to Housekeeper...\n"
with open(count_file) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if word[0] == "ENSMUSG00000057666": #Gapdh, substitute with what you want to use. REmove from production?
housekeeper = int(word[1])
return housekeeper
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Processes RNA-seq samples to bigWig tracks")
parser.add_argument('-i', '--input', help='BED file in UCSC format', required=True)
parser.add_argument('-g', '--genome', help='Genome the samples are aligned to, options include mm10/mm9/hg19', required=True)
parser.add_argument('-a', '--house', help='Housekeeper normalisation. Input file is HTSEQ-count file containing gene for normalisation on first line', required=False)
parser.add_argument('-d', '--deseq2', help='DESEQ2 sizeFactor normalisation')
parser.add_argument('-s', '--split', help='Splits the bigwig by strand', action='store_true')
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = vars(parser.parse_args())
chrom = pkg_resources.resource_filename('pyrnatools', 'data/{}.chrom.sizes'.format(args["genome"]))
if not os.path.isfile(chrom):
raise Exception("Unsupported Genome!")
name = re.sub("_ucsc.BED$", "", args["input"])
name = re.sub(".BED$", "", name)
if args["house"]:
house = normalise_to_housekeeper(args["house"])
scale = float(1000)/int(house) #Works and checked
bedgraph = genomeCoverage(name, args["genome"], house=scale, split=args["split"])
elif args["deseq2"]:
sizeF = 1/float(args["deseq2"])
bedgraph = genomeCoverage(name, args["genome"], deseq=sizeF, split=args["split"])
bedgraphtobigwig(bedgraph, chrom, args["split"])
|
pdl30/rnaseq_misc
|
rna_track_norm.py
|
Python
|
gpl-2.0
| 3,947
|
[
"HTSeq",
"pysam"
] |
e53c9eff3cdd62ee7e91eee3285423a88fb4363e5e98e449bd5d045991679dc5
|
from .. import node
import pytest
from ..util.dispatch import method_store, multimethod
from .. import special
class DummyNode(node.Node):
def __init__(self, a, b, c):
node.Node.__init__(self)
self.a = a
self.b = b
self.c = c
def test_node_shared_methods():
dummy = DummyNode('qua', None, 0)
other = DummyNode('qua', None, 0)
assert repr(dummy) == "DummyNode(a='qua', b=None, c=0)"
assert dummy == other
assert dummy._attrs
def test_compoment_equality():
assert node.ValueId(value='x') == node.ValueId(value='x')
assert node.Int(value='1') == node.Int(value='1')
assert ([node.ValueId(value='x'), node.Int(value='1')] ==
[node.ValueId(value='x'), node.Int(value='1')])
assert node.SymbolId(value='*') == node.SymbolId(value='*')
a = node.BinOp(
args=[node.ValueId(value='x'), node.Int(value='1')],
func=node.SymbolId(value='*')
)
b = node.BinOp(
args=[node.ValueId(value='x'), node.Int(value='1')],
func=node.SymbolId(value='*')
)
assert a == b
def test_complex_equality():
a = node.Module(
exprs=[
node.BinOp(
args=[node.ValueId(value='x'), node.Int(value='1')],
func=node.SymbolId(value='*')
)
],
name='_anon_module_0'
)
b = node.Module(
exprs=[
node.BinOp(
args=[node.ValueId(value='x'), node.Int(value='1')],
func=node.SymbolId(value='*')
)
],
name='_anon_module_0'
)
assert a == b
def test_node_attributes():
# pylint: disable=W0212
# pylint: disable=W0104
given = node.Module(name='test', exprs=[])
with pytest.raises(KeyError):
given['foo']
given['foo'] = 0
assert given['foo'] == 0
with pytest.raises(KeyError):
given['foo'] = 0
given['qua/bar'] = 0
assert given['qua/bar'] == 0
assert given['qua']['bar'] == 0
given.set_soft('foo', 0)
with pytest.raises(KeyError):
given.set_soft('foo', 1)
given.set_hard('foo', 1)
assert given._attrs['foo'] == 1
class SpyVisitor(object):
_store = method_store()
def __init__(self):
self.nodes = []
def validate(self, node_names):
expected_classes = [
getattr(node, name)
for name in node_names
]
result_classes = [
n.__class__
for n in self.nodes
]
for cls in result_classes:
print cls
assert result_classes == expected_classes
@multimethod(_store)
def visit(self, n):
self.nodes.append(n)
@pytest.fixture
def spy():
return SpyVisitor()
def test_walk_down(spy):
given = node.Module(name='test', exprs=[
node.Int('10'),
node.Val(
name=node.ValueId('x'),
value=node.Block(exprs=[
node.ValueId('x'),
node.TypeId('T'),
])
),
node.If(
pred=node.BinOp(func='<', args=[
node.Int('0'),
node.Real('1.0'),
]),
if_body=node.Assign(
name=node.ValueId('x'),
value=node.KV(
key='z',
value=node.Int('10')
),
),
else_body=node.Unit(),
),
])
expected = [
'Module',
'Int',
'Val',
'Block',
'ValueId',
'TypeId',
'If',
'BinOp',
'Int',
'Real',
'Assign',
'KV',
'Int',
'Unit',
]
given.walk_down(spy)
spy.validate(expected)
def test_walk_up(spy):
given = node.Module(name='test', exprs=[
node.Int('10'),
node.Val(
name=node.ValueId('x'),
value=node.Block(exprs=[
node.ValueId('x'),
node.TypeId('T'),
])
),
node.If(
pred=node.BinOp(func='<', args=[
node.Int('0'),
node.Real('1.0'),
]),
if_body=node.Assign(
name=node.ValueId('x'),
value=node.KV(
key='z',
value=node.Int('10')
),
),
else_body=node.Unit(),
),
])
expected = [
'Int',
'ValueId',
'TypeId',
'Block',
'Val',
'Int',
'Real',
'BinOp',
'Int',
'KV',
'Assign',
'Unit',
'If',
'Module',
]
given.walk_up(spy)
spy.validate(expected)
|
dacjames/mara-lang
|
bootstrap/mara/test/test_node.py
|
Python
|
mit
| 4,741
|
[
"VisIt"
] |
66be9b7ff73b11be75c663372a9b2eaddb9adbd11d25ce9980af499fb1879a29
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import sys, os, json, re, subprocess
class Jobs:
""" Class to manage I/O to the supplied json file """
def __init__(self, json_file):
if os.path.exists(json_file):
with open(json_file, 'r') as f:
self.__job_data = json.load(f)
else:
raise Exception('File does not exist: %s' % (json_file))
def yieldJobsResultPath(self):
for k, v in self.__job_data.items():
yield k, v
def hasExited(meta):
"""
determine which scheduler plugin was used to launch jobs, and query that
system for current status on job
"""
if meta.get('QUEUEING', '') == 'RunPBS':
job_id = meta['RunPBS']['ID'].split('.')[0]
qstat_process = subprocess.Popen([ 'qstat' , '-xf', job_id], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
qstat_result = qstat_process.communicate()[0].decode('utf-8')
job_result = re.findall(r'Exit_status = (\d+)', qstat_result)
if job_result:
return True
def isNotFinished(jobs):
for path, meta in jobs.yieldJobsResultPath():
if type(meta) == type({}) and meta.get('QUEUEING', {}):
if (not os.path.exists(os.path.join(path, '.previous_test_results.json'))
and not hasExited(meta)):
return True
def usage():
print('Supply a path to json queue file. Multiple files are supported, in which case all'
'tests in all json files must be finished for this script to exit with 0.')
sys.exit(1)
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) == 0:
usage()
for queue_file in args:
jobs = Jobs(queue_file)
if isNotFinished(jobs):
sys.exit(1)
|
harterj/moose
|
scripts/are_queued_jobs_finished.py
|
Python
|
lgpl-2.1
| 2,058
|
[
"MOOSE"
] |
d6cd718b80b04bcb54418450137950ba5c99b5a1df2ed993d3e1e22560a07ee2
|
import numpy as np
import os
import pickle
import pytest
import re
import time
import shutil
from copy import deepcopy
from numpy import allclose, isclose
from flare import struc, env, gp
from flare.parameters import Parameters
from flare.mgp import MappedGaussianProcess
from flare.lammps import lammps_calculator
from flare.utils.element_coder import _Z_to_mass, _Z_to_element, _element_to_Z
from flare.ase.calculator import FLARE_Calculator
from flare.ase.atoms import FLARE_Atoms
from ase.calculators.lammpsrun import LAMMPS
from .fake_gp import get_gp, get_random_structure
from .mgp_test import clean, compare_triplet, predict_atom_diag_var
body_list = ["2", "3"]
multi_list = [True, False]
force_block_only = False
curr_path = os.getcwd()
@pytest.mark.skipif(
not os.environ.get("lmp", False),
reason=(
"lmp not found "
"in environment: Please install LAMMPS "
"and set the $lmp env. "
"variable to point to the executatble."
),
)
@pytest.fixture(scope="module")
def all_gp():
allgp_dict = {}
np.random.seed(123)
for bodies in body_list:
for multihyps in multi_list:
gp_model = get_gp(
bodies,
"mc",
multihyps,
cellabc=[1.5, 1, 2],
force_only=force_block_only,
noa=5,
)
gp_model.parallel = True
gp_model.n_cpus = 2
allgp_dict[f"{bodies}{multihyps}"] = gp_model
yield allgp_dict
del allgp_dict
@pytest.fixture(scope="module")
def all_mgp():
allmgp_dict = {}
for bodies in ["2", "3", "2+3"]:
for multihyps in [False, True]:
allmgp_dict[f"{bodies}{multihyps}"] = None
yield allmgp_dict
del allmgp_dict
@pytest.fixture(scope="module")
def all_lmp():
all_lmp_dict = {}
species = ["H", "He"]
specie_symbol_list = " ".join(species)
masses = [
f"{i} {_Z_to_mass[_element_to_Z[species[i]]]}" for i in range(len(species))
]
parameters = {
"command": os.environ.get("lmp"), # set up executable for ASE
"newton": "off",
"pair_style": "mgp",
"mass": masses,
}
# set up input params
for bodies in body_list:
for multihyps in multi_list:
# create ASE calc
label = f"{bodies}{multihyps}"
files = [f"{label}.mgp"]
by = "yes" if bodies == "2" else "no"
ty = "yes" if bodies == "3" else "no"
parameters["pair_coeff"] = [
f"* * {label}.mgp {specie_symbol_list} {by} {ty}"
]
lmp_calc = LAMMPS(
label=label,
keep_tmp_files=True,
tmp_dir="./tmp/",
parameters=parameters,
files=files,
specorder=species,
)
all_lmp_dict[f"{bodies}{multihyps}"] = lmp_calc
yield all_lmp_dict
del all_lmp_dict
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_init(bodies, multihyps, all_mgp, all_gp):
"""
test the init function
"""
clean()
gp_model = all_gp[f"{bodies}{multihyps}"]
# grid parameters
grid_params = {}
if "2" in bodies:
grid_params["twobody"] = {"grid_num": [160], "lower_bound": [0.02]}
if "3" in bodies:
grid_params["threebody"] = {"grid_num": [31, 32, 33], "lower_bound": [0.02] * 3}
lammps_location = f"{bodies}{multihyps}"
data = gp_model.training_statistics
try:
mgp_model = MappedGaussianProcess(
grid_params=grid_params,
unique_species=data["species"],
n_cpus=1,
lmp_file_name=lammps_location,
var_map="simple",
)
except:
mgp_model = MappedGaussianProcess(
grid_params=grid_params,
unique_species=data["species"],
n_cpus=1,
lmp_file_name=lammps_location,
var_map=None,
)
all_mgp[f"{bodies}{multihyps}"] = mgp_model
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_build_map(all_gp, all_mgp, bodies, multihyps):
"""
test the mapping for mc_simple kernel
"""
gp_model = all_gp[f"{bodies}{multihyps}"]
mgp_model = all_mgp[f"{bodies}{multihyps}"]
mgp_model.build_map(gp_model)
# with open(f'grid_{bodies}_{multihyps}.pickle', 'wb') as f:
# pickle.dump(mgp_model, f)
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_write_model(all_mgp, bodies, multihyps):
"""
test the mapping for mc_simple kernel
"""
mgp_model = all_mgp[f"{bodies}{multihyps}"]
mgp_model.write_model(f"my_mgp_{bodies}_{multihyps}")
mgp_model.write_model(f"my_mgp_{bodies}_{multihyps}", format="pickle")
# Ensure that user is warned when a non-mean_only
# model is serialized into a Dictionary
with pytest.warns(Warning):
mgp_model.var_map = "pca"
mgp_model.as_dict()
mgp_model.var_map = "simple"
mgp_model.as_dict()
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_load_model(all_mgp, bodies, multihyps):
"""
test the mapping for mc_simple kernel
"""
name = f"my_mgp_{bodies}_{multihyps}.json"
all_mgp[f"{bodies}{multihyps}"] = MappedGaussianProcess.from_file(name)
os.remove(name)
name = f"my_mgp_{bodies}_{multihyps}.pickle"
all_mgp[f"{bodies}{multihyps}"] = MappedGaussianProcess.from_file(name)
os.remove(name)
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_cubic_spline(all_gp, all_mgp, bodies, multihyps):
"""
test the predict for mc_simple kernel
"""
mgp_model = all_mgp[f"{bodies}{multihyps}"]
delta = 1e-4
if "3" in bodies:
body_name = "threebody"
elif "2" in bodies:
body_name = "twobody"
nmap = len(mgp_model.maps[body_name].maps)
print("nmap", nmap)
for i in range(nmap):
maxvalue = np.max(np.abs(mgp_model.maps[body_name].maps[i].mean.__coeffs__))
if maxvalue > 0:
comp_code = mgp_model.maps[body_name].maps[i].species_code
if "3" in bodies:
c_pt = np.array([[0.3, 0.4, 0.5]])
c, cderv = (
mgp_model.maps[body_name].maps[i].mean(c_pt, with_derivatives=True)
)
cderv = cderv.reshape([-1])
for j in range(3):
a_pt = deepcopy(c_pt)
b_pt = deepcopy(c_pt)
a_pt[0][j] += delta
b_pt[0][j] -= delta
a = mgp_model.maps[body_name].maps[i].mean(a_pt)[0]
b = mgp_model.maps[body_name].maps[i].mean(b_pt)[0]
num_derv = (a - b) / (2 * delta)
print("spline", comp_code, num_derv, cderv[j])
assert np.isclose(num_derv, cderv[j], rtol=1e-2)
elif "2" in bodies:
center = np.sum(mgp_model.maps[body_name].maps[i].bounds) / 2.0
a_pt = np.array([[center + delta]])
b_pt = np.array([[center - delta]])
c_pt = np.array([[center]])
a = mgp_model.maps[body_name].maps[i].mean(a_pt)[0]
b = mgp_model.maps[body_name].maps[i].mean(b_pt)[0]
c, cderv = (
mgp_model.maps[body_name].maps[i].mean(c_pt, with_derivatives=True)
)
cderv = cderv.reshape([-1])[0]
num_derv = (a - b) / (2 * delta)
print("spline", num_derv, cderv)
assert np.isclose(num_derv, cderv, rtol=1e-2)
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_predict(all_gp, all_mgp, bodies, multihyps):
"""
test the predict for mc_simple kernel
"""
gp_model = all_gp[f"{bodies}{multihyps}"]
mgp_model = all_mgp[f"{bodies}{multihyps}"]
# # debug
# filename = f'grid_{bodies}_{multihyps}.pickle'
# with open(filename, 'rb') as f:
# mgp_model = pickle.load(f)
nenv = 6
cell = 1.0 * np.eye(3)
cutoffs = gp_model.cutoffs
unique_species = gp_model.training_statistics["species"]
struc_test, f = get_random_structure(cell, unique_species, nenv)
test_envi = env.AtomicEnvironment(
struc_test, 0, cutoffs, cutoffs_mask=gp_model.hyps_mask
)
if "2" in bodies:
kernel_name = "twobody"
elif "3" in bodies:
kernel_name = "threebody"
# compare_triplet(mgp_model.maps['threebody'], gp_model, test_envi)
mgp_f, mgp_e_var, mgp_s, mgp_e = mgp_model.predict(test_envi)
assert Parameters.compare_dict(
gp_model.hyps_mask, mgp_model.maps[kernel_name].hyps_mask
)
if multihyps:
gp_e, gp_e_var = gp_model.predict_local_energy_and_var(test_envi)
gp_f, gp_f_var = gp_model.predict_force_xyz(test_envi)
else:
gp_e, gp_f, gp_s, gp_e_var, _, _ = gp_model.predict_efs(test_envi)
gp_s = -gp_s[[0, 3, 5, 4, 2, 1]]
# check stress
assert np.allclose(mgp_s, gp_s, rtol=1e-2)
# check mgp is within 2 meV/A of the gp
print("mgp_en, gp_en", mgp_e, gp_e)
assert np.allclose(mgp_e, gp_e, rtol=2e-3), (
f"{bodies} body" f" energy mapping is wrong"
)
# check forces
print("isclose?", mgp_f - gp_f, gp_f)
assert np.allclose(mgp_f, gp_f, atol=1e-3), f"{bodies} body force mapping is wrong"
if mgp_model.var_map == "simple":
print(bodies, multihyps)
for i in range(struc_test.nat):
test_envi = env.AtomicEnvironment(
struc_test, i, cutoffs, cutoffs_mask=gp_model.hyps_mask
)
mgp_pred = mgp_model.predict(test_envi)
mgp_var = mgp_pred[1]
gp_var = predict_atom_diag_var(test_envi, gp_model, kernel_name)
print("mgp_var, gp_var", mgp_var, gp_var)
assert np.allclose(mgp_var, gp_var, rtol=1e-2)
print("struc_test positions", struc_test.positions, struc_test.species_labels)
@pytest.mark.skipif(
not os.environ.get("lmp", False),
reason=(
"lmp not found "
"in environment: Please install LAMMPS "
"and set the $lmp env. "
"variable to point to the executatble."
),
)
@pytest.mark.parametrize("bodies", body_list)
@pytest.mark.parametrize("multihyps", multi_list)
def test_lmp_predict(all_lmp, all_gp, all_mgp, bodies, multihyps):
"""
test the lammps implementation
"""
# pytest.skip()
prefix = f"{bodies}{multihyps}"
mgp_model = all_mgp[prefix]
gp_model = all_gp[prefix]
lmp_calculator = all_lmp[prefix]
ase_calculator = FLARE_Calculator(gp_model, mgp_model, par=False, use_mapping=True)
# create test structure
np.random.seed(1)
cell = np.diag(np.array([1, 1, 1])) * 4
nenv = 10
unique_species = gp_model.training_statistics["species"]
cutoffs = gp_model.cutoffs
struc_test, f = get_random_structure(cell, unique_species, nenv)
# build ase atom from struc
ase_atoms_flare = struc_test.to_ase_atoms()
ase_atoms_flare = FLARE_Atoms.from_ase_atoms(ase_atoms_flare)
ase_atoms_flare.calc = ase_calculator
ase_atoms_lmp = deepcopy(struc_test).to_ase_atoms()
ase_atoms_lmp.calc = lmp_calculator
try:
lmp_en = ase_atoms_lmp.get_potential_energy()
flare_en = ase_atoms_flare.get_potential_energy()
lmp_stress = ase_atoms_lmp.get_stress()
flare_stress = ase_atoms_flare.get_stress()
lmp_forces = ase_atoms_lmp.get_forces()
flare_forces = ase_atoms_flare.get_forces()
except Exception as e:
os.chdir(curr_path)
print(e)
raise e
os.chdir(curr_path)
# check that lammps agrees with mgp to within 1 meV/A
print("energy", lmp_en - flare_en, flare_en)
assert np.isclose(lmp_en, flare_en, atol=1e-3)
print("force", lmp_forces - flare_forces, flare_forces)
assert np.isclose(lmp_forces, flare_forces, atol=1e-3).all()
print("stress", lmp_stress - flare_stress, flare_stress)
assert np.isclose(lmp_stress, flare_stress, atol=1e-3).all()
# check the lmp var
# mgp_std = np.sqrt(mgp_pred[1])
# print("isclose? diff:", lammps_stds[atom_num]-mgp_std, "mgp value", mgp_std)
# assert np.isclose(lammps_stds[atom_num], mgp_std, rtol=1e-2)
clean(prefix=prefix)
|
mir-group/flare
|
tests/test_mgp.py
|
Python
|
mit
| 12,643
|
[
"ASE",
"LAMMPS"
] |
d7dd81604769e8b6207537b6eb13a1285839a6b08058106129df2d7e290cccc6
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.talent_v4beta1.services.event_service import EventServiceAsyncClient
from google.cloud.talent_v4beta1.services.event_service import EventServiceClient
from google.cloud.talent_v4beta1.services.event_service import transports
from google.cloud.talent_v4beta1.types import event
from google.cloud.talent_v4beta1.types import event_service
from google.oauth2 import service_account
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert EventServiceClient._get_default_mtls_endpoint(None) is None
assert (
EventServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
EventServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
EventServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
EventServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert EventServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [EventServiceClient, EventServiceAsyncClient,])
def test_event_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "jobs.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.EventServiceGrpcTransport, "grpc"),
(transports.EventServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_event_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [EventServiceClient, EventServiceAsyncClient,])
def test_event_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "jobs.googleapis.com:443"
def test_event_service_client_get_transport_class():
transport = EventServiceClient.get_transport_class()
available_transports = [
transports.EventServiceGrpcTransport,
]
assert transport in available_transports
transport = EventServiceClient.get_transport_class("grpc")
assert transport == transports.EventServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(EventServiceClient, transports.EventServiceGrpcTransport, "grpc"),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
EventServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventServiceClient)
)
@mock.patch.object(
EventServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(EventServiceAsyncClient),
)
def test_event_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(EventServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(EventServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(EventServiceClient, transports.EventServiceGrpcTransport, "grpc", "true"),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(EventServiceClient, transports.EventServiceGrpcTransport, "grpc", "false"),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
EventServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventServiceClient)
)
@mock.patch.object(
EventServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(EventServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_event_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [EventServiceClient, EventServiceAsyncClient])
@mock.patch.object(
EventServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventServiceClient)
)
@mock.patch.object(
EventServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(EventServiceAsyncClient),
)
def test_event_service_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(EventServiceClient, transports.EventServiceGrpcTransport, "grpc"),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_event_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
EventServiceClient,
transports.EventServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_event_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_event_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.talent_v4beta1.services.event_service.transports.EventServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = EventServiceClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
EventServiceClient,
transports.EventServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
EventServiceAsyncClient,
transports.EventServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_event_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"jobs.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
),
scopes=None,
default_host="jobs.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"request_type", [event_service.CreateClientEventRequest, dict,]
)
def test_create_client_event(request_type, transport: str = "grpc"):
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = event.ClientEvent(
request_id="request_id_value",
event_id="event_id_value",
event_notes="event_notes_value",
job_event=event.JobEvent(type_=event.JobEvent.JobEventType.IMPRESSION),
)
response = client.create_client_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == event_service.CreateClientEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, event.ClientEvent)
assert response.request_id == "request_id_value"
assert response.event_id == "event_id_value"
assert response.event_notes == "event_notes_value"
def test_create_client_event_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
client.create_client_event()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == event_service.CreateClientEventRequest()
@pytest.mark.asyncio
async def test_create_client_event_async(
transport: str = "grpc_asyncio", request_type=event_service.CreateClientEventRequest
):
client = EventServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
event.ClientEvent(
request_id="request_id_value",
event_id="event_id_value",
event_notes="event_notes_value",
)
)
response = await client.create_client_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == event_service.CreateClientEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, event.ClientEvent)
assert response.request_id == "request_id_value"
assert response.event_id == "event_id_value"
assert response.event_notes == "event_notes_value"
@pytest.mark.asyncio
async def test_create_client_event_async_from_dict():
await test_create_client_event_async(request_type=dict)
def test_create_client_event_field_headers():
client = EventServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = event_service.CreateClientEventRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
call.return_value = event.ClientEvent()
client.create_client_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_client_event_field_headers_async():
client = EventServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = event_service.CreateClientEventRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(event.ClientEvent())
await client.create_client_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_client_event_flattened():
client = EventServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = event.ClientEvent()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_client_event(
parent="parent_value",
client_event=event.ClientEvent(request_id="request_id_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].client_event
mock_val = event.ClientEvent(request_id="request_id_value")
assert arg == mock_val
def test_create_client_event_flattened_error():
client = EventServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_client_event(
event_service.CreateClientEventRequest(),
parent="parent_value",
client_event=event.ClientEvent(request_id="request_id_value"),
)
@pytest.mark.asyncio
async def test_create_client_event_flattened_async():
client = EventServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_client_event), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = event.ClientEvent()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(event.ClientEvent())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_client_event(
parent="parent_value",
client_event=event.ClientEvent(request_id="request_id_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].client_event
mock_val = event.ClientEvent(request_id="request_id_value")
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_client_event_flattened_error_async():
client = EventServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_client_event(
event_service.CreateClientEventRequest(),
parent="parent_value",
client_event=event.ClientEvent(request_id="request_id_value"),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EventServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = EventServiceClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = EventServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EventServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = EventServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.EventServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.EventServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.EventServiceGrpcTransport,
transports.EventServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = EventServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.EventServiceGrpcTransport,)
def test_event_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.EventServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_event_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.talent_v4beta1.services.event_service.transports.EventServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.EventServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = ("create_client_event",)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_event_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.talent_v4beta1.services.event_service.transports.EventServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.EventServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
),
quota_project_id="octopus",
)
def test_event_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.talent_v4beta1.services.event_service.transports.EventServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.EventServiceTransport()
adc.assert_called_once()
def test_event_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
EventServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.EventServiceGrpcTransport,
transports.EventServiceGrpcAsyncIOTransport,
],
)
def test_event_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.EventServiceGrpcTransport, grpc_helpers),
(transports.EventServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_event_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"jobs.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
),
scopes=["1", "2"],
default_host="jobs.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[transports.EventServiceGrpcTransport, transports.EventServiceGrpcAsyncIOTransport],
)
def test_event_service_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_event_service_host_no_port():
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint="jobs.googleapis.com"),
)
assert client.transport._host == "jobs.googleapis.com:443"
def test_event_service_host_with_port():
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="jobs.googleapis.com:8000"
),
)
assert client.transport._host == "jobs.googleapis.com:8000"
def test_event_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.EventServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_event_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.EventServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.EventServiceGrpcTransport, transports.EventServiceGrpcAsyncIOTransport],
)
def test_event_service_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.EventServiceGrpcTransport, transports.EventServiceGrpcAsyncIOTransport],
)
def test_event_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_company_path():
project = "squid"
tenant = "clam"
company = "whelk"
expected = "projects/{project}/tenants/{tenant}/companies/{company}".format(
project=project, tenant=tenant, company=company,
)
actual = EventServiceClient.company_path(project, tenant, company)
assert expected == actual
def test_parse_company_path():
expected = {
"project": "octopus",
"tenant": "oyster",
"company": "nudibranch",
}
path = EventServiceClient.company_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_company_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "cuttlefish"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = EventServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "mussel",
}
path = EventServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "winkle"
expected = "folders/{folder}".format(folder=folder,)
actual = EventServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "nautilus",
}
path = EventServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "scallop"
expected = "organizations/{organization}".format(organization=organization,)
actual = EventServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "abalone",
}
path = EventServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "squid"
expected = "projects/{project}".format(project=project,)
actual = EventServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "clam",
}
path = EventServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "whelk"
location = "octopus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = EventServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "oyster",
"location": "nudibranch",
}
path = EventServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = EventServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.EventServiceTransport, "_prep_wrapped_messages"
) as prep:
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.EventServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = EventServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = EventServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = EventServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(EventServiceClient, transports.EventServiceGrpcTransport),
(EventServiceAsyncClient, transports.EventServiceGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
|
googleapis/python-talent
|
tests/unit/gapic/talent_v4beta1/test_event_service.py
|
Python
|
apache-2.0
| 56,292
|
[
"Octopus"
] |
d3dbdabc0ea9bc8c78f228c19b20898e30ff487b35f4e193d3d6d38037d86585
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Implementation of twin_sac, a mix of TD3 (https://arxiv.org/abs/1802.09477) and SAC (https://arxiv.org/abs/1801.01290, https://arxiv.org/abs/1812.05905).
Overall structure and hyperparameters are taken from TD3. However, the algorithm
itself represents a version of SAC.
"""
import typing
from typing import Optional
from dm_env import specs as dm_env_specs
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.utils import object_identity
from representation_batch_rl.batch_rl.encoders import ImageEncoder
tfd = tfp.distributions
LOG_STD_MIN = -20
LOG_STD_MAX = 2
class BasePolicy(tf.keras.Model):
"""Base class for policies."""
def __init__(self,
state_dim,
action_dim,
action_spec,
hidden_dims = (256, 256),
eps = 1e-6):
"""Creates an actor.
Args:
state_dim: State size.
action_dim: Actiom size.
action_spec: Action spec.
hidden_dims: List of hidden dimensions.
eps: Epsilon for numerical stability.
"""
super().__init__()
relu_gain = tf.math.sqrt(2.0)
relu_orthogonal = tf.keras.initializers.Orthogonal(relu_gain)
near_zero_orthogonal = tf.keras.initializers.Orthogonal(1e-2)
layers = []
for hidden_dim in hidden_dims:
layers.append(
tf.keras.layers.Dense(
hidden_dim,
activation=tf.nn.relu,
kernel_initializer=relu_orthogonal))
inputs = tf.keras.Input(shape=(state_dim,))
outputs = tf.keras.Sequential(
layers + [tf.keras.layers.Dense(
action_dim, kernel_initializer=near_zero_orthogonal)]
)(inputs)
self.trunk = tf.keras.Model(inputs=inputs, outputs=outputs)
self.action_spec = action_spec
self.action_mean = tf.constant(
(action_spec.maximum + action_spec.minimum) / 2.0, dtype=tf.float32)
self.action_scale = tf.constant(
(action_spec.maximum - action_spec.minimum) / 2.0, dtype=tf.float32)
self.eps = eps
class MixtureGuassianPolicy(BasePolicy):
"""Gaussian policy with TanH squashing."""
def __init__(self, state_dim,
action_spec,
hidden_dims = (256, 256),
num_components = 5):
super().__init__(
state_dim,
num_components * action_spec.shape[0] * 3,
action_spec,
hidden_dims=hidden_dims)
self._num_components = num_components
def _get_dist_and_mode(
self,
states,
out = None,
stddev = 1.0):
"""Returns a tf.Distribution for given states modes of this distribution.
Args:
states: Batch of states.
out: Batch of neural net outputs.
stddev: Standard deviation of sampling distribution.
"""
if out is None:
out = self.trunk(states)
logits, mu, log_std = tf.split(out, num_or_size_splits=3, axis=1)
log_std = tf.clip_by_value(log_std, LOG_STD_MIN, LOG_STD_MAX)
std = tf.exp(log_std)
shape = [tf.shape(std)[0], -1, self._num_components]
logits = tf.reshape(logits, shape)
mu = tf.reshape(mu, shape)
std = tf.reshape(std, shape)
components_distribution = tfd.TransformedDistribution(
tfd.Normal(loc=mu, scale=std),
tfp.bijectors.Chain([
tfp.bijectors.Shift(
shift=tf.transpose(
tf.stack(self._num_components * [self.action_mean]))),
tfp.bijectors.Scale(
scale=tf.transpose(
tf.stack(self._num_components * [self.action_scale]))),
tfp.bijectors.Tanh(),
]))
distribution = tfd.MixtureSameFamily(
mixture_distribution=tfd.Categorical(logits=logits),
components_distribution=components_distribution)
return tfd.Independent(distribution)
@tf.function
def call(
self,
states,
out = None,
sample = False,
with_log_probs = False
):
"""Computes actions for given inputs.
Args:
states: Batch of states.
out: Batch of neural net outputs.
sample: Whether to sample actions.
with_log_probs: Whether to return log probability of sampled actions.
Returns:
Sampled actions.
"""
if sample:
dist = self._get_dist_and_mode(states, out)
else:
dist = self._get_dist_and_mode(states, out, stddev=0.0)
actions = dist.sample()
if with_log_probs:
return actions, dist.log_prob(actions)
else:
return actions
@tf.function
def log_probs(
self,
states,
actions,
out = None,
with_entropy = False
):
actions = tf.clip_by_value(actions, self.action_spec.minimum + self.eps,
self.action_spec.maximum - self.eps)
dist = self._get_dist_and_mode(states, out)
sampled_actions = dist.sample()
sampled_actions = tf.clip_by_value(sampled_actions,
self.action_spec.minimum + self.eps,
self.action_spec.maximum - self.eps)
if with_entropy:
return dist.log_prob(actions), -dist.log_prob(sampled_actions)
else:
return dist.log_prob(actions)
class DiagGuassianPolicy(BasePolicy):
"""Gaussian policy with TanH squashing."""
def __init__(self, state_dim,
action_spec,
hidden_dims = (256, 256),
encoder = None):
super().__init__(state_dim, action_spec.shape[0] * 2, action_spec,
hidden_dims=hidden_dims)
self.encoder = encoder
def _get_dist_and_mode(
self,
states,
out = None,
stddev = 1.0):
"""Returns a tf.Distribution for given states modes of this distribution.
Args:
states: Batch of states.
out: Batch of neural net outputs.
stddev: Standard deviation of sampling distribution.
"""
if out is None:
out = self.trunk(states)
mu, log_std = tf.split(out, num_or_size_splits=2, axis=1)
log_std = tf.clip_by_value(log_std, LOG_STD_MIN, LOG_STD_MAX)
std = tf.exp(log_std)
dist = tfd.TransformedDistribution(
tfd.MultivariateNormalDiag(loc=mu, scale_diag=std * stddev),
tfp.bijectors.Chain([
tfp.bijectors.Shift(shift=self.action_mean),
tfp.bijectors.Scale(scale=self.action_scale),
tfp.bijectors.Tanh(),
]))
return dist
@tf.function
def call(
self,
states,
out = None,
sample = False,
with_log_probs = False
):
"""Computes actions for given inputs.
Args:
states: Batch of states.
out: Batch of neural net outputs.
sample: Whether to sample actions.
with_log_probs: Whether to return log probability of sampled actions.
Returns:
Sampled actions.
"""
if self.encoder is not None:
states = self.encoder(states)
if sample:
dist = self._get_dist_and_mode(states, out)
else:
dist = self._get_dist_and_mode(states, out, stddev=0.0)
actions = dist.sample()
if with_log_probs:
return actions, dist.log_prob(actions)
else:
return actions
@tf.function
def log_probs(
self,
states,
actions,
out = None,
with_entropy = False
):
if self.encoder is not None:
states = self.encoder(states)
actions = tf.clip_by_value(actions, self.action_spec.minimum + self.eps,
self.action_spec.maximum - self.eps)
dist = self._get_dist_and_mode(states, out)
sampled_actions = dist.sample()
sampled_actions = tf.clip_by_value(sampled_actions,
self.action_spec.minimum + self.eps,
self.action_spec.maximum - self.eps)
if with_entropy:
return dist.log_prob(actions), -dist.log_prob(sampled_actions)
else:
return dist.log_prob(actions)
@property
def trainable_variables(self):
tvars = super(DiagGuassianPolicy, self).trainable_variables
if self.encoder is None:
return tvars
else:
# Remove the encoder conv2d variables (Policy shouldn't update the conv2d
# vars). Note that a call to stop_gradient on the fprop isn't enough to
# ensure that this is the case, this is because conv2d vars are shared
# with the critic and so they can get updated when bpropping through the
# critic to minimze the actor loss.
encoder_variables = object_identity.ObjectIdentitySet(
self.encoder.conv_stack.trainable_variables)
return [v for v in tvars if v not in encoder_variables]
class DeterministicPolicy(BasePolicy):
"""Deterministic policy with TanH squashing."""
def __init__(self, state_dim, action_spec,
stddev):
"""Creates a deterministic policy.
Args:
state_dim: State size.
action_spec: Action spec.
stddev: Noise scale.
"""
super().__init__(state_dim, action_spec.shape[0], action_spec)
self._noise = tfd.Normal(loc=0.0, scale=stddev)
@tf.function
def call(
self,
states,
sample = False
):
"""Computes actions for given inputs.
Args:
states: Batch of states.
sample: Whether to sample actions.
Returns:
Mode actions, sampled actions.
"""
actions = tf.nn.tanh(self.trunk(states))
if sample:
actions = actions + self._noise.sample(actions.shape)
actions = tf.clip_by_value(actions, -1.0, 1.0)
return (actions + self.action_mean) * self.action_scale
class CVAEPolicy(BasePolicy):
"""Conditional variational autoencoder."""
def __init__(self, state_dim, action_spec, latent_dim):
"""Creates an actor.
Args:
state_dim: State size.
action_spec: Action spec.
latent_dim: Size of latent space.
"""
action_dim = action_spec.shape[0]
super().__init__(state_dim, action_dim, action_spec)
del self.trunk
del self.eps
self.latent_dim = latent_dim
relu_gain = tf.math.sqrt(2.0)
relu_orthogonal = tf.keras.initializers.Orthogonal(relu_gain)
self.encoder = tf.keras.Sequential([
tf.keras.layers.Dense(
750,
input_dim=state_dim + action_dim,
activation='relu',
kernel_initializer=relu_orthogonal),
tf.keras.layers.Dense(
750, activation='relu', kernel_initializer=relu_orthogonal),
tf.keras.layers.Dense(
latent_dim + latent_dim, kernel_initializer='orthogonal'),
])
self.decoder = tf.keras.Sequential([
tf.keras.layers.Dense(
750,
input_dim=state_dim + latent_dim,
activation='relu',
kernel_initializer=relu_orthogonal),
tf.keras.layers.Dense(
750, activation='relu', kernel_initializer=relu_orthogonal),
tf.keras.layers.Dense(action_dim, kernel_initializer='orthogonal'),
])
@tf.function
def sample(self, states):
eps = tf.random.normal(shape=(tf.shape(states)[0], self.latent_dim))
return self.decode(states, eps)
def encode(self, states, actions):
inputs = tf.concat([states, actions], -1)
mean, logvar = tf.split(self.encoder(inputs),
num_or_size_splits=2, axis=1)
logvar = tf.clip_by_value(logvar, -4, 15)
return mean, logvar
def reparameterize(self, mean, logvar):
eps = tf.random.normal(shape=tf.shape(mean))
return eps * tf.exp(logvar * 0.5) + mean
def decode(self, states, z):
inputs = tf.concat([states, z], -1)
outputs = self.decoder(inputs)
outputs = tf.tanh(outputs)
return (outputs + self.action_mean) * self.action_scale
@tf.function
def call(self,
states,
sample = True):
"""Computes actions for given inputs.
Args:
states: Batch of states.
sample: Whether to sample actions.
Returns:
Mode actions, sampled actions.
"""
assert sample, 'CVAE cannot be called without sampling'
return self.sample(states)
|
google-research/google-research
|
representation_batch_rl/batch_rl/policies.py
|
Python
|
apache-2.0
| 12,631
|
[
"Gaussian"
] |
bfe75b22e2d8833b6762f00467605265a439ac96180ff668674029cfc9074746
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
%prog blast_file --qbed query.bed --sbed subject.bed
accepts .bed format: <http://genome.ucsc.edu/FAQ/FAQformat.html#format1>
and a blast file.
local dup filter:
if the input is query.bed and subject.bed, the script files query.localdups and subject.localdups are created containing the parent|offspring dups, as inferred by subjects hitting the same query or queries hitting the same subject.
repeat filter:
adjust the evalues in a dagchainer/blast file by the number of times they occur.
query/subjects that appear often will have the evalues raise (made less significant).
adjusted_evalue(A, B) = evalue(A, B) ** ((counts_of_blast / counts_of_genes) / (counts(A) + counts(B)))
cscore filter:
see supplementary info for sea anemone genome paper <http://www.sciencemag.org/cgi/content/abstract/317/5834/86>, formula below
cscore(A,B) = score(A,B) / max(best score for A, best score for B)
Finally a .raw file (which is the input for the quota-align pipeline <http://github.com/tanghaibao/quota-alignment/>) is created
"""
import sys
import os.path as op
import collections
import itertools
from math import log10
from bed_utils import Bed, BlastLine
sys.path.insert(0, op.join(op.dirname(__file__), ".."))
from grouper import Grouper
# helper functions in the BLAST filtering to get rid alternative splicings
def gene_name(st):
# this is ugly, but different annotation groups are inconsistent
# with how the alternative splicings are named;
# mostly it can be done by removing the suffix
# except for papaya (evm...) and maize (somewhat complicated)
if st.startswith("ev"):
return st
if st.startswith("Os"):
return st.rsplit("-",1)[0]
return st.rsplit(".", 1)[0]
def main(blast_file, options):
qbed_file, sbed_file = options.qbed, options.sbed
# is this a self-self blast?
is_self = (qbed_file == sbed_file)
if is_self:
print >>sys.stderr, "... looks like a self-self BLAST to me"
global_density_ratio = options.global_density_ratio
tandem_Nmax = options.tandem_Nmax
filter_repeats = options.filter_repeats
cscore = options.cscore
localdups = options.localdups
print >>sys.stderr, "read annotation files %s and %s" % (qbed_file, sbed_file)
qbed = Bed(qbed_file)
sbed = Bed(sbed_file)
qorder = qbed.get_order()
sorder = sbed.get_order()
fp = file(blast_file)
print >>sys.stderr, "read BLAST file %s (total %d lines)" % \
(blast_file, sum(1 for line in fp))
fp.seek(0)
# mdb added 3/18/16 for Last v731
blasts = []
for line in fp:
if not line.startswith("#"):
blasts.append(BlastLine(line))
blasts = sorted(blasts, key=lambda b: b.score, reverse=True)
# mdb removed 3/18/16 for Last v731
# blasts = sorted([BlastLine(line) for line in fp], \
# key=lambda b: b.score, reverse=True)
filtered_blasts = []
seen = set()
ostrip = options.strip_names
for b in blasts:
query, subject = b.query, b.subject
#if ostrip:
# query, subject = gene_name(query), gene_name(subject)
if query not in qorder:
print >>sys.stderr, "WARNING: %s not in %s" % (query, qbed.filename)
continue
if subject not in sorder:
print >>sys.stderr, "WARNING: %s not in %s" % (subject, sbed.filename)
continue
qi, q = qorder[query]
si, s = sorder[subject]
if is_self and qi > si:
# move all hits to same side when doing self-self BLAST
query, subject = subject, query
qi, si = si, qi
q, s = s, q
key = query, subject
if key in seen: continue
seen.add(key)
b.query, b.subject = key
b.qi, b.si = qi, si
b.qseqid, b.sseqid = q['seqid'], s['seqid']
filtered_blasts.append(b)
if global_density_ratio:
print >>sys.stderr, "running the global_density filter" + \
"(global_density_ratio=%d)..." % options.global_density_ratio
gene_count = len(qorder) + len(sorder)
before_filter = len(filtered_blasts)
filtered_blasts = filter_to_global_density(filtered_blasts, gene_count,
global_density_ratio)
print >>sys.stderr, "after filter (%d->%d)..." % (before_filter, len(filtered_blasts))
if tandem_Nmax:
print >>sys.stderr, "running the local dups filter (tandem_Nmax=%d)..." % tandem_Nmax
qtandems = tandem_grouper(qbed, filtered_blasts,
flip=True, tandem_Nmax=tandem_Nmax)
standems = tandem_grouper(sbed, filtered_blasts,
flip=False, tandem_Nmax=tandem_Nmax)
qdups_fh = open(op.splitext(qbed_file)[0] + ".localdups", "w") if localdups else None
if is_self:
for s in standems: qtandems.join(*s)
qdups_to_mother = write_localdups(qdups_fh, qtandems, qbed)
sdups_to_mother = qdups_to_mother
else:
qdups_to_mother = write_localdups(qdups_fh, qtandems, qbed)
sdups_fh = open(op.splitext(sbed_file)[0] + ".localdups", "w") if localdups else None
sdups_to_mother = write_localdups(sdups_fh, standems, sbed)
if localdups:
# write out new .bed after tandem removal
write_new_bed(qbed, qdups_to_mother)
if not is_self:
write_new_bed(sbed, sdups_to_mother)
before_filter = len(filtered_blasts)
filtered_blasts = list(filter_tandem(filtered_blasts, \
qdups_to_mother, sdups_to_mother))
print >>sys.stderr, "after filter (%d->%d)..." % \
(before_filter, len(filtered_blasts))
qbed.beds = [x for x in qbed if x["accn"] not in qdups_to_mother]
sbed.beds = [x for x in sbed if x["accn"] not in sdups_to_mother]
qorder = qbed.get_order()
sorder = sbed.get_order()
if filter_repeats:
before_filter = len(filtered_blasts)
print >>sys.stderr, "running the repeat filter",
filtered_blasts = list(filter_repeat(filtered_blasts))
print >>sys.stderr, "after filter (%d->%d)..." % (before_filter, len(filtered_blasts))
if cscore:
before_filter = len(filtered_blasts)
print >>sys.stderr, "running the cscore filter (cscore>=%.2f)..." % cscore
filtered_blasts = list(filter_cscore(filtered_blasts, cscore=cscore))
print >>sys.stderr, "after filter (%d->%d)..." % (before_filter, len(filtered_blasts))
# this is the final output we will write to after BLAST filters
#raw_name = "%s.raw" % op.splitext(blast_file)[0]
#raw_fh = open(raw_name, "w")
#write_raw(qorder, sorder, filtered_blasts, raw_fh)
write_new_blast(filtered_blasts)
def write_localdups(dups_fh, tandems, bed):
if dups_fh:
print >>sys.stderr, "write local dups to file", dups_fh.name
print >>dups_fh, "#Local duplications file"
tandem_groups = []
for group in tandems:
rows = [bed[i] for i in group]
# within the tandem groups, genes are sorted with decreasing size
rows.sort(key=lambda a: (-abs(a['end'] - a['start']), a['accn']))
tandem_groups.append([row['accn'] for row in rows])
dups_to_mother = {}
for accns in sorted(tandem_groups):
if dups_fh:
print >>dups_fh, "\t".join(accns)
for dup in accns[1:]:
dups_to_mother[dup] = accns[0]
return dups_to_mother
def write_new_bed(bed, children):
# generate local dup removed annotation files
out_name = "%s.nolocaldups%s" % op.splitext(bed.filename)
print >>sys.stderr, "write tandem-filtered bed file %s" % out_name
fh = open(out_name, "w")
for i, row in enumerate(bed):
if row['accn'] in children: continue
print >>fh, row
fh.close()
def write_raw(qorder, sorder, filtered_blasts, raw_fh):
print >>sys.stderr, "write raw file %s" % raw_fh.name
for b in filtered_blasts:
qi, q = qorder[b.query]
si, s = sorder[b.subject]
qseqid, sseqid = q['seqid'], s['seqid']
score = 50 if b.evalue == 0 else min(int(-log10(b.evalue)), 50)
print >>raw_fh, "\t".join(map(str, (qseqid, qi, sseqid, si, score)))
def write_new_blast(filtered_blasts, fh=sys.stdout):
for b in filtered_blasts:
print >>fh, b
# ---------------- All BLAST filters ----------------
def filter_to_global_density(blast_list, gene_count, global_density_ratio):
max_hits = int(gene_count * global_density_ratio)
print >>sys.stderr, "cutting at:", max_hits
return blast_list[:max_hits]
def filter_cscore(blast_list, cscore=.5):
best_score = {}
for b in blast_list:
if b.query not in best_score or b.score > best_score[b.query]:
best_score[b.query] = b.score
if b.subject not in best_score or b.score > best_score[b.subject]:
best_score[b.subject] = b.score
for b in blast_list:
cur_cscore = b.score / max(best_score[b.query], best_score[b.subject])
if cur_cscore > cscore:
yield b
def filter_repeat(blast_list, evalue_cutoff=.05):
"""
adjust the evalues in a dagchainer/blast file by the number of times they occur.
query/subjects that appear often will have the evalues raise (made less
significant).
"""
counts = collections.defaultdict(int)
for b in blast_list:
counts[b.query] += 1
counts[b.subject] += 1
expected_count = len(blast_list) * 1. / len(counts)
print >>sys.stderr, "(expected_count=%d)..." % expected_count
for b in blast_list:
count = counts[b.query] + counts[b.subject]
adjusted_evalue = b.evalue ** (expected_count / count)
if adjusted_evalue < evalue_cutoff: yield b
def filter_tandem(blast_list, qdups_to_mother, sdups_to_mother):
mother_blast = []
for b in blast_list:
if b.query in qdups_to_mother: b.query = qdups_to_mother[b.query]
if b.subject in sdups_to_mother: b.subject = sdups_to_mother[b.subject]
mother_blast.append(b)
mother_blast.sort(key=lambda b: b.score, reverse=True)
seen = {}
for b in mother_blast:
if b.query==b.subject: continue
key = b.query, b.subject
if key in seen: continue
seen[key] = None
yield b
def tandem_grouper(bed, blast_list, tandem_Nmax=10, flip=True):
if not flip:
simple_blast = [(b.query, (b.sseqid, b.si)) for b in blast_list if b.evalue < 1e-10]
else:
simple_blast = [(b.subject, (b.qseqid, b.qi)) for b in blast_list if b.evalue < 1e-10]
simple_blast.sort()
standems = Grouper()
for name, hits in itertools.groupby(simple_blast, key=lambda x:x[0]):
# these are already sorted.
hits = [x[1] for x in hits]
for ia, a in enumerate(hits[:-1]):
b = hits[ia + 1]
# on the same chromosome and rank difference no larger than tandem_Nmax
if b[1] - a[1] <= tandem_Nmax and b[0] == a[0]:
standems.join(a[1], b[1])
return standems
if __name__ == "__main__":
import optparse
parser = optparse.OptionParser(__doc__)
parser.add_option("--qbed", dest="qbed",
help="path to qbed")
parser.add_option("--sbed", dest="sbed",
help="path to sbed")
parser.add_option("--no_strip_names", dest="strip_names", action="store_false", default=True,
help="do not strip alternative splicing (e.g. At5g06540.1 -> At5g06540)")
parser.add_option("--localdups", dest="localdups", action="store_true", default=False,
help="generate .localdups and .nolocaldups.bed file")
filter_group = optparse.OptionGroup(parser, "BLAST filters")
filter_group.add_option("--tandem_Nmax", dest="tandem_Nmax", type="int", default=None,
help="merge tandem genes within distance [default: %default]")
filter_group.add_option("--filter_repeats", dest="filter_repeats", action="store_true", default=False,
help="require higher e-value for repetitive matches BLAST.")
filter_group.add_option("--cscore", type="float", default=None,
help="retain hits that have good bitscore [default: %default]")
filter_group.add_option("--global_density_ratio", type="float", default=None,
help="maximum ratio of blast hits to genes a good value is 10. "
"if there are more blasts, only the those with the lowest "
"are kept. [default: %default]")
parser.add_option_group(filter_group)
(options, blast_files) = parser.parse_args()
if not (len(blast_files) == 1 and options.qbed and options.sbed):
sys.exit(parser.print_help())
main(blast_files[0], options)
|
asherkhb/coge
|
bin/quota-alignment/scripts/blast_to_raw.py
|
Python
|
bsd-2-clause
| 12,941
|
[
"BLAST"
] |
d2345bac6d7a51afb74bdf23ff8760f80e7fa3d27671f554562d74ae1821b956
|
#!/usr/bin/env python
'''
CREATED:2014-01-24 12:42:43 by Brian McFee <brm2132@columbia.edu>
Compute beat evaluation metrics
Usage:
./beat_eval.py REFERENCE.TXT ESTIMATED.TXT
'''
from __future__ import print_function
import argparse
import sys
import os
import eval_utilities
import mir_eval
def process_arguments():
'''Argparse function to get the program parameters'''
parser = argparse.ArgumentParser(description='mir_eval beat detection '
'evaluation')
parser.add_argument('-o',
dest='output_file',
default=None,
type=str,
action='store',
help='Store results in json format')
parser.add_argument('reference_file',
action='store',
help='path to the reference annotation file')
parser.add_argument('estimated_file',
action='store',
help='path to the estimated annotation file')
return vars(parser.parse_args(sys.argv[1:]))
if __name__ == '__main__':
# Get the parameters
parameters = process_arguments()
# Load in data
reference_beats = mir_eval.io.load_events(parameters['reference_file'])
estimated_beats = mir_eval.io.load_events(parameters['estimated_file'])
# Compute all the scores
scores = mir_eval.beat.evaluate(reference_beats, estimated_beats)
print("{} vs. {}".format(os.path.basename(parameters['reference_file']),
os.path.basename(parameters['estimated_file'])))
eval_utilities.print_evaluation(scores)
if parameters['output_file']:
print('Saving results to: ', parameters['output_file'])
eval_utilities.save_results(scores, parameters['output_file'])
|
bmcfee/mir_eval
|
evaluators/beat_eval.py
|
Python
|
mit
| 1,856
|
[
"Brian"
] |
fc5e631a18207a2a01fb8dcf4f055c374bbed1469935412d5ee86e2f2c2ba463
|
WAXY_VERSION = "0.0.1"
WAXY_VERSION_TUPLE = tuple(map(int, WAXY_VERSION.split(".")))
__version__ = WAXY_VERSION
__license__ = "BSD"
__author__ = "Brian Blais (bblais@bryant.edu)"
import sys
import core # builtin functions and such
import wx
from wx import Yield
from aboutbox import AboutBox
from application import Application
from artprovider import ArtProvider
from bitmap import Bitmap, BitmapFromData, BitmapFromFile
from bitmapbutton import BitmapButton
from button import Button
###from canvas import Canvas
from checkbox import CheckBox
from checklistbox import CheckListBox
from colordb import ColorDB
from colourdialog import ColourDialog,ColorDialog
from combobox import ComboBox
from containers import Container # do we need to publish this?
from customdialog import CustomDialog
from dialog import Dialog, showdialog
from directorydialog import DirectoryDialog,ChooseDirectory
###from dragdrop import FileDropTarget, TextDropTarget, URLDropTarget
from dropdownbox import DropDownBox
from filedialog import FileDialog
##from filetreeview import FileTreeView
##from findreplacedialog import FindReplaceDialog
##from flexgridframe import FlexGridFrame
from flexgridpanel import FlexGridPanel
from font import Font
##from fontdialog import FontDialog
from frame import Frame, HorizontalFrame, VerticalFrame
##from grid import Grid
##from gridframe import GridFrame
from gridpanel import GridPanel
##from groupbox import GroupBox
from htmlwindow import HTMLWindow
from image import Image, AddImageHandler, AddAllImageHandlers, ImageAsBitmap,ImagePanel
##from imagelist import ImageList
from keys import keys
from label import Label
from line import Line
from listbox import ListBox
##from listview import ListView, ListItemAttr
#from maskedtextbox import MaskedTextBox
from menu import Menu, MenuBar
from messagedialog import MessageDialog, ShowMessage
##from mdiframes import MDIChildFrame, MDIParentFrame
##from mousepointer import MousePointers
from multichoicedialog import MultiChoiceDialog
from notebook import NoteBook
##from overlaypanel import OverlayPanel
from panel import Panel, HorizontalPanel, VerticalPanel
##from plainframe import PlainFrame
##from plainpanel import PlainPanel
from progressdialog import ProgressDialog
##from radiobutton import RadioButton
##from scrollframe import ScrollFrame
##from shell import PyCrust, PyCrustFilling
##from simpleeditor import SimpleEditor
from singlechoicedialog import SingleChoiceDialog
from slider import Slider
from splitter import Splitter
from statusbar import StatusBar
##from styledtextbox import StyledTextBox
##from systemsettings import SystemSettings
from textbox import TextBox
from textentrydialog import TextEntryDialog
##from timer import Timer
##from treelistview import TreeListView
##from treeview import TreeView
from waxyobject import WaxyObject
|
bblais/plasticity
|
plasticity/dialogs/waxy/__init__.py
|
Python
|
mit
| 2,837
|
[
"Brian"
] |
c68ad578f1d7343e4a345cc8bbff2b78ec0da25697bfef3d0f33603f81261d24
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
from mantid.api import FrameworkManagerImpl, FunctionProperty, PythonAlgorithm, IFunction
from testhelpers import assertRaisesNothing
import unittest
import math
class FunctionPropertyTest(unittest.TestCase):
class TestFunctionPropAlg(PythonAlgorithm):
def PyInit(self):
self.declareProperty(FunctionProperty("fun"))
def PyExec(self):
fp = self.getProperty("fun")
if not isinstance(fp, FunctionProperty):
raise RuntimeError("Expected a FunctionProperty but found %s " % str(type(fp)))
func = fp.value
if not isinstance(func, IFunction):
raise RuntimeError("Expected an IFunction but found %s " % str(type(func)))
height=func.getParamValue(0)
if math.fabs(height - 1.0) > 1e-12:
raise RuntimeError("Height does not have the expected value")
@classmethod
def setUpClass(cls):
FrameworkManagerImpl.Instance()
def test_constructor_succeeds_with_non_empty_string_name(self):
assertRaisesNothing(self, FunctionProperty, "Function")
def test_type_string_returns_Function(self):
func = FunctionProperty("fun")
self.assertEqual("Function", func.type)
def test_value_is_empty_string_for_default_property(self):
func = FunctionProperty("name")
self.assertEquals(None, func.value)
self.assertEquals("", func.valueAsStr)
def test_valid_string_value_gives_function_object_as_value(self):
alg=self.TestFunctionPropAlg()
alg.initialize()
alg.setProperty("fun", "name=Gaussian,PeakCentre=5.0,Height=1.0")
alg.setRethrows(True)
assertRaisesNothing(self, alg.execute)
def test_invalid_string_value_gives_function_object_as_value(self):
alg=self.TestFunctionPropAlg()
alg.initialize()
self.assertRaises(ValueError, alg.setProperty, "fun", "blah")
if __name__ == '__main__':
unittest.main()
|
mganeva/mantid
|
Framework/PythonInterface/test/python/mantid/api/FunctionPropertyTest.py
|
Python
|
gpl-3.0
| 2,329
|
[
"Gaussian"
] |
bfad823cb46e543d7250e7cae53b2f96a3506495cea6a9d941623fd58088ab82
|
from setuptools import setup
setup(
name='gautools',
packages=['gautools'],
scripts=[
'gautools/aml.py',
'gautools/create_runs.py',
'gautools/geomRegex.py',
'gautools/out_to_list.py',
'gautools/out_to_list_sf.py',
'gautools/submit_gaussian.py',
'gautools/xtorun.py',
'gautools/xyz_to_inp.py',
'gautools/xyz_to_inpglob.py',
],
url='https://github.com/theavey/QM-calc-scripts',
license='Apache License 2.0',
author='Thomas Heavey',
author_email='thomasjheavey@gmail.com',
description='A set of scripts that are useful for creating, submitting, '
'and processing QM calculations',
install_requires=[
'MDAnalysis>=0.17.0',
'thtools',
'numpy',
'six',
'paratemp',
'parmed',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
],
zip_safe=True,
)
|
thompcinnamon/QM-calc-scripts
|
setup.py
|
Python
|
apache-2.0
| 1,048
|
[
"MDAnalysis"
] |
f734c71bfccbb22aaec5771167c0b1f14573fb49b6aaf928ab61005ea46452c2
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
Helper to generate OVH API consumer key. In a nutshell, the consumer key
identifies a specific user in an application while application key and
application secret identifies the application itself. In the case of ovh-cli
each instance of the CLI must hav its own, dedicated, set of credentials.
To generate application secret and application key, please visit:
- OVH Europe: https://eu.api.ovh.com/createApp/
- OVH North America: https://ca.api.ovh.com/createApp/
- Soyoustart Europe: https://eu.api.soyoustart.com/createApp/
- Soyoustart North America: https://ca.api.soyoustart.com/createApp/
- Kimsufi Europe: https://eu.api.kimsufi.com/createApp/
- Kimsufi North America: https://ca.api.kimsufi.com/createApp/
- Runabove North America: https://api.runabove.com/createApp/
You may then request a consumer key using this tool:
$ create-consumer-key.py [endpoint]
Where ``endpoint`` may be one of ``ovh-eu``, ``ovh-ca``, and so on.
Once generated, your application key, application secret and consumer key
must be set in eiter:
- ``./ovh.conf`` for an application specific configuration
- ``$HOME/.ovh.conf`` for a user wide configuration
- ``/etc/ovh.conf`` for a system wide / server configuration
This file will look like:
[default]
endpoint=ovh-eu
[ovh-eu]
application_key=my_app_key
application_secret=my_application_secret
;consumer_key=my_consumer_key
Alternatively, at runtime, configuration may be overloaded using environment
variables. For more informations regarding available configuration options,
please see https://github.com/ovh/python-ovh
'''
import sys
import ovh
# Load api endpoint from command line, if any
if len(sys.argv) == 1:
endpoint=None
elif len(sys.argv) == 2:
endpoint=sys.argv[1]
else:
print >>sys.stderr, __doc__
sys.exit(1)
if endpoint in ['-h', '--help']:
print >>sys.stderr, __doc__
sys.exit(0)
# Create a client using configuration
try:
client = ovh.Client(endpoint)
except Exception as e:
print e
print >>sys.stderr, __doc__
sys.exit(1)
# Request full API access
access_rules = [
{'method': 'GET', 'path': '/*'},
{'method': 'POST', 'path': '/*'},
{'method': 'PUT', 'path': '/*'},
{'method': 'DELETE', 'path': '/*'}
]
# Request token
validation = client.request_consumerkey(access_rules)
print "Please visit %s to authenticate" % validation['validationUrl']
raw_input("and press Enter to continue...")
# Print nice welcome message
print "Welcome", client.get('/me')['firstname']
print "Here is your Consumer Key: '%s'" % validation['consumerKey']
|
yadutaf/ovh-cli
|
create-consumer-key.py
|
Python
|
bsd-3-clause
| 2,679
|
[
"VisIt"
] |
29b2981978627b73d64152e89cf8fae320701be76b7d8b5dbff05e6f93cef59f
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
#
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import datetime as dt
class Lammps(CMakePackage):
"""LAMMPS stands for Large-scale Atomic/Molecular Massively
Parallel Simulator. This package uses patch releases, not
stable release.
See https://github.com/spack/spack/pull/5342 for a detailed
discussion.
"""
homepage = "http://lammps.sandia.gov/"
url = "https://github.com/lammps/lammps/archive/patch_1Sep2017.tar.gz"
git = "https://github.com/lammps/lammps.git"
tags = ['ecp', 'ecp-apps']
version('develop', branch='master')
version('20180629', '6d5941863ee25ad2227ff3b7577d5e7c')
version('20180316', '25bad35679583e0dd8cb8753665bb84b')
version('20180222', '4d0513e3183bd57721814d217fdaf957')
version('20170922', '4306071f919ec7e759bda195c26cfd9a')
version('20170901', '767e7f07289663f033474dfe974974e7')
def url_for_version(self, version):
vdate = dt.datetime.strptime(str(version), "%Y%m%d")
return "https://github.com/lammps/lammps/archive/patch_{0}.tar.gz".format(
vdate.strftime("%d%b%Y").lstrip('0'))
supported_packages = ['asphere', 'body', 'class2', 'colloid', 'compress',
'coreshell', 'dipole', 'granular', 'kspace', 'latte',
'manybody', 'mc', 'meam', 'misc', 'molecule',
'mpiio', 'peri', 'poems', 'python', 'qeq', 'reax',
'replica', 'rigid', 'shock', 'snap', 'srd',
'user-atc', 'user-h5md', 'user-lb', 'user-misc',
'user-netcdf', 'user-omp', 'voronoi']
for pkg in supported_packages:
variant(pkg, default=False,
description='Activate the {0} package'.format(pkg))
variant('lib', default=True,
description='Build the liblammps in addition to the executable')
variant('mpi', default=True,
description='Build with mpi')
depends_on('mpi', when='+mpi')
depends_on('mpi', when='+mpiio')
depends_on('fftw', when='+kspace')
depends_on('voropp', when='+voronoi')
depends_on('netcdf+mpi', when='+user-netcdf')
depends_on('blas', when='+user-atc')
depends_on('lapack', when='+user-atc')
depends_on('latte@1.0.1', when='@:20180222+latte')
depends_on('latte@1.1.1:', when='@20180316:20180628+latte')
depends_on('latte@1.2.1:', when='@20180629:+latte')
depends_on('blas', when='+latte')
depends_on('lapack', when='+latte')
depends_on('python', when='+python')
depends_on('mpi', when='+user-lb')
depends_on('mpi', when='+user-h5md')
depends_on('hdf5', when='+user-h5md')
conflicts('+body', when='+poems@:20180628')
conflicts('+latte', when='@:20170921')
conflicts('+python', when='~lib')
conflicts('+qeq', when='~manybody')
conflicts('+user-atc', when='~manybody')
conflicts('+user-misc', when='~manybody')
conflicts('+user-phonon', when='~kspace')
conflicts('+user-misc', when='~manybody')
patch("lib.patch", when="@20170901")
patch("660.patch", when="@20170922")
root_cmakelists_dir = 'cmake'
def cmake_args(self):
spec = self.spec
mpi_prefix = 'ENABLE'
pkg_prefix = 'ENABLE'
if spec.satisfies('@20180629:'):
mpi_prefix = 'BUILD'
pkg_prefix = 'PKG'
args = [
'-DBUILD_SHARED_LIBS={0}'.format(
'ON' if '+lib' in spec else 'OFF'),
'-D{0}_MPI={1}'.format(
mpi_prefix,
'ON' if '+mpi' in spec else 'OFF')
]
if spec.satisfies('@20180629:+lib'):
args.append('-DBUILD_LIB=ON')
for pkg in self.supported_packages:
opt = '-D{0}_{1}'.format(pkg_prefix, pkg.upper())
if '+{0}'.format(pkg) in spec:
args.append('{0}=ON'.format(opt))
else:
args.append('{0}=OFF'.format(opt))
if '+kspace' in spec:
args.append('-DFFT=FFTW3')
return args
|
krafczyk/spack
|
var/spack/repos/builtin/packages/lammps/package.py
|
Python
|
lgpl-2.1
| 5,249
|
[
"LAMMPS",
"NetCDF"
] |
cf623e6fbbb575c8eff1a3a35bb25e7d20d266036265c254b6c5f2135473c1aa
|
import pytest
from labkey.api_wrapper import APIWrapper
from labkey.exceptions import ServerContextError
pytestmark = pytest.mark.integration # Mark all tests in this module as integration tests
STUDY_NAME = "TestStudy"
SCHEMA_NAME = "study"
QUERY_NAME = "KrankenLevel"
DATASET_DOMAIN = {
"kind": "StudyDatasetVisit",
"domainDesign": {
"name": QUERY_NAME,
"fields": [
{"name": "kronk", "label": "krongggk", "rangeURI": "double"},
{"name": "type", "label": "type", "rangeURI": "string"},
],
},
}
TEST_QC_STATES = [
{
"label": "needs verification",
"description": "that can not be right",
"publicData": False,
},
{"label": "approved", "publicData": True},
]
@pytest.fixture(scope="session")
def study(api: APIWrapper):
url = api.server_context.build_url("study", "createStudy.view")
payload = {
"shareVisits": "false",
"shareDatasets": "false",
"simpleRepository": "true",
"securityString": "BASIC_READ",
"defaultTimepointDuration": "1",
"startDate": "2020-01-01",
"timepointType": "VISIT",
"subjectColumnName": "PeopleId",
"subjectNounPlural": "Peoples",
"subjectNounSingular": "People",
"label": "Python Integration Tests Study",
}
created_study = api.server_context.make_request(url, payload, non_json_response=True)
yield created_study
url = api.server_context.build_url("study", "deleteStudy.view")
api.server_context.make_request(url, {"confirm": "true"}, non_json_response=True)
@pytest.fixture(scope="session")
def dataset(api: APIWrapper, study):
# study is not used in this function, but the fixture is required to run because we need a study in order to create
# a dataset
api.domain.create(DATASET_DOMAIN)
created_domain = api.domain.get(SCHEMA_NAME, QUERY_NAME)
yield created_domain
# Clean up
api.domain.drop(SCHEMA_NAME, QUERY_NAME)
@pytest.fixture(scope="function")
def qc_states(api: APIWrapper, study):
print(api.security.who_am_i())
insert_result = api.query.insert_rows("core", "datastates", TEST_QC_STATES)
yield insert_result
# clean up
cleanup_qc_states = [
{"rowId": insert_result["rows"][0]["rowid"]},
{"rowId": insert_result["rows"][1]["rowid"]},
]
api.query.delete_rows("core", "datastates", cleanup_qc_states)
def test_api_select_rows(api: APIWrapper):
resp = api.query.select_rows("core", "Users")
assert resp["schemaName"] == "core"
assert resp["queryName"] == "Users"
assert resp["rowCount"] > 0
assert len(resp["rows"]) > 0
def test_create_dataset(dataset):
assert dataset.name == QUERY_NAME
def test_create_duplicate_dataset(api: APIWrapper, dataset):
# Dataset fixture is not used directly here, but it is an argument so it gets created and cleaned up when this test
# runs
with pytest.raises(ServerContextError) as e:
api.domain.create(DATASET_DOMAIN)
expected = f"'500: A Dataset or Query already exists with the name \"{QUERY_NAME}\".'"
assert e.value.message == expected
def test_create_qc_state_definition(qc_states):
assert qc_states["rowsAffected"] == 2
assert qc_states["rows"][0]["label"] == "needs verification"
assert qc_states["rows"][1]["label"] == "approved"
def test_update_qc_state_definition(api: APIWrapper, qc_states, study):
new_description = "for sure that is not right"
edit_rowid = qc_states["rows"][0]["rowid"]
assert qc_states["rows"][0]["description"] != new_description
to_edit_row = [{"rowid": edit_rowid, "description": new_description}]
update_response = api.query.update_rows("core", "datastates", to_edit_row)
assert update_response["rowsAffected"] == 1
assert update_response["rows"][0]["description"] == new_description
def test_insert_duplicate_labeled_qc_state_produces_error(api: APIWrapper, qc_states, study):
with pytest.raises(ServerContextError) as e:
dupe_qc_state = [{"label": "needs verification", "publicData": "false"}]
api.query.insert_rows("core", "datastates", dupe_qc_state)
assert "500: ERROR: duplicate key value violates unique constraint" in e.value.message
def test_cannot_delete_qc_state_in_use(api: APIWrapper, qc_states, study, dataset):
qc_state_rowid = qc_states["rows"][0]["rowid"]
new_row = [
{
"ParticipantId": "2",
"vitd": 4,
"SequenceNum": "345",
"QCState": qc_state_rowid,
}
]
insert_result = api.query.insert_rows(SCHEMA_NAME, QUERY_NAME, new_row)
inserted_lsid = insert_result["rows"][0]["lsid"]
assert insert_result["rowsAffected"] == 1
assert insert_result["rows"][0]["QCState"] == qc_state_rowid
with pytest.raises(ServerContextError) as e:
qc_state_to_delete = [{"rowid": qc_state_rowid}]
api.query.delete_rows("core", "datastates", qc_state_to_delete)
assert (
e.value.message
== "\"400: State 'needs verification' cannot be deleted as it is currently in use.\""
)
# now clean up/stop using it
dataset_row_to_remove = [{"lsid": inserted_lsid}]
api.query.delete_rows(SCHEMA_NAME, QUERY_NAME, dataset_row_to_remove)
|
LabKey/labkey-api-python
|
test/integration/test_query.py
|
Python
|
apache-2.0
| 5,294
|
[
"VisIt"
] |
6674e05c05b641ad94d8e1b25ae1cc35397de134c5bbeabc29485bbdc4345795
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import unittest
from pyscf import gto
from pyscf import scf
mol = gto.Mole()
mol.verbose = 0
mol.output = None#"out_bz"
mol.atom = [
["C", (-0.65830719, 0.61123287, -0.00800148)],
["C", ( 0.73685281, 0.61123287, -0.00800148)],
["C", ( 1.43439081, 1.81898387, -0.00800148)],
["C", ( 0.73673681, 3.02749287, -0.00920048)],
["C", (-0.65808819, 3.02741487, -0.00967948)],
["C", (-1.35568919, 1.81920887, -0.00868348)],
["H", (-1.20806619, -0.34108413, -0.00755148)],
["H", ( 1.28636081, -0.34128013, -0.00668648)],
["H", ( 2.53407081, 1.81906387, -0.00736748)],
["H", ( 1.28693681, 3.97963587, -0.00925948)],
["H", (-1.20821019, 3.97969587, -0.01063248)],
["H", (-2.45529319, 1.81939187, -0.00886348)],]
mol.basis = {"H": 'ccpvdz',
"C": 'ccpvdz',}
mol.build()
def tearDownModule():
global mol
del mol
class KnownValues(unittest.TestCase):
def test_nr_rhf(self):
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-11
self.assertAlmostEqual(rhf.scf(), -230.720825199, 9)
if __name__ == "__main__":
print("Full Tests for c6h6")
unittest.main()
|
gkc1000/pyscf
|
pyscf/scf/test/test_bz.py
|
Python
|
apache-2.0
| 1,828
|
[
"PySCF"
] |
abf5ee26ea927358ad5af15711f6e4371ed0268a5599d48ed7bdb684d710fc35
|
## NIFTY (Numerical Information Field Theory) has been developed at the
## Max-Planck-Institute for Astrophysics.
##
## Copyright (C) 2015 Max-Planck-Society
##
## Author: Marco Selig
## Project homepage: <http://www.mpa-garching.mpg.de/ift/nifty/>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
## See the GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
.. __ ____ __
.. /__/ / _/ / /_
.. __ ___ __ / /_ / _/ __ __
.. / _ | / / / _/ / / / / / /
.. / / / / / / / / / /_ / /_/ /
.. /__/ /__/ /__/ /__/ \___/ \___ / rg
.. /______/
NIFTY submodule for regular Cartesian grids.
"""
from __future__ import division
#from nifty import *
import os
import numpy as np
import pylab as pl
from matplotlib.colors import LogNorm as ln
from matplotlib.ticker import LogFormatter as lf
from nifty.nifty_core import about, \
random, \
space, \
field
import nifty.smoothing as gs
import powerspectrum as gp
try:
import gfft as gf
except(ImportError):
about.infos.cprint('INFO: "plain" gfft version 0.1.0')
import gfft_rg as gf
##-----------------------------------------------------------------------------
class rg_space(space):
"""
.. _____ _______
.. / __/ / _ /
.. / / / /_/ /
.. /__/ \____ / space class
.. /______/
NIFTY subclass for spaces of regular Cartesian grids.
Parameters
----------
num : {int, numpy.ndarray}
Number of gridpoints or numbers of gridpoints along each axis.
naxes : int, *optional*
Number of axes (default: None).
zerocenter : {bool, numpy.ndarray}, *optional*
Whether the Fourier zero-mode is located in the center of the grid
(or the center of each axis speparately) or not (default: True).
hermitian : bool, *optional*
Whether the fields living in the space follow hermitian symmetry or
not (default: True).
purelyreal : bool, *optional*
Whether the field values are purely real (default: True).
dist : {float, numpy.ndarray}, *optional*
Distance between two grid points along each axis (default: None).
fourier : bool, *optional*
Whether the space represents a Fourier or a position grid
(default: False).
Notes
-----
Only even numbers of grid points per axis are supported.
The basis transformations between position `x` and Fourier mode `k`
rely on (inverse) fast Fourier transformations using the
:math:`exp(2 \pi i k^\dagger x)`-formulation.
Attributes
----------
para : numpy.ndarray
One-dimensional array containing information on the axes of the
space in the following form: The first entries give the grid-points
along each axis in reverse order; the next entry is 0 if the
fields defined on the space are purely real-valued, 1 if they are
hermitian and complex, and 2 if they are not hermitian, but
complex-valued; the last entries hold the information on whether
the axes are centered on zero or not, containing a one for each
zero-centered axis and a zero for each other one, in reverse order.
datatype : numpy.dtype
Data type of the field values for a field defined on this space,
either ``numpy.float64`` or ``numpy.complex128``.
discrete : bool
Whether or not the underlying space is discrete, always ``False``
for regular grids.
vol : numpy.ndarray
One-dimensional array containing the distances between two grid
points along each axis, in reverse order. By default, the total
length of each axis is assumed to be one.
fourier : bool
Whether or not the grid represents a Fourier basis.
"""
epsilon = 0.0001 ## relative precision for comparisons
def __init__(self,num,naxes=None,zerocenter=True,hermitian=True,purelyreal=True,dist=None,fourier=False):
"""
Sets the attributes for an rg_space class instance.
Parameters
----------
num : {int, numpy.ndarray}
Number of gridpoints or numbers of gridpoints along each axis.
naxes : int, *optional*
Number of axes (default: None).
zerocenter : {bool, numpy.ndarray}, *optional*
Whether the Fourier zero-mode is located in the center of the
grid (or the center of each axis speparately) or not
(default: True).
hermitian : bool, *optional*
Whether the fields living in the space follow hermitian
symmetry or not (default: True).
purelyreal : bool, *optional*
Whether the field values are purely real (default: True).
dist : {float, numpy.ndarray}, *optional*
Distance between two grid points along each axis
(default: None).
fourier : bool, *optional*
Whether the space represents a Fourier or a position grid
(default: False).
Returns
-------
None
"""
## check parameters
para = np.array([],dtype=np.int)
if(np.isscalar(num)):
num = np.array([num],dtype=np.int)
else:
num = np.array(num,dtype=np.int)
if(np.any(num%2)): ## module restriction
raise ValueError(about._errors.cstring("ERROR: unsupported odd number of grid points."))
if(naxes is None):
naxes = np.size(num)
elif(np.size(num)==1):
num = num*np.ones(naxes,dtype=np.int,order='C')
elif(np.size(num)!=naxes):
raise ValueError(about._errors.cstring("ERROR: size mismatch ( "+str(np.size(num))+" <> "+str(naxes)+" )."))
para = np.append(para,num[::-1],axis=None)
para = np.append(para,2-(bool(hermitian) or bool(purelyreal))-bool(purelyreal),axis=None) ## {0,1,2}
if(np.isscalar(zerocenter)):
zerocenter = bool(zerocenter)*np.ones(naxes,dtype=np.int,order='C')
else:
zerocenter = np.array(zerocenter,dtype=np.bool)
if(np.size(zerocenter)==1):
zerocenter = zerocenter*np.ones(naxes,dtype=np.int,order='C')
elif(np.size(zerocenter)!=naxes):
raise ValueError(about._errors.cstring("ERROR: size mismatch ( "+str(np.size(zerocenter))+" <> "+str(naxes)+" )."))
para = np.append(para,zerocenter[::-1]*-1,axis=None) ## -1 XOR 0 (centered XOR not)
self.para = para
## set data type
if(not self.para[naxes]):
self.datatype = np.float64
else:
self.datatype = np.complex128
self.discrete = False
## set volume
if(dist is None):
dist = 1/num.astype(self.datatype)
elif(np.isscalar(dist)):
dist = self.datatype(dist)*np.ones(naxes,dtype=self.datatype,order='C')
else:
dist = np.array(dist,dtype=self.datatype)
if(np.size(dist)==1):
dist = dist*np.ones(naxes,dtype=self.datatype,order='C')
if(np.size(dist)!=naxes):
raise ValueError(about._errors.cstring("ERROR: size mismatch ( "+str(np.size(dist))+" <> "+str(naxes)+" )."))
if(np.any(dist<=0)):
raise ValueError(about._errors.cstring("ERROR: nonpositive distance(s)."))
self.vol = np.real(dist)[::-1]
self.fourier = bool(fourier)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def naxes(self):
"""
Returns the number of axes of the grid.
Returns
-------
naxes : int
Number of axes of the regular grid.
"""
return (np.size(self.para)-1)//2
def zerocenter(self):
"""
Returns information on the centering of the axes.
Returns
-------
zerocenter : numpy.ndarray
Whether the grid is centered on zero for each axis or not.
"""
return self.para[-(np.size(self.para)-1)//2:][::-1].astype(np.bool)
def dist(self):
"""
Returns the distances between grid points along each axis.
Returns
-------
dist : np.ndarray
Distances between two grid points on each axis.
"""
return self.vol[::-1]
def dim(self,split=False):
"""
Computes the dimension of the space, i.e.\ the number of pixels.
Parameters
----------
split : bool, *optional*
Whether to return the dimension split up, i.e. the numbers of
pixels along each axis, or their product (default: False).
Returns
-------
dim : {int, numpy.ndarray}
Dimension(s) of the space. If ``split==True``, a
one-dimensional array with an entry for each axis is returned.
"""
## dim = product(n)
if(split):
return self.para[:(np.size(self.para)-1)//2]
else:
return np.prod(self.para[:(np.size(self.para)-1)//2],axis=0,dtype=None,out=None)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def dof(self):
"""
Computes the number of degrees of freedom of the space, i.e.\ the
number of grid points multiplied with one or two, depending on
complex-valuedness and hermitian symmetry of the fields.
Returns
-------
dof : int
Number of degrees of freedom of the space.
"""
## dof ~ dim
if(self.para[(np.size(self.para)-1)//2]<2):
return np.prod(self.para[:(np.size(self.para)-1)//2],axis=0,dtype=None,out=None)
else:
return 2*np.prod(self.para[:(np.size(self.para)-1)//2],axis=0,dtype=None,out=None)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def enforce_power(self,spec,size=None,**kwargs):
"""
Provides a valid power spectrum array from a given object.
Parameters
----------
spec : {float, list, numpy.ndarray, nifty.field, function}
Fiducial power spectrum from which a valid power spectrum is to
be calculated. Scalars are interpreted as constant power
spectra.
Returns
-------
spec : numpy.ndarray
Valid power spectrum.
Other parameters
----------------
size : int, *optional*
Number of bands the power spectrum shall have (default: None).
kindex : numpy.ndarray, *optional*
Scale of each band.
codomain : nifty.space, *optional*
A compatible codomain for power indexing (default: None).
log : bool, *optional*
Flag specifying if the spectral binning is performed on logarithmic
scale or not; if set, the number of used bins is set
automatically (if not given otherwise); by default no binning
is done (default: None).
nbin : integer, *optional*
Number of used spectral bins; if given `log` is set to ``False``;
integers below the minimum of 3 induce an automatic setting;
by default no binning is done (default: None).
binbounds : {list, array}, *optional*
User specific inner boundaries of the bins, which are preferred
over the above parameters; by default no binning is done
(default: None). vmin : {scalar, list, ndarray, field}, *optional*
Lower limit of the uniform distribution if ``random == "uni"``
(default: 0).
"""
if(size is None)or(callable(spec)):
## explicit kindex
kindex = kwargs.get("kindex",None)
if(kindex is None):
## quick kindex
if(self.fourier)and(not hasattr(self,"power_indices"))and(len(kwargs)==0):
kindex = gp.nklength(gp.nkdict_fast(self.para[:(np.size(self.para)-1)//2],self.vol,fourier=True))
## implicit kindex
else:
try:
self.set_power_indices(**kwargs)
except:
codomain = kwargs.get("codomain",self.get_codomain())
codomain.set_power_indices(**kwargs)
kindex = codomain.power_indices.get("kindex")
else:
kindex = self.power_indices.get("kindex")
size = len(kindex)
if(isinstance(spec,field)):
spec = spec.val.astype(self.datatype)
elif(callable(spec)):
try:
spec = np.array(spec(kindex),dtype=self.datatype)
except:
raise TypeError(about._errors.cstring("ERROR: invalid power spectra function.")) ## exception in ``spec(kindex)``
elif(np.isscalar(spec)):
spec = np.array([spec],dtype=self.datatype)
else:
spec = np.array(spec,dtype=self.datatype)
## drop imaginary part
spec = np.real(spec)
## check finiteness
if(not np.all(np.isfinite(spec))):
about.warnings.cprint("WARNING: infinite value(s).")
## check positivity (excluding null)
if(np.any(spec<0)):
raise ValueError(about._errors.cstring("ERROR: nonpositive value(s)."))
elif(np.any(spec==0)):
about.warnings.cprint("WARNING: nonpositive value(s).")
## extend
if(np.size(spec)==1):
spec = spec*np.ones(size,dtype=spec.dtype,order='C')
## size check
elif(np.size(spec)<size):
raise ValueError(about._errors.cstring("ERROR: size mismatch ( "+str(np.size(spec))+" < "+str(size)+" )."))
elif(np.size(spec)>size):
about.warnings.cprint("WARNING: power spectrum cut to size ( == "+str(size)+" ).")
spec = spec[:size]
return spec
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def set_power_indices(self,**kwargs):
"""
Sets the (un)indexing objects for spectral indexing internally.
Parameters
----------
log : bool
Flag specifying if the binning is performed on logarithmic
scale or not; if set, the number of used bins is set
automatically (if not given otherwise); by default no binning
is done (default: None).
nbin : integer
Number of used bins; if given `log` is set to ``False``;
integers below the minimum of 3 induce an automatic setting;
by default no binning is done (default: None).
binbounds : {list, array}
User specific inner boundaries of the bins, which are preferred
over the above parameters; by default no binning is done
(default: None).
Returns
-------
None
See Also
--------
get_power_indices
Raises
------
AttributeError
If ``self.fourier == False``.
ValueError
If the binning leaves one or more bins empty.
"""
if(not self.fourier):
raise AttributeError(about._errors.cstring("ERROR: power spectra indexing ill-defined."))
## check storage
if(hasattr(self,"power_indices")):
config = self.power_indices.get("config")
## check configuration
redo = False
if(config.get("log")!=kwargs.get("log",config.get("log"))):
config["log"] = kwargs.get("log")
redo = True
if(config.get("nbin")!=kwargs.get("nbin",config.get("nbin"))):
config["nbin"] = kwargs.get("nbin")
redo = True
if(np.any(config.get("binbounds")!=kwargs.get("binbounds",config.get("binbounds")))):
config["binbounds"] = kwargs.get("binbounds")
redo = True
if(not redo):
return None
else:
config = {"binbounds":kwargs.get("binbounds",None),"log":kwargs.get("log",None),"nbin":kwargs.get("nbin",None)}
## power indices
about.infos.cflush("INFO: setting power indices ...")
pindex,kindex,rho = gp.get_power_indices2(self.para[:(np.size(self.para)-1)//2],self.vol,self.para[-((np.size(self.para)-1)//2):].astype(np.bool),fourier=True)
## bin if ...
if(config.get("log") is not None)or(config.get("nbin") is not None)or(config.get("binbounds") is not None):
pindex,kindex,rho = gp.bin_power_indices(pindex,kindex,rho,**config)
## check binning
if(np.any(rho==0)):
raise ValueError(about._errors.cstring("ERROR: empty bin(s).")) ## binning too fine
## power undex
pundex = np.unique(pindex,return_index=True,return_inverse=False)[1]
## storage
self.power_indices = {"config":config,"kindex":kindex,"pindex":pindex,"pundex":pundex,"rho":rho} ## alphabetical
about.infos.cprint(" done.")
return None
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def enforce_values(self,x,extend=True):
"""
Computes valid field values from a given object, taking care of
data types, shape, and symmetry.
Parameters
----------
x : {float, numpy.ndarray, nifty.field}
Object to be transformed into an array of valid field values.
Returns
-------
x : numpy.ndarray
Array containing the valid field values.
Other parameters
----------------
extend : bool, *optional*
Whether a scalar is extented to a constant array or not
(default: True).
"""
if(isinstance(x,field)):
if(self==x.domain):
if(self.datatype is not x.domain.datatype):
raise TypeError(about._errors.cstring("ERROR: inequal data types ( '"+str(np.result_type(self.datatype))+"' <> '"+str(np.result_type(x.domain.datatype))+"' )."))
else:
x = np.copy(x.val)
else:
raise ValueError(about._errors.cstring("ERROR: inequal domains."))
else:
if(np.size(x)==1):
if(extend):
x = self.datatype(x)*np.ones(self.dim(split=True),dtype=self.datatype,order='C')
else:
if(np.isscalar(x)):
x = np.array([x],dtype=self.datatype)
else:
x = np.array(x,dtype=self.datatype)
else:
x = self.enforce_shape(np.array(x,dtype=self.datatype))
## hermitianize if ...
if(about.hermitianize.status)and(np.size(x)!=1)and(self.para[(np.size(self.para)-1)//2]==1):
x = gp.nhermitianize_fast(x,self.para[-((np.size(self.para)-1)//2):].astype(np.bool),special=False)
## check finiteness
if(not np.all(np.isfinite(x))):
about.warnings.cprint("WARNING: infinite value(s).")
return x
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def get_random_values(self,**kwargs):
"""
Generates random field values according to the specifications given
by the parameters, taking into account possible complex-valuedness
and hermitian symmetry.
Returns
-------
x : numpy.ndarray
Valid field values.
Other parameters
----------------
random : string, *optional*
Specifies the probability distribution from which the random
numbers are to be drawn.
Supported distributions are:
- "pm1" (uniform distribution over {+1,-1} or {+1,+i,-1,-i}
- "gau" (normal distribution with zero-mean and a given standard
deviation or variance)
- "syn" (synthesizes from a given power spectrum)
- "uni" (uniform distribution over [vmin,vmax[)
(default: None).
dev : float, *optional*
Standard deviation (default: 1).
var : float, *optional*
Variance, overriding `dev` if both are specified
(default: 1).
spec : {scalar, list, numpy.ndarray, nifty.field, function}, *optional*
Power spectrum (default: 1).
pindex : numpy.ndarray, *optional*
Indexing array giving the power spectrum index of each band
(default: None).
kindex : numpy.ndarray, *optional*
Scale of each band (default: None).
codomain : nifty.rg_space, *optional*
A compatible codomain with power indices (default: None).
log : bool, *optional*
Flag specifying if the spectral binning is performed on logarithmic
scale or not; if set, the number of used bins is set
automatically (if not given otherwise); by default no binning
is done (default: None).
nbin : integer, *optional*
Number of used spectral bins; if given `log` is set to ``False``;
integers below the minimum of 3 induce an automatic setting;
by default no binning is done (default: None).
binbounds : {list, array}, *optional*
User specific inner boundaries of the bins, which are preferred
over the above parameters; by default no binning is done
(default: None). vmin : {scalar, list, ndarray, field}, *optional*
Lower limit of the uniform distribution if ``random == "uni"``
(default: 0). vmin : float, *optional*
Lower limit for a uniform distribution (default: 0).
vmax : float, *optional*
Upper limit for a uniform distribution (default: 1).
"""
arg = random.arguments(self,**kwargs)
if(arg is None):
return np.zeros(self.dim(split=True),dtype=self.datatype,order='C')
elif(arg[0]=="pm1"):
if(about.hermitianize.status)and(self.para[(np.size(self.para)-1)//2]==1):
return gp.random_hermitian_pm1(self.datatype,self.para[-((np.size(self.para)-1)//2):].astype(np.bool),self.dim(split=True)) ## special case
else:
x = random.pm1(datatype=self.datatype,shape=self.dim(split=True))
elif(arg[0]=="gau"):
x = random.gau(datatype=self.datatype,shape=self.dim(split=True),mean=None,dev=arg[2],var=arg[3])
elif(arg[0]=="syn"):
naxes = (np.size(self.para)-1)//2
x = gp.draw_vector_nd(self.para[:naxes],self.vol,arg[1],symtype=self.para[naxes],fourier=self.fourier,zerocentered=self.para[-naxes:].astype(np.bool),kpack=arg[2])
## correct for 'ifft'
if(not self.fourier):
x = self.calc_weight(x,power=-1)
return x
elif(arg[0]=="uni"):
x = random.uni(datatype=self.datatype,shape=self.dim(split=True),vmin=arg[1],vmax=arg[2])
else:
raise KeyError(about._errors.cstring("ERROR: unsupported random key '"+str(arg[0])+"'."))
## hermitianize if ...
if(about.hermitianize.status)and(self.para[(np.size(self.para)-1)//2]==1):
x = gp.nhermitianize_fast(x,self.para[-((np.size(self.para)-1)//2):].astype(np.bool),special=(arg[0] in ["gau","pm1"]))
return x
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def check_codomain(self,codomain):
"""
Checks whether a given codomain is compatible to the space or not.
Parameters
----------
codomain : nifty.space
Space to be checked for compatibility.
Returns
-------
check : bool
Whether or not the given codomain is compatible to the space.
"""
if(not isinstance(codomain,space)):
raise TypeError(about._errors.cstring("ERROR: invalid input."))
elif(isinstance(codomain,rg_space)):
## naxes==naxes
if((np.size(codomain.para)-1)//2==(np.size(self.para)-1)//2):
naxes = (np.size(self.para)-1)//2
## num'==num
if(np.all(codomain.para[:naxes]==self.para[:naxes])):
## typ'==typ ==2
if(codomain.para[naxes]==self.para[naxes]==2):
## dist'~=1/(num*dist)
if(np.all(np.absolute(self.para[:naxes]*self.vol*codomain.vol-1)<self.epsilon)):
return True
## fourier'==fourier
elif(codomain.fourier==self.fourier):
## dist'~=dist
if(np.all(np.absolute(codomain.vol/self.vol-1)<self.epsilon)):
return True
else:
about.warnings.cprint("WARNING: unrecommended codomain.")
## 2!= typ'!=typ !=2 dist'~=1/(num*dist)
elif(2!=codomain.para[naxes]!=self.para[naxes]!=2)and(np.all(np.absolute(self.para[:naxes]*self.vol*codomain.vol-1)<self.epsilon)):
return True
## typ'==typ !=2
elif(codomain.para[naxes]==self.para[naxes]!=2)and(codomain.fourier==self.fourier):
## dist'~=dist
if(np.all(np.absolute(codomain.vol/self.vol-1)<self.epsilon)):
return True
else:
about.warnings.cprint("WARNING: unrecommended codomain.")
return False
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def get_codomain(self,coname=None,cozerocenter=None,**kwargs):
"""
Generates a compatible codomain to which transformations are
reasonable, i.e.\ either a shifted grid or a Fourier conjugate
grid.
Parameters
----------
coname : string, *optional*
String specifying a desired codomain (default: None).
cozerocenter : {bool, numpy.ndarray}, *optional*
Whether or not the grid is zerocentered for each axis or not
(default: None).
Returns
-------
codomain : nifty.rg_space
A compatible codomain.
Notes
-----
Possible arguments for `coname` are ``'f'`` in which case the
codomain arises from a Fourier transformation, ``'i'`` in which case
it arises from an inverse Fourier transformation, and ``'?'`` in
which case it arises from a simple shift. If no `coname` is given,
the Fourier conjugate grid is produced.
"""
naxes = (np.size(self.para)-1)//2
if(cozerocenter is None):
cozerocenter = self.para[-naxes:][::-1]
elif(np.isscalar(cozerocenter)):
cozerocenter = bool(cozerocenter)
else:
cozerocenter = np.array(cozerocenter,dtype=np.bool)
if(np.size(cozerocenter)==1):
cozerocenter = np.asscalar(cozerocenter)
elif(np.size(cozerocenter)!=naxes):
raise ValueError(about._errors.cstring("ERROR: size mismatch ( "+str(np.size(cozerocenter))+" <> "+str(naxes)+" )."))
if(coname is None):
return rg_space(self.para[:naxes][::-1],naxes=naxes,zerocenter=cozerocenter,hermitian=bool(self.para[naxes]<2),purelyreal=bool(self.para[naxes]==1),dist=1/(self.para[:naxes]*self.vol)[::-1],fourier=bool(not self.fourier)) ## dist',fourier' = 1/(num*dist),NOT fourier
elif(coname[0]=='f'):
return rg_space(self.para[:naxes][::-1],naxes=naxes,zerocenter=cozerocenter,hermitian=bool(self.para[naxes]<2),purelyreal=bool(self.para[naxes]==1),dist=1/(self.para[:naxes]*self.vol)[::-1],fourier=True) ## dist',fourier' = 1/(num*dist),True
elif(coname[0]=='i'):
return rg_space(self.para[:naxes][::-1],naxes=naxes,zerocenter=cozerocenter,hermitian=bool(self.para[naxes]<2),purelyreal=bool(self.para[naxes]==1),dist=1/(self.para[:naxes]*self.vol)[::-1],fourier=False) ## dist',fourier' = 1/(num*dist),False
else:
return rg_space(self.para[:naxes][::-1],naxes=naxes,zerocenter=cozerocenter,hermitian=bool(self.para[naxes]<2),purelyreal=bool(not self.para[naxes]),dist=self.vol[::-1],fourier=self.fourier) ## dist',fourier' = dist,fourier
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def get_meta_volume(self,total=False):
"""
Calculates the meta volumes.
The meta volumes are the volumes associated with each component of
a field, taking into account field components that are not
explicitly included in the array of field values but are determined
by symmetry conditions. In the case of an :py:class:`rg_space`, the
meta volumes are simply the pixel volumes.
Parameters
----------
total : bool, *optional*
Whether to return the total meta volume of the space or the
individual ones of each pixel (default: False).
Returns
-------
mol : {numpy.ndarray, float}
Meta volume of the pixels or the complete space.
"""
if(total):
return self.dim(split=False)*np.prod(self.vol,axis=0,dtype=None,out=None)
else:
mol = np.ones(self.dim(split=True),dtype=self.vol.dtype,order='C')
return self.calc_weight(mol,power=1)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def calc_weight(self,x,power=1):
"""
Weights a given array with the pixel volumes to a given power.
Parameters
----------
x : numpy.ndarray
Array to be weighted.
power : float, *optional*
Power of the pixel volumes to be used (default: 1).
Returns
-------
y : numpy.ndarray
Weighted array.
"""
x = self.enforce_shape(np.array(x,dtype=self.datatype))
## weight
return x*np.prod(self.vol,axis=0,dtype=None,out=None)**power
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def calc_dot(self,x,y):
"""
Computes the discrete inner product of two given arrays.
Parameters
----------
x : numpy.ndarray
First array
y : numpy.ndarray
Second array
Returns
-------
dot : scalar
Inner product of the two arrays.
"""
x = self.enforce_shape(np.array(x,dtype=self.datatype))
y = self.enforce_shape(np.array(y,dtype=self.datatype))
## inner product
dot = np.dot(np.conjugate(x.flatten(order='C')),y.flatten(order='C'),out=None)
if(np.isreal(dot)):
return np.asscalar(np.real(dot))
elif(self.para[(np.size(self.para)-1)//2]!=2):
## check imaginary part
if(np.absolute(dot.imag)>self.epsilon**2*np.absolute(dot.real)):
about.warnings.cprint("WARNING: discarding considerable imaginary part.")
return np.asscalar(np.real(dot))
else:
return dot
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def calc_transform(self,x,codomain=None,**kwargs):
"""
Computes the transform of a given array of field values.
Parameters
----------
x : numpy.ndarray
Array to be transformed.
codomain : nifty.rg_space, *optional*
Target space to which the transformation shall map
(default: None).
Returns
-------
Tx : numpy.ndarray
Transformed array
"""
x = self.enforce_shape(np.array(x,dtype=self.datatype))
if(codomain is None):
return x ## T == id
## mandatory(!) codomain check
if(isinstance(codomain,rg_space))and(self.check_codomain(codomain)):
naxes = (np.size(self.para)-1)//2
## select machine
if(np.all(np.absolute(self.para[:naxes]*self.vol*codomain.vol-1)<self.epsilon)):
if(codomain.fourier):
ftmachine = "fft"
## correct for 'fft'
x = self.calc_weight(x,power=1)
else:
ftmachine = "ifft"
## correct for 'ifft'
x = self.calc_weight(x,power=1)
x *= self.dim(split=False)
else:
ftmachine = "none"
## transform
if(self.datatype==np.float64):
Tx = gf.gfft(x.astype(np.complex128),in_ax=[],out_ax=[],ftmachine=ftmachine,in_zero_center=self.para[-naxes:].astype(np.bool).tolist(),out_zero_center=codomain.para[-naxes:].astype(np.bool).tolist(),enforce_hermitian_symmetry=bool(codomain.para[naxes]==1),W=-1,alpha=-1,verbose=False)
else:
Tx = gf.gfft(x,in_ax=[],out_ax=[],ftmachine=ftmachine,in_zero_center=self.para[-naxes:].astype(np.bool).tolist(),out_zero_center=codomain.para[-naxes:].astype(np.bool).tolist(),enforce_hermitian_symmetry=bool(codomain.para[naxes]==1),W=-1,alpha=-1,verbose=False)
## check complexity
if(not codomain.para[naxes]): ## purely real
## check imaginary part
if(np.any(Tx.imag!=0))and(np.dot(Tx.imag.flatten(order='C'),Tx.imag.flatten(order='C'),out=None)>self.epsilon**2*np.dot(Tx.real.flatten(order='C'),Tx.real.flatten(order='C'),out=None)):
about.warnings.cprint("WARNING: discarding considerable imaginary part.")
Tx = np.real(Tx)
else:
raise ValueError(about._errors.cstring("ERROR: unsupported transformation."))
return Tx.astype(codomain.datatype)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def calc_smooth(self,x,sigma=0,**kwargs):
"""
Smoothes an array of field values by convolution with a Gaussian
kernel.
Parameters
----------
x : numpy.ndarray
Array of field values to be smoothed.
sigma : float, *optional*
Standard deviation of the Gaussian kernel, specified in units
of length in position space; for testing: a sigma of -1 will be
reset to a reasonable value (default: 0).
Returns
-------
Gx : numpy.ndarray
Smoothed array.
"""
x = self.enforce_shape(np.array(x,dtype=self.datatype))
naxes = (np.size(self.para)-1)//2
## check sigma
if(sigma==0):
return x
elif(sigma==-1):
about.infos.cprint("INFO: invalid sigma reset.")
if(self.fourier):
sigma = 1.5/np.min(self.para[:naxes]*self.vol) ## sqrt(2)*max(dist)
else:
sigma = 1.5*np.max(self.vol) ## sqrt(2)*max(dist)
elif(sigma<0):
raise ValueError(about._errors.cstring("ERROR: invalid sigma."))
## smooth
Gx = gs.smooth_field(x,self.fourier,self.para[-naxes:].astype(np.bool).tolist(),bool(self.para[naxes]==1),self.vol,smooth_length=sigma)
## check complexity
if(not self.para[naxes]): ## purely real
## check imaginary part
if(np.any(Gx.imag!=0))and(np.dot(Gx.imag.flatten(order='C'),Gx.imag.flatten(order='C'),out=None)>self.epsilon**2*np.dot(Gx.real.flatten(order='C'),Gx.real.flatten(order='C'),out=None)):
about.warnings.cprint("WARNING: discarding considerable imaginary part.")
Gx = np.real(Gx)
return Gx
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def calc_power(self,x,**kwargs):
"""
Computes the power of an array of field values.
Parameters
----------
x : numpy.ndarray
Array containing the field values of which the power is to be
calculated.
Returns
-------
spec : numpy.ndarray
Power contained in the input array.
Other parameters
----------------
pindex : numpy.ndarray, *optional*
Indexing array assigning the input array components to
components of the power spectrum (default: None).
kindex : numpy.ndarray, *optional*
Scale corresponding to each band in the power spectrum
(default: None).
rho : numpy.ndarray, *optional*
Number of degrees of freedom per band (default: None).
codomain : nifty.space, *optional*
A compatible codomain for power indexing (default: None).
log : bool, *optional*
Flag specifying if the spectral binning is performed on logarithmic
scale or not; if set, the number of used bins is set
automatically (if not given otherwise); by default no binning
is done (default: None).
nbin : integer, *optional*
Number of used spectral bins; if given `log` is set to ``False``;
integers below the minimum of 3 induce an automatic setting;
by default no binning is done (default: None).
binbounds : {list, array}, *optional*
User specific inner boundaries of the bins, which are preferred
over the above parameters; by default no binning is done
(default: None). vmin : {scalar, list, ndarray, field}, *optional*
Lower limit of the uniform distribution if ``random == "uni"``
(default: 0).
"""
x = self.enforce_shape(np.array(x,dtype=self.datatype))
## correct for 'fft'
if(not self.fourier):
x = self.calc_weight(x,power=1)
## explicit power indices
pindex,kindex,rho = kwargs.get("pindex",None),kwargs.get("kindex",None),kwargs.get("rho",None)
## implicit power indices
if(pindex is None)or(kindex is None)or(rho is None):
try:
self.set_power_indices(**kwargs)
except:
codomain = kwargs.get("codomain",self.get_codomain())
codomain.set_power_indices(**kwargs)
pindex,kindex,rho = codomain.power_indices.get("pindex"),codomain.power_indices.get("kindex"),codomain.power_indices.get("rho")
else:
pindex,kindex,rho = self.power_indices.get("pindex"),self.power_indices.get("kindex"),self.power_indices.get("rho")
## power spectrum
return gp.calc_ps_fast(x,self.para[:(np.size(self.para)-1)//2],self.vol,self.para[-((np.size(self.para)-1)//2):].astype(np.bool),fourier=self.fourier,pindex=pindex,kindex=kindex,rho=rho)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def get_plot(self,x,title="",vmin=None,vmax=None,power=None,unit="",norm=None,cmap=None,cbar=True,other=None,legend=False,mono=True,**kwargs):
"""
Creates a plot of field values according to the specifications
given by the parameters.
Parameters
----------
x : numpy.ndarray
Array containing the field values.
Returns
-------
None
Other parameters
----------------
title : string, *optional*
Title of the plot (default: "").
vmin : float, *optional*
Minimum value to be displayed (default: ``min(x)``).
vmax : float, *optional*
Maximum value to be displayed (default: ``max(x)``).
power : bool, *optional*
Whether to plot the power contained in the field or the field
values themselves (default: False).
unit : string, *optional*
Unit of the field values (default: "").
norm : string, *optional*
Scaling of the field values before plotting (default: None).
cmap : matplotlib.colors.LinearSegmentedColormap, *optional*
Color map to be used for two-dimensional plots (default: None).
cbar : bool, *optional*
Whether to show the color bar or not (default: True).
other : {single object, tuple of objects}, *optional*
Object or tuple of objects to be added, where objects can be
scalars, arrays, or fields (default: None).
legend : bool, *optional*
Whether to show the legend or not (default: False).
mono : bool, *optional*
Whether to plot the monopole or not (default: True).
save : string, *optional*
Valid file name where the figure is to be stored, by default
the figure is not saved (default: False).
error : {float, numpy.ndarray, nifty.field}, *optional*
Object indicating some confidence interval to be plotted
(default: None).
kindex : numpy.ndarray, *optional*
Scale corresponding to each band in the power spectrum
(default: None).
codomain : nifty.space, *optional*
A compatible codomain for power indexing (default: None).
log : bool, *optional*
Flag specifying if the spectral binning is performed on logarithmic
scale or not; if set, the number of used bins is set
automatically (if not given otherwise); by default no binning
is done (default: None).
nbin : integer, *optional*
Number of used spectral bins; if given `log` is set to ``False``;
integers below the minimum of 3 induce an automatic setting;
by default no binning is done (default: None).
binbounds : {list, array}, *optional*
User specific inner boundaries of the bins, which are preferred
over the above parameters; by default no binning is done
(default: None). vmin : {scalar, list, ndarray, field}, *optional*
Lower limit of the uniform distribution if ``random == "uni"``
(default: 0).
"""
if(not pl.isinteractive())and(not bool(kwargs.get("save",False))):
about.warnings.cprint("WARNING: interactive mode off.")
naxes = (np.size(self.para)-1)//2
if(power is None):
power = bool(self.para[naxes])
if(power):
x = self.calc_power(x,**kwargs)
fig = pl.figure(num=None,figsize=(6.4,4.8),dpi=None,facecolor="none",edgecolor="none",frameon=False,FigureClass=pl.Figure)
ax0 = fig.add_axes([0.12,0.12,0.82,0.76])
## explicit kindex
xaxes = kwargs.get("kindex",None)
## implicit kindex
if(xaxes is None):
try:
self.set_power_indices(**kwargs)
except:
codomain = kwargs.get("codomain",self.get_codomain())
codomain.set_power_indices(**kwargs)
xaxes = codomain.power_indices.get("kindex")
else:
xaxes = self.power_indices.get("kindex")
if(norm is None)or(not isinstance(norm,int)):
norm = naxes
if(vmin is None):
vmin = np.min(x[:mono].tolist()+(xaxes**norm*x)[1:].tolist(),axis=None,out=None)
if(vmax is None):
vmax = np.max(x[:mono].tolist()+(xaxes**norm*x)[1:].tolist(),axis=None,out=None)
ax0.loglog(xaxes[1:],(xaxes**norm*x)[1:],color=[0.0,0.5,0.0],label="graph 0",linestyle='-',linewidth=2.0,zorder=1)
if(mono):
ax0.scatter(0.5*(xaxes[1]+xaxes[2]),x[0],s=20,color=[0.0,0.5,0.0],marker='o',cmap=None,norm=None,vmin=None,vmax=None,alpha=None,linewidths=None,verts=None,zorder=1)
if(other is not None):
if(isinstance(other,tuple)):
other = list(other)
for ii in xrange(len(other)):
if(isinstance(other[ii],field)):
other[ii] = other[ii].power(**kwargs)
else:
other[ii] = self.enforce_power(other[ii],size=np.size(xaxes),kindex=xaxes)
elif(isinstance(other,field)):
other = [other.power(**kwargs)]
else:
other = [self.enforce_power(other,size=np.size(xaxes),kindex=xaxes)]
imax = max(1,len(other)-1)
for ii in xrange(len(other)):
ax0.loglog(xaxes[1:],(xaxes**norm*other[ii])[1:],color=[max(0.0,1.0-(2*ii/imax)**2),0.5*((2*ii-imax)/imax)**2,max(0.0,1.0-(2*(ii-imax)/imax)**2)],label="graph "+str(ii+1),linestyle='-',linewidth=1.0,zorder=-ii)
if(mono):
ax0.scatter(0.5*(xaxes[1]+xaxes[2]),other[ii][0],s=20,color=[max(0.0,1.0-(2*ii/imax)**2),0.5*((2*ii-imax)/imax)**2,max(0.0,1.0-(2*(ii-imax)/imax)**2)],marker='o',cmap=None,norm=None,vmin=None,vmax=None,alpha=None,linewidths=None,verts=None,zorder=-ii)
if(legend):
ax0.legend()
ax0.set_xlim(xaxes[1],xaxes[-1])
ax0.set_xlabel(r"$|k|$")
ax0.set_ylim(vmin,vmax)
ax0.set_ylabel(r"$|k|^{%i} P_k$"%norm)
ax0.set_title(title)
else:
x = self.enforce_shape(np.array(x))
if(naxes==1):
fig = pl.figure(num=None,figsize=(6.4,4.8),dpi=None,facecolor="none",edgecolor="none",frameon=False,FigureClass=pl.Figure)
ax0 = fig.add_axes([0.12,0.12,0.82,0.76])
xaxes = (np.arange(self.para[0],dtype=np.int)+self.para[2]*(self.para[0]//2))*self.vol
if(vmin is None):
if(np.iscomplexobj(x)):
vmin = min(np.min(np.absolute(x),axis=None,out=None),np.min(np.real(x),axis=None,out=None),np.min(np.imag(x),axis=None,out=None))
else:
vmin = np.min(x,axis=None,out=None)
if(vmax is None):
if(np.iscomplexobj(x)):
vmax = max(np.max(np.absolute(x),axis=None,out=None),np.max(np.real(x),axis=None,out=None),np.max(np.imag(x),axis=None,out=None))
else:
vmax = np.max(x,axis=None,out=None)
if(norm=="log"):
ax0graph = ax0.semilogy
if(vmin<=0):
raise ValueError(about._errors.cstring("ERROR: nonpositive value(s)."))
else:
ax0graph = ax0.plot
if(np.iscomplexobj(x)):
ax0graph(xaxes,np.absolute(x),color=[0.0,0.5,0.0],label="graph (absolute)",linestyle='-',linewidth=2.0,zorder=1)
ax0graph(xaxes,np.real(x),color=[0.0,0.5,0.0],label="graph (real part)",linestyle="--",linewidth=1.0,zorder=0)
ax0graph(xaxes,np.imag(x),color=[0.0,0.5,0.0],label="graph (imaginary part)",linestyle=':',linewidth=1.0,zorder=0)
if(legend):
ax0.legend()
elif(other is not None):
ax0graph(xaxes,x,color=[0.0,0.5,0.0],label="graph 0",linestyle='-',linewidth=2.0,zorder=1)
if(isinstance(other,tuple)):
other = [self.enforce_values(xx,extend=True) for xx in other]
else:
other = [self.enforce_values(other,extend=True)]
imax = max(1,len(other)-1)
for ii in xrange(len(other)):
ax0graph(xaxes,other[ii],color=[max(0.0,1.0-(2*ii/imax)**2),0.5*((2*ii-imax)/imax)**2,max(0.0,1.0-(2*(ii-imax)/imax)**2)],label="graph "+str(ii+1),linestyle='-',linewidth=1.0,zorder=-ii)
if("error" in kwargs):
error = self.enforce_values(np.absolute(kwargs.get("error")),extend=True)
ax0.fill_between(xaxes,x-error,x+error,color=[0.8,0.8,0.8],label="error 0",zorder=-len(other))
if(legend):
ax0.legend()
else:
ax0graph(xaxes,x,color=[0.0,0.5,0.0],label="graph 0",linestyle='-',linewidth=2.0,zorder=1)
if("error" in kwargs):
error = self.enforce_values(np.absolute(kwargs.get("error")),extend=True)
ax0.fill_between(xaxes,x-error,x+error,color=[0.8,0.8,0.8],label="error 0",zorder=0)
ax0.set_xlim(xaxes[0],xaxes[-1])
ax0.set_xlabel("coordinate")
ax0.set_ylim(vmin,vmax)
if(unit):
unit = " ["+unit+"]"
ax0.set_ylabel("values"+unit)
ax0.set_title(title)
elif(naxes==2):
if(np.iscomplexobj(x)):
about.infos.cprint("INFO: absolute values and phases are plotted.")
if(title):
title += " "
if(bool(kwargs.get("save",False))):
save_ = os.path.splitext(os.path.basename(str(kwargs.get("save"))))
kwargs.update(save=save_[0]+"_absolute"+save_[1])
self.get_plot(np.absolute(x),title=title+"(absolute)",vmin=vmin,vmax=vmax,power=False,unit=unit,norm=norm,cmap=cmap,cbar=cbar,other=None,legend=False,**kwargs)
# self.get_plot(np.real(x),title=title+"(real part)",vmin=vmin,vmax=vmax,power=False,unit=unit,norm=norm,cmap=cmap,cbar=cbar,other=None,legend=False,**kwargs)
# self.get_plot(np.imag(x),title=title+"(imaginary part)",vmin=vmin,vmax=vmax,power=False,unit=unit,norm=norm,cmap=cmap,cbar=cbar,other=None,legend=False,**kwargs)
if(unit):
unit = "rad"
if(cmap is None):
cmap = pl.cm.hsv_r
if(bool(kwargs.get("save",False))):
kwargs.update(save=save_[0]+"_phase"+save_[1])
self.get_plot(np.angle(x,deg=False),title=title+"(phase)",vmin=-3.1416,vmax=3.1416,power=False,unit=unit,norm=None,cmap=cmap,cbar=cbar,other=None,legend=False,**kwargs) ## values in [-pi,pi]
return None ## leave method
else:
if(vmin is None):
vmin = np.min(x,axis=None,out=None)
if(vmax is None):
vmax = np.max(x,axis=None,out=None)
if(norm=="log")and(vmin<=0):
raise ValueError(about._errors.cstring("ERROR: nonpositive value(s)."))
s_ = np.array([self.para[1]*self.vol[1]/np.max(self.para[:naxes]*self.vol,axis=None,out=None),self.para[0]*self.vol[0]/np.max(self.para[:naxes]*self.vol,axis=None,out=None)*(1.0+0.159*bool(cbar))])
fig = pl.figure(num=None,figsize=(6.4*s_[0],6.4*s_[1]),dpi=None,facecolor="none",edgecolor="none",frameon=False,FigureClass=pl.Figure)
ax0 = fig.add_axes([0.06/s_[0],0.06/s_[1],1.0-0.12/s_[0],1.0-0.12/s_[1]])
xaxes = (np.arange(self.para[1]+1,dtype=np.int)-0.5+self.para[4]*(self.para[1]//2))*self.vol[1]
yaxes = (np.arange(self.para[0]+1,dtype=np.int)-0.5+self.para[3]*(self.para[0]//2))*self.vol[0]
if(norm=="log"):
n_ = ln(vmin=vmin,vmax=vmax)
else:
n_ = None
sub = ax0.pcolormesh(xaxes,yaxes,x,cmap=cmap,norm=n_,vmin=vmin,vmax=vmax)
ax0.set_xlim(xaxes[0],xaxes[-1])
ax0.set_xticks([0],minor=False)
ax0.set_ylim(yaxes[0],yaxes[-1])
ax0.set_yticks([0],minor=False)
ax0.set_aspect("equal")
if(cbar):
if(norm=="log"):
f_ = lf(10,labelOnlyBase=False)
b_ = sub.norm.inverse(np.linspace(0,1,sub.cmap.N+1))
v_ = np.linspace(sub.norm.vmin,sub.norm.vmax,sub.cmap.N)
else:
f_ = None
b_ = None
v_ = None
cb0 = fig.colorbar(sub,ax=ax0,orientation="horizontal",fraction=0.1,pad=0.05,shrink=0.75,aspect=20,ticks=[vmin,vmax],format=f_,drawedges=False,boundaries=b_,values=v_)
cb0.ax.text(0.5,-1.0,unit,fontdict=None,withdash=False,transform=cb0.ax.transAxes,horizontalalignment="center",verticalalignment="center")
ax0.set_title(title)
else:
raise ValueError(about._errors.cstring("ERROR: unsupported number of axes ( "+str(naxes)+" > 2 )."))
if(bool(kwargs.get("save",False))):
fig.savefig(str(kwargs.get("save")),dpi=None,facecolor="none",edgecolor="none",orientation="portrait",papertype=None,format=None,transparent=False,bbox_inches=None,pad_inches=0.1)
pl.close(fig)
else:
fig.canvas.draw()
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def __repr__(self):
return "<nifty_rg.rg_space>"
def __str__(self):
naxes = (np.size(self.para)-1)//2
num = self.para[:naxes][::-1].tolist()
zerocenter = self.para[-naxes:][::-1].astype(np.bool).tolist()
dist = self.vol[::-1].tolist()
return "nifty_rg.rg_space instance\n- num = "+str(num)+"\n- naxes = "+str(naxes)+"\n- hermitian = "+str(bool(self.para[naxes]<2))+"\n- purelyreal = "+str(bool(not self.para[naxes]))+"\n- zerocenter = "+str(zerocenter)+"\n- dist = "+str(dist)+"\n- fourier = "+str(self.fourier)
##-----------------------------------------------------------------------------
|
ultimanet/nifty
|
rg/nifty_rg.py
|
Python
|
gpl-3.0
| 57,120
|
[
"Gaussian"
] |
e9c44ee1cc2723e2e07c080bca300e40406be2ef855dc84bd51d7cc345fb8f0f
|
# $Id$
#
from __future__ import print_function
from rdkit import Chem
from rdkit.Chem import rdMolDescriptors as rdMD, Descriptors
from rdkit.Chem import AllChem
from rdkit import DataStructs
from rdkit import RDConfig
from rdkit.Geometry import rdGeometry as rdG
import unittest
def feq(v1, v2, tol=1.e-4):
return abs(v1 - v2) < tol
class TestCase(unittest.TestCase):
def setUp(self):
pass
def testAtomPairTypes(self):
params = rdMD.AtomPairsParameters
mol = Chem.MolFromSmiles("C=C")
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1)))
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
1 | (1 | 1<<params.numPiBits)<<params.numBranchBits)
mol = Chem.MolFromSmiles("C#CO")
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))!=\
rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1)))
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
1 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1))==\
2 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(2))==\
1 | (0 | 3<<params.numPiBits)<<params.numBranchBits)
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1),1)==\
1 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.assertTrue(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1),2)==\
0 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
def testAtomPairs(self):
m = Chem.MolFromSmiles('CCC')
fp1 = rdMD.GetAtomPairFingerprint(m)
fp2 = rdMD.GetAtomPairFingerprint(m, minLength=1, maxLength=2)
nz1 = fp1.GetNonzeroElements()
self.assertEqual(len(nz1), 2)
nz2 = fp2.GetNonzeroElements()
self.assertEqual(len(nz2), 2)
fp2 = rdMD.GetAtomPairFingerprint(m, minLength=1, maxLength=1)
nz2 = fp2.GetNonzeroElements()
self.assertEqual(len(nz2), 1)
def testHashedAtomPairs(self):
m = Chem.MolFromSmiles('c1ccccc1')
fp1 = rdMD.GetHashedAtomPairFingerprint(m, 2048)
fp2 = rdMD.GetHashedAtomPairFingerprint(m, 2048, 1, 3)
self.assertTrue(fp1 == fp2)
fp2 = rdMD.GetHashedAtomPairFingerprint(m, 2048, 1, 2)
sim = DataStructs.DiceSimilarity(fp1, fp2)
self.assertTrue(sim > 0.0 and sim < 1.0)
m = Chem.MolFromSmiles('c1ccccn1')
fp2 = rdMD.GetHashedAtomPairFingerprint(m, 2048)
sim = DataStructs.DiceSimilarity(fp1, fp2)
self.assertTrue(sim > 0.0 and sim < 1.0)
m = Chem.MolFromSmiles('c1ccccc1')
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m, 2048)
m = Chem.MolFromSmiles('c1ccccn1')
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m, 2048)
sim = DataStructs.DiceSimilarity(fp1, fp2)
self.assertTrue(sim > 0.0 and sim < 1.0)
def testRootedAtomPairs(self):
m = Chem.MolFromSmiles('Oc1ccccc1')
fp1 = rdMD.GetAtomPairFingerprint(m)
fp2 = rdMD.GetAtomPairFingerprint(m, fromAtoms=(0, ))
nz1 = fp1.GetNonzeroElements()
nz2 = fp2.GetNonzeroElements()
for k, v in nz2.items():
self.assertTrue(v <= nz1[k])
def testTopologicalTorsions(self):
mol = Chem.MolFromSmiles("CC")
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.assertTrue(fp.GetTotalVal() == 0)
mol = Chem.MolFromSmiles("CCCC")
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.assertTrue(fp.GetTotalVal() == 1)
fp = rdMD.GetTopologicalTorsionFingerprint(mol, 3)
self.assertTrue(fp.GetTotalVal() == 2)
mol = Chem.MolFromSmiles("CCCO")
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.assertTrue(fp.GetTotalVal() == 1)
fp = rdMD.GetTopologicalTorsionFingerprint(mol, 3)
self.assertTrue(fp.GetTotalVal() == 2)
mol = Chem.MolFromSmiles("CCCCCCCCCCC")
fp = rdMD.GetTopologicalTorsionFingerprint(mol, 7)
self.assertRaises(ValueError, lambda: rdMD.GetTopologicalTorsionFingerprint(mol, 8))
def testHashedTopologicalTorsions(self):
mol = Chem.MolFromSmiles("c1ncccc1")
fp1 = rdMD.GetHashedTopologicalTorsionFingerprint(mol)
mol = Chem.MolFromSmiles("n1ccccc1")
fp2 = rdMD.GetHashedTopologicalTorsionFingerprint(mol)
self.assertEqual(DataStructs.DiceSimilarity(fp1, fp2), 1.0)
def testRootedTorsions(self):
m = Chem.MolFromSmiles('Oc1ccccc1')
fp1 = rdMD.GetTopologicalTorsionFingerprint(m)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m, fromAtoms=(0, ))
nz1 = fp1.GetNonzeroElements()
nz2 = fp2.GetNonzeroElements()
for k, v in nz2.items():
self.assertTrue(v <= nz1[k])
m = Chem.MolFromSmiles('COCC')
fp1 = rdMD.GetTopologicalTorsionFingerprint(m)
self.assertEqual(len(fp1.GetNonzeroElements()), 1)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m, fromAtoms=(0, ))
self.assertEqual(len(fp1.GetNonzeroElements()), 1)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m, fromAtoms=(1, ))
self.assertEqual(len(fp1.GetNonzeroElements()), 0)
def testMorganFingerprints(self):
mol = Chem.MolFromSmiles('CC(F)(Cl)C(F)(Cl)C')
fp = rdMD.GetMorganFingerprint(mol, 0)
self.assertTrue(len(fp.GetNonzeroElements()) == 4)
mol = Chem.MolFromSmiles('CC')
fp = rdMD.GetMorganFingerprint(mol, 0)
self.assertTrue(len(fp.GetNonzeroElements()) == 1)
self.assertTrue(list(fp.GetNonzeroElements().values())[0] == 2)
fp = rdMD.GetMorganFingerprint(mol, 0, useCounts=False)
self.assertTrue(len(fp.GetNonzeroElements()) == 1)
self.assertTrue(list(fp.GetNonzeroElements().values())[0] == 1)
mol = Chem.MolFromSmiles('CC(F)(Cl)C(F)(Cl)C')
fp = rdMD.GetHashedMorganFingerprint(mol, 0)
self.assertTrue(len(fp.GetNonzeroElements()) == 4)
fp = rdMD.GetMorganFingerprint(mol, 1)
self.assertTrue(len(fp.GetNonzeroElements()) == 8)
fp = rdMD.GetHashedMorganFingerprint(mol, 1)
self.assertTrue(len(fp.GetNonzeroElements()) == 8)
fp = rdMD.GetMorganFingerprint(mol, 2)
self.assertTrue(len(fp.GetNonzeroElements()) == 9)
mol = Chem.MolFromSmiles('CC(F)(Cl)[C@](F)(Cl)C')
fp = rdMD.GetMorganFingerprint(mol, 0)
self.assertTrue(len(fp.GetNonzeroElements()) == 4)
fp = rdMD.GetMorganFingerprint(mol, 1)
self.assertTrue(len(fp.GetNonzeroElements()) == 8)
fp = rdMD.GetMorganFingerprint(mol, 2)
self.assertTrue(len(fp.GetNonzeroElements()) == 9)
fp = rdMD.GetMorganFingerprint(mol, 0, useChirality=True)
self.assertTrue(len(fp.GetNonzeroElements()) == 4)
fp = rdMD.GetMorganFingerprint(mol, 1, useChirality=True)
self.assertTrue(len(fp.GetNonzeroElements()) == 9)
fp = rdMD.GetMorganFingerprint(mol, 2, useChirality=True)
self.assertTrue(len(fp.GetNonzeroElements()) == 10)
mol = Chem.MolFromSmiles('CCCCC')
fp = rdMD.GetMorganFingerprint(mol, 0, fromAtoms=(0, ))
self.assertTrue(len(fp.GetNonzeroElements()) == 1)
mol = Chem.MolFromSmiles('CC1CC1')
vs1 = rdMD.GetConnectivityInvariants(mol)
self.assertEqual(len(vs1), mol.GetNumAtoms())
fp1 = rdMD.GetMorganFingerprint(mol, 2, invariants=vs1)
fp2 = rdMD.GetMorganFingerprint(mol, 2)
self.assertEqual(fp1, fp2)
vs2 = rdMD.GetConnectivityInvariants(mol, False)
self.assertEqual(len(vs2), mol.GetNumAtoms())
self.assertNotEqual(vs1, vs2)
fp1 = rdMD.GetMorganFingerprint(mol, 2, invariants=vs2)
self.assertNotEqual(fp1, fp2)
mol = Chem.MolFromSmiles('Cc1ccccc1')
vs1 = rdMD.GetFeatureInvariants(mol)
self.assertEqual(len(vs1), mol.GetNumAtoms())
self.assertEqual(vs1[0], 0)
self.assertNotEqual(vs1[1], 0)
self.assertEqual(vs1[1], vs1[2])
self.assertEqual(vs1[1], vs1[3])
self.assertEqual(vs1[1], vs1[4])
mol = Chem.MolFromSmiles('FCCCl')
vs1 = rdMD.GetFeatureInvariants(mol)
self.assertEqual(len(vs1), mol.GetNumAtoms())
self.assertEqual(vs1[1], 0)
self.assertEqual(vs1[2], 0)
self.assertNotEqual(vs1[0], 0)
self.assertEqual(vs1[0], vs1[3])
fp1 = rdMD.GetMorganFingerprint(mol, 0, invariants=vs1)
fp2 = rdMD.GetMorganFingerprint(mol, 0, useFeatures=True)
self.assertEqual(fp1, fp2)
def testCrippen(self):
mol = Chem.MolFromSmiles("n1ccccc1CO")
contribs = rdMD._CalcCrippenContribs(mol)
self.assertEqual(len(contribs), mol.GetNumAtoms())
ts = [0] * mol.GetNumAtoms()
contribs = rdMD._CalcCrippenContribs(mol, force=True, atomTypes=ts)
self.assertEqual(ts, [59, 25, 25, 25, 25, 28, 17, 69])
ls = [''] * mol.GetNumAtoms()
contribs = rdMD._CalcCrippenContribs(mol, force=True, atomTypeLabels=ls)
self.assertEqual(ls, ['N11', 'C18', 'C18', 'C18', 'C18', 'C21', 'C10', 'O2'])
def testUSR(self):
mol = Chem.MolFromSmiles("CC")
AllChem.Compute2DCoords(mol)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSR(mol))
mol = Chem.MolFromSmiles("C1CCCCC1")
mol = Chem.AddHs(mol)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSR(mol))
AllChem.Compute2DCoords(mol)
usr = rdMD.GetUSR(mol)
self.failUnlessEqual(len(usr), 12)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRDistributions([]))
conf = mol.GetConformer()
coords = [conf.GetAtomPosition(i) for i in range(mol.GetNumAtoms())]
dist = rdMD.GetUSRDistributions(coords)
self.failUnlessEqual(len(dist), 4)
self.failUnlessEqual(len(dist[0]), mol.GetNumAtoms())
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRFromDistributions([]))
usr2 = rdMD.GetUSRFromDistributions(dist)
self.failUnlessEqual(usr, usr2)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRDistributionsFromPoints(coords, []))
p = []
dist = rdMD.GetUSRDistributions(coords, p)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRDistributionsFromPoints([], p))
dist2 = rdMD.GetUSRDistributionsFromPoints(coords, p)
usr2 = rdMD.GetUSRFromDistributions(dist2)
self.failUnlessEqual(usr, usr2)
mol2 = Chem.MolFromSmiles("C1CCCCC1")
mol2 = Chem.AddHs(mol2)
AllChem.Compute2DCoords(mol2)
usr2 = rdMD.GetUSR(mol2)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRScore(usr, usr2[:2]))
self.failUnlessEqual(rdMD.GetUSRScore(usr, usr2), 1.0)
m1 = [4.44, 2.98, 1.04, 4.55, 4.70, 0.23, 8.30, 16.69, -22.97, 7.37, 15.64, 0.51]
m2 = [4.39, 3.11, 1.36, 4.50, 4.44, 0.09, 8.34, 16.78, -23.20, 7.15, 16.52, 0.13]
self.failUnlessAlmostEqual(rdMD.GetUSRScore(m1, m2), 0.812, 2)
def testUSRCAT(self):
mol = Chem.MolFromSmiles("CC")
AllChem.Compute2DCoords(mol)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRCAT(mol))
mol = Chem.MolFromSmiles("C1CCCCC1")
mol = Chem.AddHs(mol)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRCAT(mol))
AllChem.Compute2DCoords(mol)
usr = rdMD.GetUSRCAT(mol)
self.failUnlessEqual(len(usr), 60)
self.failUnlessRaises(ValueError, lambda : rdMD.GetUSRCAT(mol, atomSelections=[]))
atoms = [[1, 2, 3, 4, 5, 6], []]
usr2 = rdMD.GetUSRCAT(mol, atomSelections=atoms)
self.failUnlessEqual(len(usr2), 36)
atoms = [[1, 2, 3, 4, 5, 6], [], [], []]
usr2 = rdMD.GetUSRCAT(mol, atomSelections=atoms)
self.failUnlessEqual(len(usr2), 60)
self.failUnlessEqual(rdMD.GetUSRScore(usr, usr2, weights=[1.0, 1.0, 1.0, 1.0, 1.0]), 1.0)
def testMolWt(self):
mol = Chem.MolFromSmiles("C")
amw = rdMD._CalcMolWt(mol)
self.assertTrue(feq(amw, 16.043, .001))
amw = rdMD._CalcMolWt(mol, True)
self.assertTrue(feq(amw, 12.011, .001))
mol2 = Chem.AddHs(mol)
amw = rdMD._CalcMolWt(mol2)
self.assertTrue(feq(amw, 16.043, .001))
amw = rdMD._CalcMolWt(mol2, True)
self.assertTrue(feq(amw, 12.011, .001))
mol = Chem.MolFromSmiles("C")
amw = rdMD.CalcExactMolWt(mol)
self.assertTrue(feq(amw, 16.031, .001))
def testPairValues(self):
import base64
testD = (
('CCCO',
b'AQAAAAQAAAAAAIAABgAAACGECAABAAAAIoQIAAEAAABBhAgAAQAAACNEGAABAAAAQUQYAAEAAABC\nRBgAAQAAAA==\n'
),
('CNc1ccco1',
b'AQAAAAQAAAAAAIAAEAAAACOECgABAAAAJIQKAAIAAABBhQoAAgAAAEKFCgABAAAAIsQKAAEAAABB\nxQoAAQAAAELFCgACAAAAIYQQAAEAAABChRAAAQAAAEOFEAACAAAAYYUQAAEAAAAjhBoAAQAAAEGF\nGgABAAAAQoUaAAIAAABhhRoAAQAAAEKIGgABAAAA\n'
), )
for smi, txt in testD:
pkl = base64.decodestring(txt)
fp = rdMD.GetAtomPairFingerprint(Chem.MolFromSmiles(smi))
fp2 = DataStructs.IntSparseIntVect(pkl)
self.assertEqual(DataStructs.DiceSimilarity(fp, fp2), 1.0)
self.assertEqual(fp, fp2)
def testTorsionValues(self):
import base64
testD = (
('CCCO', b'AQAAAAgAAAD/////DwAAAAEAAAAAAAAAIECAAAMAAAABAAAA\n'),
('CNc1ccco1',
b'AQAAAAgAAAD/////DwAAAAkAAAAAAAAAIICkSAEAAAABAAAAKVKgSQEAAAABAAAAKVCgUAEAAAAB\nAAAAKVCgUQEAAAABAAAAKVCkCAIAAAABAAAAKdCkCAIAAAABAAAAKVCgSAMAAAABAAAAKVCkSAMA\nAAABAAAAIICkSAMAAAABAAAA\n'
), )
for smi, txt in testD:
pkl = base64.decodestring(txt)
fp = rdMD.GetTopologicalTorsionFingerprint(Chem.MolFromSmiles(smi))
fp2 = DataStructs.LongSparseIntVect(pkl)
self.assertEqual(DataStructs.DiceSimilarity(fp, fp2), 1.0)
self.assertEqual(fp, fp2)
def testAtomPairOptions(self):
m1 = Chem.MolFromSmiles('c1ccccc1')
m2 = Chem.MolFromSmiles('c1ccccn1')
fp1 = rdMD.GetAtomPairFingerprint(m1)
fp2 = rdMD.GetAtomPairFingerprint(m2)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetAtomPairFingerprint(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetAtomPairFingerprint(m2, atomInvariants=[1] * 6)
self.assertEqual(fp1, fp2)
fp1 = rdMD.GetAtomPairFingerprint(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetAtomPairFingerprint(m2, atomInvariants=[2] * 6)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2, atomInvariants=[1] * 6)
self.assertEqual(fp1, fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2, atomInvariants=[2] * 6)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2, atomInvariants=[1] * 6)
self.assertEqual(fp1, fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2, atomInvariants=[2] * 6)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2)
self.assertNotEqual(fp1, fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2, atomInvariants=[1] * 6)
self.assertEqual(fp1, fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1, atomInvariants=[1] * 6)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2, atomInvariants=[2] * 6)
self.assertNotEqual(fp1, fp2)
def testMolFormula(self):
m = Chem.MolFromSmiles("[2H]C([3H])O")
formula = rdMD.CalcMolFormula(m)
self.assertEqual(formula, 'CH4O')
formula = rdMD.CalcMolFormula(m, separateIsotopes=True)
self.assertEqual(formula, 'CH2DTO')
formula = rdMD.CalcMolFormula(m, separateIsotopes=True, abbreviateHIsotopes=False)
self.assertEqual(formula, 'CH2[2H][3H]O')
m = Chem.MolFromSmiles("[2H][13CH2]CO")
formula = rdMD.CalcMolFormula(m)
self.assertEqual(formula, 'C2H6O')
formula = rdMD.CalcMolFormula(m, separateIsotopes=True)
self.assertEqual(formula, 'C[13C]H5DO')
def testSpiroAndBridgeheads(self):
m = Chem.MolFromSmiles("C1CC2CCC1CC2")
self.assertEqual(rdMD.CalcNumSpiroAtoms(m), 0)
sa = []
self.assertEqual(rdMD.CalcNumSpiroAtoms(m, atoms=sa), 0)
self.assertEqual(len(sa), 0)
self.assertEqual(rdMD.CalcNumBridgeheadAtoms(m), 2)
sa = []
self.assertEqual(rdMD.CalcNumBridgeheadAtoms(m, atoms=sa), 2)
self.assertEqual(len(sa), 2)
self.assertEqual(sorted(sa), [2, 5])
m = Chem.MolFromSmiles("C1CCC2(C1)CC1CCC2CC1")
self.assertEqual(rdMD.CalcNumSpiroAtoms(m), 1)
sa = []
self.assertEqual(rdMD.CalcNumSpiroAtoms(m, atoms=sa), 1)
self.assertEqual(len(sa), 1)
self.assertEqual(sorted(sa), [3])
self.assertEqual(rdMD.CalcNumBridgeheadAtoms(m), 2)
sa = []
self.assertEqual(rdMD.CalcNumBridgeheadAtoms(m, atoms=sa), 2)
self.assertEqual(len(sa), 2)
self.assertEqual(sorted(sa), [6, 9])
def testNumRotatableBonds(self):
for s in ["C1CC1CC",
"CCNC(=O)NCC",
'Cc1cccc(C)c1c1c(C)cccc1C',
'CCc1cccc(C)c1c1c(C)cccc1CC',
'Cc1cccc(C)c1c1c(C)nccc1C',
'Cc1cccc(C)c1c1c(C)cccc1',
'CCO', ]:
m = Chem.MolFromSmiles(s)
v1 = rdMD.CalcNumRotatableBonds(m)
v2 = rdMD.CalcNumRotatableBonds(m, False)
v3 = rdMD.CalcNumRotatableBonds(m, True)
v4 = rdMD.CalcNumRotatableBonds(m, rdMD.NumRotatableBondsOptions.Default)
v5 = rdMD.CalcNumRotatableBonds(m, rdMD.NumRotatableBondsOptions.NonStrict)
v6 = rdMD.CalcNumRotatableBonds(m, rdMD.NumRotatableBondsOptions.Strict)
v7 = rdMD.CalcNumRotatableBonds(m, rdMD.NumRotatableBondsOptions.StrictLinkages)
self.assertEquals(v1, v4)
self.assertEquals(v2, v5)
self.assertEquals(v3, v6)
def testProperties(self):
props = rdMD.Properties()
names = list(props.GetAvailableProperties())
self.assertEquals(names, list(props.GetPropertyNames()))
m = Chem.MolFromSmiles("C1CC1CC")
results = props.ComputeProperties(m)
for i, name in enumerate(names):
props = rdMD.Properties([name])
res = props.ComputeProperties(m)
self.assertEquals(len(res), 1)
self.assertEquals(res[0], results[i])
self.assertEquals(props.GetPropertyNames()[0], names[i])
self.assertEquals(len(props.GetPropertyNames()), 1)
try:
props = rdMD.Properties([1, 2, 3])
self.assertEquals("should not get here", "but did")
except TypeError:
pass
try:
props = rdMD.Properties(["property that doesn't exist"])
self.assertEquals("should not get here", "but did")
except RuntimeError:
pass
def testPythonDescriptorFunctor(self):
class NumAtoms(Descriptors.PropertyFunctor):
def __init__(self):
Descriptors.PropertyFunctor.__init__(self, "NumAtoms", "1.0.0")
def __call__(self, mol):
return mol.GetNumAtoms()
numAtoms = NumAtoms()
rdMD.Properties.RegisterProperty(numAtoms)
props = rdMD.Properties(["NumAtoms"])
self.assertEquals(1, props.ComputeProperties(Chem.MolFromSmiles("C"))[0])
self.assertTrue("NumAtoms" in rdMD.Properties.GetAvailableProperties())
# check memory
del numAtoms
self.assertEquals(1, props.ComputeProperties(Chem.MolFromSmiles("C"))[0])
self.assertTrue("NumAtoms" in rdMD.Properties.GetAvailableProperties())
m = Chem.MolFromSmiles("c1ccccc1")
properties = rdMD.Properties()
for name, value in zip(properties.GetPropertyNames(), properties.ComputeProperties(m)):
print(name, value)
properties = rdMD.Properties(['exactmw', 'lipinskiHBA'])
for name, value in zip(properties.GetPropertyNames(), properties.ComputeProperties(m)):
print(name, value)
def testPropertyRanges(self):
query = rdMD.MakePropertyRangeQuery("exactmw", 0, 1000)
self.assertTrue(query.Match(Chem.MolFromSmiles("C")))
query = rdMD.MakePropertyRangeQuery("exactmw", 1000, 10000)
self.assertFalse(query.Match(Chem.MolFromSmiles("C")))
def testNumStereoCenters(self):
m = Chem.MolFromSmiles('CC(F)(Cl)[C@H](Cl)Br')
self.assertEqual(rdMD.CalcNumAtomStereoCenters(m),2)
self.assertEqual(rdMD.CalcNumUnspecifiedAtomStereoCenters(m),1)
# Tests from Berend Huisman:
for (smiles, expected) in (("C", 0),
("c1ccccc1", 0),
("CC(Cl)Br", 1),
("CCC(C)C(Cl)Br", 2),
("CCC(C(Cl)Br)C(F)I", 3),
("[H][C@](F)(I)C(CC)C(Cl)Br", 3),
("[H][C@](F)(I)[C@@]([H])(CC)C(Cl)Br", 3), ):
mol = Chem.MolFromSmiles(smiles)
actual = len(Chem.FindMolChiralCenters(mol, includeUnassigned=True))
self.assertEqual(rdMD.CalcNumAtomStereoCenters(mol), expected)
for (smiles, expected) in (("C", 0),
("c1ccccc1", 0),
("CC(Cl)Br", 1),
("CCC(C)C(Cl)Br", 2),
("CCC(C(Cl)Br)C(F)I", 3),
("[H][C@](F)(I)C(CC)C(Cl)Br", 2),
("[H][C@](F)(I)[C@@]([H])(CC)C(Cl)Br", 1), ):
mol = Chem.MolFromSmiles(smiles)
actual = sum(1 for x in Chem.FindMolChiralCenters(mol, includeUnassigned=True) if x[1] == '?')
self.assertEqual(actual, expected)
self.assertEqual(rdMD.CalcNumUnspecifiedAtomStereoCenters(mol), expected)
if __name__ == '__main__':
unittest.main()
|
rvianello/rdkit
|
Code/GraphMol/Descriptors/Wrap/testMolDescriptors.py
|
Python
|
bsd-3-clause
| 21,553
|
[
"RDKit"
] |
a90d638fcca5da13ddbc5cf8917d73606f793da5c3de797ff8a2982490a7ae32
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2009 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import sys
from django.core.management import call_command
from django.contrib.auth.models import User
from pootle.i18n.gettext import ugettext as _
from pootle_language.models import Language
from pootle_project.models import Project
def header(exception):
text = """
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
<head>
<title>%(title)s</title>
<meta content="text/html; charset=utf-8" http-equiv="content-type" />
<style type="text/css">
body
{
background-color: #ffffff;
color: #000000;
font-family: Georgia, serif;
margin: 40px auto;
width: 740px;
}
h1
{
font-size: 185%%;
}
ul
{
list-style-type: square;
}
.error
{
background-color: inherit;
color: #d54e21;
font-weight: bold;
}
</style>
</head>
<body>
<h1>%(title)s</h1>
<p class="error">%(msg)s</p>
""" % {'title': _('Pootle: Install'),
'msg': _('Error: "%s" while attempting to access the Pootle database, will try to initialize database.', exception)}
return text
def syncdb():
text = u"""
<p>%s</p>
""" % _('Creating database tables...')
call_command('syncdb', interactive=False)
return text
def initdb():
text = u"""
<p>%s</p>
""" % _('Creating default languages, projects and admin user')
call_command('initdb')
return text
def stats_start():
text = u"""
<p>%s</p>
<ul>
""" % _('Calculating translation statistics, this will take a few minutes')
return text
def stats_language(language):
text = u"""
<li>%s</li>
""" % _('%(language)s is %(percent)d%% complete',
{'language': language.localname(), 'percent': language.translated_percentage()})
return text
def stats_project(project):
text = u"""
<li>%s</li>
""" % _('Project %(project)s is %(percent)d%% complete',
{'project': project.fullname, 'percent': project.translated_percentage()})
return text
def stats_end():
text = u"""
</ul>
<p>%s</p>
""" % _('Done calculating statistics for default languages and projects')
return text
def footer():
text = """
<p>%(endmsg)s</p>
<div><script>setTimeout("location.reload()", 10000)</script></div>
</body></html>
""" % { 'endmsg': _('Initialized database, you will be redirected to the front page in 10 seconds') }
return text
def staggered_install(exception):
"""Initialize the pootle database while displaying progress
reports for each step"""
# django's syncdb command prints progress repots to stdout, but
# mod_wsgi doesn't like stdout, so we reroute to stderr
stdout = sys.stdout
sys.stdout = sys.stderr
yield header(exception)
# try to build the database tables
yield syncdb()
# if this is a fresh install we should add some default languages
# and projects and a default admin account to make pootle more
# usable out of the box
#
# if there are no user accounts apart from defaults then assume
# it's fresh install
if User.objects.hide_defaults().count() == 0:
yield initdb()
# first time to visit the front page all stats for projects and
# languages will be calculated which can take forever, since users
# don't like webpages that take forever let's precalculate the
# stats here
yield stats_start()
for language in Language.objects.iterator():
yield stats_language(language)
for project in Project.objects.iterator():
yield stats_project(project)
yield stats_end()
yield footer()
# bring back stdout
sys.stdout = stdout
return
|
lehmannro/pootle
|
local_apps/pootle_misc/dbinit.py
|
Python
|
gpl-2.0
| 4,588
|
[
"VisIt"
] |
e590e223e379634033220fe68264e0665187fd7d2734e180fffa48db353ae49c
|
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import abel
import bz2
import matplotlib.pylab as plt
# Demonstration of two techniques to determine the anisotropy parameter
# (a) directly, using `linbasex`
# (b) from the inverse Abel transformed image
# Load image as a numpy array
imagefile = bz2.BZ2File('data/O2-ANU1024.txt.bz2')
IM = np.loadtxt(imagefile)
# use scipy.misc.imread(filename) to load image formats (.png, .jpg, etc)
# === linbasex transform ===================================
legendre_orders = [0, 2, 4] # Legendre polynomial orders
proj_angles = range(0, 180, 10) # projection angles in 10 degree steps
radial_step = 1 # pixel grid
smoothing = 0.9 # smoothing 1/e-width for Gaussian convolution smoothing
threshold = 0.2 # threshold for normalization of higher order Newton spheres
clip = 0 # clip first vectors (smallest Newton spheres) to avoid singularities
# linbasex method - center and center_options ensure image has odd square shape
LIM = abel.Transform(IM, method='linbasex', center='slice',
center_options=dict(square=True),
transform_options=dict(basis_dir=None,
proj_angles=proj_angles, radial_step=radial_step,
smoothing=smoothing, threshold=threshold, clip=clip,
return_Beta=True, verbose=True))
# === Hansen & Law inverse Abel transform ==================
HIM = abel.Transform(IM, center="slice", method="hansenlaw",
symmetry_axis=None, angular_integration=True)
# speed distribution
radial, speed = HIM.angular_integration
# normalize to max intensity peak
speed /= speed[200:].max() # exclude transform noise near centerline of image
# PAD - photoelectron angular distribution from image ======================
# Note: `linbasex` provides the anisotropy parameter directly LIM.Beta[1]
# here we extract I vs theta for given radial ranges
# and use fitting to determine the anisotropy parameter
#
# radial ranges (of spectral features) to follow intensity vs angle
# view the speed distribution to determine radial ranges
r_range = [(145, 162), (200, 218), (230, 250), (255, 280), (280, 310),
(310, 330), (330, 350), (350, 370), (370, 390), (390, 410),
(410, 430)]
# anisotropy parameter from image for each tuple r_range
Beta, Amp, Rmid, Ivstheta, theta =\
abel.tools.vmi.radial_integration(HIM.transform, r_range)
# OR anisotropy parameter for ranges (0, 20), (20, 40) ...
# Beta_whole_grid, Amp_whole_grid, Radial_midpoints =\
# abel.tools.vmi.anisotropy(AIM.transform, 20)
# Radial intensity and anisotropy distributions
I, beta2 = abel.tools.vmi.Ibeta(HIM.transform, window=9)
# normalize to max intensity peak
I /= I.max()
# remove (noisy) anisotropy values for low-intensity parts
beta2[I < 0.01] = np.nan
# plots of the analysis
fig = plt.figure(figsize=(8, 4))
ax1 = plt.subplot(121)
ax2 = plt.subplot(122)
# join 1/2 raw data : 1/2 inversion image
rows, cols = IM.shape
c2 = cols//2
vmax = IM[:, :c2-100].max()
AIM = HIM.transform
AIM *= vmax/AIM[:, c2+100:].max()
JIM = np.concatenate((IM[:, :c2], AIM[:, c2:]), axis=1)
# Plot the image data VMI | inverse Abel
im1 = ax1.imshow(JIM, origin='lower', aspect='auto', vmin=0, vmax=vmax)
fig.colorbar(im1, ax=ax1, fraction=.1, shrink=0.9, pad=0.03)
ax1.set_xlabel('x (pixels)')
ax1.set_ylabel('y (pixels)')
ax1.set_title('VMI, inverse Abel: {:d}×{:d}'.format(rows, cols))
# Plot the 1D speed distribution
line01, = ax2.plot(LIM.Beta[0], 'r-', label='linbasex-Beta[0]')
line02, = ax2.plot(speed, 'b-', label='speed')
line03, = ax2.plot(I, 'c--', label='$I(r)$')
legend0 = ax2.legend(handles=[line01, line02, line03],
frameon=False, labelspacing=0.1, numpoints=1, loc=2,
fontsize='small')
plt.gca().add_artist(legend0)
# Plot anisotropy parameter, attribute Beta[1], x speed
line11, = ax2.plot(LIM.Beta[1], 'r-', label='linbasex-Beta[2]')
BetaT = np.transpose(Beta)
line12 = ax2.errorbar(Rmid, BetaT[0], BetaT[1], fmt='.', color='g',
label='specific radii')
# ax2.plot(Radial_midpoints, Beta_whole_grid[0], '-g', label='stepped')
line13, = ax2.plot(beta2, 'c', label=r'$\beta_2(r)$')
legend1 = ax2.legend(handles=[line11, line12, line13],
frameon=False, labelspacing=0.1, numpoints=1, loc=3,
fontsize='small')
ax2.axis(xmin=100, xmax=450, ymin=-1.2, ymax=1.2)
ax2.set_xlabel('radial pixel')
ax2.set_ylabel('speed/anisotropy')
ax2.set_title('speed/anisotropy distribution')
plt.subplots_adjust(left=0.06, bottom=0.17, right=0.95, top=0.89,
wspace=0.35, hspace=0.37)
# Save a image of the plot
plt.savefig("plot_example_PAD.png", dpi=100)
# Show the plots
plt.show()
|
stggh/PyAbel
|
examples/example_anisotropy_parameter.py
|
Python
|
mit
| 4,917
|
[
"Gaussian"
] |
f9f509de9b4bf09f6f1e1aa29bc26c568423eda277485281258194e81e6a595c
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino
#
# The licence is in the file __manifest__.py
#
##############################################################################
from datetime import datetime
import werkzeug
from dateutil.relativedelta import relativedelta
from odoo import http, _, fields
from odoo.addons.http_routing.models.ir_http import slug
from odoo.addons.website.models.ir_http import sitemap_qs2dom
from odoo.http import request
from odoo.addons.cms_form.controllers.main import FormControllerMixin
from odoo.addons.cms_form_compassion.controllers.payment_controller import (
PaymentFormController,
)
class EventsController(PaymentFormController, FormControllerMixin):
def sitemap_events(env, rule, qs):
today = fields.Date.to_string(datetime.today())
events = env["crm.event.compassion"]
dom = sitemap_qs2dom(qs, '/events', events._rec_name)
dom += request.website.website_domain()
dom += [("website_published", "=", True), ("end_date", ">=", today)]
for reg in events.search(dom):
loc = '/event/%s' % slug(reg)
if not qs or qs.lower() in loc:
yield {'loc': loc}
def sitemap_participants(env, rule, qs):
registrations = env["event.registration"]
dom = sitemap_qs2dom(qs, '/event', registrations._rec_name)
dom += request.website.website_domain()
dom += [("website_published", "=", True)]
for reg in registrations.search(dom):
loc = '/event/%s/%s' % (slug(reg.compassion_event_id), slug(reg))
if not qs or qs.lower() in loc:
yield {'loc': loc}
@http.route("/events/", auth="public", website=True, sitemap=False)
def list(self, **kwargs):
today = fields.Date.to_string(datetime.today())
# Events that are set to finish after today
started_events = request.env["crm.event.compassion"].search([
("website_published", "=", True), ("end_date", ">=", today),
])
if len(started_events) == 1:
return request.redirect("/event/" + str(started_events.id))
return request.render(
"website_event_compassion.list", {"events": started_events}
)
###################################################
# Methods for the event page and event registration
###################################################
@http.route(
'/event/<model("crm.event.compassion"):event>/', auth="public", website=True,
sitemap=sitemap_events
)
def event_page(self, event, **kwargs):
if not event.is_published and request.env.user.share:
return request.redirect("/events")
if not event.can_access_from_current_website():
raise werkzeug.exceptions.NotFound()
values = self.get_event_page_values(event, **kwargs)
registration_form = values["form"]
if registration_form.form_success:
# The user submitted a registration, redirect to confirmation
result = werkzeug.utils.redirect(
registration_form.form_next_url(), code=303
)
else:
# Check if registration was already present
errors = registration_form.form_render_values.get("errors")
if errors and errors.get("_integrity"):
request.env.cr.rollback()
# Replace error message with more friendly text.
request.website.get_status_message()
request.website.add_status_message(
_("You are already registered to this trip."),
type_="danger",
title=_("Error"),
)
# Display the Event page
result = request.render(values.pop("website_template"), values)
if event.event_type_id.sudo().travel_features:
# Travel events are full not called by AJAX popup form
return result
return result
@http.route(
'/event/<model("crm.event.compassion"):event>/faq', auth="public", website=True,
sitemap=False
)
def event_faq(self, event, **kwargs):
if not event.is_published:
return request.redirect("/events")
return request.render("website_event_compassion.event_faq", {"event": event})
@http.route(
'/event/<model("event.event"):event>/registration/'
'<int:registration_id>/success',
auth="public",
website=True, sitemap=False
)
def registration_success(self, event, registration_id, **kwargs):
limit_date = datetime.now() - relativedelta(days=1)
registration = request.env["event.registration"].sudo().browse(registration_id)
if not registration.exists() or registration.create_date < limit_date:
return request.redirect("/events")
values = {"event": event, "attendees": registration}
return request.render(
"website_event_compassion.event_registration_successful", values
)
@http.route(
'/event/<model("crm.event.compassion"):event>/confirmation/',
auth="public",
website=True, sitemap=False
)
def confirmation_page(self, event, **kwargs):
if not event.is_published:
return request.redirect("/events")
values = {
"confirmation_title": kwargs.get("title"),
"confirmation_message": kwargs.get("message"),
"event": event,
}
return request.render(
"website_event_compassion.event_confirmation_page", values
)
def get_event_page_values(self, event, **kwargs):
"""
Processes the registration form and gets the values used by the website to
render the event page.
:param event: crm.event.compassion record to render
:param kwargs: request arguments
:return: dict: values for the event website template
(must contain event, start_date, end_date, form,
main_object and website_template values)
"""
values = kwargs.copy()
# This allows the translation to still work on the page
values.pop("edit_translations", False)
values.update(
{
"event": event,
"start_date": event.get_date("start_date", "date_full"),
"end_date": event.get_date("end_date", "date_full"),
"additional_title": _("- Registration"),
}
)
# Travel display only registration form, others do have a page.
template = "website_event_compassion."
if event.event_type_id.sudo().travel_features:
values["form_model_key"] = "cms.form.group.visit.registration"
template += "event_full_page_form"
else:
template += "event_page"
registration_form = self.get_form("event.registration", **values)
registration_form.form_process()
values.update(
{
"form": registration_form,
"main_object": event,
"website_template": template,
"event_step": 1,
}
)
return values
###################################################
# Methods for the participant page and the donation
###################################################
@http.route(
[
"/event/<model('crm.event.compassion'):event>/<reg_string>-<int:reg_id>",
"/event/<model('crm.event.compassion'):event>/<int:reg_id>",
],
auth="public", website=True, sitemap=sitemap_participants
)
def participant_details(self, event, reg_id, **kwargs):
"""
:param event: the event record
:param reg_id: the registration record
:return:the rendered page
"""
if not event.is_published:
return request.redirect("/events")
reg_obj = request.env["event.registration"].sudo()
registration = reg_obj.browse(reg_id).exists().filtered("website_published")
if not registration:
return werkzeug.utils.redirect("/event/" + str(event.id), 301)
kwargs["form_model_key"] = "cms.form.event.donation"
values = self.get_participant_page_values(event, registration, **kwargs)
donation_form = values["form"]
if donation_form.form_success:
# The user submitted a donation, redirect to confirmation
result = werkzeug.utils.redirect(donation_form.form_next_url(), code=303)
else:
result = request.render(values["website_template"], values)
return result
def get_participant_page_values(self, event, registration, **kwargs):
"""
Gets the values used by the website to render the participant page.
:param event: crm.event.compassion record to render
:param registration: event.registration record to render
:param kwargs: request arguments
:return: dict: values for the event website template
(must contain event, start_date, end_date, form,
main_object and website_template values)
"""
values = kwargs.copy()
# This allows the translation to still work on the page
values.pop("edit_translations", False)
values.update({
"event": event, "registration": registration,
})
donation_form = self.get_form(False, **values)
donation_form.form_process()
values.update(
{
"form": donation_form,
"main_object": registration,
"website_template": "website_event_compassion.participant_page",
}
)
return values
########################################
# Methods for after donation redirection
########################################
@http.route("/event/payment/validate/<int:invoice_id>",
type="http", auth="public", website=True,
sitemap=False)
def donation_payment_validate(self, invoice_id=None, **kwargs):
""" Method that should be called by the server when receiving an update
for a transaction.
"""
try:
invoice = request.env["account.invoice"].browse(int(invoice_id)).sudo()
invoice.exists().ensure_one()
transaction = invoice.get_portal_last_transaction()
except ValueError:
transaction = request.env["payment.transaction"]
invoice_lines = invoice.invoice_line_ids
event = invoice_lines.mapped("event_id")
if transaction.state != "done":
return request.render(
self.get_donation_failure_template(event), {"error_intro": ""}
)
ambassador = invoice_lines.mapped("user_id")
registration = event.registration_ids.filtered(
lambda r: r.partner_id == ambassador
)
values = {"registration": registration, "event": event, "error_intro": ""}
success_template = self.get_donation_success_template(event)
return request.render(success_template, values)
@http.route(
"/event/payment/gpv_payment_validate/<int:invoice_id>", type="http",
auth="public", website=True, sitemap=False
)
def down_payment_validate(self, invoice_id=None, **post):
""" Method that should be called by the server when receiving an update
for a transaction.
"""
failure_template = "website_event_compassion.donation_failure"
error_intro = _(
"Thank you for your efforts in the Compassion trip registration " "process."
)
try:
invoice = request.env["account.invoice"].browse(int(invoice_id))
invoice.exists().ensure_one()
tx = invoice.get_portal_last_transaction()
except ValueError:
tx = request.env["payment.transaction"]
if tx.state != "done":
return request.render(failure_template, {"error_intro": error_intro})
invoice_lines = invoice.invoice_line_ids
event = invoice_lines.mapped("event_id")
registration = tx.registration_id
post.update(
{
"attendees": registration,
"event": event,
"confirmation_title": _("We are glad to confirm your registration!"),
"confirmation_message": _(
"Thank you for your efforts in the Compassion trip "
"registration process."
)
+ "<br/><br/>"
+ _(
"Your payment was successful and your are now a confirmed "
"participant of the trip. You will receive all the "
"documentation for the preparation of your trip by e-mail in "
"the coming weeks."
),
"error_intro": error_intro,
}
)
template = "website_event_compassion.event_confirmation_page"
if invoice == registration.group_visit_invoice_id:
post["confirmation_message"] = _(
"Congratulations! Everything is ready for this beautiful "
"trip to happen. You will receive all the practical "
"information about the trip preparation a few weeks before "
"the departure. Until then, don't hesitate to contact us if "
"you have any question."
)
return super().compassion_payment_validate(
tx, template, failure_template, **post
)
def get_donation_success_template(self, event):
"""
Gets the website templates for donation confirmation
:param event: crm.event.compassion record
:return: xml_id of website template
"""
return "website_event_compassion.donation_successful"
|
CompassionCH/compassion-switzerland
|
website_event_compassion/controllers/events_controller.py
|
Python
|
agpl-3.0
| 14,186
|
[
"VisIt"
] |
d682fb07d2682fcbd2321cf75a52d4a51c679c2c0fe266b0655150579e956bc3
|
#!/usr/bin/python
from datetime import datetime, timedelta
import httplib2
import os
import sys
import pandas as pd
from pprint import pprint as pp
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data and YouTube Analytics
# APIs for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secrets.json"
# These OAuth 2.0 access scopes allow for read-only access to the authenticated
# user's account for both YouTube Data API resources and YouTube Analytics Data.
YOUTUBE_SCOPES = ["https://www.googleapis.com/auth/youtube.readonly",
"https://www.googleapis.com/auth/yt-analytics.readonly"]
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
YOUTUBE_ANALYTICS_API_SERVICE_NAME = "youtubeAnalytics"
YOUTUBE_ANALYTICS_API_VERSION = "v1"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_services(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=" ".join(YOUTUBE_SCOPES),
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
http = credentials.authorize(httplib2.Http())
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=http)
youtube_analytics = build(YOUTUBE_ANALYTICS_API_SERVICE_NAME,
YOUTUBE_ANALYTICS_API_VERSION, http=http)
return (youtube, youtube_analytics)
def get_channel_id(youtube):
channels_list_response = youtube.channels().list(
mine=True,
part="id"
).execute()
return channels_list_response["items"][0]["id"]
def run_analytics_report(youtube_analytics, channel_id, options):
# Call the Analytics API to retrieve a report. For a list of available
# reports, see:
# https://developers.google.com/youtube/analytics/v1/channel_reports
analytics_query_response = youtube_analytics.reports().query(
ids="channel==%s" % channel_id,
metrics=options.metrics,
dimensions=options.dimensions,
start_date=options.start_date,
end_date=options.end_date,
max_results=options.max_results,
sort=options.sort
).execute()
print "Analytics Data for Channel %s" % channel_id
pp(analytics_query_response)
headers = analytics_query_response.get("columnHeaders",[])
analytics_results = analytics_query_response.get("rows")
pp(analytics_results)
pp(headers)
return analytics_results, headers
# for column_header in analytics_query_response.get("columnHeaders", []):
# print "%-20s" % column_header["name"],
# print
#
# for row in analytics_query_response.get("rows", []):
# for value in row:
# print "%-20s" % value,
# print
if __name__ == "__main__":
now = datetime.now()
start = (now - timedelta(days=150)).strftime("%Y-%m-%d")
end = (now - timedelta(days=1)).strftime("%Y-%m-%d")
argparser.add_argument("--metrics", help="Report metrics",
default="views,averageViewDuration,averageViewPercentage")
argparser.add_argument("--dimensions", help="Report dimensions",
default="day,subscribedStatus")
argparser.add_argument("--start-date", default=start,
help="Start date, in YYYY-MM-DD format")
argparser.add_argument("--end-date", default=end,
help="End date, in YYYY-MM-DD format")
argparser.add_argument("--max-results", help="Max results", default=90)
argparser.add_argument("--sort", help="Sort order", default="-day")
args = argparser.parse_args()
(youtube, youtube_analytics) = get_authenticated_services(args)
try:
channel_id = get_channel_id(youtube)
analytics_results, headers = run_analytics_report(youtube_analytics, channel_id, args)
df_total_views = pd.DataFrame(analytics_results,columns=["date","subscribedStatus","views","averageViewDuration","averageViewPercentage"])
# df_total_views.loc[:,"netSubscribers"] = df_total_views["subscribersGained"] - df_total_views["subscribersLost"]
# df_total_views.loc[:,"totalWatchTime"] = df_total_views["views"]*df_total_views["averageViewDuration"]
df_total_views.set_index("date",inplace=True)
df_total_views.to_csv("Youtube Subscribed vs Not Subscribed.csv")
except HttpError, e:
print "An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)
|
facemelters/data-science
|
Atlas/test-youtube2.py
|
Python
|
gpl-2.0
| 5,639
|
[
"VisIt"
] |
60e76aab617219fab6838eda2f145adaa8e195496ae9c04f227318e063700b2f
|
from JMol import JMol
from JSMol import JSMol
|
mtthwflst/terse
|
Engine3D/__init__.py
|
Python
|
mit
| 46
|
[
"Jmol"
] |
e976ec79bf02d2c9783ae7e7cd5ea9a346cc086c4c8f555db351ccb800577e66
|
import os
import re
import sys
import wx
import IO
import shutil
import oscaar
import urllib2
import zipfile
import datetime
import subprocess
import webbrowser
import numpy as np
import systematics
import timeConversions
from glob import glob
from matplotlib import pyplot
from mathMethods import medianBin
from oscaar.extras.knownSystemParameters import returnSystemParams
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigCanvas
APP_EXIT = 1
class OscaarFrame(wx.Frame):
'''
This class is the main frame of the OSCAAR GUI.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
self.aboutOpen = False
self.loadOldPklOpen = False
self.loadFittingOpen = False
self.etdOpen = False
self.loadMasterFlat = False
self.overWrite = False
self.ds9Open = False
self.messageFrame = False
self.IP = wx.Frame
self.loadFitError = False
self.loadEphFrame = False
self.singularOccurance = 0
self.extraRegionsOpen = False
self.programmersEdit = False
self.loadObservatoryFrame = False
self.preprocessedImagesFrame = False
self.ccdGain = "1.0"
self.exposureTime = "JD"
self.switchTimes = 0
self.title = "OSCAAR"
wx.Frame.__init__(self, None, -1, self.title)
self.panel = wx.Panel(self)
if sys.platform == "win32":
self.fontType = wx.Font(9, wx.DEFAULT, wx.NORMAL, wx.BOLD)
else:
self.fontType = wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL)
self.static_bitmap = wx.StaticBitmap(self.panel)
self.logo = wx.Image(os.path.join(os.path.dirname(__file__), 'images',
'logo4.png'), wx.BITMAP_TYPE_ANY)
self.bitmap = wx.BitmapFromImage(self.logo)
self.static_bitmap.SetBitmap(self.bitmap)
self.paths = AddLCB(self.panel, -1, name="mainGUI", rowNum=5, vNum=15,
hNum=5, font=self.fontType)
self.topBox = wx.BoxSizer(wx.HORIZONTAL)
self.topBox.Add(self.paths, border=5, flag=wx.ALL)
tupleList = [('zoom', "Track Zoom: ",
'Enter a number for the zoom here.', '15'),
('radius', "Aperture Radius: ",
'Enter a decimal for the radius here.', '4.5'),
('smoothing', "Smoothing Constant: ",
'Enter an integer for smoothing here.', '3')]
self.leftBox = ParameterBox(self.panel, -1, tupleList, rows=5, cols=2,
vNum=10, hNum=10, font=self.fontType)
tupleList = [('ingress', "Ingress, UT (YYYY/MM/DD)",
"Enter a date in the correct format here.",
"YYYY/MM/DD"),
('egress', "Egress, UT (YYYY/MM/DD)",
"Enter a date in the correct format here.",
"YYYY/MM/DD"),
('rbTrackPlot', "Tracking Plots: ", "On", "Off"),
('rbPhotPlot', "Photometry Plots: ", "On", "Off"),
('rbFitAfterPhot', "Fit After Photometry ", "On", "Off")]
self.radioBox = ParameterBox(self.panel, -1, tupleList, rows=5, cols=3,
vNum=10, hNum=10, font=self.fontType)
self.sizer0 = wx.FlexGridSizer(rows=1, cols=4)
self.buttonBox = wx.BoxSizer(wx.HORIZONTAL)
self.buttonBox.Add(self.sizer0, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.ephButton = wx.Button(self.panel, label="Ephemeris")
self.masterFlatButton = wx.Button(self.panel,
label="Master Flat Maker")
self.ds9Button = wx.Button(self.panel, label="Open DS9")
self.runButton = wx.Button(self.panel, label="Run")
self.observatoryButton = wx.Button(self.panel, label="Extra " + \
"Observatory Parameters")
self.Bind(wx.EVT_BUTTON,
lambda evt: self.singularExistance(evt,
self.loadObservatoryFrame,
"observatory"),
self.observatoryButton)
self.Bind(wx.EVT_BUTTON,
lambda evt: self.singularExistance(evt, self.loadEphFrame,
"ephemeris"),
self.ephButton)
self.Bind(wx.EVT_BUTTON,
lambda evt: self.singularExistance(evt, self.loadMasterFlat,
"masterFlat"),
self.masterFlatButton)
self.Bind(wx.EVT_BUTTON,
lambda evt: self.singularExistance(evt, self.ds9Open,
"ds9"),
self.ds9Button)
self.Bind(wx.EVT_BUTTON, self.runOscaar, self.runButton)
self.sizer0.Add(self.ephButton, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.sizer0.Add(self.masterFlatButton, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.sizer0.Add(self.ds9Button, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.sizer0.Add(self.runButton, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.rightBox = wx.BoxSizer(wx.VERTICAL)
self.rightBox.Add(self.radioBox, 0, flag=wx.ALIGN_CENTER | wx.ALL,
border=5)
self.rightBox.Add(self.buttonBox, 0, flag=wx.ALIGN_CENTER | wx.ALL,
border=5)
self.leftBox2 = wx.BoxSizer(wx.VERTICAL)
self.leftBox2.Add(self.leftBox, 0, flag=wx.ALIGN_CENTER | wx.ALL,
border=5)
self.leftBox2.Add(self.observatoryButton, 0, flag=wx.ALIGN_CENTER |
wx.ALL, border=5)
self.bottomBox = wx.BoxSizer(wx.HORIZONTAL)
self.bottomBox.Add(self.leftBox2, 0, flag=wx.ALIGN_CENTER)
self.bottomBox.Add(self.rightBox, 0, flag=wx.ALIGN_CENTER | wx.ALL,
border=5)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.static_bitmap, 0, flag=wx.ALIGN_LEFT)
self.vbox.Add(self.topBox, 0, flag=wx.ALIGN_CENTER)
self.vbox.Add(self.bottomBox, 0, flag=wx.CENTER | wx.ALL, border=5)
self.create_menu()
self.CreateStatusBar()
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.setDefaults()
iconloc = os.path.join(os.path.dirname(__file__), 'images',
'logo4noText.ico')
icon1 = wx.Icon(iconloc, wx.BITMAP_TYPE_ICO)
self.SetIcon(icon1)
self.Center()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the main GUI.
Notes
-----
This method has no input or return parameters. It will simply be used
as self.create_menu() when in the initialization method for an
OscaarFrame instance.
'''
menubar = wx.MenuBar()
menu_file = wx.Menu()
m_quit = menu_file.Append(wx.ID_EXIT, "Quit\tCtrl+Q",
"Quit this application.")
self.Bind(wx.EVT_MENU, self.on_exit, m_quit)
menu_help = wx.Menu()
m_help = menu_help.Append(wx.ID_HELP, "Help\tCtrl+H",
"More Information about how to use this" + \
" application.")
self.Bind(wx.EVT_MENU,
lambda evt: self.openLink(evt,
"https://github.com/OSCAAR/" + \
"OSCAAR/tree/master/docs/" + \
"documentationInProgress"),
m_help)
menu_oscaar = wx.Menu()
m_loadOld = menu_oscaar.Append(-1, "Load old output\tCtrl+L",
"Load an old output file for " + \
"further analysis.")
m_loadFitting = menu_oscaar.Append(-1, "Fitting Routines\tCtrl-F",
"Different fitting methods for " + \
"analysis of an old .pkl file.")
m_extraRegions = menu_oscaar.Append(-1, "Extra Regions File Sets",
"Add extra regions files to " + \
"specific referenced images.")
self.Bind(wx.EVT_MENU,
lambda evt: self.singularExistance(evt, self.loadOldPklOpen,
"loadOld"),
m_loadOld)
self.Bind(wx.EVT_MENU,
lambda evt: self.singularExistance(evt, self.loadFittingOpen,
"loadFitting"),
m_loadFitting)
self.Bind(wx.EVT_MENU,
lambda evt: self.singularExistance(evt,
self.extraRegionsOpen,
"extra"),
m_extraRegions)
menu_czech = wx.Menu()
m_etd = menu_czech.Append(-1, "Czech ETD Format", "Take a .pkl file " \
"and convert the data to a format that is " \
"accepted by the Czech Astronomical " \
"Society's exoplanet transit database.")
m_ttp = menu_czech.Append(-1, "Transit Time Predictions",
"Transit time predictions from the " + \
"Czech Astronomical Society.")
self.Bind(wx.EVT_MENU,
lambda evt: self.openLink(evt,
"http://var2.astro.cz/ETD/" + \
"predictions.php"),
m_ttp)
self.Bind(wx.EVT_MENU,
lambda evt: self.singularExistance(evt, self.etdOpen, "etd"),
m_etd)
menu_update = wx.Menu()
m_update = menu_update.Append(-1, "Check For Updates", "Check to see" \
"if you have the latest commit for " \
"this version of oscaar.")
self.Bind(wx.EVT_MENU, self.checkSHA, m_update)
menu_about = wx.Menu()
m_about = menu_about.Append(-1, "About", "Contributors of OSCAAR.")
self.Bind(wx.EVT_MENU,
lambda evt: self.singularExistance(evt, self.aboutOpen,
"about"),
m_about)
menubar.Append(menu_file, "File")
menubar.Append(menu_oscaar, "Oscaar")
menubar.Append(menu_czech, "Czech ETD")
menubar.Append(menu_update, "Update")
menubar.Append(menu_help, "Help")
menubar.Append(menu_about, "About")
self.SetMenuBar(menubar)
def runOscaar(self, event):
'''
This method will activate when the run button on the main GUI is
pressed. It executes the differentialPhotometry.py script.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The *
represents a wild card value.
Notes
-----
There is nothing to return for this method. Upon completion a window
will open with the light curve that was produced from the data and
input parameters.
'''
self.values = {}
invalidDarkFrames = self.checkFileInputs(self.paths.boxList[1].
GetValue(), saveNum=1)
masterFlat = self.paths.boxList[2].GetValue().strip()
invalidDataImages = self.checkFileInputs(self.paths.boxList[3].
GetValue(), saveNum=3)
regionsFile = self.paths.boxList[4].GetValue().strip()
self.outputFile = self.paths.boxList[5].GetValue().strip()
self.values["radius"] = self.leftBox.userParams["radius"].GetValue()
self.radiusError = "radius"
if invalidDarkFrames != "":
self.IP = InvalidParameter(invalidDarkFrames, self, -1,
stringVal="fits",
secondValue="the path to Dark Frames")
elif masterFlat != "" and (os.path.isfile(masterFlat) != True or \
(masterFlat.lower().endswith(".fit") != True and \
masterFlat.lower().endswith(".fits") != True)):
tempString = masterFlat
if len(masterFlat.split(",")) > 1:
tempString = ""
for string in masterFlat.split(","):
if string == "" and len(masterFlat.split(",")) == 2:
tempString += ","
else:
tempString += "\n" + string.strip()
self.IP = InvalidParameter(tempString, self, -1,
stringVal="master",
secondValue="path to the Master Flat")
elif invalidDataImages != "":
self.IP = InvalidParameter(invalidDataImages, self, -1,
stringVal="fits",
secondValue="the path to Data Images")
elif self.checkRegionsBox(regionsFile) == False:
pass
elif not os.path.isdir(self.outputFile.rpartition(str(os.sep))[0]) or \
not len(self.outputFile) > \
(len(self.outputFile[:self.outputFile.rfind(os.sep)]) + 1):
self.IP = InvalidParameter(self.outputFile, self, -1,
stringVal="output",
secondValue="output file")
elif self.checkAperture(self.values["radius"]) != True:
self.IP = InvalidParameter(self.leftBox.userParams["radius"].
GetValue(), self, -1,
stringVal=self.radiusError)
elif self.timeAndDateCheck(self.radioBox.userParams['ingress1'].
GetValue(),
self.radioBox.userParams['egress1'].
GetValue(),
self.radioBox.userParams['ingress'].
GetValue(),
self.radioBox.userParams['egress'].
GetValue()) == True:
try:
tempList = ["smoothing", "zoom"]
for string in tempList:
self.values[string] = int(self.leftBox.userParams[string].GetValue())
self.leftBox.userParams[string].SetValue(str(self.values[string]))
self.paths.boxList[2].SetValue(masterFlat)
self.paths.boxList[5].SetValue(self.outputFile)
# This code here writes all the parameters to the init.par file.
init = open(os.path.join(os.path.dirname(__file__),'init.par'), 'w')
init.write("Path to Dark Frames: " + self.paths.boxList[1].GetValue() + "\n")
init.write("Path to Data Images: " + self.paths.boxList[3].GetValue() + "\n")
init.write("Path to Master-Flat Frame: " + masterFlat + "\n")
init.write("Path to Regions File: " + self.paths.boxList[4].GetValue() + "\n")
if not self.paths.boxList[5].GetValue().lower().endswith(".pkl"):
init.write("Output Path: " + self.paths.boxList[5].GetValue() + ".pkl\n")
else:
init.write("Output Path: " + self.paths.boxList[5].GetValue() + "\n")
self.parseTime(self.radioBox.userParams["ingress"].GetValue(),
self.radioBox.userParams["ingress1"].GetValue(), 'Ingress: ', init, name="ingress")
self.parseTime(self.radioBox.userParams["egress"].GetValue(),
self.radioBox.userParams["egress1"].GetValue(), 'Egress: ', init, name="egress")
if self.radioBox.userParams['rbTrackPlot'].GetValue():
init.write("Plot Tracking: " + "on"+ "\n")
else:
init.write("Plot Tracking: " + "off"+ "\n")
if self.radioBox.userParams['rbPhotPlot'].GetValue():
init.write("Plot Photometry: " + "on"+ "\n")
else:
init.write("Plot Photometry: " + "off"+ "\n")
init.write("Smoothing Constant: " + str(self.values["smoothing"]) + '\n')
init.write("Radius: " + str(self.values["radius"]) + '\n')
init.write("Tracking Zoom: " + str(self.values["zoom"]) + '\n')
init.write("CCD Gain: " + self.ccdGain + "\n")
init.write("Exposure Time Keyword: " + self.exposureTime + "\n")
init.close()
if self.loadFittingOpen == False:
if self.preprocessedImagesFrame == False and \
self.overWrite == False and \
(self.paths.boxList[1].GetValue() == "" or \
self.paths.boxList[2].GetValue() == ""):
OverWrite(self, -1, "Preprocessed Images Check", "", "PreprocessedImages")
self.preprocessedImagesFrame = True
elif self.preprocessedImagesFrame == False and \
(os.path.isfile(self.outputFile) or \
os.path.isfile(self.outputFile + '.pkl')):
if self.overWrite == False:
OverWrite(self, -1, "Overwrite Output File", self.outputFile, "Output File")
self.overWrite = True
elif self.preprocessedImagesFrame == False and \
self.overWrite == False:
diffPhotCall = "from oscaar import differentialPhotometry"
subprocess.check_call(['python','-c',diffPhotCall])
if self.radioBox.userParams["rbFitAfterPhot"].GetValue() == True:
wx.CallAfter(self.createFrame)
else:
if self.loadFitError == False:
self.IP = InvalidParameter("", self, -1, stringVal="fitOpen")
self.loadFitError = True
except ValueError:
string2 = string
if string2 == "smoothing":
string2 = "smoothing constant"
self.IP = InvalidParameter(self.leftBox.userParams[string].GetValue(),self,-1, stringVal="leftbox", secondValue=string2)
def timeAndDateCheck(self, time1, time2, date1, date2):
'''
This method checks that the times and dates entered in the main GUI are in the correct format.
Parameters
----------
time1 : string
The ingress time of the transit that was observed.
time2 : string
The egress time of the transit that was observed.
date1 : string
The date for the ingress of the transit.
date2 : string
The date for the egress of the transit.
Returns
-------
literal : bool
Returns true if the parameters are all in the correct format, otherwise it returns false.
Notes
-----
The correct format for the times is HH:MM:SS, while for the dates it is YYYY/MM/DD. This method
will also check that real dates have been entered, as well as that the ingress time always
is before the egress time.
'''
years = []
months = []
days = []
hours = []
minutes = []
seconds = []
for timeArray, value in [(time1.split(":"), time1),
(time2.split(":"), time2)]:
if len(timeArray) != 3:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="time")
return False
else:
try:
hour = int(timeArray[0].strip())
hours.append(hour)
minute = int(timeArray[1].strip())
minutes.append(minute)
second = int(timeArray[2].strip())
seconds.append(second)
if len(timeArray[0].strip()) > 2 or len(timeArray[1].strip()) > 2 or len(timeArray[2].strip()) > 2:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="time")
return False
if hour > 23 or hour < 0 or minute > 59 or minute < 0 or second > 59 or second < 0:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="time")
return False
except ValueError:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="time")
return False
for dateArray,value in [(date1.split("/"),date1),
(date2.split("/"),date2)]:
if len(dateArray) != 3:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="date")
return False
else:
try:
year = int(dateArray[0].strip())
years.append(year)
month = int(dateArray[1].strip())
months.append(month)
day = int(dateArray[2].strip())
days.append(day)
if len(dateArray[0].strip()) != 4 or len(dateArray[1].strip()) > 2 or len(dateArray[2].strip()) > 2:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="date")
return False
minYear = datetime.date.today().year - 100
maxYear = datetime.date.today().year + 100
if year < minYear or year > maxYear or month > 12 or month < 0 or day > 31 or day < 0 or \
month == 0 or year == 0 or day == 0:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="date")
return False
except ValueError:
self.IP = InvalidParameter(value, self, -1, stringVal="dateTime", secondValue="date")
return False
if years[0] > years[1]:
self.IP = InvalidParameter(date1, self, -1, stringVal="logicalDate")
return False
elif years[0] == years[1]:
if months[0] > months[1]:
self.IP = InvalidParameter(date1, self, -1, stringVal="logicalDate")
return False
elif months[0] == months[1]:
if days[0] > days[1]:
self.IP = InvalidParameter(date1, self, -1, stringVal="logicalDate")
return False
elif days[0] == days[1]:
if hours[0] > hours[1]:
self.IP = InvalidParameter(time1, self, -1, stringVal="logicalTime")
return False
elif hours[0] == hours[1]:
if minutes[0] > minutes[1]:
self.IP = InvalidParameter(time1, self, -1, stringVal="logicalTime")
return False
elif minutes[0] == minutes[1]:
if seconds[0] >= seconds [1]:
self.IP = InvalidParameter(time1, self, -1, stringVal="logicalTime")
return False
return True
def checkAperture(self, stringVal):
'''
This method parses the string from the aperture radius text box to make sure that the values
are in the correct format and valid.
Parameters
----------
stringVal : string
The input of the aperture radius text box in the main GUI.
Returns
-------
literal : bool
True if the values are valid and false otherwise.
Notes
-----
This method will check the radius step interval is not larger than the max and min radii, as well
as that the max radius is always larger than the min radius. Only when using 3 values in this text control
box, the GUI will interpret it as (min radius, max radius, step interval), otherwise it only computes
the specific values entered.
'''
splitString = stringVal.split(",")
if len(splitString) == 1:
try:
float(splitString[0])
self.leftBox.userParams["radius"].SetValue(str(float(splitString[0])))
return True
except ValueError:
self.radiusError = "radiusNum"
return False
elif len( splitString) == 3:
minRadius = splitString[0].strip()
maxRadius = splitString[1].strip()
stepSize = splitString[2].strip()
try:
minRadius = float(minRadius)
maxRadius = float(maxRadius)
stepSize = float(stepSize)
if minRadius == maxRadius:
self.radiusError = "radiusEqual"
return False
elif minRadius > maxRadius:
self.radiusError = "radiusLogic"
return False
elif (maxRadius-minRadius) < stepSize:
self.radiusError = "radiusStep"
return False
if stepSize == 0:
self.radiusError = "radiusLogic"
return False
elif minRadius == 0 or maxRadius == 0:
self.radiusError = "radiusLogic"
return False
self.values["radius"] = str(minRadius) + "," + str(maxRadius) + "," + str(stepSize)
self.leftBox.userParams["radius"].SetValue(str(minRadius) + "," + str(maxRadius) + "," + str(stepSize))
return True
except ValueError:
self.radiusError = "radiusNum"
return False
else:
stringTemp = ""
for num in splitString:
numStrip = num.strip()
try:
float(numStrip)
if numStrip == 0:
self.radiusError = "radiusLogic2"
return False
except ValueError:
self.radiusError = "radiusNum"
return False
stringTemp += str(float(numStrip)) + ","
self.values["radius"] = stringTemp.rpartition(",")[0]
self.leftBox.userParams["radius"].SetValue(stringTemp.rpartition(",")[0])
return True
def setDefaults(self):
'''
This method will set the default values for the text boxes in the main GUI with those
listed in the init.par file.
Notes
-----
This is a recursive string parser that searches for the provided keywords.
'''
if self.programmersEdit == True:
init = open("init.par","r").read().splitlines()
else:
oscaarpath = os.path.dirname(os.path.abspath(oscaar.__file__))
init = open(os.path.join(oscaarpath,'init.par'), 'r').read().splitlines()
for line in init:
if len(line.split()) > 1:
inline = line.split(':', 1)
name = inline[0].strip()
value = str(inline[1].strip())
tempList = [("Path to Master-Flat Frame", 2),
("Path to Regions File", 4),
("Ingress", "ingress"),("Egress", "egress"),
("Radius", "radius"),("Tracking Zoom", "zoom"),
("Plot Tracking", "rbTrackPlot"),
("Plot Photometry", "rbPhotPlot"),("Smoothing Constant", "smoothing"),
("Output Path",5),("Path to Dark Frames", 1),("Path to Data Images", 3),
("CCD Gain",""),("Exposure Time Keyword","")]
for string,save in tempList:
if string == name:
if name == "Smoothing Constant" or name == "Tracking Zoom":
self.leftBox.userParams[save].SetValue(value)
elif name == "Radius":
stripTemp = [x.strip() for x in value.split(",")]
stringTemp = ""
for eachTemp in stripTemp:
stringTemp += eachTemp + ","
self.leftBox.userParams[save].SetValue(stringTemp.rpartition(",")[0])
elif name == "Plot Photometry" or name == "Plot Tracking":
if value == "off":
save += "1"
self.radioBox.userParams[save].SetValue(True)
elif name == "Path to Dark Frames" or name == "Path to Data Images":
tempArray = value.split(",")
tempArray[:] = [x.strip() for x in tempArray]
finalString = ""
for eachString in tempArray:
finalString += eachString + ","
self.paths.boxList[save].SetValue(finalString.rpartition(",")[0])
elif name == "Path to Master-Flat Frame" or name == "Path to Regions File" or\
name == "Output Path":
self.paths.boxList[save].SetValue(value)
elif name == "CCD Gain":
self.ccdGain = value
elif name == "Exposure Time Keyword":
self.exposureTime = value
else:
date = value.split(";")[0].strip().replace("-","/")
time = value.split(";")[1].strip()
for eachOne, other in [(date,""),(time,"1")]:
if other == "1":
separator = ":"
else:
separator = "/"
stripTemp = [x.strip() for x in eachOne.split(separator)]
stringTemp = ""
for eachTemp in stripTemp:
stringTemp += eachTemp + separator
if other == "1":
self.radioBox.userParams[save+"1"].SetValue(stringTemp.rpartition(separator)[0])
else:
self.radioBox.userParams[save].SetValue(stringTemp.rpartition(separator)[0])
def checkFileInputs(self,array,saveNum):
'''
This checks that the files from a text control box are valid .fit/.fits files. Then it refreshes
the text control box with a string of the valid files.
Parameters
----------
array : string
The list of files from a text control box in the main GUI.
saveNum : int
When it refreshes the text control box, the method needs to know which box to do it for. The box numbers from
the main GUI are in order 1-5 (this is only for the input file text boxes).
Returns
-------
errorString : string
A string of all of the invalid files that were entered in the input file text box.
Notes
-----
If errorString returns '' (empty), this means that all of the entered files were valid.
'''
errorString = ""
setValueString = ""
array2 = []
smallArray = ""
if array.strip() == "" and saveNum != 3:
return errorString
for element in array.split(","):
element = element.strip()
if element.lower().endswith(os.sep):
tempElement = element + "*.fit"
element += "*.fits"
smallArray = "-1"
if smallArray == "":
if len(glob(element)) < 1:
errorString += element
elif len(glob(element)) > 1:
for element2 in glob(element):
if element2.lower().endswith(".fit") or element2.lower().endswith(".fits"):
array2.append(element2)
else:
errorString += "\n" + element2
elif not element.lower().endswith(".fit") and not element.lower().endswith(".fits"):
errorString += "\n" + element
else:
array2.append(glob(element)[0])
else:
if len(glob(tempElement)) < 1 and len(glob(element)) < 1:
errorString += "\n" + tempElement + ",\n" + element
else:
if len(glob(tempElement)) >= 1:
for element2 in glob(tempElement):
array2.append(element2)
if len(glob(element)) >= 1:
for element2 in glob(element):
array2.append(element2)
if not array:
return "No Values Entered"
else:
if errorString == "":
setValueString = ""
uniqueArray = np.unique(array2).tolist()
for eachString in uniqueArray:
setValueString += eachString + ","
if saveNum == 3 and (len(uniqueArray) < 2):
errorString = self.paths.boxList[3].GetValue()
return errorString
self.paths.boxList[saveNum].SetValue(setValueString.rpartition(",")[0])
return errorString
def checkRegionsBox(self, boxValue):
'''
This method specifically checks that the regions file input box in the main GUI has files that are in
the correct format.
Parameters
----------
boxValue : string
The value of the regions file box.
Returns
-------
literal : bool
True if all of the files are valid, false otherwise.
Notes
-----
The correct format for files in the regions file box is (somefile.reg,referencefile.fits;). The semicolon will
separate different sets of regions and reference files. Only if there is one regions file is it acceptable to
not include a reference file, otherwise you must.
'''
setValueString = ""
tempString = ""
if boxValue == "":
self.IP = InvalidParameter(boxValue, self, -1, stringVal="emptyReg")
return False
splitSets = boxValue.split(";")
checkSet = self.paths.boxList[3].GetValue().strip().split(",")
try:
if len(splitSets[0].split(",")) == 1 and len(splitSets[1]) == 0 and len(splitSets) == 2 and \
splitSets[0].split(",")[0].strip().lower().endswith(".reg"):
setValueString = splitSets[0].strip() + "," + self.paths.boxList[3].GetValue().split(",")[0].strip() + ";"
elif splitSets[0].split(",")[1].strip() == "" and len(splitSets[1]) == 0 and len(splitSets) == 2:
if splitSets[0].split(",")[0].strip().lower().endswith(".reg") != True or \
len(glob(splitSets[0].split(",")[0])) != 1:
self.IP = InvalidParameter("\nRegions: "+ splitSets[0].split(",")[0]
+ "\nReference: " + splitSets[0].split(",")[1], self, -1, stringVal="invalidReg")
return False
setValueString = splitSets[0].split(",")[0].strip() + "," + \
self.paths.boxList[3].GetValue().split(",")[0].strip() + ";"
else:
try:
for eachSet in splitSets:
if eachSet != "":
tempString = "tempReg"
tempReg = eachSet.split(",")[0].strip()
tempString = "tempRef"
tempRef = eachSet.split(",")[1].strip()
if len(glob(tempReg)) != 1 or tempReg.lower().endswith(".reg") == False:
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidReg")
return False
elif len(glob(tempRef)) != 1 or (tempRef.lower().endswith(".fits") == False and
tempRef.lower().endswith(".fit") == False):
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidRef")
return False
elif all(tempRef != temp for temp in checkSet):
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidRefExist")
return False
setValueString += tempReg + "," + tempRef + ";"
except IndexError:
if tempString == "tempReg":
tempReg = ""
elif tempString == "tempRef":
tempRef = ""
if len(eachSet.split(",")) == 1:
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="outofbounds")
return False
except IndexError:
if splitSets[0].split(",")[0].strip().lower().endswith(".reg") != True or \
len(glob(splitSets[0].split(",")[0])) != 1:
if len(splitSets[0].split(",")) == 1:
temp = ""
else:
temp = splitSets[0].split(",")[1]
self.IP = InvalidParameter("\nRegions: "+ splitSets[0].split(",")[0]
+ "\nReference: " + temp, self, -1, stringVal="invalidReg")
return False
setValueString = splitSets[0].split(",")[0].strip() + "," + \
self.paths.boxList[3].GetValue().split(",")[0].strip()
splitSets[0] = setValueString
setValueString = ""
try:
for eachSet in splitSets:
if eachSet != "":
tempString = "tempReg"
tempReg = eachSet.split(",")[0].strip()
tempString = "tempRef"
tempRef = eachSet.split(",")[1].strip()
if len(glob(tempReg)) != 1 or tempReg.lower().endswith(".reg") == False:
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidReg")
return False
elif len(glob(tempRef)) != 1 or (tempRef.lower().endswith(".fits") == False and
tempRef.lower().endswith(".fit") == False):
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidRef")
return False
elif all(tempRef != temp for temp in checkSet):
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="invalidRefExist")
return False
setValueString += tempReg + "," + tempRef + ";"
except IndexError:
if tempString == "tempReg":
tempReg = ""
elif tempString == "tempRef":
tempRef = ""
if len(eachSet.split(",")) == 1:
self.IP = InvalidParameter("\nRegions: "+tempReg + "\nReference: " + tempRef, self, -1, stringVal="outofbounds")
return False
refArray = []
regArray = []
tempDict = {}
for eachSet in setValueString.split(";"):
if len(eachSet.split(",")) != 1:
reg = eachSet.split(",")[0]
ref = eachSet.split(",")[1]
regTemp = reg in regArray
refTemp = ref in refArray
if regTemp == False and refTemp == False:
regArray.append(reg)
refArray.append(ref)
tempDict[reg] = ref
elif regTemp == False and refTemp == True:
for key, val in tempDict.items():
if val == ref:
tempReg = key
tempString = "\nRegions: " + reg + "\nReference: " + ref + "\nBecause ---" + "\nRegions: " + \
tempReg + "\nIs already associated with the reference file."
self.IP = InvalidParameter(tempString, self, -1, stringVal="referenceImageDup")
return False
elif regTemp == True and refTemp == False:
tempRef = tempDict.get(reg)
tempString = "\nRegions: " + reg + "\nReference: " + ref + "\nBecause ---" + "\nRegions: " + \
reg + "\nIs already associated with:\nReference: " + tempRef
self.IP = InvalidParameter(tempString, self, -1, stringVal="regionsDup")
return False
setValueString = ""
for key, val in tempDict.items():
setValueString += key + "," + val + ";"
self.paths.boxList[4].SetValue(setValueString)
return True
def singularExistance(self, event, value, name):
'''
This method checks to make sure that there is only one frame of each class open at once, as to not
have two fitting frames open and such.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
value : bool
Indicates whether or not there is already an instance of the class open.
name : string
The keyword defining the name of the class for which a frame is about to be opened.
Notes
-----
There is nothing returned for this method. On a successful completion, a new frame will appear. If
`value` is True however, then the method does nothing because there is already an instance of the frame
open, so it will not duplicate it.
'''
if value == False:
if name == "about":
AboutFrame(self,-1)
self.aboutOpen = True
elif name == "loadOld":
LoadOldPklFrame(self, -1)
self.loadOldPklOpen = True
elif name == "loadFitting":
FittingFrame(self, -1)
self.loadFittingOpen = True
elif name == "masterFlat":
MasterFlatFrame(self, -1)
self.loadMasterFlat = True
elif name == "ephemeris":
EphemerisFrame(self, -1)
self.loadEphFrame = True
elif name == "ds9":
if sys.platform == "win32":
errorType = WindowsError
else:
errorType = OSError
try:
subprocess.Popen([os.path.join(os.path.dirname(os.path.abspath(oscaar.__file__)),
'extras','ds9',sys.platform,'ds9')])
except errorType:
self.IP = InvalidParameter("", self, -1, stringVal="ds9")
elif name == "extra":
invalidDataImages = self.checkFileInputs(self.paths.boxList[3].GetValue(), saveNum=3)
if invalidDataImages != "":
self.IP = InvalidParameter(invalidDataImages, self, -1, stringVal="fits", secondValue="the path to Data Images")
elif self.checkRegionsBox(self.paths.boxList[4].GetValue()) == True:
ExtraRegions(self,-1)
self.extraRegionsOpen = True
elif name == "observatory":
invalidDataImages = self.checkFileInputs(self.paths.boxList[3].GetValue(), saveNum=3)
if invalidDataImages != "":
self.IP = InvalidParameter(invalidDataImages, self, -1, stringVal="fits", secondValue="the path to Data Images")
else:
ObservatoryFrame(self, -1)
self.loadObservatoryFrame = True
elif name == "etd":
ETDFrame(self, -1)
self.etdOpen = True
def parseTime(self, date, time, text, filename, name=""):
'''
This method prints the dates and times of the transit into the init.par file in the correct format.
Parameters
----------
date : string
A string of the date in the format YYYY/MM/DD.
time : string
A string of the time in the format HH:MM:SS.
text : string
The name of what should be entered in the init.par file before the actual values (ingress or egress).
filename : file
The open file that the value will be appended to.
name : string, optional
The name of the text box that will be refreshed.
Notes
-----
When it is done printing into init.par, the method refreshes the values of the text control boxes for ingress
and egress so there are no spaces and such in between.
'''
dateArr = str(date).split('/')
result = dateArr[0].strip() + '-' + dateArr[1].strip() + '-' + dateArr[2].strip() + ' ; '
timeArr = str(time).split(":")
result += timeArr[0].strip() + ":" + timeArr[1].strip() + ':' + timeArr[2].strip()
filename.write(text + result + '\n')
self.radioBox.userParams[name].SetValue(dateArr[0].strip() + '/' + dateArr[1].strip() + '/' + dateArr[2].strip())
self.radioBox.userParams[name+"1"].SetValue(timeArr[0].strip() + ":" + timeArr[1].strip() + ':' +
timeArr[2].strip())
def createFrame(self):
'''
This method allows the fitting frame to be opened after the completion of the differentialPhotometry.py script
so that users may work on their light curves.
'''
if self.loadFittingOpen == False:
if not self.outputFile.lower().endswith(".pkl"):
FittingFrame(self, -1, self.outputFile + ".pkl")
self.loadFittingOpen = True
else:
FittingFrame(self, -1, self.outputFile)
self.loadFittingOpen = True
def checkSHA(self, event):
'''
This method checks the secure hash algorithm that is saved when
oscaar is installed in __init__.py against the one online for the
latest commit.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The *
represents a wild card value.
Notes
-----
There is no return. If both the sha's are equal, then the latest
version of oscaar is installed, and a pop up message explains so. If
they are not equal, a message pops up to tell the user to download
the latest commit.
'''
try:
url = urllib2.urlopen("https://github.com/OSCAAR/OSCAAR/commits/" \
"master").read()
mostRecentCommit = re.search('href="/OSCAAR/OSCAAR/commit/[a-z0-9]*',
str(url)).group(0).rpartition("/")[2]
try:
currentCommit = oscaar.__sha__
if mostRecentCommit == currentCommit:
self.IP = InvalidParameter("", self, -1, stringVal="upToDate")
else:
self.IP = InvalidParameter("", self, -1, stringVal="newCommit")
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="installAgain")
except urllib2.URLError:
self.IP = InvalidParameter("", self, -1, stringVal="noInternetConnection")
def openLink(self, event, string):
'''
This opens a new tab in the default web browser with the specified link.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
string : string
The web url that will be opened.
'''
webbrowser.open_new_tab(string)
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
class ObservatoryFrame(wx.Frame):
'''
This is a frame for updating extra parameters that would define an observatory's configuration.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
if sys.platform == "win32":
self.fontType = wx.Font(9, wx.DEFAULT, wx.NORMAL, wx.BOLD)
else:
self.fontType = wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL)
wx.Frame.__init__(self, parent, objectID, "Change Observatory Parameters")
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.IP = wx.Frame
self.titlebox = wx.StaticText(self.panel, -1, "Observatory Parameters")
self.titleFont = wx.Font(15, wx.DEFAULT, wx.NORMAL, wx.BOLD)
self.titlebox.SetFont(self.titleFont)
paramsList = [('ccd',"CCD Gain: ",
'Enter a decimal for the gain here.', self.parent.ccdGain)]
# Quick check to see the available keywords from the header for a fits file.
# header = pyfits.getheader(self.parent.paths.boxList[3].GetValue().split(",")[0]).keys()
# print header
bestKeyword, self.allKeys, acceptedKeys, conversion = \
timeConversions.findKeyword(self.parent.paths.boxList[3].GetValue().split(",")[0])
if conversion: pass
self.unionKeys = []
for eachKey in self.allKeys:
if eachKey in acceptedKeys:
self.unionKeys.append(eachKey)
self.timeLabel = wx.StaticText(self.panel, -1, 'Select Exposure Time Keyword: ')
self.timeLabel.SetFont(self.fontType)
if self.parent.switchTimes == 0:
self.timeList = wx.ComboBox(self.panel, value = bestKeyword, choices = sorted(self.unionKeys),
size=(75,wx.DefaultSize.GetHeight()))
self.parent.switchTimes = 1
else:
self.timeList = wx.ComboBox(self.panel, value = self.parent.exposureTime, choices = sorted(self.unionKeys),
size=(75,wx.DefaultSize.GetHeight()))
self.timeList.Bind(wx.EVT_COMBOBOX, self.updateTime)
self.dropBox = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox.Add(self.timeLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox.Add(self.timeList, 0, flag = wx.ALIGN_CENTER)
self.params = ParameterBox(self.panel, -1, paramsList, rows=5, cols=2, vNum=10, hNum=10, font=self.fontType)
self.updateButton = wx.Button(self.panel, label = "Update")
self.Bind(wx.EVT_BUTTON, self.update, self.updateButton)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.titlebox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.vbox.Add(self.params, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 5)
self.vbox.Add(self.dropBox, 0, flag=wx.ALIGN_CENTER | wx.ALL, border=5)
self.vbox.Add(self.updateButton, 0, flag=wx.ALIGN_CENTER | wx.ALL, border=5)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.create_menu()
self.CreateStatusBar()
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.Center()
self.Show()
def updateTime(self,event):
'''
This updates the exposure time keyword variable for parsing the .fit(s) files in the parent OscaarFrame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.exposureTime = self.timeList.GetValue()
def update(self, event):
'''
This updates the exposure time keyword for parsing .fit(s) files as well as the ccd gain in the init.par file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.checkParams() == True:
self.parent.ccdGain = self.params.userParams["ccd"].GetValue()
self.parent.exposureTime = self.timeList.GetValue()
string = open(os.path.join(os.path.dirname(__file__),'init.par'), 'r').read().splitlines()
stringCopy = np.copy(string)
for line in stringCopy:
if ("CCD Gain:" in line) or ("Exposure Time Keyword:" in line):
string.remove(line)
observ = open(os.path.join(os.path.dirname(__file__),'init.par'), 'w')
observ.write('\n'.join(string))
observ.write("\nCCD Gain: " + self.params.userParams["ccd"].GetValue() + "\n")
observ.write("Exposure Time Keyword: " + self.timeList.GetValue() + "\n")
def checkParams(self):
'''
This is check to make sure that the ccd gain and exposure time keyword are valid,
before updating the init.par file.
Returns
-------
literal : bool
True if both ccd gain and exposure time keyword are valid, false otherwise.
'''
try:
tempCCD = float(self.params.userParams["ccd"].GetValue())
self.params.userParams["ccd"].SetValue(str(tempCCD))
timeKey = self.timeList.GetValue().strip()
if timeKey == "":
self.IP = InvalidParameter(timeKey, self, -1, stringVal="emptyKeyword")
return False
elif not timeKey in self.allKeys:
self.IP = InvalidParameter(timeKey, self, -1, stringVal="invalidKeyword")
return False
elif (not timeKey in self.unionKeys) and (timeKey in self.allKeys):
self.IP = InvalidParameter(timeKey, self, -1, stringVal="emailKeyword")
return False
self.timeList.SetValue(timeKey)
except ValueError:
self.IP = InvalidParameter(self.params.userParams["ccd"].GetValue(),self,-1, stringVal="leftbox", secondValue="ccd")
return False
return True
def create_menu(self):
'''
This method creates the menu bars that are at the top of the observatory frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
menubar = wx.MenuBar()
menu_file = wx.Menu()
m_quit = menu_file.Append(wx.ID_EXIT, "Quit\tCtrl+Q", "Quit this application.")
self.Bind(wx.EVT_MENU, self.on_exit, m_quit)
menubar.Append(menu_file, "File")
self.SetMenuBar(menubar)
def on_exit(self,event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self,event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadObservatoryFrame = False
class ExtraRegions(wx.Frame):
'''
This frame allows a user to append multiple regions files and their respective reference files as sets to the
regions file text box in the parent OscaarFrame.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
if sys.platform == "win32":
self.fontType = wx.Font(9, wx.DEFAULT, wx.NORMAL, wx.BOLD)
else:
self.fontType = wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL)
wx.Frame.__init__(self, parent, objectID, "Extra Regions Files")
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.IP = wx.Frame
self.titlebox = wx.StaticText(self.panel, -1, "Extra Regions Files")
self.titleFont = wx.Font(15, wx.DEFAULT, wx.NORMAL, wx.BOLD)
self.titlebox.SetFont(self.titleFont)
self.set1 = AddLCB(self.panel, -1, name="Path to Regions File: ,Path to Reference Image: ", rowNum=2, vNum=5,
hNum=5, boxName ="Set 1", font=self.fontType)
self.set2 = AddLCB(self.panel, -1, name="Path to Regions File: ,Path to Reference Image: ", rowNum=2, vNum=5,
hNum=5, boxName="Set 2", font=self.fontType)
self.set3 = AddLCB(self.panel, -1, name="Path to Regions File: ,Path to Reference Image: ", rowNum=2, vNum=5,
hNum=5, boxName="Set 3", font=self.fontType)
self.set4 = AddLCB(self.panel, -1, name="Path to Regions File: ,Path to Reference Image: ", rowNum=2, vNum=5,
hNum=5, boxName="Set 4", font=self.fontType)
self.set5 = AddLCB(self.panel, -1, name="Path to Regions File: ,Path to Reference Image: ", rowNum=2, vNum=5,
hNum=5, boxName="Set 5", font=self.fontType)
self.addSet1= wx.Button(self.panel, -1, label = "Add Set 1")
self.Bind(wx.EVT_BUTTON, lambda evt, lambdaStr=self.addSet1.Label: self.addSet(evt,lambdaStr), self.addSet1)
self.addSet2= wx.Button(self.panel, -1, label = "Add Set 2")
self.Bind(wx.EVT_BUTTON, lambda evt, lambdaStr=self.addSet2.Label: self.addSet(evt,lambdaStr), self.addSet2)
self.addSet3= wx.Button(self.panel, -1, label = "Add Set 3")
self.Bind(wx.EVT_BUTTON, lambda evt, lambdaStr=self.addSet3.Label: self.addSet(evt,lambdaStr), self.addSet3)
self.addSet4= wx.Button(self.panel, -1, label = "Add Set 4")
self.Bind(wx.EVT_BUTTON, lambda evt, lambdaStr=self.addSet4.Label: self.addSet(evt,lambdaStr), self.addSet4)
self.addSet5= wx.Button(self.panel, -1, label = "Add Set 5")
self.Bind(wx.EVT_BUTTON, lambda evt, lambdaStr=self.addSet5.Label: self.addSet(evt,lambdaStr), self.addSet5)
self.vbox2 = wx.BoxSizer(wx.VERTICAL)
self.vbox2.Add(self.addSet1, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 35)
self.vbox2.Add(self.addSet2, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 35)
self.vbox2.Add(self.addSet3, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 35)
self.vbox2.Add(self.addSet4, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 35)
self.vbox2.Add(self.addSet5, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 35)
self.hbox1 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox1.Add(self.set1, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox1.Add(self.addSet1, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox2.Add(self.set2, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox2.Add(self.addSet2, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox3 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox3.Add(self.set3, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox3.Add(self.addSet3, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox4 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox4.Add(self.set4, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox4.Add(self.addSet4, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox5 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox5.Add(self.set5, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox5.Add(self.addSet5, 0, flag=wx.ALIGN_CENTER | wx.ALL, border = 5)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.titlebox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.vbox.Add(self.hbox1, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.vbox.Add(self.hbox2, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.vbox.Add(self.hbox3, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.vbox.Add(self.hbox4, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.vbox.Add(self.hbox5, 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.create_menu()
self.CreateStatusBar()
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.Center()
self.Show()
def addSet(self, event, stringName):
'''
This is the method that adds a regions files and reference file set to the regions file
box in the parent frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
stringName : string
A string to differentiate the different sets which a user could be trying to add.
Notes
-----
There is no return, but upon successful completion a set in the form (somefile.reg,referencefile.fits;)
will be added to the regions file box in the parent frame.
'''
if stringName == "Add Set 1":
useSet = self.set1
elif stringName == "Add Set 2":
useSet = self.set2
elif stringName == "Add Set 3":
useSet = self.set3
elif stringName == "Add Set 4":
useSet = self.set4
elif stringName == "Add Set 5":
useSet = self.set5
regions = useSet.boxList[1].GetValue().strip()
reference = useSet.boxList[2].GetValue().strip()
if self.SetCheck(regions, reference) == True:
useSet.boxList[1].SetValue(regions)
useSet.boxList[2].SetValue(reference)
setString = regions + "," + reference
dataImages = self.parent.paths.boxList[3].GetValue().strip().split(",")
regionsBox = self.parent.paths.boxList[4].GetValue()
uniqueSet = True
uniqueReg = True
uniqueRef = True
for eachSet in regionsBox.split(";"):
if len(eachSet.split(",")) == 2:
tempReg = eachSet.split(",")[0].strip()
tempRef = eachSet.split(",")[1].strip()
if tempReg == regions and tempRef == reference:
uniqueSet = False
break
elif tempReg == regions:
uniqueReg = False
break
elif tempRef == reference:
uniqueRef = False
break
if uniqueSet == False:
self.IP = InvalidParameter("", self, -1, stringVal="setExists")
elif uniqueReg == False:
tempString = "\nRegions: " + regions + "\nReference: " + reference + "\nBecause ---" + "\nRegions: " + \
tempReg + "\nIs already associated with:\nReference: " + tempRef
self.IP = InvalidParameter(tempString, self, -1, stringVal="regionsDup")
elif uniqueRef == False:
tempString = "\nRegions: " + regions + "\nReference: " + reference + "\nBecause ---" + "\nRegions: " + \
tempReg + "\nIs already associated with this reference file."
self.IP = InvalidParameter(tempString, self, -1, stringVal="referenceImageDup")
elif all(reference != temp for temp in dataImages):
self.IP = InvalidParameter("\nRegions: "+ regions + "\nReference: " + reference, self, -1, stringVal="invalidRefExist")
else:
regionsBox += setString + ";"
self.parent.paths.boxList[4].SetValue(regionsBox)
self.IP = InvalidParameter("", self, -1, stringVal="regionsUpdate")
def SetCheck(self, reg, ref):
'''
This method checks whether or not the regions file and reference file given are valid files
for their respective roles.
Parameters
----------
reg : string
A value from a regions file text box that needs to be checked.
ref : string
A value from a reference file text box that needs to be checked.
Returns
-------
literal : bool
True if both files are valid, false otherwise.
'''
if reg == "":
self.IP = InvalidParameter(reg, self, -1, stringVal="regionsError1")
return False
elif ref == "":
self.IP = InvalidParameter(ref, self, -1, stringVal="regionsError1")
return False
if len(glob(reg)) != 1:
tempString = reg
if len(reg.split(",")) > 1:
tempString = ""
for string in reg.split(","):
if string == "":
tempString += ","
else:
tempString += "\n" + string.strip()
self.IP = InvalidParameter(tempString, self, -1, stringVal="regionsError2")
return False
elif len(glob(ref)) != 1:
tempString = ref
if len(ref.split(",")) > 1:
tempString = ""
for string in ref.split(","):
if string == "":
tempString += ","
else:
tempString += "\n" + string.strip()
self.IP = InvalidParameter(tempString, self, -1, stringVal="regionsError2")
return False
elif reg.lower().endswith(".reg") == False:
self.IP = InvalidParameter(reg, self, -1, stringVal="regionsError3")
return False
elif ref.lower().endswith(".fits") == False and ref.lower().endswith(".fit") == False:
self.IP = InvalidParameter(ref, self, -1, stringVal="regionsError4")
return False
return True
def create_menu(self):
'''
This method creates the menu bars that are at the top of the extra regions frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
menubar = wx.MenuBar()
menu_file = wx.Menu()
m_quit = menu_file.Append(wx.ID_EXIT, "Quit\tCtrl+Q", "Quit this application.")
self.Bind(wx.EVT_MENU, self.on_exit, m_quit)
menubar.Append(menu_file, "File")
self.SetMenuBar(menubar)
def on_exit(self,event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self,event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.extraRegionsOpen = False
class MasterFlatFrame(wx.Frame):
'''
This frame allows the user to create a master flat using their own images.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, "Master Flat Maker")
self.panel = wx.Panel(self)
self.parent = parent
self.overWrite = False
self.messageFrame = False
self.IP = wx.Frame
self.titlebox = wx.StaticText(self.panel, -1, 'OSCAAR: Master Flat Maker')
self.titleFont = wx.Font(15, wx.DEFAULT, wx.NORMAL, wx.BOLD)
self.titlebox.SetFont(self.titleFont)
self.path1 = AddLCB(self.panel, -1, name="Path to Flat Images: ", multFiles=True, saveType=None)
self.path2 = AddLCB(self.panel, -1, name="Path to Dark Flat Images: ", multFiles=True, saveType=None)
self.path3 = AddLCB(self.panel, -1, name="Path to Save Master Flat: ", saveType=wx.FD_SAVE)
tupleList = [('rbTrackPlot',"","On","Off")]
self.plotBox = ParameterBox(self.panel,-1,tupleList, name = "Plots")
tupleList = [('rbFlatType',"","Standard","Twilight")]
self.flatBox = ParameterBox(self.panel,-1,tupleList, name = "Flat Type")
self.runButton = wx.Button(self.panel, -1, label = "Run")
self.Bind(wx.EVT_BUTTON, self.run, self.runButton)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.plotBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.hbox.Add(self.flatBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.hbox.Add(self.runButton, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.titlebox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.path1, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.path2, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.path3, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.hbox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.create_menu()
self.CreateStatusBar()
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.Center()
self.Show()
def run(self,event):
'''
This runs either the standardFlatMaker or twilightFLatMaker method from the systematics.py to create a master flat.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
There is no return, on successful completion a window will open up with what the master flat looks like.
'''
path = self.path3.boxList[1].GetValue().strip()
self.flatImages = self.checkFileInputs(self.path1.boxList[1].GetValue(), self.path1.boxList[1])
self.darkFlatImages = self.checkFileInputs(self.path2.boxList[1].GetValue(), self.path2.boxList[1])
if self.flatImages != "":
self.IP = InvalidParameter(self.flatImages, self, -1, stringVal="flat1")
elif self.darkFlatImages != "":
self.IP = InvalidParameter(self.darkFlatImages, self, -1, stringVal="flat2")
elif not path:
self.IP = InvalidParameter(str(path), self, -1, stringVal="flat3")
elif not os.path.isdir(path[path.rfind(os.sep)]) or \
not len(path) > (len(path[:path.rfind(os.sep)]) + 1):
self.IP = InvalidParameter(path, self, -1, stringVal="flat3")
else:
self.flatImages = []
self.darkFlatImages = []
for pathname in self.path1.boxList[1].GetValue().split(','):
self.flatImages += glob(pathname)
for pathname in self.path2.boxList[1].GetValue().split(','):
self.darkFlatImages += glob(pathname)
if not path.lower().endswith('.fits') and not path.lower().endswith('.fit'):
path += '.fits'
pathCorrected = path.replace('/', os.sep)
outfolder = pathCorrected[:pathCorrected.rfind(os.sep)] + os.sep + '*'
self.plotCheck = self.plotBox.userParams['rbTrackPlot'].GetValue()
if pathCorrected in glob(outfolder):
if self.overWrite == False:
OverWrite(self, -1, "Overwrite Master Flat", pathCorrected, "MasterFlat")
self.overWrite = True
else:
if self.flatBox.userParams['rbFlatType'].GetValue() == True:
systematics.standardFlatMaker(self.flatImages, self.darkFlatImages, self.path3.boxList[1].GetValue(),
self.plotCheck)
else:
systematics.twilightFlatMaker(self.flatImages, self.darkFlatImages, self.path3.boxList[1].GetValue(),
self.plotCheck)
def checkFileInputs(self,array,box):
'''
This method checks to make sure that the files entered in a text box in the master flat frame are valid.
Parameters
----------
array : string
A list of all of the files that need to be checked.
box : wx.TextCtrl
The box that gets refreshed with a string of the valid files.
Returns
-------
errorString : string
A list of all the files that were invalid.
Notes
-----
If `errorString` returns '' (empty), that means that all the files were valid.
'''
errorString = ""
setValueString = ""
array2 = []
smallArray = ""
for element in array.split(","):
element = element.strip()
if element.lower().endswith(os.sep):
tempElement = element + "*.fit"
element += "*.fits"
smallArray = "-1"
if smallArray == "":
if len(glob(element)) < 1:
errorString += element
elif len(glob(element)) > 1:
for element2 in glob(element):
if element2.lower().endswith(".fit") or element2.lower().endswith(".fits"):
array2.append(element2)
else:
errorString += "\n" + element2
elif not element.lower().endswith(".fit") and not element.lower().endswith(".fits"):
errorString += "\n" + element
else:
array2.append(glob(element)[0])
else:
if len(glob(tempElement)) < 1 and len(glob(element)) < 1:
errorString += "\n" + tempElement + ",\n" + element
else:
if len(glob(tempElement)) >= 1:
for element2 in glob(tempElement):
array2.append(element2)
if len(glob(element)) >= 1:
for element2 in glob(element):
array2.append(element2)
if not array:
return "No Values Entered"
else:
if errorString == "":
setValueString = ""
uniqueArray = np.unique(array2).tolist()
for eachString in uniqueArray:
setValueString += eachString + ","
box.SetValue(setValueString.rpartition(",")[0])
return errorString
def create_menu(self):
'''
This method creates the menu bars that are at the top of the master flat frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
menubar = wx.MenuBar()
menu_file = wx.Menu()
m_quit = menu_file.Append(wx.ID_EXIT, "Quit\tCtrl+Q", "Quit this application.")
self.Bind(wx.EVT_MENU, self.on_exit, m_quit)
menubar.Append(menu_file, "File")
self.SetMenuBar(menubar)
def on_exit(self,event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self,event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadMasterFlat = False
class AboutFrame(wx.Frame):
'''
This is a frame about OSCAAR and its contributors.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, "About OSCAAR")
self.panel = wx.Panel(self)
self.parent = parent
self.static_bitmap = wx.StaticBitmap(self.panel, style=wx.ALIGN_CENTER)
self.logo = wx.Image(os.path.join(os.path.dirname(os.path.abspath(__file__)),'images/logo4noText.png'),
wx.BITMAP_TYPE_ANY)
self.bitmap = wx.BitmapFromImage(self.logo)
self.static_bitmap.SetBitmap(self.bitmap)
titleText = '\n'.join(['OSCAAR 2.0 beta',\
'Open Source differential photometry Code for Amateur Astronomical Research',\
'Created by Brett M. Morris (NASA GSFC/UMD)\n'])
contribText = '\n'.join(['Other Contributors:',\
'Daniel Galdi (UMD)',\
'Luuk Visser (LU/TUD)',\
'Nolan Matthews (UMD)',\
'Dharmatej Mikkilineni (UMD)',\
'Harley Katz (UMD)',\
'Sam Gross (UMD)',\
'Naveed Chowdhury (UMD)',\
'Jared King (UMD)',\
'Steven Knoll (UMD)'])
self.titleText = wx.StaticText(self.panel, -1, label = titleText, style = wx.ALIGN_CENTER)
self.contribText = wx.StaticText(self.panel, -1, label = contribText, style = wx.ALIGN_CENTER)
self.viewRepoButton = wx.Button(self.panel, -1, label = "Open Code Repository (GitHub)")
self.exitButton = wx.Button(self.panel, -1, label = "Close")
self.Bind(wx.EVT_BUTTON, lambda evt: self.parent.openLink(evt, "https://github.com/OSCAAR/OSCAAR"),
self.viewRepoButton)
self.exitButton.Bind(wx.EVT_BUTTON, self.exit)
self.buttonBox = wx.BoxSizer(wx.HORIZONTAL)
self.buttonBox.Add(self.viewRepoButton, 0, flag = wx.ALIGN_CENTER | wx.RIGHT, border = 20)
self.buttonBox.Add(self.exitButton, 0, flag = wx.ALIGN_CENTER)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.static_bitmap, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.titleText, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.contribText, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.vbox.Add(self.buttonBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.Center()
self.Show()
def exit(self, event):
'''
This method defines the action quit for the button `close`. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.aboutOpen = False
class OverWrite(wx.Frame):
'''
This class creates a frame that prompts a user action for whether or not a file can be overwritten. Based
on the user's response, different methods are activated.
'''
def __init__(self, parent, objectID, title, path, option):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, title)
self.panel = wx.Panel(self)
self.parent = parent
self.path = path
if path == "":
self.text = wx.StaticText(self.panel, -1,
"Are you using precorrected images?\n\nYou have left " +
"either the path to Dark Frames or the " +
"Path to the Master Flat empty.\nIf you are " +
"using pre-processed Data Images, press "+
"Yes and OSCAAR will run without \ndark " +
"and flat corrections. If you need to enter "+
"these exposures, press No to return.")
else:
self.text = wx.StaticText(self.panel, -1, "Are you sure you want to overwrite\n" + self.path + "?")
self.yesButton = wx.Button(self.panel, label = "Yes")
self.noButton = wx.Button(self.panel,label = "No")
self.SetFocus()
if option == "MasterFlat":
self.Bind(wx.EVT_BUTTON, self.onMasterFlat, self.yesButton)
elif option == "Output File":
self.Bind(wx.EVT_BUTTON, self.onOutputFile, self.yesButton)
elif option == "PreprocessedImages":
self.Bind(wx.EVT_BUTTON, self.onPreprocessedImages, self.yesButton)
self.Bind(wx.EVT_BUTTON, self.onNO, self.noButton)
self.sizer0 = wx.FlexGridSizer(rows=2, cols=1)
self.buttonBox = wx.BoxSizer(wx.HORIZONTAL)
self.buttonBox.Add(self.yesButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.buttonBox.Add(self.noButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.sizer0,0, wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.text,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.buttonBox, 0,wx.ALIGN_CENTER|wx.ALL,5)
self.Bind(wx.EVT_WINDOW_DESTROY, self.doNothing)
self.panel.SetSizer(self.hbox)
self.hbox.Fit(self)
self.Center()
self.Show()
def onMasterFlat(self,event):
'''
When the user selects `yes` in this frame with the parent frame being the master flat frame, then
a new master flat will be created, overwriting the currently selected one.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
self.parent.overWrite = False
os.remove(self.path)
if self.parent.flatBox.userParams['rbFlatType'].GetValue() == True:
systematics.standardFlatMaker(self.parent.flatImages, self.parent.darkFlatImages,
self.parent.path3.boxList[1].GetValue(), self.parent.plotCheck)
else:
systematics.twilightFlatMaker(self.parent.flatImages, self.parent.darkFlatImages,
self.parent.path3.boxList[1].GetValue(), self.parent.plotCheck)
def onOutputFile(self,event):
'''
This method is for whether or not to override the existing .pkl file that was specified in the output path
text box in the parent OSCAAR frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
self.parent.overWrite = False
diffPhotCall = "from oscaar import differentialPhotometry"
subprocess.check_call(['python','-c',diffPhotCall])
if self.parent.radioBox.userParams["rbFitAfterPhot"].GetValue() == True:
wx.CallAfter(self.parent.createFrame)
def onPreprocessedImages(self, event):
'''
This method is to remind the user that they are trying to run the
differential photometry script without any dark frames or a master flat
and make sure they want to continue.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
self.parent.preprocessedImagesFrame = False
if os.path.isfile(self.parent.outputFile) or os.path.isfile(self.parent.outputFile + '.pkl'):
if self.parent.overWrite == False:
OverWrite(self.parent, -1, "Overwrite Output File", self.parent.outputFile, "Output File")
self.parent.overWrite = True
else:
diffPhotCall = "from oscaar import differentialPhotometry"
subprocess.check_call(['python','-c',diffPhotCall])
if self.parent.radioBox.userParams["rbFitAfterPhot"].GetValue() == True:
wx.CallAfter(self.parent.createFrame)
def onNO(self, event):
'''
When a user presses the `no` button, this method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame. It then
will close the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.path == "":
self.parent.preprocessedImagesFrame = False
else:
self.parent.overWrite = False
self.Destroy()
def doNothing(self,event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.path == "":
self.parent.preprocessedImagesFrame = False
else:
self.parent.overWrite = False
pass
class EphemerisFrame(wx.Frame):
'''
This frame will allow users to calculate the positions of different planets in the sky
for a given time frame at a specified observatory.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, "Ephemerides")
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.IP = wx.Frame
self.titlebox = wx.StaticText(self.panel, -1, 'Ephemeris Calculator')
self.titleFont = wx.Font(15, wx.DEFAULT, wx.NORMAL, wx.BOLD)
self.titlebox.SetFont(self.titleFont)
self.titlebox2 = wx.StaticText(self.panel, -1, 'Advanced Options')
self.titlebox2.SetFont(self.titleFont)
if sys.platform == "win32":
self.fontType = wx.Font(9, wx.DEFAULT, wx.NORMAL, wx.BOLD)
else:
self.fontType = wx.Font(12, wx.DEFAULT, wx.NORMAL, wx.NORMAL)
self.calculateButton = wx.Button(self.panel, -1, label = "Calculate")
self.Bind(wx.EVT_BUTTON, self.calculate, self.calculateButton)
obsList = glob(os.path.join(os.path.dirname(os.path.abspath(oscaar.__file__)),'extras','eph','observatories','*.par'))
self.nameList = {}
for currentFile in obsList:
for line in open(currentFile,'r').read().splitlines():
if line.split(":")[0] == "name":
self.nameList[line.split(":")[1].strip()] = currentFile
self.obsLabel = wx.StaticText(self.panel, -1, 'Select Observatory: ')
self.obsLabel.SetFont(self.fontType)
self.obsList = wx.ComboBox(self.panel, value = 'Observatories',
choices = sorted(self.nameList.keys()) + ["Enter New Observatory"])
self.obsList.Bind(wx.EVT_COMBOBOX, self.update)
self.dropBox = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox.Add(self.obsLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox.Add(self.obsList, 0, flag = wx.ALIGN_CENTER)
tupleList = [('observatoryName',"Name of Observatory: ","",""),
('fileName',"Enter File Name: ","",""),
('obsStart',"Start of Observation, UT (YYYY/MM/DD): ",
"Enter a date in the correct format here.",datetime.date.today().strftime("%Y/%m/%d")),
('obsEnd',"End of Observation, UT (YYYY/MM/DD): ",
"Enter a date in the correct format here.",(datetime.datetime.now()+datetime.timedelta(days=7)
).strftime("%Y/%m/%d")),
('upperLimit',"Apparent Mag. Upper Limit: ","","0.0"),
('lowerLimit',"Depth Lower Limit: ","","0.0")]
self.leftBox = ParameterBox(self.panel,-1,tupleList, rows=6, cols=2, vNum = 5, hNum = 15, font = self.fontType)
tupleList = [("latitude","Latitude (deg:min:sec): ","","00:00:00"),
("longitude","Longitude (deg:min:sec): ","","00:00:00"),
("elevation","Observatory Elevation (m): ","","0.0"),
("temperature","Temperature (degrees C): ","","0.0"),
("lowerElevation","Lower Elevation Limit (deg:min:sec): ","","00:00:00")]
self.leftBox2 = ParameterBox(self.panel, -1, tupleList, rows=5, cols=2, vNum = 5, hNum = 15, font =self.fontType)
self.twilightChoices = {}
self.twilightChoices["Civil Twilight (-6 degrees)"] = "-6"
self.twilightChoices["Nautical Twilight (-12 degrees)"] = "-12"
self.twilightChoices["Astronomical Twilight (-18 degrees)"] = "-18"
self.twilightLabel = wx.StaticText(self.panel, -1, "Select Twilight Type: ")
self.twilightLabel.SetFont(self.fontType)
self.twilightList = wx.ComboBox(self.panel, value = "Civil Twilight (-6 degrees)",
choices = sorted(self.twilightChoices.keys()))
self.dropBox2 = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox2.Add(self.twilightLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox2.Add(self.twilightList, 0, flag = wx.ALIGN_CENTER)
tupleList = [('rbBand',"","V","K")]
self.band = ParameterBox(self.panel,-1,tupleList, name = "Band Type")
tupleList = [('rbShowLT',"","On","Off")]
self.showLT = ParameterBox(self.panel,-1,tupleList, name = "Show Local Times", secondButton = True)
self.botRadioBox = wx.BoxSizer(wx.HORIZONTAL)
self.botRadioBox.Add(self.showLT, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.botRadioBox.Add(self.band, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 15)
tupleList = [('rbCalcEclipse',"","True","False")]
self.calcEclipseBox = ParameterBox(self.panel,-1,tupleList, name = "Calculate Eclipses", secondButton = True)
tupleList = [('rbHtmlOut',"","True", "False")]
self.htmlBox = ParameterBox(self.panel,-1,tupleList, name = "HTML Out")
tupleList = [('rbTextOut',"","True","False")]
self.textBox = ParameterBox(self.panel,-1,tupleList, name = "Text Out")
tupleList = [('rbCalcTransits',"","True","False")]
self.calcTransitsBox = ParameterBox(self.panel,-1,tupleList, name = "Calculate Transits")
self.radioBox = wx.BoxSizer(wx.VERTICAL)
self.radioBox.Add(self.calcTransitsBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.radioBox.Add(self.calcEclipseBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.radioBox.Add(self.htmlBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.radioBox.Add(self.textBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.topBox = wx.BoxSizer(wx.HORIZONTAL)
self.topBox.Add(self.leftBox, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 5)
self.topBox.Add(self.calculateButton, 0, flag = wx.ALIGN_CENTER | wx.RIGHT | wx.LEFT, border = 5)
self.leftVertBox = wx.BoxSizer(wx.VERTICAL)
self.leftVertBox.Add(self.leftBox2, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.leftVertBox.Add(self.dropBox2, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.leftVertBox.Add(self.botRadioBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.botBox = wx.BoxSizer(wx.HORIZONTAL)
self.botBox.Add(self.leftVertBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.botBox.Add(self.radioBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.titlebox, 0, flag = wx.ALIGN_CENTER | wx.TOP, border = 5)
self.vbox.Add(self.dropBox, 0, flag = wx.ALIGN_LEFT | wx.TOP, border = 10)
self.vbox.Add(self.topBox, 0, flag = wx.ALIGN_CENTER)
self.vbox.Add(self.titlebox2, 0, flag = wx.ALIGN_CENTER)
self.vbox.Add(self.botBox, 0, flag = wx.ALIGN_CENTER)
self.create_menu()
self.CreateStatusBar()
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.Center()
self.Show()
def calculate(self, event):
'''
After checking to see if all of the parameters entered are valid, this method actually runs
the calculateEphemerides method from the eph.py file to get the transit times and such for
different planets.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a new window will open in the default browser for your machine with the
ephemeris chart open.
'''
try:
import oscaar.extras.eph.calculateEphemerides as eph
import ephem
ephem.sun_radius
if self.parameterCheck() == True:
if self.parent.singularOccurance == 0 and self.showLT.userParams["rbShowLT"].GetValue():
self.parent.singularOccurance = 1
self.IP = InvalidParameter("", self, -1, stringVal="warnError")
else:
outputPath = str(os.path.join(os.path.dirname(os.path.abspath(oscaar.__file__)),
'extras','eph','ephOutputs','eventReport.html'))
path = os.path.join(os.path.dirname(os.path.abspath(oscaar.__file__)),
'extras','eph','observatories', self.leftBox.userParams["fileName"].GetValue() + '.par')
if not self.nameList.has_key(self.name):
self.nameList[self.name] = path
self.obsList.Append(self.name)
self.saveFile(path)
eph.calculateEphemerides(path)
if self.htmlBox.userParams["rbHtmlOut"].GetValue() == True:
webbrowser.open_new_tab("file:"+2*os.sep+outputPath)
except ImportError:
self.IP = InvalidParameter("", self, -1, stringVal="importError")
def parameterCheck(self):
'''
This is a local method for this class that checks to make sure all of the
parameters that can be manipulated by the user are valid.
Returns
-------
literal : bool
False if any of the parameters are invalid, true otherwise.
'''
self.name = self.leftBox.userParams["observatoryName"].GetValue().strip()
self.fileName = self.leftBox.userParams["fileName"].GetValue().strip()
self.latitude = self.leftBox2.userParams["latitude"].GetValue().strip()
self.longitude = self.leftBox2.userParams["longitude"].GetValue().strip()
self.elevation = self.leftBox2.userParams["elevation"].GetValue().strip()
self.temperature = self.leftBox2.userParams["temperature"].GetValue().strip()
self.lowerElevation = self.leftBox2.userParams["lowerElevation"].GetValue().strip()
self.startingDate = self.leftBox.userParams["obsStart"].GetValue().strip()
self.endingDate = self.leftBox.userParams["obsEnd"].GetValue().strip()
self.upperLimit = self.leftBox.userParams["upperLimit"].GetValue().strip()
self.lowerLimit = self.leftBox.userParams["lowerLimit"].GetValue().strip()
self.twilight = self.twilightList.GetValue().strip()
if self.name == "" or self.name == "Enter the name of the Observatory":
self.IP = InvalidParameter(self.name, self, -1, stringVal="obsName")
return False
elif self.fileName == "" or self.fileName == "Enter the name of the file":
self.IP = InvalidParameter(self.fileName, self, -1, stringVal="obsFile")
return False
years = []
months = []
days = []
for dateArray,value in [(self.startingDate.split("/"),self.startingDate),
(self.endingDate.split("/"),self.endingDate)]:
if len(dateArray) != 3:
self.IP = InvalidParameter(value, self, -1, stringVal="obsDate")
return False
else:
try:
year = int(dateArray[0].strip())
years.append(year)
month = int(dateArray[1].strip())
months.append(month)
day = int(dateArray[2].strip())
days.append(day)
if len(dateArray[0].strip()) != 4 or len(dateArray[1].strip()) > 2 or len(dateArray[2].strip()) > 2:
self.IP = InvalidParameter(value, self, -1, stringVal="obsDate")
return False
minYear = datetime.date.today().year - 100
maxYear = datetime.date.today().year + 100
if year < minYear or year > maxYear or month > 12 or month < 0 or day > 31 or day < 0 or \
month == 0 or year == 0 or day == 0:
self.IP = InvalidParameter(value, self, -1, stringVal="dateRange")
return False
except ValueError:
self.IP = InvalidParameter(value, self, -1, stringVal="obsDate")
return False
if years[0] > years[1]:
self.IP = InvalidParameter(self.startingDate, self, -1, stringVal="logicalDate")
return False
elif years[0] == years[1]:
if months[0] > months[1]:
self.IP = InvalidParameter(self.startingDate, self, -1, stringVal="logicalDate")
return False
elif months[0] == months[1]:
if days[0] >= days[1]:
self.IP = InvalidParameter(self.startingDate, self, -1, stringVal="logicalDate")
return False
for coordArray, value, coordType in [(self.latitude.split(":"),self.latitude, "lat"),
(self.longitude.split(":"),self.longitude, "long")]:
if(len(coordArray) != 3):
self.IP = InvalidParameter(value, self, -1, stringVal="coordTime")
return False
else:
try:
deg = float(coordArray[0].strip())
minutes = float(coordArray[1].strip())
sec = float(coordArray[2].strip())
if coordType == "lat":
self.latitude = str(deg) + ":" + str(minutes) + ":" + str(sec)
if abs(deg) > 90.0 or minutes >= 60 or minutes < 0.0 or sec >= 60 or sec < 0.0:
self.IP = InvalidParameter(value, self, -1, stringVal="coordRange")
return False
elif coordType == "long":
self.longitude = str(deg) + ":" + str(minutes) + ":" + str(sec)
if abs(deg) > 180.0 or minutes >= 60 or minutes < 0.0 or sec >= 60 or sec < 0.0:
self.IP = InvalidParameter(value, self, -1, stringVal="coordRange")
return False
if abs(deg) == 90 and coordType == "lat":
if minutes != 0 or sec != 0:
self.IP = InvalidParameter(value, self, -1, stringVal="coordRange")
return False
elif abs(deg) == 180 and coordType == "long":
if minutes != 0 or sec != 0:
self.IP = InvalidParameter(value, self, -1, stringVal="coordRange")
return False
except ValueError:
self.IP = InvalidParameter(value, self, -1, stringVal="coordTime")
return False
try:
tempString = "elevation"
temp1 = float(self.elevation)
tempString = "temperature"
temp2 = float(self.temperature)
tempString = "apparent magnitude upper limit"
temp3 = float(self.upperLimit)
tempString = "depth lower limit"
temp4 = float(self.lowerLimit)
tempString = "lower elevation limit"
if temp3: pass
stripElevation = self.lowerElevation.split(":")
if len(stripElevation) != 3:
self.IP = InvalidParameter(self.lowerElevation, self, -1, stringVal="lowerElevation")
return False
temp6 = int(stripElevation[0])
temp7 = int(stripElevation[1])
temp8 = int(stripElevation[2])
if temp6 < 0.0 or temp6 > 90 or temp7 >= 60 or temp7 < 0.0 or temp8 >= 60 or temp8 < 0.0:
self.IP = InvalidParameter(self.lowerElevation, self, -1, stringVal="lowerElevation")
return False
elif temp6 == 90:
if temp7 != 0 or temp8 != 0:
self.IP = InvalidParameter(self.lowerElevation, self, -1, stringVal="lowerElevation")
return False
self.lowerElevation = stripElevation[0].strip() + ":" + stripElevation[1].strip() + ":" +\
stripElevation[2].strip()
if temp1 < 0:
self.IP = InvalidParameter(self.elevation, self, -1, stringVal="tempElevNum", secondValue="elevation")
return False
elif temp2 < 0:
self.IP = InvalidParameter(self.temperature, self, -1, stringVal="tempElevNum", secondValue="temperature")
return False
elif temp4 < 0:
self.IP = InvalidParameter(self.lowerLimit, self, -1, stringVal="tempElevNum", secondValue="depth lower limit")
return False
except ValueError:
if tempString == "temperature":
self.IP = InvalidParameter(self.temperature, self, -1, stringVal="tempElevNum", secondValue=tempString)
elif tempString == "apparent magnitude upper limit":
self.IP = InvalidParameter(self.upperLimit, self, -1, stringVal="tempElevNum", secondValue=tempString)
elif tempString == "depth lower limit":
self.IP = InvalidParameter(self.lowerLimit, self, -1, stringVal="tempElevNum", secondValue=tempString)
elif tempString == "lower elevation limit":
self.IP = InvalidParameter(self.lowerElevation, self, -1, stringVal="lowerElevation")
else:
self.IP = InvalidParameter(self.elevation, self, -1, stringVal="tempElevNum", secondValue=tempString)
return False
if all(self.twilight != temp for temp in ["Civil Twilight (-6 degrees)",
"Nautical Twilight (-12 degrees)",
"Astronomical Twilight (-18 degrees)"]):
self.IP = InvalidParameter(self.twilight, self, -1, stringVal="twilight")
return False
return True
def update(self, event):
'''
This method is bound to the drop down list of observatories that can be selected in the
frame. Once an observatory is chosen, this method updates all relevant text fields with the
appropriate parameters.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.obsList.GetValue() == "Enter New Observatory":
self.leftBox.userParams["observatoryName"].SetValue("Enter the name of the Observatory")
self.leftBox.userParams["fileName"].SetValue("Enter the name of the file")
else:
radioBoxes = self.radioBox.GetChildren()
radioList = []
for eachBox in radioBoxes:
window = eachBox.GetWindow()
children = window.GetChildren()
for child in children:
if isinstance(child, wx.RadioButton):
radioList.append(child)
lines = open(self.nameList[self.obsList.GetValue()],"r").read().splitlines()
self.leftBox.userParams["fileName"].SetValue(os.path.split(self.nameList[self.obsList.GetValue()
])[1].split(".")[0])
for eachLine in lines:
if len(eachLine.split()) > 1:
inline = eachLine.split(':', 1)
name = inline[0].strip()
value = str(inline[1].strip())
tempList = [("name","observatoryName"),("min_horizon","lowerElevation"),("mag_limit","upperLimit"),
("depth_limit","lowerLimit"),("latitude",""),("longitude",""),("elevation",""),
("temperature",""),("twilight",""),("calc_transits",0),("calc_eclipses",2),
("html_out",4),("text_out",6), ("show_lt","rbShowLT"), ("band","rbBand")]
for string,saveName in tempList:
if string == name:
if any(temp == name for temp in ["name","mag_limit","depth_limit"]):
self.leftBox.userParams[saveName].SetValue(str(value))
elif any(temp == name for temp in ["latitude","longitude","elevation","temperature",
"twilight","min_horizon","time_zone", "band"]):
if saveName == "":
saveName = name
if name == "twilight":
tempStr = [temp for temp in self.twilightChoices.keys() \
if self.twilightChoices[temp] == value]
if len(tempStr) != 0:
self.twilightList.SetValue(tempStr[0])
elif name == "show_lt":
if value == "0":
saveName = saveName + "1"
self.showLT.userParams[saveName].SetValue(True)
elif name == "band":
if value == "K":
saveName = saveName + "1"
self.band.userParams[saveName].SetValue(True)
else:
self.leftBox2.userParams[saveName].SetValue(str(value))
elif any(temp == name for temp in ["calc_transits","calc_eclipses","html_out","text_out"]):
if(value == "False"):
saveName = saveName + 1
radioList[saveName].SetValue(True)
def saveFile(self, fileName):
'''
This method saves all the current parameters in the window for a selected
observatory to a text file. This allows the user to quickly select the observatory
with pre-loaded parameters after an initial setup.
Parameters
----------
fileName : string
The name of the file that will be saved with all of the user inputs.
'''
startDate = [x.strip() for x in self.leftBox.userParams["obsStart"].GetValue().split("/")]
endDate = [x.strip() for x in self.leftBox.userParams["obsEnd"].GetValue().split("/")]
dates = {}
for date, stringDate in [(startDate,"date1"), (endDate,"date2")]:
for stringNum in date:
if stringNum == "08":
date[date.index(stringNum)] = "8"
elif stringNum == "09":
date[date.index(stringNum)] = "9"
date += ["0","0","0"]
tempString = "("
for num in range(0,len(date)):
if num != len(date)-1:
tempString += date[num] + ","
else:
tempString += date[num]
tempString += ")"
dates[stringDate] = tempString
newObs = open(fileName, "w")
newObs.write("name: " + self.name + "\n")
newObs.write("latitude: " + self.latitude + "\n")
newObs.write("longitude: " + self.longitude + "\n")
newObs.write("elevation: " + self.elevation + "\n")
newObs.write("temperature: " + self.temperature + "\n")
newObs.write("min_horizon: " + self.lowerElevation + "\n")
newObs.write("start_date: " + dates["date1"] + "\n")
newObs.write("end_date: " + dates["date2"] + "\n")
newObs.write("mag_limit: " + self.upperLimit + "\n")
newObs.write("depth_limit: " + self.lowerLimit + "\n")
newObs.write("calc_transits: " + str(self.calcTransitsBox.userParams["rbCalcTransits"].GetValue()) + "\n")
newObs.write("calc_eclipses: " + str(self.calcEclipseBox.userParams["rbCalcEclipse"].GetValue()) + "\n")
newObs.write("html_out: " + str(self.htmlBox.userParams["rbHtmlOut"].GetValue()) + "\n")
newObs.write("text_out: " + str(self.textBox.userParams["rbTextOut"].GetValue()) + "\n")
newObs.write("twilight: " + self.twilightChoices[self.twilight] + "\n")
tempLT = str(self.showLT.userParams["rbShowLT"].GetValue())
if tempLT == "True":
tempLT = "1"
else:
tempLT = "0"
newObs.write("show_lt: " + tempLT + "\n")
tempString = str(self.band.userParams["rbBand"].GetValue())
if tempString == "True":
bandString = "V"
else:
bandString = "K"
newObs.write("band: "+ bandString)
newObs.close()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the ephemeris frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
menubar = wx.MenuBar()
menu_file = wx.Menu()
m_save = menu_file.Append(wx.ID_SAVE, "Save\tCtrl+S", "Save data to a zip folder.")
m_quit = menu_file.Append(wx.ID_EXIT, "Quit\tCtrl+Q", "Quit this application.")
self.Bind(wx.EVT_MENU, self.on_exit, m_quit)
self.Bind(wx.EVT_MENU, self.saveOutput, m_save)
menubar.Append(menu_file, "File")
self.SetMenuBar(menubar)
def saveOutput(self, event):
'''
This method will save the output of the ephemeris calculations as a zip file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
dlg = wx.FileDialog(self, message = "Save your output...", style = wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
outputPath = dlg.GetPath()
if self.parameterCheck():
self.calculate(None)
shutil.copytree(os.path.join(os.path.dirname(os.path.abspath(oscaar.__file__)),'extras','eph','ephOutputs'),
outputPath)
outputArchive = zipfile.ZipFile(outputPath+'.zip', 'w')
for name in glob(outputPath+os.sep+'*'):
outputArchive.write(name, os.path.basename(name), zipfile.ZIP_DEFLATED)
shutil.rmtree(outputPath)
outputArchive.close()
dlg.Destroy()
def on_exit(self,event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadEphFrame = False
class FittingFrame(wx.Frame):
'''
After you have created your own light curve, there are different fitting methods that
you can do. Currently the only fitting method in place is MCMC.
'''
def __init__(self, parent, objectID, path = ''):
'''
This method defines the initialization of this class.
'''
self.path = path
self.title = "Fitting Methods"
self.loadMCMC = False
wx.Frame.__init__(self, parent, objectID, self.title)
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.IP = wx.Frame
self.box = AddLCB(self.panel,-1,name="Path to Output File: ")
self.box2 = AddLCB(self.panel, -1, name="Results Output Path (.txt): ", saveType=wx.FD_SAVE)
self.vbox2= wx.BoxSizer(wx.VERTICAL)
self.vbox2.Add(self.box, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox2.Add(self.box2, border=5, flag=wx.ALL)
self.box.boxList[1].SetValue(self.path)
self.plotMCMCButton = wx.Button(self.panel,label="MCMC Fit", size = (130,25))
self.Bind(wx.EVT_BUTTON, self.plotMCMC, self.plotMCMCButton)
self.sizer0 = wx.FlexGridSizer(rows=2, cols=4)
self.sizer0.Add(self.plotMCMCButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.pklPathTxt = self.box.boxList[1]
self.saveLocation = self.box2.boxList[1]
self.create_menu()
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.vbox2, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.sizer0, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.CreateStatusBar()
self.vbox.Fit(self)
self.Center()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the ephemeris frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_browse = menu_file.Append(-1,"Browse","Browse for a .pkl file to use.")
self.Bind(wx.EVT_MENU, lambda event: self.browseButtonEvent(event,'Choose Path to Output File',self.pklPathTxt,
False,wx.FD_OPEN),m_browse)
m_browse2 = menu_file.Append(-1, "Browse2", "Browse a save location for the results.")
self.Bind(wx.EVT_MENU, lambda event: self.browseButtonEvent(event,'Choose Path to Output File',self.saveLocation,
False,wx.FD_SAVE),m_browse2)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, "Exit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def browseButtonEvent(self, event, message, textControl, fileDialog, saveDialog):
'''
This method defines the `browse` function for selecting a file on any OS.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
message : string
The message that tells the user what to choose.
textControl : wx.TextCtrl
The box in the frame that will be refreshed with the files that are chosen by the user.
fileDialog : bool
If true, the style is wx.FD_MULTIPLE, otherwise it is the same as the `saveDialog`.
saveDialog : wx.FD_*
The style of the box that will appear. The * represents a wild card value for different types.
'''
if not fileDialog:
dlg = wx.FileDialog(self, message = message, style = saveDialog)
else:
dlg = wx.FileDialog(self, message = message, style = wx.FD_MULTIPLE)
if dlg.ShowModal() == wx.ID_OK:
if saveDialog == wx.SAVE:
filenames = [dlg.GetPath()]
else:
filenames = dlg.GetPaths()
textControl.Clear()
for i in range(0,len(filenames)):
if i != len(filenames)-1:
textControl.WriteText(filenames[i] + ',')
else:
textControl.WriteText(filenames[i])
dlg.Destroy()
def plotLSFit(self,event):
'''
This method is for a least squares fitting method that is not in use right now.
'''
if self.validityCheck():
global pathText
global loadLSFit
pathText = self.pklPathTxt.GetValue()
if loadLSFit == False:
LeastSquaresFitFrame()
loadLSFit = True
def plotMCMC(self,event):
'''
This method checks that the file chosen to be loaded is valid, and that there is a valid save
file selected for the output of the MCMC calculations.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.validityCheck():
tempSaveLoc = self.saveLocation.GetValue()
if not os.path.isdir(tempSaveLoc.rpartition(str(os.sep))[0]) or \
not len(tempSaveLoc) > (len(tempSaveLoc[:tempSaveLoc.rfind(os.sep)]) + 1):
self.IP = InvalidParameter(tempSaveLoc, self, -1, stringVal="output", secondValue="results output file")
else:
try:
self.pathText = self.pklPathTxt.GetValue()
self.data = IO.load(self.pathText)
if self.loadMCMC == False:
MCMCFrame(self, -1)
self.loadMCMC = True
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
def validityCheck(self):
'''
This is a fitting frame specific method that checks whether or not the given .pkl file
is valid.
'''
pathName = self.pklPathTxt.GetValue()
if pathName != "":
if pathName.lower().endswith(".pkl"):
if os.path.isfile(pathName) == False:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
return True
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadFittingOpen = False
class ETDFrame(wx.Frame):
'''
This frame converts the data from a .pkl into the correct format in a text
file that can be accepted by the Czech exoplanet transit database.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
self.title = "ETD Conversion"
wx.Frame.__init__(self, parent, objectID, self.title)
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.data = ""
self.box = AddLCB(self.panel,-1, parent2 = self, name="Path to Output File: ", updateRadii = True)
self.box2 = AddLCB(self.panel, -1, name="Results Output Path (.txt): ", saveType=wx.FD_SAVE)
self.apertureRadii = []
self.apertureRadiusIndex = 0
self.radiusLabel = wx.StaticText(self.panel, -1, 'Select Aperture Radius: ')
self.radiusList = wx.ComboBox(self.panel, value = "", choices = "", size = (100, wx.DefaultSize.GetHeight()))
self.radiusList.Bind(wx.EVT_COMBOBOX, self.radiusIndexUpdate)
self.updateRadiiButton = wx.Button(self.panel, label = "Update Radii List")
self.Bind(wx.EVT_BUTTON, self.updateRadiiList, self.updateRadiiButton)
self.dropBox = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox.Add(self.radiusLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox.Add(self.radiusList, 0, flag = wx.ALIGN_CENTER)
self.convertToETDButton = wx.Button(self.panel,label = 'Convert to ETD Format')
self.Bind(wx.EVT_BUTTON, self.convertToETD, self.convertToETDButton)
self.sizer0 = wx.FlexGridSizer(rows=2, cols=3)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.sizer0, 0, wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox.Add(self.updateRadiiButton, 0, wx.ALIGN_CENTER |wx. ALL, border = 5)
self.hbox.Add(self.dropBox, 0, flag=wx.ALIGN_CENTER | wx.ALL, border=10)
self.sizer0.Add(self.convertToETDButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.pklPathTxt = self.box.boxList[1]
self.saveLocation = self.box2.boxList[1]
self.create_menu()
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.box, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.box2, 0, flag=wx.ALIGN_CENTER | wx.ALL, border=5)
self.vbox.Add(self.hbox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.CreateStatusBar()
self.vbox.Fit(self)
self.Center()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the ETDFrame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_browse = menu_file.Append(-1,"Browse","Browse for a .pkl file to use.")
self.Bind(wx.EVT_MENU, lambda event: self.browseButtonEvent(event,'Choose Path to Output File',self.pklPathTxt,False,
wx.FD_OPEN),m_browse)
m_browse2 = menu_file.Append(-1, "Browse2", "Browse a save location for the results.")
self.Bind(wx.EVT_MENU, lambda event: self.browseButtonEvent(event,'Choose Path to Output File',self.saveLocation,
False,wx.FD_SAVE),m_browse2)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, "Exit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def browseButtonEvent(self, event, message, textControl, fileDialog, saveDialog):
'''
This method defines the `browse` function for selecting a file on any OS.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
message : string
The message that tells the user what to choose.
textControl : wx.TextCtrl
The box in the frame that will be refreshed with the files that are chosen by the user.
fileDialog : bool
If true, the style is wx.FD_MULTIPLE, otherwise it is the same as the `saveDialog`.
saveDialog : wx.FD_*
The style of the box that will appear. The * represents a wild card value for different types.
'''
if not fileDialog:
dlg = wx.FileDialog(self, message = message, style = saveDialog)
else:
dlg = wx.FileDialog(self, message = message, style = wx.FD_MULTIPLE)
if dlg.ShowModal() == wx.ID_OK:
if saveDialog == wx.SAVE:
filenames = [dlg.GetPath()]
else:
filenames = dlg.GetPaths()
textControl.Clear()
for i in range(0,len(filenames)):
if i != len(filenames)-1:
textControl.WriteText(filenames[i] + ',')
else:
textControl.WriteText(filenames[i])
if self.validityCheck(throwException = False):
try:
self.radiusList.Clear()
self.data = IO.load(self.box.boxList[1].GetValue())
self.apertureRadii = np.empty_like(self.data.apertureRadii)
self.apertureRadii[:] = self.data.apertureRadii
radiiString = [str(x) for x in self.data.apertureRadii]
for string in radiiString:
self.radiusList.Append(string)
self.radiusList.SetValue(radiiString[0])
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
dlg.Destroy()
def convertToETD(self, event):
'''
This method uses the czechETDstring method from the databank.py class
to convert the data into the appropriate format.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.validityCheck() and self.radiusCheck():
tempSaveLoc = self.saveLocation.GetValue()
if not os.path.isdir(tempSaveLoc.rpartition(str(os.sep))[0]) or \
not len(tempSaveLoc) > (len(tempSaveLoc[:tempSaveLoc.rfind(os.sep)]) + 1):
self.IP = InvalidParameter(tempSaveLoc, self, -1, stringVal="output", secondValue="results output file")
else:
if not tempSaveLoc.lower().endswith(".txt"):
tempSaveLoc += ".txt"
openFile = open(tempSaveLoc, 'w')
openFile.write(self.data.czechETDstring(self.apertureRadiusIndex))
openFile.close()
self.IP = InvalidParameter("", self, -1, stringVal="successfulConversion")
def validityCheck(self, throwException=True):
'''
This method checks to make sure that the entered .pkl file is valid and can
be used.
Parameters
----------
throwException : bool, optional
If true there will be a pop up frame that will explain the reason for why
the selected file cannot be used if it is invalid. If false, no error message
will pop up when an invalid file is selected.
Returns
-------
literal : bool
False if the selected file is invalid, true otherwise.
'''
pathName = self.pklPathTxt.GetValue()
if pathName != "":
if pathName.lower().endswith(".pkl"):
if os.path.isfile(pathName) == False:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
return True
def radiusCheck(self):
'''
This method checks to make sure that if the user enters an aperture radius that they
would like to plot, that it is a valid number in the list of saved aperture radii for
the selected file.
Returns
-------
literal : bool
False if the aperture radius selected is not a number or not in the approved list,
true otherwise.
'''
if len(self.apertureRadii) == 0:
self.IP = InvalidParameter(str(self.apertureRadii), self, -1, stringVal="radiusListError", secondValue="etdError")
return False
elif self.radiusList.GetValue() == "":
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
try:
self.tempNum = np.where(self.epsilonCheck(self.apertureRadii,float(self.radiusList.GetValue())))
if len(self.tempNum[0]) == 0:
tempString = self.radiusList.GetValue() + " was not found in " + str(self.apertureRadii)
self.IP = InvalidParameter(tempString, self, -1, stringVal="radiusListError2")
return False
except ValueError:
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
return True
def updateRadiiList(self, event):
'''
This method will manually update the drop down menu for the available aperture radii that can
be chosen from the .pkl file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion, a list of available radii should be shown in the drop down menu of the frame.
'''
if self.validityCheck():
try:
self.radiusList.Clear()
self.data = IO.load(self.box.boxList[1].GetValue())
self.apertureRadii = np.empty_like(self.data.apertureRadii)
self.apertureRadii[:] = self.data.apertureRadii
radiiString = [str(x) for x in self.data.apertureRadii]
for string in radiiString:
self.radiusList.Append(string)
self.radiusList.SetValue(radiiString[0])
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
def epsilonCheck(self,a,b):
'''
This method checks that two numbers are within machine precision of each other
because otherwise we get machine precision difference errors when mixing
single and double precision NumPy floats and pure Python built-in float types.
Parameters
----------
a : array
An array of float type numbers to check through.
b : float
The number that is being checked for in the array.
Returns
-------
literal : array
This is an array of booleans.
Notes
-----
There a boolean literals of true in the return array if any number in `a` is within machine precision
of `b`.
Examples
--------
Inputs: `a` = [0, 1.0, 2.0, 3.0, 4.0], `b` = 3.0
Return: [False, False, False, True, False]
'''
return np.abs(a-b) < np.finfo(np.float32).eps
def radiusIndexUpdate(self, event):
'''
This method updates the current index in the list of available radii that this frame will use to plot different
things. It does this by calling self.epsiloCheck to get an array of booleans. Afterwards, it selects the location
of the boolean 'True' and marks that as the new index.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.apertureRadiusIndex = np.where(self.epsilonCheck(self.apertureRadii, float(self.radiusList.GetValue())))[0][0]
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.etdOpen = False
class LoadOldPklFrame(wx.Frame):
'''
This frame loads an old .pkl file so that you can make different plots with the
saved data.
'''
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
self.title = "Load An Old .pkl File"
wx.Frame.__init__(self, parent, objectID, self.title)
self.panel = wx.Panel(self)
self.parent = parent
self.loadGraphFrame = False
self.messageFrame = False
self.IP = wx.Frame
self.data = ""
self.box = AddLCB(self.panel,-1, parent2 = self, buttonLabel="Browse\t (Ctrl-O)",
name="Path to Output File: ", updateRadii = True)
self.apertureRadii = []
self.apertureRadiusIndex = 0
self.radiusLabel = wx.StaticText(self.panel, -1, 'Select Aperture Radius: ')
self.radiusList = wx.ComboBox(self.panel, value = "", choices = "", size = (100, wx.DefaultSize.GetHeight()))
self.radiusList.Bind(wx.EVT_COMBOBOX, self.radiusIndexUpdate)
self.updateRadiiButton = wx.Button(self.panel, label = "Update Radii List")
self.Bind(wx.EVT_BUTTON, self.updateRadiiList, self.updateRadiiButton)
self.dropBox = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox.Add(self.radiusLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox.Add(self.radiusList, 0, flag = wx.ALIGN_CENTER)
self.rightBox = wx.BoxSizer(wx.VERTICAL)
self.rightBox.Add(self.updateRadiiButton, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.rightBox.Add(self.dropBox, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.box, 0, flag = wx.ALIGN_CENTER | wx.ALL, border = 10)
if sys.platform == 'win32':
self.plotLightCurveButton = wx.Button(self.panel,label = 'Plot Light Curve', size = (130,25))
self.plotRawFluxButton = wx.Button(self.panel,label = 'Plot Raw Fluxes', size = (130,25))
self.plotScaledFluxesButton = wx.Button(self.panel,label = 'Plot Scaled Fluxes', size = (130,25))
self.plotCentroidPositionsButton = wx.Button(self.panel, label = 'Trace Stellar Centroid Positions', size = (170,25))
self.plotComparisonStarWeightingsButton = wx.Button(self.panel,label = 'Plot Comparison\nStar Weightings', size = (110,37))
self.plotInteractiveLightCurveButton = wx.Button(self.panel,label = 'Plot Interactive Light Curve', size = (170,25))
elif sys.platform == 'darwin':
self.plotLightCurveButton = wx.Button(self.panel,label = 'Plot Light Curve', size = (130,25))
self.plotRawFluxButton = wx.Button(self.panel,label = 'Plot Raw Fluxes', size = (130,25))
self.plotScaledFluxesButton = wx.Button(self.panel,label = 'Plot Scaled Fluxes', size = (130,25))
self.plotCentroidPositionsButton = wx.Button(self.panel,-1,label = 'Trace Stellar\nCentroid Positions', size = (150,40))
self.plotComparisonStarWeightingsButton = wx.Button(self.panel,-1,label = 'Plot Comparison\nStar Weightings', size = (150,40))
self.plotInteractiveLightCurveButton = wx.Button(self.panel,-1,label = 'Plot Interactive Light Curve', size = (190,25))
else:
self.plotLightCurveButton = wx.Button(self.panel,label = 'Plot Light Curve', size = (130,30))
self.plotRawFluxButton = wx.Button(self.panel,label = 'Plot Raw Fluxes', size = (130,30))
self.plotScaledFluxesButton = wx.Button(self.panel,label = 'Plot Scaled Fluxes', size = (135,30))
self.plotCentroidPositionsButton = wx.Button(self.panel,-1,label = 'Trace Stellar\nCentroid Positions', size = (150,45))
self.plotComparisonStarWeightingsButton = wx.Button(self.panel,-1,label = 'Plot Comparison\nStar Weightings', size = (150,45))
self.plotInteractiveLightCurveButton = wx.Button(self.panel,-1,label = 'Plot Interactive Light Curve', size = (195,30))
self.Bind(wx.EVT_BUTTON, self.plotLightCurve, self.plotLightCurveButton)
self.Bind(wx.EVT_BUTTON, self.plotRawFlux, self.plotRawFluxButton)
self.Bind(wx.EVT_BUTTON, self.plotScaledFluxes,self.plotScaledFluxesButton)
self.Bind(wx.EVT_BUTTON, self.plotCentroidPosition, self.plotCentroidPositionsButton)
self.Bind(wx.EVT_BUTTON, self.plotComparisonStarWeightings, self.plotComparisonStarWeightingsButton)
self.Bind(wx.EVT_BUTTON, self.plotInteractiveLightCurve, self.plotInteractiveLightCurveButton)
self.sizer0 = wx.FlexGridSizer(rows=2, cols=3)
self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox2.Add(self.sizer0, 0, wx.ALIGN_CENTER | wx.ALL, border = 5)
self.hbox2.Add(self.rightBox, 0, wx.ALIGN_CENTER |wx. ALL, border = 5)
self.sizer0.Add(self.plotLightCurveButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotRawFluxButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotScaledFluxesButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotCentroidPositionsButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotComparisonStarWeightingsButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotInteractiveLightCurveButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.pklPathTxt = self.box.boxList[1]
self.create_menu()
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.hbox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox2, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.CreateStatusBar()
self.vbox.Fit(self)
self.Center()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the load old pkl frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_browse = menu_file.Append(-1,"Browse\tCtrl-O","Browse")
self.Bind(wx.EVT_MENU,lambda event: self.browseButtonEvent(event,'Choose Path to Output File',self.pklPathTxt,False,
wx.FD_OPEN),m_browse)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, "Exit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def browseButtonEvent(self, event, message, textControl, fileDialog, saveDialog):
'''
This method defines the `browse` function for selecting a file on any OS.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
message : string
The message that tells the user what to choose.
textControl : wx.TextCtrl
The box in the frame that will be refreshed with the files that are chosen by the user.
fileDialog : bool
If true, the style is wx.FD_MULTIPLE, otherwise it is the same as the `saveDialog`.
saveDialog : wx.FD_*
The style of the box that will appear. The * represents a wild card value for different types.
'''
if not fileDialog:
dlg = wx.FileDialog(self, message = message, style = saveDialog)
else:
dlg = wx.FileDialog(self, message = message, style = wx.FD_MULTIPLE)
if dlg.ShowModal() == wx.ID_OK:
if saveDialog == wx.SAVE:
filenames = [dlg.GetPath()]
else:
filenames = dlg.GetPaths()
textControl.Clear()
for i in range(0,len(filenames)):
if i != len(filenames)-1:
textControl.WriteText(filenames[i] + ',')
else:
textControl.WriteText(filenames[i])
if self.validityCheck(throwException = False):
try:
self.radiusList.Clear()
self.data = IO.load(self.box.boxList[1].GetValue())
self.apertureRadii = np.empty_like(self.data.apertureRadii)
self.apertureRadii[:] = self.data.apertureRadii
radiiString = [str(x) for x in self.data.apertureRadii]
for string in radiiString:
self.radiusList.Append(string)
self.radiusList.SetValue(radiiString[0])
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
dlg.Destroy()
def plotLightCurve(self, event):
'''
This method will plot the light curve of the data that has been saved in an
old .pkl file for the specific aperture radius that is selected.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck() and self.radiusCheck():
if self.tempNum[0][0] != self.apertureRadiusIndex:
self.apertureRadiusIndex = self.tempNum[0][0]
print 'Loading file: '+self.pklPathTxt.GetValue()
commandstring = "import oscaar.IO; data=oscaar.IO.load('%s'); data.plotLightCurve(apertureRadiusIndex=%s)" \
% (self.pklPathTxt.GetValue(),self.apertureRadiusIndex)
subprocess.Popen(['python','-c',commandstring])
def plotRawFlux(self, event):
'''
This method will plot the raw fluxes of the data that has been saved in an
old .pkl file for the specific aperture radius that is selected.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck() and self.radiusCheck():
if self.tempNum[0][0] != self.apertureRadiusIndex:
self.apertureRadiusIndex = self.tempNum[0][0]
print 'Loading file: '+self.pklPathTxt.GetValue()
commandstring = "import oscaar.IO; data=oscaar.IO.load('%s'); data.plotRawFluxes(apertureRadiusIndex=%s)" \
% (self.pklPathTxt.GetValue(),self.apertureRadiusIndex)
subprocess.Popen(['python','-c',commandstring])
def plotScaledFluxes(self, event):
'''
This method will plot the scaled fluxes of the data that has been saved in an
old .pkl file for the specific aperture radius that is selected.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck() and self.radiusCheck():
if self.tempNum[0][0] != self.apertureRadiusIndex:
self.apertureRadiusIndex = self.tempNum[0][0]
print 'Loading file: '+self.pklPathTxt.GetValue()
commandstring = "import oscaar.IO; data=oscaar.IO.load('%s'); data.plotScaledFluxes(apertureRadiusIndex=%s)" \
% (self.pklPathTxt.GetValue(),self.apertureRadiusIndex)
subprocess.Popen(['python','-c',commandstring])
def plotCentroidPosition(self, event):
'''
This method will plot the centroid positions of the data that has been saved in an
old .pkl file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck():
print 'Loading file: '+self.pklPathTxt.GetValue()
commandstring = "import oscaar.IO; data=oscaar.IO.load('%s'); data.plotCentroidsTrace()" \
% (self.pklPathTxt.GetValue())
subprocess.Popen(['python','-c',commandstring])
def plotComparisonStarWeightings(self, event):
'''
This method will plot the comparison star weightings of the data that has been saved in an
old .pkl file for the specific aperture radius that is selected.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck() and self.radiusCheck():
if self.tempNum[0][0] != self.apertureRadiusIndex:
self.apertureRadiusIndex = self.tempNum[0][0]
print 'Loading file: '+self.pklPathTxt.GetValue()
commandstring = "import oscaar.IO; data=oscaar.IO.load('%s');" \
"data.plotComparisonWeightings(apertureRadiusIndex=%s)" \
% (self.pklPathTxt.GetValue(),self.apertureRadiusIndex)
subprocess.Popen(['python','-c',commandstring])
def plotInteractiveLightCurve(self, event):
'''
This method will plot the interactive light curve of the data that has been saved in an
old .pkl file for the specific aperture radius that is selected.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion a plot will open up in a new window.
'''
if self.validityCheck() and self.radiusCheck():
if self.tempNum[0][0] != self.apertureRadiusIndex:
self.apertureRadiusIndex = self.tempNum[0][0]
if self.loadGraphFrame == False:
GraphFrame(self, -1)
self.loadGraphFrame = True
def validityCheck(self, throwException=True):
'''
This method checks to make sure that the entered .pkl file is valid and can
be used.
Parameters
----------
throwException : bool, optional
If true there will be a pop up frame that will explain the reason for why
the selected file cannot be used if it is invalid. If false, no error message
will pop up when an invalid file is selected.
Returns
-------
literal : bool
False if the selected file is invalid, true otherwise.
'''
pathName = self.pklPathTxt.GetValue()
if pathName != "":
if pathName.lower().endswith(".pkl"):
if os.path.isfile(pathName) == False:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
else:
if throwException:
self.IP = InvalidParameter(pathName, self, -1, stringVal="path")
return False
return True
def radiusCheck(self):
'''
This method checks to make sure that if the user enters an aperture radius that they
would like to plot, that it is a valid number in the list of saved aperture radii for
the selected file.
Returns
-------
literal : bool
False if the aperture radius selected is not a number or not in the approved list,
true otherwise.
'''
if len(self.apertureRadii) == 0:
self.IP = InvalidParameter(str(self.apertureRadii), self, -1, stringVal="radiusListError")
return False
elif self.radiusList.GetValue() == "":
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
try:
self.tempNum = np.where(self.epsilonCheck(self.apertureRadii,float(self.radiusList.GetValue())))
if len(self.tempNum[0]) == 0:
tempString = self.radiusList.GetValue() + " was not found in " + str(self.apertureRadii)
self.IP = InvalidParameter(tempString, self, -1, stringVal="radiusListError2")
return False
except ValueError:
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
return True
def updateRadiiList(self, event):
'''
This method will manually update the drop down menu for the available aperture radii that can
be chosen from the .pkl file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion, a list of available radii should be shown in the drop down menu of the frame.
'''
if self.validityCheck():
try:
self.radiusList.Clear()
self.data = IO.load(self.box.boxList[1].GetValue())
self.apertureRadii = np.empty_like(self.data.apertureRadii)
self.apertureRadii[:] = self.data.apertureRadii
radiiString = [str(x) for x in self.data.apertureRadii]
for string in radiiString:
self.radiusList.Append(string)
self.radiusList.SetValue(radiiString[0])
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
def epsilonCheck(self,a,b):
'''
This method checks that two numbers are within machine precision of each other
because otherwise we get machine precision difference errors when mixing
single and double precision NumPy floats and pure Python built-in float types.
Parameters
----------
a : array
An array of float type numbers to check through.
b : float
The number that is being checked for in the array.
Returns
-------
literal : array
This is an array of booleans.
Notes
-----
There a boolean literals of true in the return array if any number in `a` is within machine precision
of `b`.
Examples
--------
Inputs: `a` = [0, 1.0, 2.0, 3.0, 4.0], `b` = 3.0
Return: [False, False, False, True, False]
'''
return np.abs(a-b) < np.finfo(np.float32).eps
def radiusIndexUpdate(self, event):
'''
This method updates the current index in the list of available radii that this frame will use to plot different
things. It does this by calling self.epsiloCheck to get an array of booleans. Afterwords, it selects the location
of the boolean 'True' and marks that as the new index.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.apertureRadiusIndex = np.where(self.epsilonCheck(self.apertureRadii, float(self.radiusList.GetValue())))[0][0]
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadOldPklOpen = False
class GraphFrame(wx.Frame):
'''
This is the class for the interactive light curve plot frame. It allows a user to continuously
plot a light curve with a new bin size as well as change the names of the axes and title.
'''
title = 'Interactive Light Curve Plot'
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, self.title, style = wx.DEFAULT_FRAME_STYLE & ~ (wx.RESIZE_BORDER |
wx.RESIZE_BOX | wx.MAXIMIZE_BOX))
self.pT = parent.pklPathTxt.GetValue()
self.parent = parent
self.apertureRadiusIndex = self.parent.apertureRadiusIndex
self.create_menu()
self.statusbar = self.CreateStatusBar()
self.create_main_panel()
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.Centre()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the graph frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_expt = menu_file.Append(-1, "&Save plot\tCtrl-S", "Save plot to file")
self.Bind(wx.EVT_MENU, self.on_save_plot, m_expt)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, "E&xit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def create_main_panel(self):
'''
This method creates a wxPython panel that will update everytime a new instance of the
light curve plot is generated.
'''
self.panel = wx.Panel(self)
self.init_plot()
self.canvas = FigCanvas(self.panel, -1, self.fig)
self.box = ScanParamsBox(self.panel,-1)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.box, border=5, flag=wx.ALL)
self.plotButton = wx.Button(self.panel,label = 'Plot')
self.Bind(wx.EVT_BUTTON,self.draw_plot, self.plotButton)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.canvas, 1, flag=wx.LEFT | wx.TOP | wx.GROW)
self.vbox.Add(self.hbox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.plotButton,0,flag=wx.ALIGN_CENTER|wx.TOP)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
def init_plot(self):
'''
This is the initial plot that is displayed. It uses a bin size of 10 for the light curve.
'''
self.data = IO.load(self.pT)
self.pointsPerBin = 10
binnedTime, binnedFlux, binnedStd = medianBin(self.data.times,self.data.lightCurves[self.apertureRadiusIndex],
self.pointsPerBin)
self.fig = pyplot.figure(num=None, figsize=(10, 8), facecolor='w',edgecolor='k')
self.dpi = 100
self.axes = self.fig.add_subplot(111)
self.axes.set_axis_bgcolor('white')
self.axes.set_title('Light Curve', size=12)
def format_coord(x, y):
'''
Function to give data value on mouse over plot.
'''
return 'JD=%1.5f, Flux=%1.4f' % (x, y)
self.axes.format_coord = format_coord
self.axes.errorbar(self.data.times,self.data.lightCurves[self.apertureRadiusIndex],
yerr=self.data.lightCurveErrors[self.apertureRadiusIndex],fmt='k.',ecolor='gray')
self.axes.errorbar(binnedTime, binnedFlux, yerr=binnedStd, fmt='rs-', linewidth=2)
self.axes.axvline(ymin=0,ymax=1,x=self.data.ingress,color='k',ls=':')
self.axes.axvline(ymin=0,ymax=1,x=self.data.egress,color='k',ls=':')
self.axes.set_title(('Light curve for aperture radius %s' % self.data.apertureRadii[self.apertureRadiusIndex]))
self.axes.set_xlabel('Time (JD)')
self.axes.set_ylabel('Relative Flux')
def draw_plot(self,event):
'''
This method will redraw the plot every time the user presses the plot button in the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
Notes
-----
On successful completion with at least one parameter changed, the new plot will show up in the panel of
the frame.
'''
self.box.update()
self.box.setMax(len(self.data.times))
if self.box.boxCorrect() == True and self.box.boxDiff() == True:
print "Re-drawing Plot"
self.xlabel = self.box.userinfo['xlabel'].GetValue()
self.ylabel = self.box.userinfo['ylabel'].GetValue()
self.plotTitle = self.box.userinfo['title'].GetValue()
self.pointsPerBin = int(self.box.userinfo['bin'].GetValue())
binnedTime, binnedFlux, binnedStd = medianBin(self.data.times,self.data.lightCurves[self.apertureRadiusIndex],
self.pointsPerBin)
if sys.platform == 'win32':
self.fig = pyplot.figure(num=None, figsize=(10, 6.75), facecolor='w',edgecolor='k')
else:
self.fig = pyplot.figure(num=None, figsize=(10, 8.0), facecolor='w',edgecolor='k')
self.dpi = 100
self.axes = self.fig.add_subplot(111)
self.axes.set_axis_bgcolor('white')
self.axes.set_title('Light Curve', size=12)
def format_coord(x, y):
'''
Function to give data value on mouse over plot.
'''
return 'JD=%1.5f, Flux=%1.4f' % (x, y)
self.axes.format_coord = format_coord
self.axes.errorbar(self.data.times,self.data.lightCurves[self.apertureRadiusIndex],
yerr=self.data.lightCurveErrors[self.apertureRadiusIndex],fmt='k.',ecolor='gray')
self.axes.errorbar(binnedTime, binnedFlux, yerr=binnedStd, fmt='rs-', linewidth=2)
self.axes.axvline(ymin=0,ymax=1,x=self.data.ingress,color='k',ls=':')
self.axes.axvline(ymin=0,ymax=1,x=self.data.egress,color='k',ls=':')
self.axes.set_title(self.plotTitle)
self.axes.set_xlabel(self.xlabel)
self.axes.set_ylabel(self.ylabel)
self.canvas = FigCanvas(self.panel, -1, self.fig)
def on_save_plot(self, event):
'''
This method will save the plot you create as a .png file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
file_choices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(
self,
message="Save plot as...",
defaultDir=os.getcwd(),
defaultFile="plot.png",
wildcard=file_choices,
style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi)
self.flash_status_message("Saved to %s" % path)
def flash_status_message(self, msg, flash_len_ms=1500):
'''
This method will show a message for a brief moment on the status bar at the bottom of the frame.
Parameters
----------
msg : string
The message that will appear.
flash_len_ms : int, optional
The amount of time the message should appear for in milliseconds.
'''
self.statusbar.SetStatusText(msg)
self.timeroff = wx.Timer(self)
self.Bind(
wx.EVT_TIMER,
self.on_flash_status_off,
self.timeroff)
self.timeroff.Start(flash_len_ms, oneShot=True)
def on_flash_status_off(self, event):
'''
This clears the status bar of the frame after a message has been displayed.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.statusbar.SetStatusText('')
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadGraphFrame = False
class LeastSquaresFitFrame(wx.Frame):
'''
This class is not in use right now.
'''
"""
title = "Least Squares Fit"
def __init__(self):
wx.Frame.__init__(self, None,-1, self.title)
self.panel = wx.Panel(self)
self.pT = pathText
self.data = IO.load(self.pT)
self.box1 = AddLCB(self.panel,-1,name="planet")
self.Bind(wx.EVT_BUTTON,self.update,self.box1.updateButton)
self.topBox = wx.BoxSizer(wx.HORIZONTAL)
self.topBox.Add(self.box1, border=5, flag=wx.ALL)
self.list = [
('Rp/Rs',"Ratio of Radii (Rp/Rs):",
'Enter a ratio of the radii here.',''),
('a/Rs',"a/Rs:",
'Enter a value for a/Rs here.',''),
('per',"Period:",
'Enter a value for the period here.',''),
('inc',"Inclination:",
'Enter a value for the inclination here.',''),
('ecc',"Eccentricity: ",
'Enter a value for the eccentricity here.',''),
('t0',"t0:",
'Enter a value for t0 here.',
str(transiterFit.calcMidTranTime(self.data.times,self.data.lightCurves[radiusNum]))),
('gamma1',"Gamma 1:",
'Enter a value for gamma 1 here.','0.0'),
('gamma2'," Gamma 2:",
'Enter a value for gamma 2 here.','0.0'),
('pericenter',"Pericenter:",
'Enter an arguement for the pericenter here.','0.0'),
('limbdark',"Limb-Darkening Parameter:",
'Enter an arguement for limb-darkening here.','False')
]
self.box = ParameterBox(self.panel,-1,self.list,name="Input Parameters")
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.box, border=5, flag=wx.ALL)
self.plotButton = wx.Button(self.panel,label = 'Plot')
self.Bind(wx.EVT_BUTTON,self.plot, self.plotButton)
self.sizer0 = wx.FlexGridSizer(rows=1, cols=10)
self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox2.Add(self.sizer0,0, wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.topBox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox2, 0, flag=wx.ALIGN_CENTER | wx.TOP)
#
# self.box.userParams['t0'].SetValue(str(oscaar.transiterFit.calcMidTranTime(self.data.times,self.data.lightCurve)))
#
self.vbox.AddSpacer(10)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.create_menu()
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.Center()
self.Show()
def plot(self,event):
self.tempLimbDark = self.box.userParams['limbdark'].GetValue()
list = [(self.box.userParams['Rp/Rs'].GetValue(),"Rp/Rs"),(self.box.userParams['a/Rs'].GetValue(),"a/Rs"),
(self.box.userParams['per'].GetValue(),"per"), (self.box.userParams['inc'].GetValue(),"inc"),
(self.box.userParams['ecc'].GetValue(),"ecc"), (self.box.userParams['t0'].GetValue(),"t0"),
(self.box.userParams['gamma1'].GetValue(),"gamma1"),(self.box.userParams['gamma2'].GetValue(),"gamma2"),
(self.box.userParams['pericenter'].GetValue(),"pericenter"),
(self.tempLimbDark,"limbdark")]
if checkParams(self,list) == True:
if self.box.userParams['limbdark'].GetValue() == 'False':
self.tempLimbDark = False
fit, success = transiterFit.run_LMfit(self.data.getTimes(), self.data.lightCurves[radiusNum],
self.data.lightCurveErrors[radiusNum],
float(self.box.userParams['Rp/Rs'].GetValue()),
float(self.box.userParams['a/Rs'].GetValue()),
float(self.box.userParams['inc'].GetValue()),
float(self.box.userParams['t0'].GetValue()),
float(self.box.userParams['gamma1'].GetValue()),
float(self.box.userParams['gamma2'].GetValue()),
float(self.box.userParams['per'].GetValue()),
float(self.box.userParams['ecc'].GetValue()),
float(self.box.userParams['pericenter'].GetValue()),
fitLimbDark=self.tempLimbDark, plotting=True)
n_iter = 300
# Rp,aRs,inc,t0,gam1,gam2=oscaar.transiterFit.run_MCfit(n_iter,self.data.getTimes(),
# self.data.lightCurve, self.data.lightCurveError,fit,success,
# float(self.box.GetPeriod()),float(self.box.GetEcc()),
# float(self.box.GetPericenter()),float(self.box.GetGamma1()),float(self.box.GetGamma2()), plotting=False)
def update(self,event):
if self.box1.boxList[1].GetValue() == '':
self.IP = InvalidParameter(self.box1.boxList[1].GetValue(), None,-1, stringVal="planet")
else:
self.planet = self.box1.boxList[1].GetValue()
[RpOverRs,AOverRs,per,inc,ecc] = returnSystemParams.transiterParams(self.planet)
if RpOverRs == -1 or AOverRs == -1 or per == -1 or inc == -1 or ecc == -1:
self.IP = InvalidParameter(self.box1.boxList[1].GetValue(), None,-1, stringVal="planet")
else:
self.box.userParams['Rp/Rs'].SetValue(str(RpOverRs))
self.box.userParams['a/Rs'].SetValue(str(AOverRs))
self.box.userParams['per'].SetValue(str(per))
self.box.userParams['inc'].SetValue(str(inc))
self.box.userParams['ecc'].SetValue(str(ecc))
self.IP = InvalidParameter("",None,-1, stringVal="params")
def create_menu(self):
# These commands create a drop down menu with the exit command.
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_exit = menu_file.Append(-1, "E&xit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def on_exit(self, event):
self.Destroy()
def onDestroy(self, event):
global loadLSFit
loadLSFit = False
"""
class MCMCFrame(wx.Frame):
'''
This frame allows the user to edit a number of different parameters to run the
Markov Chain Monte Carlo routine for fitting.
'''
title = "MCMC Fit"
def __init__(self, parent, objectID):
'''
This method defines the initialization of this class.
'''
wx.Frame.__init__(self, parent, objectID, self.title)
self.panel = wx.Panel(self)
self.parent = parent
self.messageFrame = False
self.IP = wx.Frame
self.pT = self.parent.pathText
self.saveLoc = self.parent.saveLocation.GetValue()
self.data = self.parent.data
self.LCB = AddLCB(self.panel,-1,name="planet")
self.Bind(wx.EVT_BUTTON,self.update,self.LCB.updateButton)
radiiString = [str(x) for x in self.data.apertureRadii]
self.apertureRadiusIndex = 0
self.radiusLabel = wx.StaticText(self.panel, -1, 'Select Aperture Radius: ')
self.radiusList = wx.ComboBox(self.panel, value = str(self.data.apertureRadii[0]), choices = radiiString)
self.radiusList.Bind(wx.EVT_COMBOBOX, self.radiusUpdate)
self.dropBox = wx.BoxSizer(wx.HORIZONTAL)
self.dropBox.Add(self.radiusLabel, 0, flag = wx.ALIGN_CENTER | wx.LEFT, border = 10)
self.dropBox.Add(self.radiusList, 0, flag = wx.ALIGN_CENTER)
self.topBox = wx.BoxSizer(wx.HORIZONTAL)
self.topBox.Add(self.LCB, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
self.topBox.Add(self.dropBox, flag = wx.ALIGN_CENTER | wx.ALL, border = 5)
tupleList = [('Rp/Rs',"Ratio of Radii (Rp/Rs):", 'Enter a ratio of the radii here.','0.11'),
('a/Rs',"a/Rs:", 'Enter a value for a/Rs here.','14.1'),
('inc',"Inclination:", 'Enter a value for the inclination here.','90.0'),
('t0',"t0:", 'Enter a value for the mid transit time here.','2456427.9425593214')]
self.box = ParameterBox(self.panel,-1,tupleList,"Free Parameters",rows=4,cols=2)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.box, border=5, flag=wx.ALL)
tupleList = [('b-Rp/Rs',"Beta Rp/Rs:", 'Enter a beta for Rp/Rs here.','0.005'),
('b-a/Rs',"Beta a/Rs:", 'Enter a beta for a/Rs here.','0.005'),
('b-inc',"Beta Inclination:", 'Enter a beta for inclination here.','0.005'),
('b-t0',"Beta t0:", 'Enter a beta for the mid transit time here.','0.005')]
self.box2 = ParameterBox(self.panel,-1,tupleList,"Beta's",rows=4,cols=2)
self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox2.Add(self.box2, border=5, flag=wx.ALL)
tupleList = [('per',"Period:", 'Enter a value for the period here.','1.580400'),
('gamma1',"gamma1:", 'Enter a value for gamma1 here.','0.23'),
('gamma2',"gamma2:", 'Enter a value for gamma2 here.','0.3'),
('ecc',"Eccentricity:", 'Enter a value for the eccentricity here.','0.0'),
('pericenter',"Pericenter:", 'Enter a value for the pericenter here.','0.0')]
self.box3 = ParameterBox(self.panel,-1,tupleList,"Fixed Parameters")
self.hbox3 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox3.Add(self.box3, border=5, flag=wx.ALL)
tupleList = [('saveiteration',"Iteration to save:", 'Enter a number for the nth iteration to be saved.','10'),
('burnfrac',"Burn Fraction:", 'Enter a decimal for the burn fraction here.','0.20'),
('acceptance',"Acceptance:", 'Enter a value for the acceptance rate here.','0.30'),
('number', "Number of Steps:", 'Enter a value for the total steps here.','10000')]
self.box4 = ParameterBox(self.panel,-1,tupleList,"Fit Parameters")
self.hbox4 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox4.Add(self.box4, border=5, flag=wx.ALL)
self.plotButton = wx.Button(self.panel,label = 'Run and Plot')
self.Bind(wx.EVT_BUTTON,self.plot, self.plotButton)
self.sizer0 = wx.FlexGridSizer(rows=1, cols=10)
self.hbox5 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox5.Add(self.sizer0,0, wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.plotButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.vbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.vbox2.Add(self.hbox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox2.Add(self.hbox2, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.topBox, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.vbox2, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox3, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox4, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.Add(self.hbox5, 0, flag=wx.ALIGN_CENTER | wx.TOP)
self.vbox.AddSpacer(10)
self.vbox.AddSpacer(10)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
self.create_menu()
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.Center()
self.Show()
def create_menu(self):
'''
This method creates the menu bars that are at the top of the MCMC frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_exit = menu_file.Append(-1, "E&xit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def plot(self,event):
'''
After checking that all of the user editable parameters in the frame are valid and loaded
as a list of variables, this method actually exexcutes the MCMC fitting routine by calling it from
the fitting.py file.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
tupleList = [(self.box.userParams['Rp/Rs'].GetValue(),"Rp/Rs"),
(self.box.userParams['a/Rs'].GetValue(),"a/Rs"),
(self.box3.userParams['per'].GetValue(),"per"),
(self.box.userParams['inc'].GetValue(),"inc"),
(self.box3.userParams['ecc'].GetValue(),"ecc"),
(self.box.userParams['t0'].GetValue(),"t0"),
(self.box3.userParams['gamma1'].GetValue(),"gamma1"),
(self.box3.userParams['gamma2'].GetValue(),"gamma2"),
(self.box3.userParams['pericenter'].GetValue(),"pericenter"),
(self.box4.userParams['saveiteration'].GetValue(),"saveiteration"),
(self.box4.userParams['acceptance'].GetValue(),"acceptance"),
(self.box4.userParams['burnfrac'].GetValue(),"burnfrac"),
(self.box4.userParams['number'].GetValue(),"number")]
if checkParams(self,tupleList) == True and self.radiusCheck() == True:
initParams = [float(self.box.userParams['Rp/Rs'].GetValue()),float(self.box.userParams['a/Rs'].GetValue()),
float(self.box3.userParams['per'].GetValue()), float(self.box.userParams['inc'].GetValue()),
float(self.box3.userParams['gamma1'].GetValue()),float(self.box3.userParams['gamma2'].GetValue()),
float(self.box3.userParams['ecc'].GetValue()),float(self.box3.userParams['pericenter'].GetValue()),
float(self.box.userParams['t0'].GetValue())]
nSteps = float(self.box4.userParams['number'].GetValue())
initBeta = (np.zeros([4]) + 0.012).tolist()
idealAcceptanceRate = float(self.box4.userParams['acceptance'].GetValue())
interval = float(self.box4.userParams['saveiteration'].GetValue())
burnFraction = float(self.box4.userParams['burnfrac'].GetValue())
# Spawn a new process to execute the MCMC run separately.
mcmcCall = 'import oscaar.fitting; mcmcinstance = oscaar.fitting.mcmcfit("%s",%s,%s,%s,%s,%s,%s); mcmcinstance.run(updatepkl=True, apertureRadiusIndex=%s); mcmcinstance.plot(num=%s)' % \
(self.pT,initParams,initBeta,nSteps,interval,idealAcceptanceRate,burnFraction,
self.apertureRadiusIndex,self.apertureRadiusIndex)
subprocess.check_call(['python','-c',mcmcCall])
# Load the data again and save it in a text file.
self.data = IO.load(self.pT)
if not self.saveLoc.lower().endswith(".txt"):
self.saveLoc += ".txt"
outfile = open(self.saveLoc,'w')
outfile.write(self.data.uncertaintyString())
outfile.close()
def radiusCheck(self):
'''
This method checks to make sure that the aperture radius entered is valid and in the list
available for the selected .pkl file.
Returns
-------
literal : bool
True if the radius is valid, false otherwise.
'''
if self.radiusList.GetValue() == "":
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
try:
condition = self.epsilonCheck(self.data.apertureRadii,float(self.radiusList.GetValue()))
self.tempNum = np.array(self.data.apertureRadii)[condition]
if len(self.tempNum) == 0:
tempString = self.radiusList.GetValue() + " was not found in " + str(self.data.apertureRadii)
self.IP = InvalidParameter(tempString, self, -1, stringVal="radiusListError2")
return False
except ValueError:
self.IP = InvalidParameter(self.radiusList.GetValue(), self, -1, stringVal="radiusError")
return False
return True
def update(self,event):
'''
This method will update the appropriate parameters for the frame, if a user selects
an appropriate planet name from the exoplanet.org database.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
if self.LCB.boxList[1].GetValue() == '':
self.IP = InvalidParameter(self.LCB.boxList[1].GetValue(), self,-1, stringVal="planet")
else:
self.planet = self.LCB.boxList[1].GetValue()
[RpOverRs,AOverRs,per,inc,ecc] = returnSystemParams.transiterParams(self.planet)
if RpOverRs == -1 or AOverRs == -1 or per == -1 or inc == -1 or ecc == -1:
self.IP = InvalidParameter(self.LCB.boxList[1].GetValue(), self,-1, stringVal="planet")
else:
self.box.userParams['Rp/Rs'].SetValue(str(RpOverRs))
self.box.userParams['a/Rs'].SetValue(str(AOverRs))
self.box3.userParams['per'].SetValue(str(per))
self.box.userParams['inc'].SetValue(str(inc))
self.box3.userParams['ecc'].SetValue(str(ecc))
self.IP = InvalidParameter("",self,-1, stringVal="params")
def epsilonCheck(self,a,b):
'''
This method checks that two numbers are within machine precision of each other
because otherwise we get machine precision difference errors when mixing
single and double precision NumPy floats and pure Python built-in float types.
Parameters
----------
a : array
An array of float type numbers to check through.
b : float
The number that is being checked for in the array.
Returns
-------
literal : array
This is an array of booleans.
Notes
-----
There a boolean literals of true in the return array if any number in `a` is within machine precision
of `b`.
Examples
--------
Inputs: `a` = [0, 1.0, 2.0, 3.0, 4.0], `b` = 3.0
Return: [False, False, False, True, False]
'''
a = np.array(a)
return np.abs(a-b) < np.finfo(np.float32).eps
def radiusUpdate(self, event):
'''
This method updates the current index in the list of available radii that this frame will use to plot MCMC.
It does this by calling self.epsiloCheck to get an array of booleans. Afterwords, it selects the location
of the boolean 'True' and marks that as the new index.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.apertureRadiusIndex = np.where(self.epsilonCheck(self.data.apertureRadii,
float(self.radiusList.GetValue())))[0][0]
def on_exit(self, event):
'''
This method defines the action quit from the menu. It closes the frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadMCMC = False
class ParameterBox(wx.Panel):
'''
This is a general method that is used throughout the GUI to create an interactive box
with multiple text controls for user input.
Parameters
----------
parent : window
The parent window that this box will be associated with.
objectID : int
The identity number of the object.
tupleList : array
An array of tuples for the different text controls desired. The tuple must be four strings.
name : string, optional
The name of the box for the current set of parameters. It is displayed in the upper left hand corner.
rows : int, optional
The number of rows for the box.
cols : int, optional
The number of columns for the box.
vNum : int, optional
The vertical displacement between each text control.
hNum : int, optional
The horizontal displacement between each text control.
font : wx.font(), optional
The type of style you would like the text to be displayed as.
secondButton : bool, optional
If a radio button is created by this class, the first value of the radio button will be selected
since the default value is false. IF this variable is true however, the second value of the radio
button is selected.
Notes
-----
The list that is given as a parameter must be an array of tuples. The format for these tuples is
(string, string, string, string). The first string will be the keyword (widget) to select that specific
text box to work with in the code. The second string is the name of the parameter that will appear in the GUI.
The third string will be the tooltip that is seen if the user hovers over the box. The fourth string is
the default value for that parameter.
If however, the widget name begins with 'rb', a radio button will be created. In this scenario, the second
string will be the name of the parameter, with the 3rd and 4th strings being the values of the two radio
buttons that will be created.
'''
def __init__(self, parent, objectID, tupleList, name="", rows=1, cols=10, vNum=0, hNum=0, font=wx.NORMAL_FONT,
secondButton=False):
wx.Panel.__init__(self,parent,objectID)
box1 = wx.StaticBox(self, -1, name)
sizer = wx.StaticBoxSizer(box1, wx.VERTICAL)
self.userParams = {}
sizer0 = wx.FlexGridSizer(rows=rows, cols=cols, vgap=vNum, hgap=hNum)
sizer.Add(sizer0, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
for (widget, labeltxt, ToolTip, value) in tupleList:
label = wx.StaticText(self, -1, labeltxt, style=wx.ALIGN_CENTER)
sizer0.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 3)
label.SetFont(font)
if widget == "observatoryName" or widget == "fileName":
self.userParams[widget] = wx.TextCtrl(self, -1, value = value, size = (220,wx.DefaultSize.GetHeight()))
elif not widget.find('rb') == 0:
self.userParams[widget] = wx.TextCtrl(self, -1, value = value)
if widget.find('rb') == 0:
label1 = ToolTip
label2 = value
self.userParams[widget] = wx.RadioButton(self, label = label1, style = wx.RB_GROUP)
sizer0.Add(self.userParams[widget], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
if secondButton == True:
self.userParams[widget+"1"] = wx.RadioButton(self, label = label2)
self.userParams[widget+"1"].SetValue(True)
else:
self.userParams[widget+"1"] = wx.RadioButton(self, label = label2)
self.userParams[widget].SetValue(True)
sizer0.Add(self.userParams[widget+"1"], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
else:
self.userParams[widget].SetToolTipString(ToolTip)
sizer0.Add(self.userParams[widget], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
if widget == "ingress" or widget == "egress":
value = "00:00:00"
self.userParams[widget+"1"] = wx.TextCtrl(self, -1, value = value)
self.userParams[widget+"1"].SetToolTipString(ToolTip)
sizer0.Add(self.userParams[widget+"1"], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
self.SetSizer(sizer)
sizer.Fit(self)
class AddLCB(wx.Panel):
'''
This creates the set of a label, control box, and button. Usually used to let a user
browse and select a file.
Parameters
----------
parent : window
The parent panel that this box will be associated with.
objectID : int
The identity number of the object.
parent2 : window, optional
Usually the parent is the panel that the LCB gets created in. If however, there is a need
to use the actual parent frame, a second window is allowed to be linked.
name : string, optional
The name of the label for the static box. If the name is 'mainGUI' or 'planet' a different set gets
created.
buttonLabel : string, optional
The name of the button that is created.
multFiles : bool, optional
If true, when browsing for files the user can select multiple ones. If false, only one file is
allowed to be selected.
rowNum : int, optional
The number of rows for the box.
colNum : int, optional
The number of columns for the box.
vNum : int, optional
The vertical displacement between each text control.
hNum : int, optional
The horizontal displacement between each text control.
font : wx.font(), optional
The type of style you would like the text to be displayed as.
updateRadii : bool, optional
If true, this method will update the available aperture radii list for the drop down menu in the
parent frame.
boxName : string, optional
The name of the box for the current LCB set. It is displayed in the upper left hand corner.
height : int, optional
The height of the control box.
saveType : wx.FD_*, optional
The style of the box that will appear. The * represents a wild card value for different types.
'''
def __init__(self, parent, objectID, parent2=None, name='', buttonLabel="Browse", multFiles=False, rowNum=1, colNum=3,
vNum=0, hNum=0, font=wx.NORMAL_FONT, updateRadii=False, boxName="", height=20, saveType=wx.FD_OPEN):
wx.Panel.__init__(self,parent,objectID)
box1 = wx.StaticBox(self, -1, boxName)
box1.SetFont(font)
sizer = wx.StaticBoxSizer(box1, wx.VERTICAL)
self.parent = parent2
self.messageFrame = False
self.IP = wx.Frame
self.boxList = {}
self.buttonList = {}
sizer0 = wx.FlexGridSizer(rows=rowNum, cols=colNum, vgap=vNum, hgap=hNum)
sizer.Add(sizer0, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
iterationNumber = 0
extraName = ""
if name == "mainGUI":
extraName = "mainGUI"
name = "Path to Dark Frames: ,Path to Master Flat: ,Path to Data Images: ,Path to Regions File: ," + \
"Output Path: "
for eachName in name.split(","):
if sys.platform != "win32":
if eachName == "Path to Dark Frames: " or eachName == "Path to Data Images: ":
height = 35
else:
height = 25
if eachName == "Path to Dark Frames: " or eachName == "Path to Data Images: " or eachName == "Path to "+\
"Regions File: ":
if extraName == "mainGUI":
multFiles = True
saveType = None
elif eachName == "Path to Master Flat: ":
multFiles = False
saveType = wx.FD_OPEN
elif eachName == "Output Path: ":
multFiles = False
saveType = wx.FD_SAVE
iterationNumber += 1
if eachName == 'planet':
self.label = wx.StaticText(self, -1, "Planet Name", style=wx.ALIGN_CENTER)
self.label.SetFont(font)
self.boxList[iterationNumber] = wx.TextCtrl(self, -1, value='GJ 1214 b', style=wx.TE_RICH)
self.boxList[iterationNumber].SetToolTipString("Enter the name of a planet from the" +\
"exoplanet.org database here.")
else:
self.label = wx.StaticText(self, -1, eachName, style=wx.ALIGN_CENTER)
self.label.SetFont(font)
self.boxList[iterationNumber] = wx.TextCtrl(self, -1, size=(500,height), style=wx.TE_RICH)
sizer0.Add(self.label, 0, wx.ALIGN_CENTRE|wx.ALL, 3)
sizer0.Add(self.boxList[iterationNumber], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
if eachName == 'planet':
self.updateButton = wx.Button(self, -1, "Update Parameters")
sizer0.Add(self.updateButton,0,wx.ALIGN_CENTER|wx.ALL,0)
else:
if sys.platform != 'win32':
if buttonLabel == "Browse\t (Cntrl-O)":
buttonLabel = "Browse\t("+u'\u2318'"-O)"
self.buttonList[iterationNumber] = wx.Button(self, -1, buttonLabel)
else:
self.buttonList[iterationNumber] = wx.Button(self, -1, buttonLabel)
self.buttonList[iterationNumber].Bind(wx.EVT_BUTTON, lambda event, lambdaIter = iterationNumber,
lambdaMult = multFiles, lambdaSave = saveType:
self.browseButtonEvent(event, "Choose Path(s) to File(s)",self.boxList[lambdaIter], lambdaMult,
lambdaSave, update=updateRadii))
sizer0.Add(self.buttonList[iterationNumber],0,wx.ALIGN_CENTRE|wx.ALL,0)
self.SetSizer(sizer)
sizer.Fit(self)
def browseButtonEvent(self, event, message, textControl, fileDialog, saveDialog, update=False):
'''
This method defines the `browse` function for selecting a file on any OS.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
message : string
The message that tells the user what to choose.
textControl : wx.TextCtrl
The box in the frame that will be refreshed with the files that are chosen by the user.
fileDialog : bool
If true, the style is wx.FD_MULTIPLE, otherwise it is the same as the `saveDialog`.
saveDialog : wx.FD_*
The style of the box that will appear. The * represents a wild card value for different types.
update : bool, optional
This will update the aperture radii list for a selected file in the parent frame if true.
'''
if not fileDialog:
dlg = wx.FileDialog(self, message = message, style = saveDialog)
else:
dlg = wx.FileDialog(self, message = message, style = wx.FD_MULTIPLE)
if dlg.ShowModal() == wx.ID_OK:
if saveDialog == wx.SAVE:
filenames = [dlg.GetPath()]
else:
filenames = dlg.GetPaths()
textControl.Clear()
for i in range(0,len(filenames)):
if i != len(filenames)-1:
textControl.WriteText(filenames[i] + ',')
else:
textControl.WriteText(filenames[i])
if update == True:
try:
if self.parent.validityCheck(throwException = False):
self.parent.radiusList.Clear()
self.parent.data = IO.load(self.parent.box.boxList[1].GetValue())
self.parent.apertureRadii = np.empty_like(self.parent.data.apertureRadii)
self.parent.apertureRadii[:] = self.parent.data.apertureRadii
radiiString = [str(x) for x in self.parent.data.apertureRadii]
for string in radiiString:
self.parent.radiusList.Append(string)
self.parent.radiusList.SetValue(radiiString[0])
except AttributeError:
self.IP = InvalidParameter("", self, -1, stringVal="oldPKL")
dlg.Destroy()
class ScanParamsBox(wx.Panel):
'''
This is the box that is used in the GraphFrame class for an interactive light curve plot.
'''
def __init__(self,parent,objectID):
'''
This is the initialization of the box. It has four controls: bin size, title, x-axis label,
and y-axis label.
'''
wx.Panel.__init__(self,parent,objectID)
self.messageFrame = False
self.IP = wx.Frame
box1 = wx.StaticBox(self, -1, "Descriptive information")
sizer = wx.StaticBoxSizer(box1, wx.VERTICAL)
self.userinfo = {}
sizer0 = wx.FlexGridSizer(rows=2, cols=4)
sizer.Add(sizer0, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
for (widget,label,ToolTip) in [
('bin',"Bin Size:",
'Enter a bin number here.'),
('title',"Title:",
'Enter a name for the title here.'),
('xlabel',"X-Axis Name:",
'Enter a name for the X-Axis here.'),
('ylabel',"Y-Axis Name:",
'Enter a name for the Y-Axis here.')
]:
label = wx.StaticText(self, -1, label, style=wx.ALIGN_CENTER)
sizer0.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 3)
if widget == 'bin':
self.userinfo[widget] = wx.TextCtrl(self, -1,value='10')
elif widget == 'xlabel':
self.userinfo[widget] = wx.TextCtrl(self, -1,value='Time (JD)')
elif widget == 'ylabel':
self.userinfo[widget] = wx.TextCtrl(self, -1,value='Relative Flux')
elif widget == 'title':
self.userinfo[widget] = wx.TextCtrl(self, -1,value='Light Curve')
self.userinfo[widget].SetToolTipString(ToolTip)
sizer0.Add(self.userinfo[widget], 0, wx.ALIGN_CENTRE|wx.ALL, 0)
self.SetSizer(sizer)
sizer.Fit(self)
self.oldNum = self.userinfo['bin'].GetValue()
self.newNum = self.userinfo['bin'].GetValue()
self.oldX = str(self.userinfo['xlabel'].GetValue())
self.newX = str(self.userinfo['xlabel'].GetValue())
self.oldY = str(self.userinfo['ylabel'].GetValue())
self.newY = str(self.userinfo['ylabel'].GetValue())
self.oldtitle = str(self.userinfo['title'].GetValue())
self.newtitle = str(self.userinfo['title'].GetValue())
self.max = 100
def boxCorrect(self):
'''
This method checks to make sure that the user input for bin size is a number
as well as greater than the miniumum bin size of 5. The maximum bin size depends
on the light curve that was loaded.
'''
if self.userinfo['bin'].GetValue() == '':
self.IP = InvalidParameter(self.userinfo['bin'].GetValue(), self, -1, secondValue=str(self.max))
return False
else:
try:
self.var = int(self.userinfo['bin'].GetValue())
except ValueError:
self.IP = InvalidParameter(self.userinfo['bin'].GetValue(), self, -1, secondValue=str(self.max))
return False
if int(self.userinfo['bin'].GetValue()) <= 4 or int(self.userinfo['bin'].GetValue()) > self.max:
self.IP = InvalidParameter(self.userinfo['bin'].GetValue(), self,-1, secondValue=str(self.max))
return False
else:
return True
def boxDiff(self):
'''
This method will determine if a new plot needs to be made or not.
Returns
-------
literal : bool
If true, one of the four parameters for this box was changed, and a new plot needs to be made. If
no change has been made then it returns false.
'''
if not self.oldNum == self.newNum:
self.oldNum = self.newNum
return True
elif not self.oldX == self.newX:
self.oldX = self.newX
return True
elif not self.oldY == self.newY:
self.oldY = self.newY
return True
elif not self.oldtitle == self.newtitle:
self.oldtitle = self.newtitle
return True
else:
return False
def update(self):
'''
Before checking if a parameter has been changed using the above boxDiff() method, this method
updates the current values of each control to be checked against the old values.
'''
self.newNum = self.userinfo['bin'].GetValue()
self.newX = self.userinfo['xlabel'].GetValue()
self.newY = self.userinfo['ylabel'].GetValue()
self.newtitle = self.userinfo['title'].GetValue()
def setMax(self, length):
'''
Sets the maximum bin size for the plot.
Parameters
----------
length : int
Number for the max bin size.
'''
self.max = length
class InvalidParameter(wx.Frame):
'''
This class is universally used throughout the code to relay any pop-up messages
to the user.
'''
def __init__(self, message, parent, objectID, stringVal='', secondValue='0', columns=2):
'''
This is the initialization of the popup message. It varies greatly depending on what
the user needs to see.
Parameters
----------
message : string
Usually the invalid value that was entered by the user somewhere. Left blank if
instead of an error, a message just needs to be seen by the user.
parent : window
The parent class that this frame will open up from and is associated with.
objectID : int
The identity number of the object.
stringVal : string, optional
This is the string that is used to determine what type of message will appear in the frame
that pops up.
secondValue : string, optional
If a second value needs to be displayed besides `message`, this is where it is entered.
columns : int, optional
The number of columns that this frame will have.
Notes
-----
There is no return, but on successful completion of initialization a window will pop up
with a message for the user.
'''
if sys.platform == "win32":
wx.Frame.__init__(self, parent, objectID, 'Invalid Parameter', size = (500,110))
else:
wx.Frame.__init__(self, parent, objectID, 'Invalid Parameter', size = (500,100))
self.create_menu()
self.Bind(wx.EVT_CHAR_HOOK, self.onCharOkay)
self.parent = parent
if self.parent.messageFrame == True:
pass
else:
self.parent.messageFrame = True
if stringVal == "params":
self.SetTitle("Updated Parameters")
self.Bind(wx.EVT_CHAR_HOOK, self.onOkay)
elif stringVal == "ds9":
self.SetTitle("DS9 Error")
elif stringVal == "fitOpen":
self.SetTitle("Fitting Frame Open Error")
elif stringVal == "warnError":
self.SetTitle("Warning about local times!")
elif stringVal == "regionsUpdate":
self.SetTitle("Regions File Set Added!")
elif stringVal == "setExists":
self.SetTitle("Set Exists!")
self.panel = wx.Panel(self)
self.string = "invalid"
if secondValue != '0':
self.string = "The bin size must be between 5 and "+ secondValue +"."
if stringVal == "Rp/Rs":
self.string = "The value for Rp over Rs must be between 0 and 1."
elif stringVal == "a/Rs":
self.string = "The value for A over Rs must be greater than 1."
elif stringVal == "inc":
self.string = "The value for the inclincation must be between 0 and 90."
elif stringVal == "t0":
self.string = "The value for the mid-transit time, t0, must be greater than 0."
elif stringVal == "gamma1":
self.string = "The value entered for gamma1 must be a number."
elif stringVal == "gamma2":
self.string = "The value entered for gamma2 must be a number."
elif stringVal == "gamma":
self.string = "The value for Gamma1 + Gamma2 must be less than or equal to 1."
elif stringVal == "per":
self.string = "The value for the period must be greater than 0."
elif stringVal == "ecc":
self.string = "The value for the eccentricity must be between 0 and 1."
elif stringVal == "pericenter":
self.string = "The value for the pericenter must be greater than or equal to 0."
elif stringVal == "planet":
self.string = "The name of the planet does not exist in the database."
elif stringVal == "limbdark":
self.string = "The parameter for Limb-Darkening must be either 'False', 'linear', or 'quadratic'."
elif stringVal == "saveiteration":
self.string = "The iterative step to be saved must be greater than or equal to 5."
elif stringVal == "acceptance":
self.string = "The acceptance rate must be greater than 0."
elif stringVal == "burnfrac":
self.string = "The burn number must be greater than 0 and less than or equal to 1."
elif stringVal == "number":
self.string = "The number of total steps must be greater than or equal to 10."
elif stringVal == "mod":
self.string = "The iterative step to be saved cannot be greater than the total number of steps."
elif stringVal == "flat1":
self.string = "The path(s) to flat images must be fixed."
elif stringVal == "flat2":
self.string = "The path(s) to dark flat images must be fixed."
elif stringVal == "flat3":
self.string = "The path to save the master flat must be fixed."
elif stringVal == "fits":
self.string = "One or more of the files in " + secondValue + " need to be fixed."
elif stringVal == "master":
self.string = "Either more than one file has been entered, or the file entered needs to be fixed in the " + \
secondValue + "."
elif stringVal == "output":
self.string = "Either you entered a directory, or the specified path cannot be made for the " + secondValue + \
"."
elif stringVal == "leftbox":
self.string = "Please enter a number for the " + secondValue + "."
elif stringVal == "dateTime":
self.string = "Please check the format and values entered for the ingress or egress " + secondValue + ".\n"
if secondValue == "date":
self.string += "The year must be within 100 years of today, the month must be between 1 and 12\nand" +\
" the day must be between 1 and 31."
elif secondValue == "time":
self.string += "The hour must be between 0 and 23, while both the minutes and seconds must be between"+\
" 0 and 59.\nThe format is hh:mm:ss."
elif stringVal == "obsName" or stringVal == "obsFile":
self.string = "The observatory name or file name must be fixed."
elif stringVal == "logicalDate":
self.string = "The starting date must come before the ending date."
elif stringVal == "logicalTime":
self.string = "The starting time must come before the ending time when the dates are equal."
elif stringVal == "obsDate":
self.string = "The starting date and ending date both need to be in the format YYYY/MM/DD with integers."
elif stringVal == "dateRange":
self.string = "The year must be within 100 years of today, the month must be between 1 and 12,\nand the"+\
" day must be between 1 and 31."
elif stringVal == "coordRange":
self.string = "The latitude must be between 90 and -90 degrees, while the longitude must be \nbetween "+\
"0 and 180 degrees. Both must have min and sec in between 0 and 59."
elif stringVal == "coordTime":
self.string = "The longitude and latitude must be in the format Deg:Min:Sec with numbers."
elif stringVal == "tempElevNum":
if secondValue == "apparent magnitude upper limit":
self.string = "The " + secondValue + " must be a number."
else:
self.string = "The " + secondValue + " must be a number greater than or equal to 0."
elif stringVal == "twilight":
self.string = "The twilight must be -6, -12, or -18. Please select one from the drop down menu."
elif stringVal == "lowerElevation":
self.string = "The lower elevation limist needs to be in the format Deg:Min:Sec, "+\
"with min and sec\nbetween 0 and 59. The degrees must be between 0 and 90."
elif stringVal == "radiusNum":
self.string = "The aperture radii values must be numbers."
elif stringVal == "radiusEqual":
self.string = "The min and max aperture radii cannot be equal."
elif stringVal == "radiusStep":
self.string = "The aperture radii step size cannot be smaller than the difference between the maximum\n" + \
"radius and the minimum radius. The format for this is \"min, max, stepsize\"."
elif stringVal == "radiusLogic":
self.string = "The minimum aperture radius must be smaller than the maximum. None of the 3 parameters\n" + \
"can be equal to 0."
elif stringVal == "radiusLogic2":
self.string = "None of the aperture radii can be equal to 0."
elif stringVal == "radiusError":
self.string = "The radius you entered was empty or not a number. Please enter a valid number."
elif stringVal == "radiusListError":
if secondValue == "etdError":
self.string = "The conversion method here depends on the aperture radii list from the .pkl file. You\n" + \
"must update the radii list to continue."
else:
self.string = "The plotting methods rely on the aperture radii list from the .pkl file. You\n" + \
"must update the radii list to continue."
elif stringVal == "radiusListError2":
self.string = "The radius you entered was not in the aperture radii list for this .pkl file.\n" + \
"Please pick a radius from the approved radii in the drop down menu."
elif stringVal == "utZone":
self.string = "The time zone must be between -12 and 12. Please choose one from the drop down menu."
elif stringVal == "regionsError1":
self.string = "Either the regions file or reference file for this set is empty. You cannot add an " + \
"extra\nregions file without a referenced data image."
elif stringVal == "regionsError2":
self.string = "You have entered a filename that does not exist or more than one file. There can " + \
"only be one regions file\nand one reference file entered at a time for a set."
elif stringVal == "regionsError3":
self.string = "The regions file must be a valid .reg file."
elif stringVal == "regionsError4":
self.string = "The reference file must be a valid .fits or .fit file."
elif stringVal == "emptyReg":
self.string = "You must enter a regions file. If you wish you can enter additional sets of regions " + \
"files\nafter at least one has been entered."
elif stringVal == "invalidReg":
self.string = "This regions file was not found, or is not a vaild .reg file."
elif stringVal == "invalidRef":
self.string = "This reference file was not found, or is not a valid .fits or .fit file."
elif stringVal == "invalidRefExist":
self.string = "This reference file was not found in the list of data images. Please add it to the list of" + \
"data images and try again."
elif stringVal == "outofbounds":
self.string = "You must enter extra regions files as sets with a reference file. The format is " + \
"\"regionsFiles,referenceFile;\"."
elif stringVal == "referenceImageDup":
self.string = "The reference image you have listed in this set is already assigned to another regions file."
elif stringVal == "emptyKeyword":
self.string = "The exposure time keyword cannot be empty. Please use a valid phrase, or choose from " + \
"the drop down menu."
elif stringVal == "invalidKeyword":
self.string = "The keyword you entered was not found in the header of the first data image."
elif stringVal == "emailKeyword":
self.string = "This keyword is in the header file of the first data image, but is not something we " + \
"have a conversion method for.\nPlease email us the keyword you are trying to use and we " + \
"will include it into our list of possible keywords."
elif stringVal == "saveLocation":
self.string = "Either you entered a directory, or the specified path cannot be made to save the results " + \
"of MCMC in a text file."
elif stringVal == "regionsDup":
self.string = "The regions file that you have entered is already assigned to another reference image."
self.okButton = wx.Button(self.panel,label = "Okay", pos = (125,30))
self.Bind(wx.EVT_BUTTON, self.onOkay, self.okButton)
if stringVal == "path":
self.text = wx.StaticText(self.panel, -1, "The following is an invalid output path: " + message)
elif stringVal == "params":
self.text = wx.StaticText(self.panel, -1, "The appropriate parameters have been updated.")
elif stringVal == "ds9":
self.Bind(wx.EVT_WINDOW_DESTROY, self.ds9Error)
self.text = wx.StaticText(self.panel, -1,
"It seems that ds9 may not have installed correctly, please try again.")
elif stringVal == "importError":
self.text = wx.StaticText(self.panel, -1, "Failed to import ephem, please try again.")
elif stringVal == "fitOpen":
self.Bind(wx.EVT_WINDOW_DESTROY, self.fitError)
self.text = wx.StaticText(self.panel, -1, "Please close the fitting frame window and try again.")
elif stringVal == "warnError":
self.Bind(wx.EVT_WINDOW_DESTROY, self.parent.calculate)
self.text = wx.StaticText(self.panel, -1, "Please be careful. The local times are calculated using " + \
"PyEphem's ephem.localtime(\"input\") method. Make sure\nthat this method " + \
"produces the correct local time for yourself. If you don't know how to check " + \
"this, please refer\nto the documentation from the help menu in the main frame. " + \
"This message is shown once per GUI session,\nand will run the calculations " + \
"for the current parameters as soon as you close this window.")
elif stringVal == "oldPKL":
self.text = wx.StaticText(self.panel, -1, "This seems to be an outdated .pkl file, sorry. Try creating" + \
" a new .pkl file from the main frame and try again.\nIf this .pkl file is" + \
" important and cannot be recreated, talk to our developers for information on" + \
" how to extract \nthe data from the file.")
elif stringVal == "regionsUpdate":
self.text = wx.StaticText(self.panel, -1, "This set has been added to the list of regions sets "+ \
"in the main GUI.")
elif stringVal == "setExists":
self.text = wx.StaticText(self.panel, -1, "The set you are trying to add is already there! " + \
"Please add a different set.")
elif stringVal == "upToDate":
self.Title = "Up To Date"
self.text = wx.StaticText(self.panel, -1, "The version of " \
"OSCAAR that you have is currently " \
"up to date!\n\nYour version is from "\
"commit: \n" + oscaar.__sha__ )
elif stringVal == "newCommit":
self.Title = "New Commit Available!"
self.text = wx.StaticText(self.panel, -1, "The current vers" \
"ion that you have is out of date. " \
"Please visit our GitHub page at "\
"\n http://www.github.com/OSCAAR/"\
"OSCAAR\nand retrieve the latest "\
"commit.\n\nYour version is from "\
"commit: \n" + oscaar.__sha__)
elif stringVal == "installAgain":
self.Title = "Error"
self.text = wx.StaticText(self.panel, -1, "There seems to be an outdated __init__ file. Please"\
" reinstall OSCAAR to use this update function.")
elif stringVal == "noInternetConnection":
self.Title = "Error"
self.text = wx.StaticText(self.panel, -1, "An internet"\
" connection is needed to access this function, "\
"no connection is detected.\n\nPlease check your "\
"connection and try again.")
elif stringVal == "successfulConversion":
self.Title = "Conversion Completed"
self.text = wx.StaticText(self.panel, -1, "A file that the Czech ETD will accept has been created!")
else:
self.text = wx.StaticText(self.panel, -1, self.string +"\nThe following is invalid: " + message)
self.sizer0 = wx.FlexGridSizer(rows=2, cols=columns)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
self.hbox.Add(self.sizer0,0, wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.text,0,wx.ALIGN_CENTER|wx.ALL,5)
self.sizer0.Add(self.okButton,0,wx.ALIGN_CENTER|wx.ALL,5)
self.Bind(wx.EVT_WINDOW_DESTROY, self.onDestroy)
self.panel.SetSizer(self.hbox)
self.hbox.Fit(self)
self.Center()
self.Show()
def ds9Error(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.ds9Open = False
def fitError(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.loadFitError = False
def create_menu(self):
'''
This method creates the menu bars that are at the top of the InvalidParameter frame.
Notes
-----
This method has no input or return parameters. It will simply be used as self.create_menu()
when in the initialization method for an instance of this frame.
'''
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_exit = menu_file.Append(-1, "Exit", "Exit")
self.Bind(wx.EVT_MENU, self.onOkay, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def onCharOkay(self,event):
'''
This method allows for users on a Mac to close the InvalidParameter frame by just pressing the
enter key when it pops up.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.keycode = event.GetKeyCode()
if self.keycode == wx.WXK_RETURN:
self.Destroy()
def onOkay(self, event):
'''
This method defines the action quit from the menu. It closes the frame. In this class it also
defines what happens when the user clicks the ok button.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.Destroy()
def onDestroy(self, event):
'''
Whenever this frame is closed, this secondary method updates a variable in the parent
class to make sure that it knows there is no active instance of this frame.
Parameters
----------
event : wx.EVT_*
A wxPython event that allows the activation of this method. The * represents a wild card value.
'''
self.parent.messageFrame = False
def checkParams(self, tupleList):
'''
This method checks to make sure that all of the parameters and values that are in
`tupleList` are valid for the MCMC and LeastSquaresFit classes.
Parameters
----------
tupleList : array
The input is an array of tuples in the form: (int,string).
Returns
-------
literal : bool
True if all of the parameters required to run MCMC or LeastSquaresFit are valid,
false otherwise.
'''
self.tempGamma1 = -1
self.tempGamma2 = -1
self.tempSaveIteration = -1
self.tempNumber = -1
for (number,string) in tupleList:
if number == '':
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
else:
try:
if string !="limbdark":
self.tmp = float(number)
except ValueError:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "Rp/Rs":
if float(number)>1 or float(number)<0:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "a/Rs":
if float(number) <= 1:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "per":
if float(number) < 0:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "inc":
if float(number) < 0 or float(number) > 90:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "t0":
if float(number) < 0:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "ecc":
if float(number) < 0 or float(number) > 1:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "pericenter":
if float(number) < 0:
self.IP = InvalidParameter(number, self,-1, stringVal=string)
return False
if string == "limbdark":
if (number != "False"):
if (number != "linear"):
if(number != "quadratic"):
self.IP = InvalidParameter(number,self,-1,stringVal=string)
return False
if string == 'gamma1':
self.tempGamma1 = number
if string == 'gamma2':
self.tempGamma2 = number
if string == "saveiteration":
self.tempSaveIteration = float(number)
if float(number) < 5:
self.IP = InvalidParameter(number,self,-1,stringVal=string)
return False
if string == "number":
self.tempNumber = float(number)
if float(number) < 10:
self.IP = InvalidParameter(number,self,-1,stringVal=string)
return False
if string == "acceptance":
if float(number) <= 0:
self.IP = InvalidParameter(number,self,-1,stringVal=string)
return False
if string == "burnfrac":
if float(number) > 1 or float(number) <= 0:
self.IP = InvalidParameter(number,self,-1,stringVal=string)
return False
if(self.tempNumber != -1) and (self.tempSaveIteration != -1):
if (self.tempNumber % self.tempSaveIteration) != 0:
tempString = str(self.tempSaveIteration)+" < "+str(self.tempNumber)
self.IP = InvalidParameter(tempString,self,-1,stringVal="mod")
return False
self.totalGamma = float(self.tempGamma1) + float(self.tempGamma2)
self.totalString = str(self.totalGamma)
if self.totalGamma > 1:
self.IP = InvalidParameter(self.totalString, self,-1, stringVal="gamma")
return False
return True
###################
#This Runs The GUI#
###################
def main():
'''
This allows oscaarGUI to be imported without
automatically opening the frame every time.
'''
pass
if __name__ == "oscaar.oscaarGUI" or __name__ == "__main__":
'''
If oscaarGUI is imported through oscaar, or if it is run
as a standalone program, the frame will open.
'''
app = wx.App(False)
OscaarFrame(parent=None, objectID=-1)
app.MainLoop()
main()
|
bluegod/OSCAAR
|
oscaar/oscaarGUI.py
|
Python
|
mit
| 238,923
|
[
"VisIt"
] |
f5dc0137c3972f63a32374f8c3e4fa60b51ad3fa8b5b3824a4b0b42d3b758ccf
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2017, Zhijiang Yao, Jie Dong and Dongsheng Cao
# All rights reserved.
# This file is part of the PyBioMed.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the PyBioMed source tree.
"""
This file provides functions to convert descriptors list of multiple molecules (dicts) into CSV
If you have any questions, please feel free to contact us.
E-mail: biomed@csu.edu.cn
@File name: PyPreTools
@author: Jie Dong and Zhijiang Yao
"""
# Core Library modules
import csv
def DictToCSV(MultiDictList, csvOutPath):
"""
Convert descriptors list of multiple molecules (dicts) into CSV
:param MultiDictList: a list contains multiple dicts
:param csvOutPath: path to save CSV file
:return: csvOutPath
"""
try:
desHeader = MultiDictList[0].keys()
desContent = []
for i in MultiDictList:
temp = []
for j in desHeader:
temp.append(i.get(j))
desContent.append(temp)
f = file(csvOutPath, "w")
writer = csv.writer(f)
writer.writerow(tuple(desHeader))
for k in desContent:
writer.writerow(tuple(k))
f.close()
return csvOutPath
except Exception as e:
return str(e)
def ListToCSV(MultiList, csvOutPath, Name="Des"):
"""
Convert descriptors list of multiple molecules (lists) into CSV
:param MultiList: a list contains multiple lists
:param csvOutPath: path to save CSV file
:return: csvOutPath
"""
try:
desHeader = []
for index in range(len(MultiList[0])):
desHeader.append(str(Name) + str(index + 1))
desContent = []
for i in MultiList:
desContent.append(i)
f = file(csvOutPath, "w")
writer = csv.writer(f)
writer.writerow(tuple(desHeader))
for k in desContent:
writer.writerow(tuple(k))
f.close()
return csvOutPath
except Exception as e:
return str(e)
def TupleToCSV(MultiTupleList, csvOutPath, Name="Des"):
"""
Convert descriptors list of multiple molecules (tuple) into CSV
:param MultiTupleList: a list contains multiple lists
:param csvOutPath: path to save CSV file
:return: csvOutPath
"""
try:
desHeader = []
for index in range(len(MultiTupleList[0])):
desHeader.append(str(Name) + str(index + 1))
desContent = []
for i in MultiTupleList:
desContent.append(i)
f = file(csvOutPath, "w")
writer = csv.writer(f)
writer.writerow(tuple(desHeader))
for k in desContent:
writer.writerow(k)
f.close()
return csvOutPath
except Exception as e:
return str(e)
if __name__ == "__main__":
print("Only PyBioMed is successfully installed the code below can be run!")
# uncomment below code as an example to use if you have successfully installed PyBioMed.
print("-" * 10 + "START" + "-" * 10)
from rdkit import Chem
from PyBioMed.PyMolecule.charge import GetCharge
smis = ["CCCC", "CCCCC", "CCCCCC", "CC(N)C(=O)O", "CC(N)C(=O)[O-].[Na+]"]
smi5 = ["CCCCCC", "CCC(C)CC", "CC(C)CCC", "CC(C)C(C)C", "CCCCCN", "c1ccccc1N"]
des_list2 = []
from PyBioMed.PyMolecule.fingerprint import CalculatePubChemFingerprint
for index, smi in enumerate(smis):
m = Chem.MolFromSmiles(smi)
des_list2.append(CalculatePubChemFingerprint(m))
print(des_list2)
print(ListToCSV(des_list2, "reeeee.csv", "pubchem"))
print("-" * 25)
des_list = []
for index, smi in enumerate(smis):
m = Chem.MolFromSmiles(smi)
des_list.append(GetCharge(m))
print(des_list)
print(DictToCSV(des_list, "reeee.csv"))
print("-" * 25)
print("-" * 10 + "END" + "-" * 10)
|
gadsbyfly/PyBioMed
|
PyBioMed/PyPretreat/PyPreTools.py
|
Python
|
bsd-3-clause
| 3,932
|
[
"RDKit"
] |
221c33ad305197e5d369b7bee1298971a9ea9e8354ef49498d7536fc4ff3b697
|
# coding: utf-8
from __future__ import unicode_literals
"""
Defines an abstract base class contract for Transformation object.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Sep 23, 2011"
import abc
from pymatgen.serializers.json_coders import PMGSONable
import six
class AbstractTransformation(six.with_metaclass(abc.ABCMeta, PMGSONable)):
"""
Abstract transformation class.
"""
@abc.abstractmethod
def apply_transformation(self, structure):
"""
Applies the transformation to a structure. Depending on whether a
transformation is one-to-many, there may be an option to return a
ranked list of structures.
Args:
structure:
input structure
return_ranked_list:
Boolean stating whether or not multiple structures are
returned. If return_ranked_list is a number, that number of
structures is returned.
Returns:
depending on returned_ranked list, either a transformed structure
or
a list of dictionaries, where each dictionary is of the form
{'structure' = .... , 'other_arguments'}
the key 'transformation' is reserved for the transformation that
was actually applied to the structure.
This transformation is parsed by the alchemy classes for generating
a more specific transformation history. Any other information will
be stored in the transformation_parameters dictionary in the
transmuted structure class.
"""
return
@abc.abstractproperty
def inverse(self):
"""
Returns the inverse transformation if available.
Otherwise, should return None.
"""
return
@abc.abstractproperty
def is_one_to_many(self):
"""
Determines if a Transformation is a one-to-many transformation. If a
Transformation is a one-to-many transformation, the
apply_transformation method should have a keyword arg
"return_ranked_list" which allows for the transformed structures to be
returned as a ranked list.
"""
return False
@property
def use_multiprocessing(self):
"""
Indicates whether the transformation can be applied by a
subprocessing pool. This should be overridden to return True for
transformations that the transmuter can parallelize.
"""
return False
|
Dioptas/pymatgen
|
pymatgen/transformations/transformation_abc.py
|
Python
|
mit
| 2,653
|
[
"pymatgen"
] |
d1cc4d105ef45d9b179ba566d9ffb25d5d38208def61466faa0a39e1f9c5d7a8
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Module with non-generic exceptions classes."""
class QcdbException(Exception):
"""Error class for QCDB."""
pass
class FeatureNotImplemented(QcdbException):
"""Error called for functions defined but not yet implemented.
Also for functions defined that will never be implemented.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException: Feature %s is not yet implemented.\n\n' % (msg))
class ValidationError(QcdbException):
"""Error called for problems with syntax input file. Prints
error message *msg* to standard output stream.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException: %s\n\n' % (msg))
class IncompleteAtomError(QcdbException):
"""Error raised when not all variables in an atom specification
have been defined at compute time. May be a temporary situation
so message not printed but appears as traceback when error persists.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
class ParsingValidationError(QcdbException):
"""Error called for problems with syntax from a QC output file. Prints
error message *msg* to standard output stream.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException: %s\n\n' % (msg))
class FragmentCountError(QcdbException):
"""Error called molecule has wrong number of fragments for method.
Prints error message *msg* to standard output stream.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
#print('\nQcdbException: %s\n\n' % (msg))
class BasisSetFileNotFound(QcdbException):
"""
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException BasisSetFileNotFound: %s\n\n' % (msg))
class BasisSetNotFound(QcdbException):
"""
"""
def __init__(self, msg, silent=False):
QcdbException.__init__(self, msg)
self.msg = msg
if not silent:
print('\nQcdbException BasisSetNotFound: %s\n\n' % (msg))
class BasisSetNotDefined(QcdbException):
"""
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException BasisSetNotDefined: %s\n\n' % (msg))
class Dftd3Error(QcdbException):
"""
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nDftd3Error: %s\n\n' % (msg))
class TestComparisonError(QcdbException):
"""Error called when a test case fails due to a failed
compare_values() call. Prints error message *msg* to standard
output stream and output file.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nQcdbException: %s\n\n' % msg)
class MoleculeFormatError(QcdbException):
"""Error called when a Molecule.from_string contains unparsable lines."""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
class FeatureDeprecated(QcdbException):
"""Error called for functions removed but still defined.
Should suggest a replacement.
"""
def __init__(self, msg):
QcdbException.__init__(self, msg)
self.msg = msg
print('\nFeature deprecated: {}\n\n'.format(msg))
class UpgradeHelper(QcdbException):
"""Error called on previously valid syntax that now isn't and a
simple syntax transition is possible.
It is much preferred to leave the old syntax valid for a release
cycle and have the old syntax raise a deprecation FutureWarning. For
cases where the syntax just has to jump, this can be used to trap
the old syntax at first error and suggest the new.
"""
def __init__(self, old, new, version, elaboration):
msg = "Using `{}` instead of `{}` is obsolete as of {}.{}".format(old, new, version, elaboration)
QcdbException.__init__(self, msg)
print('\nQcdbException: %s\n\n' % (msg))
|
CDSherrill/psi4
|
psi4/driver/qcdb/exceptions.py
|
Python
|
lgpl-3.0
| 5,158
|
[
"Psi4"
] |
9428d486ac1ba07ff9f0a89015a693d57e481593eff5b725b303155090b79677
|
__RCSID__ = "$Id$"
from DIRAC import gLogger, S_OK, S_ERROR, siteName
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.StorageManagementSystem.Client.StorageManagerClient import StorageManagerClient
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.AccountingSystem.Client.Types.DataOperation import DataOperation
from DIRAC.AccountingSystem.Client.DataStoreClient import gDataStoreClient
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
import re
AGENT_NAME = 'StorageManagement/StageMonitorAgent'
class StageMonitorAgent( AgentModule ):
def initialize( self ):
self.stagerClient = StorageManagerClient()
# This sets the Default Proxy to used as that defined under
# /Operations/Shifter/DataManager
# the shifterProxy option in the Configuration can be used to change this default.
self.am_setOption( 'shifterProxy', 'DataManager' )
return S_OK()
def execute( self ):
res = getProxyInfo( disableVOMS = True )
if not res['OK']:
return res
self.proxyInfoDict = res['Value']
return self.monitorStageRequests()
def monitorStageRequests( self ):
""" This is the third logical task manages the StageSubmitted->Staged transition of the Replicas
"""
res = self.__getStageSubmittedReplicas()
if not res['OK']:
gLogger.fatal( "StageMonitor.monitorStageRequests: Failed to get replicas from StorageManagementDB.", res['Message'] )
return res
if not res['Value']:
gLogger.info( "StageMonitor.monitorStageRequests: There were no StageSubmitted replicas found" )
return res
seReplicas = res['Value']['SEReplicas']
replicaIDs = res['Value']['ReplicaIDs']
gLogger.info( "StageMonitor.monitorStageRequests: Obtained %s StageSubmitted replicas for monitoring." % len( replicaIDs ) )
for storageElement, seReplicaIDs in seReplicas.iteritems():
self.__monitorStorageElementStageRequests( storageElement, seReplicaIDs, replicaIDs )
gDataStoreClient.commit()
return S_OK()
def __monitorStorageElementStageRequests( self, storageElement, seReplicaIDs, replicaIDs ):
terminalReplicaIDs = {}
oldRequests = []
stagedReplicas = []
# Since we are in a given SE, the LFN is a unique key
lfnRepIDs = {}
for replicaID in seReplicaIDs:
lfn = replicaIDs[replicaID]['LFN']
lfnRepIDs[lfn] = replicaID
if lfnRepIDs:
gLogger.info( "StageMonitor.__monitorStorageElementStageRequests: Monitoring %s stage requests for %s." % ( len( lfnRepIDs ),
storageElement ) )
else:
gLogger.warn( "StageMonitor.__monitorStorageElementStageRequests: No requests to monitor for %s." % storageElement )
return
oAccounting = DataOperation()
oAccounting.setStartTime()
res = StorageElement( storageElement ).getFileMetadata( lfnRepIDs )
if not res['OK']:
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: Completely failed to monitor stage requests for replicas.", res['Message'] )
return
prestageStatus = res['Value']
accountingDict = self.__newAccountingDict( storageElement )
for lfn, reason in prestageStatus['Failed'].iteritems():
accountingDict['TransferTotal'] += 1
if re.search( 'File does not exist', reason ):
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: LFN did not exist in the StorageElement", lfn )
terminalReplicaIDs[lfnRepIDs[lfn]] = 'LFN did not exist in the StorageElement'
for lfn, metadata in prestageStatus['Successful'].iteritems():
if not metadata:
continue
staged = metadata.get( 'Cached', metadata['Accessible'] )
if staged:
accountingDict['TransferTotal'] += 1
accountingDict['TransferOK'] += 1
accountingDict['TransferSize'] += metadata['Size']
stagedReplicas.append( lfnRepIDs[lfn] )
elif staged is not None:
oldRequests.append( lfnRepIDs[lfn] ) # only ReplicaIDs
oAccounting.setValuesFromDict( accountingDict )
oAccounting.setEndTime()
gDataStoreClient.addRegister( oAccounting )
# Update the states of the replicas in the database
if terminalReplicaIDs:
gLogger.info( "StageMonitor.__monitorStorageElementStageRequests: %s replicas are terminally failed." % len( terminalReplicaIDs ) )
res = self.stagerClient.updateReplicaFailure( terminalReplicaIDs )
if not res['OK']:
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: Failed to update replica failures.", res['Message'] )
if stagedReplicas:
gLogger.info( "StageMonitor.__monitorStorageElementStageRequests: %s staged replicas to be updated." % len( stagedReplicas ) )
res = self.stagerClient.setStageComplete( stagedReplicas )
if not res['OK']:
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: Failed to updated staged replicas.", res['Message'] )
res = self.stagerClient.updateReplicaStatus( stagedReplicas, 'Staged' )
if not res['OK']:
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: Failed to insert replica status.", res['Message'] )
if oldRequests:
gLogger.info( "StageMonitor.__monitorStorageElementStageRequests: %s old requests will be retried." % len( oldRequests ) )
res = self.__wakeupOldRequests( oldRequests )
if not res['OK']:
gLogger.error( "StageMonitor.__monitorStorageElementStageRequests: Failed to wakeup old requests.", res['Message'] )
return
def __newAccountingDict( self, storageElement ):
""" Generate a new accounting Dict """
accountingDict = {}
accountingDict['OperationType'] = 'Stage'
accountingDict['User'] = self.proxyInfoDict['username']
accountingDict['Protocol'] = 'Stager'
accountingDict['RegistrationTime'] = 0.0
accountingDict['RegistrationOK'] = 0
accountingDict['RegistrationTotal'] = 0
accountingDict['FinalStatus'] = 'Successful'
accountingDict['Source'] = storageElement
accountingDict['Destination'] = storageElement
accountingDict['ExecutionSite'] = siteName()
accountingDict['TransferTotal'] = 0
accountingDict['TransferOK'] = 0
accountingDict['TransferSize'] = 0
accountingDict['TransferTime'] = self.am_getPollingTime()
return accountingDict
def __getStageSubmittedReplicas( self ):
""" This obtains the StageSubmitted replicas from the Replicas table and the RequestID from the StageRequests table """
res = self.stagerClient.getCacheReplicas( {'Status':'StageSubmitted'} )
if not res['OK']:
gLogger.error( "StageMonitor.__getStageSubmittedReplicas: Failed to get replicas with StageSubmitted status.", res['Message'] )
return res
if not res['Value']:
gLogger.debug( "StageMonitor.__getStageSubmittedReplicas: No StageSubmitted replicas found to process." )
return S_OK()
else:
gLogger.debug( "StageMonitor.__getStageSubmittedReplicas: Obtained %s StageSubmitted replicas(s) to process." % len( res['Value'] ) )
seReplicas = {}
replicaIDs = res['Value']
for replicaID, info in replicaIDs.iteritems():
storageElement = info['SE']
seReplicas.setdefault( storageElement, [] ).append( replicaID )
# RequestID was missing from replicaIDs dictionary BUGGY?
res = self.stagerClient.getStageRequests( {'ReplicaID':replicaIDs.keys()} )
if not res['OK']:
return res
if not res['Value']:
return S_ERROR( 'Could not obtain request IDs for replicas %s from StageRequests table' % ( replicaIDs.keys() ) )
for replicaID, info in res['Value'].iteritems():
replicaIDs[replicaID]['RequestID'] = info['RequestID']
return S_OK( {'SEReplicas':seReplicas, 'ReplicaIDs':replicaIDs} )
def __wakeupOldRequests( self, oldRequests ):
gLogger.info( "StageMonitor.__wakeupOldRequests: Attempting..." )
retryInterval = self.am_getOption( 'RetryIntervalHour', 2 )
res = self.stagerClient.wakeupOldRequests( oldRequests, retryInterval )
if not res['OK']:
gLogger.error( "StageMonitor.__wakeupOldRequests: Failed to resubmit old requests.", res['Message'] )
return res
return S_OK()
|
Andrew-McNab-UK/DIRAC
|
StorageManagementSystem/Agent/StageMonitorAgent.py
|
Python
|
gpl-3.0
| 8,412
|
[
"DIRAC"
] |
3ece9de34428850018bd5bd0225d4859e9ca8c87b61f33ed5757ffa656a6255f
|
#!/usr/bin/env python
import re
import math as m
import numpy as np
def read_frequences(filename):
"""Return data from <seedname>.phonon
The CASTEP and related PHONON codes both
generate a file containing phonon frequences and
related information. This function returns a
list of frequencies from a file (assumed to be
in .phonon file format"""
# phonon frequences are on lines by themselves
# as integers followed by reals. Only case in
# the file that is like this
# But we may also (for gamma point) end up with
# flippin IR activities too. We ignore these if
# we find em.
get_freq_RE = re.compile(r"^\s+\d+\s+([\+\-]?\d+\.\d+)(?:\s+[\+\-]?\d+\.\d+)?\s*$",
re.MULTILINE)
get_freq_weights_RE = re.compile(r"^\s+q-pt=\s+\d+\s+[\+\-]?\d+\.\d+\s+[\+\-]?\d+\.\d+\s+[\+\-]?\d+\.\d+\s+(\d+\.\d+)\s*$", re.MULTILINE)
get_lattice_vecs_RE = re.compile(r"^\s+Unit cell vectors \(A\)\n\s*(-?\d+\.\d+)\s+(-?\d+\.\d+)\s+(-?\d+\.\d+)\s*\n\s*(-?\d+\.\d+)\s+(-?\d+\.\d+)\s+(-?\d+\.\d+)\s*\n\s*(-?\d+\.\d+)\s+(-?\d+\.\d+)\s+(-?\d+\.\d+)", re.MULTILINE)
fh = open(filename, 'r')
filelines = fh.read()
freq_grps = get_freq_RE.findall(filelines)
wgt_grps = get_freq_weights_RE.findall(filelines)
lvec_grps = get_lattice_vecs_RE.findall(filelines)[0]
fh.close
wgts = []
for wgt in wgt_grps:
wgts.append(float(wgt))
freqs = []
for freq in freq_grps:
freqs.append(float(freq))
lvec = np.array([[float(lvec_grps[0]), float(lvec_grps[1]), float(lvec_grps[2])],
[float(lvec_grps[3]), float(lvec_grps[4]), float(lvec_grps[5])],
[float(lvec_grps[6]), float(lvec_grps[7]), float(lvec_grps[8])]]
)
vol = np.linalg.det(lvec)
return freqs, wgts, vol
def beta(T, N, freq, freqstar, wgt, wgtstar):
h = 4.135667516E-15 # eV.s
k = 8.6173324E-5 # eV/K
cm2ev = 1.23984E-4 # *cm^-1 to give eV
cm2Hz = 0.03E12 # *cm^-1 to give Hz (1/s)
assert len(freq)==len(freqstar)
assert wgt == wgtstar
N_qpt = len(wgt)
N_fr = len(freq)//N_qpt
i = 0
beta = 1.0
for Nqwt in wgt:
this_bt = 1.0
for vs, v in zip(freqstar[i*N_fr:i*N_fr+N_fr], freq[i*N_fr:i*N_fr+N_fr]):
if v <= 0.0:
continue
if vs <= 0.0:
continue
vs = vs*cm2Hz
v = v*cm2Hz
evs = m.exp((-1.0*h*vs)/(2.0*k*T))
evsb = m.exp((-1.0*h*vs)/(k*T))
ev = m.exp((-1.0*h*v)/(2.0*k*T))
evb = m.exp((-1.0*h*v)/(k*T))
this_bt = this_bt * (vs/v) * (evs / (1.0-evsb)) * ((1.0-evb)/ev)
beta = beta*this_bt**Nqwt
i = i + 1
beta = beta**(1.0/N)
return beta
def beta_T(Ts, N, freq, freqstar, wgt, wgtstar):
betas = np.zeros_like(Ts)
i = 0
for T in Ts:
betas[i] = beta(T, N, freq, freqstar, wgt, wgtstar)
i = i + 1
return betas
if __name__ == "__main__":
import sys
v, w, vol = read_frequences(sys.argv[1])
vs, ws, vol = read_frequences(sys.argv[2])
for T in [15, 30, 60, 120, 240, 300, 500, 670, 1000, 1500, 2000, 2500, 2600, 3000, 3500, 3700, 4000]:
b = beta(T, 1, v, vs, w, ws)
print(T, b, m.log(b)*1E3)
# Or, the 'vectorised' version...
Ts = np.array([15.0, 30.0, 60.0, 120.0, 240.0, 300.0, 500.0, 670.0, 1000.0, 1500.0, 2000.0, 2500.0, 2600.0, 3000.0, 3500.0, 3700.0, 4000.0])
betas = beta_T(Ts, 1, v, vs, w, ws)
print(Ts)
print(betas)
print(np.log(betas)*1E3)
|
andreww/isofrac
|
calc_beta.py
|
Python
|
bsd-3-clause
| 3,616
|
[
"CASTEP"
] |
e6e4a227071c4e0f3d58add1da34462ef15a0f9ea268a75d305caa55d3714c09
|
import numpy as np
import pylab
import pyfits
import sys
import glob
import os
from pyraf import iraf
import glob
import time
from astropy.io import fits as fits
print " -----------------------------------------------------------------"
print " . * . . + . . . "
print " . . . . \|/ "
print " * + ` - * - ` "
print " ` . ` + . * . /|\ "
print " + . . + . "
print " . . * * + . . * "
print " . . . . . . . * "
print " ---------------------------------------------- "
print " * Welcome to the Gemini data reduction pipeline! ` "
print " ---------------------------------------------- "
print " . _ * \|/ . . -*- + "
print " .` \\`. + -*- * . ` . * "
print " . |__''_| . /|\ + . + . | "
print " | | . . -o- "
print " | | ` . ` ,'`. * . | "
print " _.'-----'-._ * + ,'`. ,'`. ,' `. "
print " / \__.__.--._________' `. `. `._________ "
print "------------------------------------------------------------------"
############################
#find where user is working#
############################
mainpath = os.getcwd()
print ">>> What folder are you working in? "
print ">>> Be sure to put a / at the end!"
foldername=raw_input('>>> Your answer: ')
if mainpath+'/' != foldername:
print('>>> You are not in the correct directory! Quitting...')
sys.exit()
##############################################
#sort all fits files in the working directory#
##############################################
files = glob.glob('S*.fits')
sz = np.size(files)
wave = np.chararray(sz,itemsize=10)
obj = np.chararray(sz,itemsize=10)
grat = np.chararray(sz,itemsize=10)
mask = np.chararray(sz,itemsize=10)
obstype = np.chararray(sz,itemsize=10)
obsclass = np.chararray(sz,itemsize=10)
date = np.chararray(sz,itemsize=10)
for i in np.arange(0,sz):
tmp = pyfits.open(files[i])
hdr = tmp[0].header
wave[i] = (hdr['centwave'])
obj[i] = (hdr['object'])
grat[i] = (hdr['grating'])
mask[i] = (hdr['maskname'])
obstype[i] = (hdr['obstype'])
obsclass[i] = (hdr['obsclass'])
date[i] = (hdr['date'])
if mask[0] == 'IFU-R': setup = 'blue'
if mask[0] == 'IFU-2': setup = 'red'
#~~~~select either the red or blue setups
if setup == 'blue':
waves = np.array(['480.0', '490.0'])
slit = 'red'
vers = '1'
if setup == 'red':
waves = np.array(['635.0', '640.0'])
slit = 'both'
vers = '*'
#~~~~pull out acquisition images
selacq = (obsclass == 'acq')
acqimages = np.extract(selacq, files)
#~~~~pull out science images and define done parameters
selsci1 = (obsclass == 'science') & (wave == waves[0])
sciimage1 = np.extract(selsci1,files)
n = np.size(sciimage1)
if n <= 1:
sci11 = sciimage1[0]
sci11base = sci11.split('.')[0]
sci11done = 'eprg' + sci11base
else:
sci11 = sciimage1[0]
sci12 = sciimage1[1]
sci11base = sci11.split('.')[0]
sci12base = sci12.split('.')[0]
sci11done = 'eprg' + sci11base
sci12done = 'eprg' + sci12base
selsci2 = (obsclass == 'science') & (wave == waves[1])
sciimage2 = np.extract(selsci2,files)
n = np.size(sciimage2)
if n <= 1:
sci21 = sciimage2[0]
sci21base = sci21.split('.')[0]
sci21done = 'eprg' + sci21base
else:
sci21 = sciimage2[0]
sci22 = sciimage2[1]
sci21base = sci21.split('.')[0]
sci22base = sci22.split('.')[0]
sci21done = 'eprg' + sci21base
sci22done = 'eprg' + sci22base
#~~~~pull out arcs and define done parameters
selarc1 = (wave == waves[0]) & (obstype == 'ARC')
arc1 = np.extract(selarc1, files)
n = np.size(arc1)
if n <= 1:
arc11 = arc1[0]
arc11base = arc11.split('.')[0]
arc11done = 'eprg' + arc11base
arc11wt = 'teprg' + arc11base
else:
arc11 = arc1[0]
arc12 = arc1[1]
arc11base = arc11.split('.')[0]
arc12base = arc12.split('.')[0]
arc11done = 'eprg' + arc11base
arc12done = 'eprg' + arc12base
arc11wt = 'teprg' + arc11base #see if wavetran applied to arc
arc12wt = 'teprg' + arc12base
selarc2 = (wave == waves[1]) & (obstype == 'ARC')
arc2 = np.extract(selarc2, files)
n = np.size(arc2)
if n <= 1:
arc21 = arc2[0]
arc21base = arc21.split('.')[0]
arc21done = 'eprg' + arc21base
arc21wt = 'teprg' + arc21base
else:
arc21 = arc2[0]
arc22 = arc2[1]
arc21base = arc21.split('.')[0]
arc22base = arc22.split('.')[0]
arc21done = 'eprg' + arc21base
arc22done = 'eprg' + arc22base
arc21wt = 'teprg' + arc21
arc22wt = 'teprg' + arc22
#~~~~pull out flats and define done parameters
selflat1 = (wave == waves[0]) & (obstype == 'FLAT')
flat1 = np.extract(selflat1, files)[0]
flat1base = flat1.split('.')[0]
flat1done = 'eprg' + flat1base
selflat2 = (wave == waves[1]) & (obstype == 'FLAT')
flat2 = np.extract(selflat2, files)[0]
flat2base = flat2.split('.')[0]
flat2done = 'eprg' + flat2base
#~~~~pull out date
obsdate = date[0] #date should be same for all files
if obsdate == '2015-11-08': biasim='20151108bias.fits'
#~~~~pull out bias
bias1 = glob.glob('*bias*.fits')[0]
biasbase = bias1.split('.')[0]
#~~~~pull out standard star info
resps = glob.glob('resp*.fits')
respwave = np.chararray(2,itemsize=10)
for i in np.arange(0,2):
tmp = pyfits.open(resps[i])
hdr = tmp[0].header
respwave[i] = (hdr['centwave'])
selresp1 = (respwave == waves[0])
response1 = np.extract(selresp1,resps)[0]
selresp2 = (respwave == waves[1])
response2 = np.extract(selresp2,resps)[0]
sfunc = glob.glob('sfunction*.fits')
sfuncwave = np.chararray(2,itemsize=10)
for i in np.arange(0,2):
tmp = pyfits.open(sfunc[i])
hdr = tmp[0].header
sfuncwave[i] = (hdr['centwave'])
selsfunc1 = (sfuncwave == waves[0])
sfunction1 = np.extract(selsfunc1,sfunc)[0]
selsfunc2 = (sfuncwave == waves[1])
sfunction2 = np.extract(selsfunc2,sfunc)[0]
#~~~~find coord list
coordlistloc = glob.glob('smalllinelist.dat')[0]
#~~~~pull out galaxy name
selname = (obsclass == 'science') & (wave == waves[0])
galname = np.extract(selname, obj)
#~~~~print out file list
print "---------------------------------------------------"
print('file name file type cent. wavelength')
print(flat1 + ' flat ' + waves[0])
print(flat2 + ' flat ' + waves[1])
print(arc11 + ' arc ' + waves[0])
try:
arc12
except NameError:
pass
else:
print(arc12 + ' arc ' + waves[0])
print(arc21 + ' arc ' + waves[1])
try:
arc22
except NameError:
pass
else:
print(arc22 + ' arc ' + waves[1])
print(sci11 + ' science ' + waves[0])
try:
sci12
except NameError:
pass
else:
print(sci12 + ' science ' + waves[0])
print(sci21 + ' science ' + waves[1])
try:
sci22
except NameError:
pass
else:
print(sci22 + ' science ' + waves[1])
print(bias1 + ' bias ---')
print "---------------------------------------------------"
print('')
print('>>> Do these look correct to you?')
print('>>> 1 = yes')
print('>>> 0 = no')
ans = raw_input('>>> Your choice: ')
if ans == '0':
print('>>> Whoa, a bug already?! Shoot Elaine an email.')
sys.exit()
else:
print('>>> Alright, on to the reduction...')
time.sleep(2)
#################
#start reduction#
#################
#~~~~load iraf packages
iraf.load('gemini')
time.sleep(2)
iraf.load('gmos')
#~~~~flat 1 reduction
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting flat reduction and fiber identification')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def run_gfreduce(inimage,biasimage,refimage,slit,cr,wt,ss,inter,os,tr,bi,ex,fl,ap,weight,trac,rec,orde):
iraf.gemini.gmos.gfreduce(inimage, \
fl_inter=inter, \
fl_over=os, \
fl_trim=tr, \
fl_bias=bi, \
fl_fluxcal=fl, \
fl_extract=ex, \
fl_gscrrej=cr, \
fl_wavtran=wt,
fl_gsappwave=ap, \
fl_skysub=ss, \
slits=slit, \
rawpath=foldername, \
weights=weight, \
trace=trac, \
recenter=rec, \
order=orde, \
logfile=galname[0]+'.log', \
bias=biasimage)
if os.path.isfile('erg' + flat1) == False: #if the output file isn't done yet, do this step
run_gfreduce(flat1,bias1,'',slit,'no','no','no','yes','yes','yes','yes','yes','no','no','none','yes','yes','default')
print('>>> flat 1 complete, on to flat 2')
time.sleep(2)
else:
print('>>> flat 1 already done, moving on')
time.sleep(2)
if os.path.isfile('erg' + flat2) == False:
run_gfreduce(flat2,bias1,'',slit,'no','no','no','yes','yes','yes','yes','yes','no','no','none','yes','yes','default')
print('>>> flat 2 complete')
time.sleep(2)
else:
print('>>> flat 2 already done, moving on')
time.sleep(2)
#~~~~basic arc reduction
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting basic arc reduction')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def reduce_arcs(inimage, refimage, slit):
iraf.gemini.gmos.gfreduce(inimage, fl_fluxcal='no', fl_extract='no', fl_wavtran='no', fl_inter='no', ref=refimage, recenter='no', \
trace='no', fl_skysub='no', fl_gscrrej='no', fl_bias='no', fl_over='yes', \
order='1', weights='none', slits=slit)
if os.path.isfile('rg' + arc11) == False:
reduce_arcs(arc11base,flat1done,slit)
print('>>> arc 1 reduction complete')
time.sleep(2)
else:
print('>>> arc 1 already done, moving on')
time.sleep(2)
try:
arc12
except NameError:
pass
else:
if os.path.isfile('rg' + arc12) == False:
reduce_arcs(arc12base,flat1done,slit)
print('>>> arc 12 reduction complete')
time.sleep(2)
else:
print('>>> arc 12 already done, moving on')
time.sleep(2)
if os.path.isfile('rg' + arc21) == False:
reduce_arcs(arc21base,flat2done,slit)
print('>>> arc 2 reduction complete')
time.sleep(2)
else:
print('>>> arc 2 already done, moving on')
time.sleep(2)
try:
arc22
except NameError:
pass
else:
if os.path.isfile('rg' + arc22) == False:
reduce_arcs(arc22base,flat2done,slit)
print('>>> arc 22 reduction complete')
time.sleep(2)
else:
print('>>> arc 22 already done, moving on')
time.sleep(2)
#~~~~create bad pixel map for arcs and flats
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting bad pixel mask creation for flats and arcs')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def arith(inimage, outimage):
iraf.gemini.gemtools.gemarith(operand1=inimage, op='*',operand2='0',result=outimage,outtype='ushort')
def hedit(inimage):
iraf.hedit(inimage, fields='EXTNAME', value='DQ', update='yes', ver='no')
def imrep(inimage):
iraf.imreplace(inimage, value='1')
def addbpm(inimage,bpm):
iraf.addbpm(inimage,bpm=bpm)
def fixgem(inimage,outimage):
iraf.gemfix(inimage, outimages=outimage,method='fit1d', bitmask='1',order='5', fl_inter='no')
#~~~~start with the flats
if os.path.isfile('prg'+flat1) == False:
os.system('ds9 -mecube rg' + flat1 + ' &')
arith('rg' + flat1base, 'bpm_2x1_flat')
for i in np.arange(1,13):
hedit('bpm_2x1_flat[sci,'+str(i)+']')
print '>>> Look through the 12 extensions and record the bad columns/pixels'
print '>>> Are there bad values?'
print '>>> 1 = yes; 0 = no'
ans = raw_input('>>> Your answer: ')
if ans == '1':
done = False
while done == False:
ext = raw_input('>>> What extension number? ')
x = raw_input('>>> What x value(s)? ')
y = raw_input('>>> What y values(s)? ')
print ">>> You entered: extension = " + ext + ' x = ' + x + " y = " + y
print ">>> Are these correct? 1 = yes; 0 = no"
ans3 = raw_input('>>> Your choice: ')
if ans3 == '0': continue
imrep('bpm_2x1_flat[dq,'+str(ext)+']['+str(x)+','+str(y)+']')
print '>>> Are there more bad values?'
print '>>> 1 = yes; 0 = no '
ans2 = raw_input('>>> Your choice: ')
if ans2 == '1': done == False
if ans2 == '0': done == True
addbpm('rg'+flat1,'bpm_2x1_flat')
fixgem('rg'+flat1,'prg'+flat1)
print('>>> flat bad pixel map complete')
time.sleep(2)
else:
print('>>> flat bad pixel map already done, moving on')
time.sleep(2)
if os.path.isfile('prg'+flat2) == False:
addbpm('rg'+flat2,'bpm_2x1_flat')
fixgem('rg'+flat2,'prg'+flat2)
if os.path.isfile('prg'+arc11) == False:
os.system('ds9 -mecube rg' + arc11 + ' &')
arith('rg' + arc11base, 'bpm_2x1_arc')
for i in np.arange(1,13):
hedit('bpm_2x1_arc[sci,'+str(i)+']')
print '>>> Look through the 12 extensions and record the bad columns/pixels'
print '>>> Are there bad values?'
print '>>> 1 = yes; 0 = no'
ans = raw_input('>>> Your answer: ')
if ans == '1':
done = False
while(done == False):
ext = raw_input('>>> What extension number? ')
x = raw_input('>>> What x value(s)? ')
y = raw_input('>>> What y values(s)? ')
print ">>> You entered: extension = " + ext + ' x = ' + x + " y = " + y
print ">>> Are these correct? 1 = yes; 0 = no"
ans3 = raw_input('>>> Your choice: ')
if ans3 == '0': continue
imrep('bpm_2x1_arc[dq,'+str(ext)+']['+str(x)+','+str(y)+']')
print '>>> Are there more bad values?'
print '>>> 1 = yes; 0 = no '
ans2 = raw_input('>>> Your choice: ')
if ans2 == '1': done = False
if ans2 == '0': done = True
addbpm('rg'+arc11,'bpm_2x1_arc')
fixgem('rg'+arc11,'prg'+arc11)
print('>>> arc bad pixel map complete')
time.sleep(2)
else:
print('>>> arc bad pixel map already done, moving on')
time.sleep(2)
try:
arc12
except NameError:
pass
else:
if os.path.isfile('prg'+arc12) == False:
addbpm('rg'+arc12,'bpm_2x1_arc')
fixgem('rg'+arc12,'prg'+arc12)
if os.path.isfile('prg'+arc21) == False:
addbpm('rg'+arc21,'bpm_2x1_arc')
fixgem('rg'+arc21,'prg'+arc21)
try:
arc22
except NameError:
pass
else:
if os.path.isfile('prg'+arc22) == False:
addbpm('rg'+arc22,'bpm_2x1_arc')
fixgem('rg'+arc22,'prg'+arc22)
#~~~~extract the arcs
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting extraction of arcs')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def extract(inimage, refimage):
iraf.gemini.gmos.gfextract(inimage,ref=refimage)
if os.path.isfile('eprg' + arc11) == False:
extract('prg' + arc11base, 'erg'+flat1base)
print('>>> arc 1 extraction done')
time.sleep(2)
else:
print('>>> arc 1 already done, moving on')
time.sleep(2)
if os.path.isfile('eprg' + arc21) == False:
extract('prg' + arc21base, 'erg'+flat2base)
print('>>> arc 2 extraction done')
time.sleep(2)
else:
print('>>> arc 2 already done, moving on')
time.sleep(2)
try:
arc12
except NameError:
pass
else:
if os.path.isfile('eprg'+arc12) == False:
extract('prg' + arc12base, 'erg'+flat2base)
print('>>> arc 12 extraction done')
time.sleep(2)
else:
print('>>> arc 12 already done, moving on')
time.sleep(2)
try:
arc22
except NameError:
pass
else:
if os.path.isfile('eprg'+arc22) == False:
extract('prg' + arc22base, 'erg'+flat2base)
print('>>> arc 22 extraction done')
time.sleep(2)
else:
print('>>> arc 22 already done, moving on')
time.sleep(2)
#~~~~find the wavelength solution for the arcs
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting wavelength solution')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def wave_cal(inimage):
iraf.gemini.gmos.gswavelength(inimage, fl_inter=yes, nlost=10,coordli=coordlistloc)
if os.path.isfile(foldername+'database/ideprg'+arc11base+'_001') == False:
wave_cal('eprg' + arc11base)
print('>>> arc 1 wavelength solution done')
time.sleep(2)
else:
print('>>> arc 1 already done, moving on')
time.sleep(2)
try:
arc12
except NameError:
pass
else:
if os.path.isfile(foldername+'database/ideprg'+arc12base+'_001') == False:
wave_cal('eprg' + arc12base)
print('>>> arc 12 wavelength solution done')
time.sleep(2)
else:
print('>>> arc 12 already done, moving on')
time.sleep(2)
if os.path.isfile(foldername+'database/ideprg'+arc21base+'_001') == False:
wave_cal('eprg' + arc21base)
print('>>> arc 2 wavelength solution done')
time.sleep(2)
else:
print('>>> arc 2 already done, moving on')
time.sleep(2)
try:
arc22
except NameError:
pass
else:
if os.path.isfile(foldername+'database/ideprg'+arc22base+'_001') == False:
wave_cal('eprg' + arc22base)
print('>>> arc 22 wavelength solution done')
time.sleep(2)
else:
print('>>> arc 22 already done, moving on')
time.sleep(2)
print " if you stopped in between slits or need to finish first slit"
print " mv the database eprgS######_001 or _002 files to a temp name"
print " then run command outside of pipeline wave_cal(<<your file name>>"
print " see user guide for more details"
#~~~~wavelength calibration of the arcs
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting wavelength tranformation of arcs')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def wave_trans(inimage,wavtranimage):
iraf.gemini.gmos.gftransform(inimage, wavtran=wavtranimage)
if os.path.isfile(arc11wt+'.fits') == False:
wave_trans('eprg' + arc11base, 'eprg' + arc11base)
print('>>> arc 1 wavelength transformation done')
time.sleep(2)
else:
print('>>> arc 1 already done, moving on')
time.sleep(2)
try:
arc12
except NameError:
pass
else:
if os.path.isfile(arc12wt+'.fits') == False:
wave_trans('eprg' + arc12base,'eprg' + arc12base)
print('>>> arc 12 wavelength transformation done')
time.sleep(2)
else:
print('>>> arc 12 already done, moving on')
time.sleep(2)
if os.path.isfile(arc21wt+'.fits') == False:
wave_trans('eprg' + arc21base,'eprg' + arc21base)
print('>>> arc 2 wavelength transformation done')
time.sleep(2)
else:
print('>>> arc 2 already done, moving on')
time.sleep(2)
try:
arc22
except NameError:
pass
else:
if os.path.isfile(arc22wt+'.fits') == False:
wave_trans('eprg' + arc22base,'eprg' + arc22base)
print('>>> arc 22 wavelength transformation done')
time.sleep(2)
else:
print('>>> arc 22 already done, moving on')
time.sleep(2)
#~~~~qe correct the flats
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting quantum efficiency correction of flats')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def qe_corr(inimage,refimage):
iraf.gemini.gmos.gqecorr(inimage, refimages=refimage, fl_keep=yes)
if os.path.isfile('qprg' + flat1base + '.fits') == False:
qe_corr('prg' + flat1base, arc11done)
print('>>> flat 1 QE correction done')
time.sleep(2)
else:
print('>>> flat 1 already done, moving on')
time.sleep(2)
if os.path.isfile('qprg' + flat2base + '.fits') == False:
qe_corr('prg' + flat2base, arc21done)
print('>>> flat 2 QE correction done')
time.sleep(2)
else:
print('>>> flat 2 already done, moving on')
time.sleep(2)
#~~~~re-extract the qe-corrected flats
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting re-extraction of flats')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
if os.path.isfile('eqprg' + flat1base + '.fits') == False:
extract('qprg' + flat1base, flat1done)
print('>>> flat 1 re-extraction done')
time.sleep(2)
else:
print('>>> flat 1 already done, moving on')
time.sleep(2)
if os.path.isfile('eqprg' + flat2base + '.fits') == False:
extract('qprg' + flat2base, flat2done)
print('>>> flat 2 re-extraction done')
time.sleep(2)
else:
print('>>> flat 2 already done, moving on')
time.sleep(2)
#~~~~bias and overscan subtract the science data
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting bias and overscan subtraction of science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_red_bias(inimage,slit):
iraf.gemini.gmos.gfreduce(inimage, slits=slit, fl_inter='no', fl_over='yes', fl_trim='yes', fl_bias='yes', \
fl_flux='no', fl_gscrrej='no', fl_extract='no', fl_gsappwave='no', fl_wavtran='no', \
fl_skysub='no', weights='none', bias=bias1)
if os.path.isfile('rg' + sci11base + '.fits') == False:
sci_red_bias(sci11base,slit)
print('>>> sci 1 basic reduction done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('rg' + sci12base + '.fits') == False:
sci_red_bias(sci12base,slit)
print('>>> sci 11 basic reduction done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('rg' + sci21base + '.fits') == False:
sci_red_bias(sci21base,slit)
print('>>> sci 2 basic reduction done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('rg' + sci22base + '.fits') == False:
sci_red_bias(sci22base,slit)
print('>>> sci 22 basic reduction done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~make bad pixel map for science data
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting bad pixel mask creation for science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
if os.path.isfile('prg'+sci11base+'.fits') == False:
os.system('ds9 -mecube rg' + sci11 + ' &')
arith('rg' + sci11base, 'bpm_2x1_sci')
for i in np.arange(1,13):
hedit('bpm_2x1_sci[sci,'+str(i)+']')
print '>>> Look through the 12 extensions and record the bad columns/pixels'
print '>>> Are there bad values?'
print '>>> 1 = yes; 0 = no'
ans = raw_input('>>> Your answer: ')
if ans == '1':
done = False
while done == False:
ext = raw_input('>>> What extension number? ')
x = raw_input('>>> What x value(s)? ')
y = raw_input('>>> What y values(s)? ')
imrep('bpm_2x1_sci[dq,'+str(ext)+']['+str(x)+','+str(y)+']')
print '>>> Are there more bad values?'
print '>>> 1 = yes; 0 = no '
ans2 = raw_input('>>> Your choice: ')
if ans2 == '1': done == False
if ans2 == '0': done == True
addbpm('rg'+sci11,'bpm_2x1_sci')
fixgem('rg'+sci11,'prg'+sci11)
print('>>> flat bad pixel map complete')
time.sleep(2)
else:
print('>>> flat bad pixel map already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
addbpm('rg'+sci12,'bpm_2x1_sci')
fixgem('rg'+sci12,'prg'+sci12)
if os.path.isfile('prg'+sci21base+'.fits') == False:
addbpm('rg'+sci21,'bpm_2x1_sci')
fixgem('rg'+sci21,'prg'+sci21)
try:
sci22
except NameError:
pass
else:
addbpm('rg'+sci22,'bpm_2x1_sci')
fixgem('rg'+sci22,'prg'+sci22)
#~~~~remove the cosmic rays
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting cosmic ray rejection on science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def gem_crspec(inimage, outimage):
iraf.gemini.gemcrspec(inimage, outimage,sigclip='3',fl_vardq='yes')
if os.path.isfile('xrg' + sci11base + '.fits') == False:
gem_crspec('rg' + sci11base,'xrg' + sci11base)
print('>>> sci 1 cosmic ray rejection done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('xrg' + sci12base + '.fits') == False:
gem_crspec('rg' + sci12base,'xrg' + sci12base)
print('>>> sci 11 cosmic ray rejection done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('xrg' + sci21base + '.fits') == False:
gem_crspec('rg' + sci21base,'xrg' + sci21base)
print('>>> sci 2 cosmic ray rejection done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('xrg' + sci22base + '.fits') == False:
gem_crspec('rg' + sci22base,'xrg' + sci22base)
print('>>> sci 22 cosmic ray rejection done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~qe correct the science data
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting QE corretion on science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sciqe_corr(inimage,refimage,corr):
iraf.gemini.gmos.gqecorr(inimage, refimages=refimage, corrimages=corr,fl_keep='no')
if os.path.isfile('qxrg' + sci11base + '.fits') == False:
sciqe_corr('xrg' + sci11base,arc11done,'qecorr'+arc11done+'.fits')
print('>>> sci 1 QE correction done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('qxrg' + sci12base + '.fits') == False:
sciqe_corr('xrg' + sci12base,arc11done,'qecorr'+arc12done+'.fits')
print('>>> sci 11 QE correction done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('qxrg' + sci21base + '.fits') == False:
sciqe_corr('xrg' + sci21base,arc21done,'qecorr'+arc21done+'.fits')
print('>>> sci 2 QE correction done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('qxrg' + sci22base + '.fits') == False:
sciqe_corr('xrg' + sci22base,arc21done,'qecorr'+arc21done+'.fits')
print('>>> sci 22 QE correction done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~Flat field and extract spectra!
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting flat fielding and extraction of science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_extract(inimage,refimage,slit,response):
iraf.gemini.gmos.gfreduce(inimage, fl_inter='no', fl_addmdf='no', fl_over='no', fl_trim='no', fl_bias='no', \
fl_gscrrej='no', fl_extract='yes', fl_wavtran='no', fl_sky='no', fl_fluxcal='no', \
slits=slit, trace='no', verb='yes', refer=refimage, response=response, weights='none', \
fl_qecorr='no', fl_crspec='no',logfile=galname[0]+'.log')
if os.path.isfile('eqxrg' + sci11base + '.fits') == False:
sci_extract('qxrg'+sci11base,'eqrg'+flat1base,slit,response1)
print('>>> sci 1 extraction done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('eqxrg' + sci12base + '.fits') == False:
sci_extract('qxrg' + sci12base,'eqrg'+flat1base,slit,response1)
print('>>> sci 11 extraction done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('eqxrg' + sci21base + '.fits') == False:
sci_extract('qxrg' + sci21base,'eqrg'+flat2base,slit,response2)
print('>>> sci 2 extraction done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('eqxrg' + sci22base + '.fits') == False:
sci_extract('qxrg' + sci22base,'eqrg'+flat2base,slit,response2)
print('>>> sci 22 extraction done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~wavelength calibrate science frames!
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting wavelength calibration of science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_wave(inimage,refimage):
iraf.gemini.gmos.gftransform(inimage, wavtran=refimage)
if os.path.isfile('teqxrg' + sci11base + '.fits') == False:
sci_wave('eqxrg' + sci11base,arc11done)
print('>>> sci 1 wavelength transformation done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('teqxrg' + sci12base + '.fits') == False:
sci_wave('eqxrg' + sci12base,arc11done)
print('>>> sci 11 wavelength transformation done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('teqxrg' + sci21base + '.fits') == False:
sci_wave('eqxrg' + sci21base,arc21done)
print('>>> sci 2 wavelength transformation done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('teqxrg' + sci22base + '.fits') == False:
sci_wave('eqxrg' + sci22base,arc21done)
print('>>> sci 22 wavelength transformation done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~subtract the sky!
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting sky subtraction of science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_sky(inimage):
iraf.gemini.gmos.gfskysub(inimage, fl_inter='yes', verb='yes', weight='none',sepslits='yes',logfile=galname[0]+'.log')
if os.path.isfile('steqxrg' + sci11base + '.fits') == False:
sci_sky('teqxrg' + sci11base)
print('>>> sci 1 sky subtraction done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('steqxrg' + sci12base + '.fits') == False:
sci_sky('teqxrg' + sci12base)
print('>>> sci 11 sky subtraction done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('steqxrg' + sci21base + '.fits') == False:
sci_sky('teqxrg' + sci21base)
print('>>> sci 2 sky subtraction done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('steqxrg' + sci22base + '.fits') == False:
sci_sky('teqxrg' + sci22base)
print('>>> sci 22 sky subtraction done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~flux calibration!
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting flux calibration of science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_flux(inimage,sfunc):
iraf.gemini.gmos.gscalibrate(inimage, sfunctio=sfunc, observa="Gemini-South", fluxscal=1)
if os.path.isfile('csteqxrg' + sci11base + '.fits') == False:
sci_flux('steqxrg' + sci11base,sfunction1)
print('>>> sci 1 flux calibration done')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('csteqxrg' + sci12base + '.fits') == False:
sci_flux('steqxrg' + sci12base,sfunction1)
print('>>> sci 11 flux calibration done')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('csteqxrg' + sci21base + '.fits') == False:
sci_flux('steqxrg' + sci21base,sfunction2)
print('>>> sci 2 flux calibration done')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('csteqxrg' + sci22base + '.fits') == False:
sci_fkux('steqxrg' + sci22base,sfunction2)
print('>>> sci 22 flux calibration done')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~create data cubes!
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('starting data cube creation from science')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
def sci_cube(inimage,outimage,sam):
iraf.gemini.gmos.gfcube(inimage, outimage=outimage, ssample=sam)
if os.path.isfile('dcsteqxrg' + sci11base + '.fits') == False:
sci_cube('csteqxrg' + sci11base,'dcsteqxrg' + sci11base, 0.2)
print('>>> sci 1 data cube made')
time.sleep(2)
else:
print('>>> sci 1 already done, moving on')
time.sleep(2)
try:
sci12
except NameError:
pass
else:
if os.path.isfile('dcsteqxrg' + sci12base + '.fits') == False:
sci_cube('csteqxrg' + sci12base,'dcsteqxrg' + sci12base,0.2)
print('>>> sci 11 data cube made')
time.sleep(2)
else:
print('>>> sci 11 already done, moving on')
time.sleep(2)
if os.path.isfile('dcsteqxrg' + sci21base + '.fits') == False:
sci_cube('csteqxrg' + sci21base,'dcsteqxrg' + sci21base,0.2)
print('>>> sci 2 data cube made')
time.sleep(2)
else:
print('>>> sci 2 already done, moving on')
time.sleep(2)
try:
sci22
except NameError:
pass
else:
if os.path.isfile('dcsteqxrg' + sci22base + '.fits') == False:
sci_cube('csteqxrg' + sci22base,'dcsteqxrg' + sci22base,0.2)
print('>>> sci 22 data cube made')
time.sleep(2)
else:
print('>>> sci 22 already done, moving on')
time.sleep(2)
#~~~~create data cubes!
print('%%%%%%%%%%%%%%%%%%%%%')
print('you are done for now!')
print('%%%%%%%%%%%%%%%%%%%%%')
time.sleep(2)
|
emsnyder/geminiDRpipeline
|
gemreductionpipeline.py
|
Python
|
mit
| 35,479
|
[
"Galaxy"
] |
760551e098d93d802384651a309d5b6c8281e1d9edbdffd6562dccce9a9c7098
|
"""
Acceptance tests for Studio related to the textbooks.
"""
from nose.plugins.attrib import attr
from common.test.acceptance.pages.lms.textbook_view import TextbookViewPage
from common.test.acceptance.pages.studio.textbook_upload import TextbookUploadPage
from common.test.acceptance.tests.helpers import disable_animations
from common.test.acceptance.tests.studio.base_studio_test import StudioCourseTest
class TextbooksTest(StudioCourseTest):
"""
Test that textbook functionality is working properly on studio side
"""
def setUp(self, is_staff=True):
"""
Install a course with no content using a fixture.
"""
super(TextbooksTest, self).setUp(is_staff)
self.textbook_upload_page = TextbookUploadPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.textbook_upload_page.visit()
disable_animations(self)
self.textbook_view_page = TextbookViewPage(self.browser, self.course_id)
@attr(shard=9)
def test_create_first_book_message(self):
"""
Scenario: A message is displayed on the textbooks page when there are no uploaded textbooks
Given that I am viewing the Textbooks page in Studio
And I have not yet uploaded a textbook
Then I see a message stating that I have not uploaded any textbooks
"""
message = self.textbook_upload_page.get_element_text('.wrapper-content .no-textbook-content')
self.assertIn("You haven't added any textbooks", message)
@attr(shard=9)
def test_new_textbook_upload(self):
"""
Scenario: View Live link for textbook is correctly populated
Given that I am viewing the Textbooks page in Studio
And I have uploaded a PDF textbook and save the new textbook information
Then the "View Live" link contains a link to the textbook in the LMS
"""
self.textbook_upload_page.upload_new_textbook()
self.assertTrue(self.textbook_upload_page.is_view_live_link_worked())
@attr('a11y')
def test_textbook_page_a11y(self):
"""
Uploads a new textbook
Runs an accessibility test on the textbook page in lms
"""
self.textbook_upload_page.upload_new_textbook()
self.textbook_view_page.visit()
self.textbook_view_page.a11y_audit.config.set_rules({
'ignore': [
'section' # AC-503
],
})
self.textbook_view_page.a11y_audit.check_for_accessibility_errors()
@attr('a11y')
def test_pdf_viewer_a11y(self):
"""
Uploads a new textbook
Runs an accessibility test on the pdf viewer frame in lms
"""
self.textbook_upload_page.upload_new_textbook()
self.textbook_view_page.visit()
self.textbook_view_page.switch_to_pdf_frame(self)
self.textbook_view_page.a11y_audit.config.set_scope({
'exclude': [
'#viewer', # PDF viewer (vendor file)
]
})
self.textbook_view_page.a11y_audit.config.set_rules({
'ignore': [
'color-contrast', # will always fail because pdf.js converts pdf to divs with transparent text
],
})
self.textbook_view_page.a11y_audit.check_for_accessibility_errors()
|
Stanford-Online/edx-platform
|
common/test/acceptance/tests/studio/test_studio_textbooks.py
|
Python
|
agpl-3.0
| 3,412
|
[
"VisIt"
] |
54904931347d5fe26bb0fdb3ee863fd764524e115bd0044b6b69bd84a8d9b10a
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for modules/usage_reporting/*"""
__author__ = 'Mike Gainer (mgainer@google.com)'
import appengine_config
from common import utils as common_utils
from common import users
from models import data_removal as models_data_removal
from models import models
from models import student_work
from modules.analytics import student_aggregate
from modules.data_removal import data_removal
from modules.data_removal import removal_models
from modules.gitkit import gitkit
from modules.invitation import invitation
from modules.questionnaire import questionnaire
from modules.notifications import notifications
from modules.oeditor import oeditor
from modules.review import domain
from modules.review import peer
from modules.skill_map import competency
from modules.unsubscribe import unsubscribe
from tests.functional import actions
from google.appengine.ext import db
class DataRemovalTestBase(actions.TestBase):
def setUp(self):
super(DataRemovalTestBase, self).setUp()
# If the optional wipeout module is present, it will enforce some
# requirements that we're not prepared to construct in core
# Course Builder. Unilaterally remove its registrations.
event_callbacks = models.StudentLifecycleObserver.EVENT_CALLBACKS
for event_type in event_callbacks:
if 'wipeout' in event_callbacks[event_type]:
del event_callbacks[event_type]['wipeout']
enqueue_callbacks = models.StudentLifecycleObserver.EVENT_CALLBACKS
for event_type in enqueue_callbacks:
if 'wipeout' in enqueue_callbacks[event_type]:
del enqueue_callbacks[event_type]['wipeout']
def _unregister_and_request_data_removal(self, course):
response = self.get('/%s/student/home' % course)
response = self.click(response, 'Unenroll')
self.assertIn('to unenroll from', response.body)
form = response.form
form['data_removal'].checked = True
form.action = self.canonicalize(form.action, response)
response = form.submit()
form = response.form
form.action = self.canonicalize(form.action, response)
response = form.submit('data_removal')
self.assertIn('You have been unenrolled', response.body)
def _complete_removal(self):
# Remove indexed items, add to-do items for map/reduce.
task_count = self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
# Add map/reduce jobs on default queue
response = self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
# Run map/reduce jobs
self.execute_all_deferred_tasks()
# Final call to cron to do cleanup once map/reduce work items done.
response = self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
class DataRemovalTests(DataRemovalTestBase):
COURSE = 'data_removal_test'
NAMESPACE = 'ns_' + COURSE
ADMIN_EMAIL = 'admin@foo.com'
STUDENT_EMAIL = 'student@foo.com'
def setUp(self):
super(DataRemovalTests, self).setUp()
app_context = actions.simple_add_course(
self.COURSE, self.ADMIN_EMAIL, 'Data Removal Test')
def test_cron_handler_requires_reserved_header(self):
response = self.get(
data_removal.DataRemovalCronHandler.URL, expect_errors=True)
self.assertEquals(403, response.status_int)
self.assertEquals('Forbidden.', response.body)
def test_cron_handler_ok_when_no_work_to_do(self):
response = self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
self.assertEquals(200, response.status_int)
self.assertEquals('OK.', response.body)
def test_non_removal_policy(self):
with actions.OverriddenEnvironment({
data_removal.DATA_REMOVAL_SETTINGS_SECTION: {
data_removal.REMOVAL_POLICY:
data_removal.IndefiniteRetentionPolicy.get_name()}}):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL, course=self.COURSE)
self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
with common_utils.Namespace(self.NAMESPACE):
# After registration, we should have a student object, and no
# ImmediateRemovalState instance due to the don't-care policy.
student = models.Student.get_by_user(user)
self.assertIsNotNone(student)
self.assertIsNone(
removal_models.ImmediateRemovalState.get_by_user_id(
student.user_id))
r = removal_models.BatchRemovalState.get_by_user_ids(
[student.user_id])
self.assertEqual([None], r)
actions.unregister(self, course=self.COURSE)
# Expect to see unregister event on queue -- register event handled
# as part of actions.register.
task_count = self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
self.assertEquals(1, task_count)
# Running deletion cycle should have no effect. Verify that.
self._complete_removal()
with common_utils.Namespace(self.NAMESPACE):
# After unregister, we should still have a student object.
student = models.Student.get_by_user(user)
self.assertIsNotNone(student)
self.assertIsNone(
removal_models.ImmediateRemovalState.get_by_user_id(
student.user_id))
r = removal_models.BatchRemovalState.get_by_user_ids(
[student.user_id])
self.assertEqual([None], r)
def test_immediate_removal_policy(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL, course=self.COURSE)
self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
user_id = None
with common_utils.Namespace(self.NAMESPACE):
# After registration, we should have a student object, and
# a ImmediateRemovalState instance, and no to-do deletion work.
student = models.Student.get_by_user(user)
self.assertIsNotNone(student)
user_id = student.user_id
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNotNone(removal_state)
self.assertEquals(
removal_models.ImmediateRemovalState.STATE_REGISTERED,
removal_state.state)
r = removal_models.BatchRemovalState.get_by_user_ids([user_id])
self.assertEqual([None], r)
# Add an EventEntity record so we can see it being removed.
event = models.EventEntity(user_id=user_id, source='test')
event.put()
self._unregister_and_request_data_removal(self.COURSE)
with common_utils.Namespace(self.NAMESPACE):
# Immediately upon unregistration, we should still have the student
# record, and removal state should be pending deletion.
student = models.Student.get_by_user(user)
self.assertIsNotNone(student)
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNotNone(removal_state)
self.assertEquals(
removal_models.ImmediateRemovalState.STATE_DELETION_PENDING,
removal_state.state)
r = removal_models.BatchRemovalState.get_by_user_ids([user_id])
self.assertEqual([None], r)
events = list(models.EventEntity.all().run())
self.assertEquals(1, len(events))
# We should have gotten a to-do item on the task queue for student
# removal.
task_count = self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
self.assertEquals(1, task_count) # unregistration.
with common_utils.Namespace(self.NAMESPACE):
# Having processed the queue item, the student record should now
# be gone.
students = list(models.Student.all().run())
student = models.Student.get_by_user(user)
self.assertIsNone(student)
# But the record tracking removal should not yet be gone.
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNotNone(removal_state)
self.assertEquals(
removal_models.ImmediateRemovalState.STATE_DELETION_PENDING,
removal_state.state)
# And we should have a to-do item for the cron batch cleanup.
r = removal_models.BatchRemovalState.get_by_user_ids([user_id])
self.assertEquals(1, len(r))
removal_record = r[0]
self.assertEquals(
models_data_removal.Registry.get_unindexed_class_names(),
removal_record.resource_types)
# Events won't have been cleaned up yet; need cron batch to run.
events = list(models.EventEntity.all().run())
self.assertEquals(1, len(events))
# Call the cron handler to schedule batch removal tasks. This, in
# turn, will schedule map/reduce jobs to remove records for that
# student.
response = self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
self.assertEquals(200, response.status_int)
self.assertEquals('OK.', response.body)
# Run the map/reduce jobs to completion.
self.execute_all_deferred_tasks()
# We should now be nearly clean; in the normal course of events, only
# the ImmediateRemovalState should still be present. However, due to
# race conditions, an analysis map/reduce job may have finished in the
# meantime, and written a per-student record. Add such a record.
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.get_by_user(user)
self.assertIsNone(student)
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNotNone(removal_state)
# Events should now be gone.
events = list(models.EventEntity.all().run())
self.assertEquals(0, len(events))
# Cron batch cleanup record should be present, but now empty.
r = removal_models.BatchRemovalState.get_by_user_ids([user_id])
self.assertEquals(1, len(r))
removal_record = r[0]
self.assertEquals([], removal_record.resource_types)
# Simulate map/reduce finishing asychronously & adding a per-student
# item. Verify that the record is present so we know the test
# below that checks for it being gone is correct.
student_aggregate.StudentAggregateEntity(key_name=user_id).put()
a = student_aggregate.StudentAggregateEntity.get_by_key_name(
user_id)
self.assertIsNotNone(a)
# Call the cron handler one more time. Because the batch work item
# is empty, this should do one more round of cleanup on items indexed
# by user id.
response = self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
self.assertEquals(200, response.status_int)
self.assertEquals('OK.', response.body)
# We should now have zero data about the user.
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.get_by_user(user)
self.assertIsNone(student)
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNone(removal_state)
# Events should now be gone.
events = list(models.EventEntity.all().run())
self.assertEquals(0, len(events))
# Cron batch cleanup record should be gone.
r = removal_models.BatchRemovalState.get_by_user_ids([user_id])
self.assertEqual([None], r)
# Map/reduce results should be gone.
a = student_aggregate.StudentAggregateEntity.get_by_key_name(
user_id)
self.assertIsNone(a)
def test_multiple_students(self):
# Register two students
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, user.email(), course=self.COURSE)
other_user = actions.login('student002@foo.com')
actions.register(self, other_user.email(), course=self.COURSE)
# Get IDs of those students; make an event for each.
with common_utils.Namespace(self.NAMESPACE):
student1_id = (
models.Student.get_by_user(user).user_id)
student2_id = (
models.Student.get_by_user(other_user).user_id)
models.EventEntity(user_id=student1_id, source='test').put()
models.EventEntity(user_id=student2_id, source='test').put()
# Unregister one of them.
actions.login(self.STUDENT_EMAIL)
self._unregister_and_request_data_removal(self.COURSE)
self._complete_removal()
# Unregistered student and his data are gone; still-registered
# student's data is still present.
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(models.Student.get_by_user(user))
self.assertIsNotNone(models.Student.get_by_user(other_user))
entities = list(models.EventEntity.all().run())
self.assertEquals(1, len(entities))
self.assertEquals(student2_id, entities[0].user_id)
def test_multiple_courses(self):
COURSE_TWO = 'course_two'
COURSE_TWO_NS = 'ns_' + COURSE_TWO
# Slight cheat: Register gitkit data remover manually, rather than
# enabling the entire module, which disrupts normal functional test
# user login handling
gitkit.EmailMapping.register_for_data_removal()
actions.simple_add_course(
COURSE_TWO, self.ADMIN_EMAIL, 'Data Removal Test Two')
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, user.email(), course=self.COURSE)
actions.register(self, user.email(), course=COURSE_TWO)
# Slight cheat: Rather than enabling gitkit module, just call
# the method that will insert the EmailMapping row.
gitkit.EmailUpdatePolicy.apply(user)
# Global profile object(s) should now exist.
profile = models.StudentProfileDAO.get_profile_by_user_id(
user.user_id())
self.assertIsNotNone(profile)
email_policy = gitkit.EmailMapping.get_by_user_id(user.user_id())
self.assertIsNotNone(email_policy)
# Unregister from 'data_removal_test' course.
self._unregister_and_request_data_removal(self.COURSE)
self._complete_removal()
# Student object should be gone from data_removal_test course, but
# not from course_two.
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(models.Student.get_by_user(user))
with common_utils.Namespace(COURSE_TWO_NS):
self.assertIsNotNone(models.Student.get_by_user(user))
# Global profile object(s) should still exist.
profile = models.StudentProfileDAO.get_profile_by_user_id(
user.user_id())
self.assertIsNotNone(profile)
email_policy = gitkit.EmailMapping.get_by_user_id(user.user_id())
self.assertIsNotNone(email_policy)
# Unregister from other course.
self._unregister_and_request_data_removal(COURSE_TWO)
self._complete_removal()
# Both Student objects should now be gone.
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNone(models.Student.get_by_user(user))
with common_utils.Namespace(COURSE_TWO_NS):
self.assertIsNone(models.Student.get_by_user(user))
# Global profile object(s) should also be gone.
profile = models.StudentProfileDAO.get_profile_by_user_id(
user.user_id())
self.assertIsNone(profile)
email_policy = gitkit.EmailMapping.get_by_user_id(user.user_id())
self.assertIsNone(email_policy)
def test_records_indexed_by_user_id_removed(self):
"""Test a sampling of types whose index is or contains the user ID."""
user_id = None
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL, course=self.COURSE)
# Get IDs of those students; make an event for each.
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.get_by_user(user)
user_id = student.user_id
# Indexed by user ID suffixed with a string.
p = models.StudentPropertyEntity.create(student, 'foo')
p.value = 'foo'
p.put()
invitation.InvitationStudentProperty.load_or_default(student).put()
questionnaire.StudentFormEntity.load_or_default(
student, 'a_form').put()
# User ID plus skill name.
cm = competency.BaseCompetencyMeasure.load(user_id, 1)
cm.save()
# models.student_work.KeyProperty - a foreign key to Student.
reviewee_key = db.Key.from_path(models.Student.kind(), user_id)
reviewer_key = db.Key.from_path(models.Student.kind(), 'xyzzy')
student_work.Review(contents='abcdef', reviewee_key=reviewee_key,
reviewer_key=reviewer_key, unit_id='7').put()
submission_key = student_work.Submission(
unit_id='7', reviewee_key=reviewee_key).put()
peer.ReviewSummary(submission_key=submission_key,
reviewee_key=reviewee_key, unit_id='7').put()
peer.ReviewStep(
submission_key=submission_key, reviewee_key=reviewee_key,
reviewer_key=reviewer_key, unit_id='7',
state=domain.REVIEW_STATE_ASSIGNED,
assigner_kind=domain.ASSIGNER_KIND_AUTO).put()
key_name = oeditor.EditorPrefsDao.create_key_name(
user_id, 'dasboard?action=foo', 'frammis')
editor_prefs = oeditor.EditorPrefsDto(key_name, {'this': 'that'})
oeditor.EditorPrefsDao.save(editor_prefs)
# Assure ourselves that we have all of the items we just added.
with common_utils.Namespace(self.NAMESPACE):
l = list(models.StudentPropertyEntity.all().run())
self.assertEquals(2, len(l)) # 'foo', 'linear-course-completion'
l = list(invitation.InvitationStudentProperty.all().run())
self.assertEquals(1, len(l))
l = list(questionnaire.StudentFormEntity.all().run())
self.assertEquals(1, len(l))
l = list(competency.CompetencyMeasureEntity.all().run())
self.assertEquals(1, len(l))
l = list(student_work.Review.all().run())
self.assertEquals(1, len(l))
l = list(student_work.Submission.all().run())
self.assertEquals(1, len(l))
l = list(peer.ReviewSummary.all().run())
self.assertEquals(1, len(l))
l = list(peer.ReviewStep.all().run())
self.assertEquals(1, len(l))
l = list(oeditor.EditorPrefsEntity.all().run())
self.assertEquals(1, len(l))
self._unregister_and_request_data_removal(self.COURSE)
self._complete_removal()
# Assure ourselves that all added items are now gone.
with common_utils.Namespace(self.NAMESPACE):
l = list(models.StudentPropertyEntity.all().run())
self.assertEquals(0, len(l))
l = list(invitation.InvitationStudentProperty.all().run())
self.assertEquals(0, len(l))
l = list(questionnaire.StudentFormEntity.all().run())
self.assertEquals(0, len(l))
l = list(competency.CompetencyMeasureEntity.all().run())
self.assertEquals(0, len(l))
l = list(student_work.Review.all().run())
self.assertEquals(0, len(l))
l = list(student_work.Submission.all().run())
self.assertEquals(0, len(l))
l = list(peer.ReviewSummary.all().run())
self.assertEquals(0, len(l))
l = list(peer.ReviewStep.all().run())
self.assertEquals(0, len(l))
l = list(oeditor.EditorPrefsEntity.all().run())
self.assertEquals(0, len(l))
def test_remove_by_email(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, user.email(), course=self.COURSE)
with common_utils.Namespace(self.NAMESPACE):
sse = unsubscribe.SubscriptionStateEntity(
key_name=user.email())
sse.is_subscribed = True
sse.save()
notifications.Manager.send_async(
user.email(), self.ADMIN_EMAIL, 'testemail',
'Mary had a little lamb. She fed it beans and buns.',
'Pets for Mary', '{"audit_trail": "yes"}',
retention_policy=notifications.RetainAll)
# Finish deferred tasks so notifications subsystem would have
# deleted items if it were going to. It shouldn't based on our
# use of RetainAll above, but belt-and-suspenders.
self.execute_all_deferred_tasks()
l = list(notifications.Notification.all().run())
self.assertEquals(1, len(l))
l = list(notifications.Payload.all().run())
self.assertEquals(1, len(l))
self._unregister_and_request_data_removal(self.COURSE)
self._complete_removal()
with common_utils.Namespace(self.NAMESPACE):
l = list(unsubscribe.SubscriptionStateEntity.all().run())
self.assertEquals(0, len(l))
l = list(notifications.Notification.all().run())
self.assertEquals(0, len(l))
l = list(notifications.Payload.all().run())
self.assertEquals(0, len(l))
def test_subscription_state_entity_unsubscribed_not_removed(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, user.email(), course=self.COURSE)
# Get IDs of those students; make an event for each.
with common_utils.Namespace(self.NAMESPACE):
sse = unsubscribe.SubscriptionStateEntity(
key_name=user.email())
sse.is_subscribed = False
sse.save()
self._unregister_and_request_data_removal(self.COURSE)
self._complete_removal()
with common_utils.Namespace(self.NAMESPACE):
l = list(unsubscribe.SubscriptionStateEntity.all().run())
self.assertEquals(1, len(l))
def test_unenroll_commanded_with_delete_requested(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL, course=self.COURSE)
# Verify user is really there.
with common_utils.Namespace(self.NAMESPACE):
self.assertIsNotNone(models.Student.get_by_user_id(user.user_id()))
# Mark user for data deletion upon unenroll
removal_models.ImmediateRemovalState.set_deletion_pending(
user.user_id())
response = self.post(
models.StudentLifecycleObserver.URL,
{'user_id': user.user_id(),
'event':
models.StudentLifecycleObserver.EVENT_UNENROLL_COMMANDED,
'timestamp': '2015-05-14T10:02:09.758704Z',
'callbacks': appengine_config.CORE_MODULE_NAME},
headers={'X-AppEngine-QueueName':
models.StudentLifecycleObserver.QUEUE_NAME})
self.assertEquals(response.status_int, 200)
self.assertEquals('', self.get_log())
# User should still be there, but now marked unenrolled.
student = models.Student.get_by_user_id(user.user_id())
self.assertFalse(student.is_enrolled)
# Running lifecycle queue should cause data removal to delete user.
self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
# User should now be gone.
self.assertIsNone(models.Student.get_by_user_id(user.user_id()))
class UserInteractionTests(DataRemovalTestBase):
COURSE = 'data_removal_test'
NAMESPACE = 'ns_' + COURSE
ADMIN_EMAIL = 'admin@foo.com'
STUDENT_EMAIL = 'student@foo.com'
def setUp(self):
super(UserInteractionTests, self).setUp()
app_context = actions.simple_add_course(
self.COURSE, self.ADMIN_EMAIL, 'Data Removal Test')
self.base = '/' + self.COURSE
def test_unregister_hides_deletion_option_when_no_deletion_policy(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
with actions.OverriddenEnvironment({
data_removal.DATA_REMOVAL_SETTINGS_SECTION: {
data_removal.REMOVAL_POLICY:
data_removal.IndefiniteRetentionPolicy.get_name()}}):
response = self.get('student/unenroll')
self.assertNotIn('Remove all my data from the course', response.body)
def test_unregister_shows_deletion_option_when_deletion_possible(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('student/unenroll')
self.assertIn('Delete all associated data', response.body)
def test_unregister_without_deletion_permits_reregistration(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
actions.unregister(self)
actions.register(self, self.STUDENT_EMAIL)
def _unregister_flow(self, response,
with_deletion_checked=False,
cancel_on_unregister=False,
cancel_on_deletion=False):
unregistration_expected = (not cancel_on_unregister and
not cancel_on_deletion)
data_deletion_expected = (unregistration_expected and
with_deletion_checked)
# Caller should have arranged for us to be at the unregister form.
form = response.form
if with_deletion_checked:
form['data_removal'].checked = True
if cancel_on_unregister:
response = self.click(response, "No")
return response
# Submit unregister form.
response = form.submit()
if with_deletion_checked:
self.assertIn(
'Once you delete your data, there is no way to recover it.',
response.body)
form = response.form
form.action = self.canonicalize(form.action, response)
if cancel_on_deletion:
response = form.submit('cancel_removal').follow()
self.assertIn(
'To leave the course permanently, click on Unenroll',
response.body)
else:
response = form.submit('data_removal')
self.assertIn('You have been unenrolled', response.body)
# Try to visit student's profile - verify can or can't depending
# on whether we unregistered the student.
response = self.get('student/home')
if unregistration_expected:
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location,
'http://localhost/%s/course' % self.COURSE)
response = response.follow()
self.assertEquals(response.status_int, 200)
else:
self.assertEquals(response.status_int, 200) # not 302 to /course
# Run pipeline which might do deletion to ensure we are really
# giving the code the opportunity to do the deletion before we
# check whether the Student is not gone.
self._complete_removal()
with common_utils.Namespace(self.NAMESPACE):
user = users.get_current_user()
if data_deletion_expected:
self.assertIsNone(models.Student.get_by_user(user))
else:
self.assertIsNotNone(models.Student.get_by_user(user))
def _deletion_flow_for_unregistered_student(self, response, cancel):
self.assertIn(
'Once you delete your data, there is no way to recover it.',
response.body)
form = response.form
form.action = self.canonicalize(form.action, response)
if cancel:
response = form.submit('cancel_removal')
# Verify redirected back to /course page in either case.
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location,
'http://localhost/%s/student/home' % self.COURSE)
response = response.follow()
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location,
'http://localhost/%s/course' % self.COURSE)
response = response.follow()
self.assertEquals(response.status_int, 200)
else:
response = form.submit('data_removal')
self.assertEquals(response.status_int, 302)
self.assertEquals(response.location,
'http://localhost/%s/' % self.COURSE)
response = response.follow()
self.assertEquals(response.status_int, 200)
# Run pipeline which might do deletion to ensure we are really
# giving the code the opportunity to do the deletion before we
# check whether the Student is not gone.
self._complete_removal()
with common_utils.Namespace(self.NAMESPACE):
user = users.get_current_user()
if cancel:
self.assertIsNotNone(models.Student.get_by_user(user))
else:
self.assertIsNone(models.Student.get_by_user(user))
def test_unregister_then_cancel_does_not_unregister_or_delete(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('student/unenroll')
self._unregister_flow(response, cancel_on_unregister=True)
def test_unregister_without_deletion_unregisters_but_does_not_delete(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('student/unenroll')
self._unregister_flow(response)
def test_unregister_with_deletion_then_cancel_does_not_unregister(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('student/unenroll')
self._unregister_flow(response, with_deletion_checked=True,
cancel_on_deletion=True)
def test_unregister_with_deletion_does_deletion(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('student/unenroll')
self._unregister_flow(response, with_deletion_checked=True)
def test_delete_link_in_footer_not_present_when_not_logged_in(self):
response = self.get('course')
self.assertNotIn('Delete My Data', response.body)
def test_delete_link_in_footer_not_present_when_not_registered(self):
actions.login(self.STUDENT_EMAIL)
response = self.get('course')
self.assertNotIn('Delete My Data', response.body)
def test_delete_link_when_registered_then_cancel_unregister(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._unregister_flow(response, cancel_on_unregister=True)
def test_delete_link_when_registered_then_cancel_deletion(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._unregister_flow(response, with_deletion_checked=True,
cancel_on_deletion=True)
def test_delete_link_when_registered_then_unregister_without_deletion(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._unregister_flow(response)
def test_delete_link_when_registered_then_proceed_and_delete(self):
actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._unregister_flow(response, with_deletion_checked=True)
def test_delete_link_when_unregistered_then_cancel(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
actions.unregister(self)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._deletion_flow_for_unregistered_student(response, cancel=True)
response = self.get('course')
self.assertIn('Delete My Data', response.body)
def test_delete_link_when_unregistered_then_proceed(self):
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, self.STUDENT_EMAIL)
actions.unregister(self)
response = self.get('course')
response = self.click(response, 'Delete My Data')
self._deletion_flow_for_unregistered_student(response, cancel=False)
response = self.get('course')
self.assertNotIn('Delete My Data', response.body)
def test_reregistration_blocked_during_deletion(self):
def assert_cannot_register():
response = self.get('register')
self.assertIn('You cannot re-register for this course',
response.body)
self.assertNotIn('What is your name?', response.body)
user_id = None
user = actions.login(self.STUDENT_EMAIL)
actions.register(self, user.email())
with common_utils.Namespace(self.NAMESPACE):
# After registration, we should have a student object, and
# a ImmediateRemovalState instance.
student = models.Student.get_by_user(user)
self.assertIsNotNone(student)
user_id = student.user_id
self._unregister_and_request_data_removal(self.COURSE)
# On submitting the unregister form, the user's ImmediateRemovalState
# will have been marked as deltion-in-progress, and so user cannot
# re-register yet.
assert_cannot_register()
# Run the queue to do the cleanup of indexed items, and add the
# work-to-do items for batched cleanup.
self.execute_all_deferred_tasks(
models.StudentLifecycleObserver.QUEUE_NAME)
assert_cannot_register()
# Run the cron job that launches the map/reduce jobs to clean up
# bulk items. Still not able to re-register.
self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
assert_cannot_register()
# Run the map/reduce jobs. Bulk items should now be cleaned.
self.execute_all_deferred_tasks()
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.get_by_user(user)
self.assertIsNone(student)
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNotNone(removal_state)
assert_cannot_register()
# Run the cron job one more time. When no bulk to-do items remain,
# we then clean up the ImmediateRemovalState. Re-registration should
# now be possible.
self.get(
data_removal.DataRemovalCronHandler.URL,
headers={'X-AppEngine-Cron': 'True'})
with common_utils.Namespace(self.NAMESPACE):
student = models.Student.get_by_user(user)
self.assertIsNone(student)
removal_state = removal_models.ImmediateRemovalState.get_by_user_id(
user_id)
self.assertIsNone(removal_state)
actions.register(self, self.STUDENT_EMAIL)
|
GirlsCodePy/girlscode-coursebuilder
|
modules/data_removal/data_removal_tests.py
|
Python
|
gpl-3.0
| 37,916
|
[
"VisIt"
] |
bdfd260e977200055642c848df0f3337519232993034071d9bbaea61915c9ce0
|
"""
ExampleRawIO is a class of a fake example.
This is to be used when coding a new RawIO.
Rules for creating a new class:
1. Step 1: Create the main class
* Create a file in **neo/rawio/** that endith with "rawio.py"
* Create the class that inherits from BaseRawIO
* copy/paste all methods that need to be implemented.
* code hard! The main difficulty is `_parse_header()`.
In short you have a create a mandatory dict than
contains channel informations::
self.header = {}
self.header['nb_block'] = 2
self.header['nb_segment'] = [2, 3]
self.header['signal_streams'] = signal_streams
self.header['signal_channels'] = signal_channels
self.header['spike_channels'] = spike_channels
self.header['event_channels'] = event_channels
2. Step 2: RawIO test:
* create a file in neo/rawio/tests with the same name with "test_" prefix
* copy paste neo/rawio/tests/test_examplerawio.py and do the same
3. Step 3 : Create the neo.io class with the wrapper
* Create a file in neo/io/ that ends with "io.py"
* Create a class that inherits both your RawIO class and BaseFromRaw class
* copy/paste from neo/io/exampleio.py
4.Step 4 : IO test
* create a file in neo/test/iotest with the same previous name with "test_" prefix
* copy/paste from neo/test/iotest/test_exampleio.py
"""
from .baserawio import (BaseRawIO, _signal_channel_dtype, _signal_stream_dtype,
_spike_channel_dtype, _event_channel_dtype)
import numpy as np
class ExampleRawIO(BaseRawIO):
"""
Class for "reading" fake data from an imaginary file.
For the user, it gives access to raw data (signals, event, spikes) as they
are in the (fake) file int16 and int64.
For a developer, it is just an example showing guidelines for someone who wants
to develop a new IO module.
Two rules for developers:
* Respect the :ref:`neo_rawio_API`
* Follow the :ref:`io_guiline`
This fake IO:
* has 2 blocks
* blocks have 2 and 3 segments
* has 2 signals streams of 8 channel each (sample_rate = 10000) so 16 channels in total
* has 3 spike_channels
* has 2 event channels: one has *type=event*, the other has
*type=epoch*
Usage:
>>> import neo.rawio
>>> r = neo.rawio.ExampleRawIO(filename='itisafake.nof')
>>> r.parse_header()
>>> print(r)
>>> raw_chunk = r.get_analogsignal_chunk(block_index=0, seg_index=0,
i_start=0, i_stop=1024, channel_names=channel_names)
>>> float_chunk = reader.rescale_signal_raw_to_float(raw_chunk, dtype='float64',
channel_indexes=[0, 3, 6])
>>> spike_timestamp = reader.spike_timestamps(spike_channel_index=0,
t_start=None, t_stop=None)
>>> spike_times = reader.rescale_spike_timestamp(spike_timestamp, 'float64')
>>> ev_timestamps, _, ev_labels = reader.event_timestamps(event_channel_index=0)
"""
extensions = ['fake']
rawmode = 'one-file'
def __init__(self, filename=''):
BaseRawIO.__init__(self)
# note that this filename is ued in self._source_name
self.filename = filename
def _source_name(self):
# this function is used by __repr__
# for general cases self.filename is good
# But for URL you could mask some part of the URL to keep
# the main part.
return self.filename
def _parse_header(self):
# This is the central part of a RawIO
# we need to collect from the original format all
# information required for fast access
# at any place in the file
# In short `_parse_header()` can be slow but
# `_get_analogsignal_chunk()` need to be as fast as possible
# create fake signals stream information
signal_streams = []
for c in range(2):
name = f'stream {c}'
stream_id = c
signal_streams.append((name, stream_id))
signal_streams = np.array(signal_streams, dtype=_signal_stream_dtype)
# create fake signals channels information
# This is mandatory!!!!
# gain/offset/units are really important because
# the scaling to real value will be done with that
# The real signal will be evaluated as `(raw_signal * gain + offset) * pq.Quantity(units)`
signal_channels = []
for c in range(16):
ch_name = 'ch{}'.format(c)
# our channel id is c+1 just for fun
# Note that chan_id should be related to
# original channel id in the file format
# so that the end user should not be lost when reading datasets
chan_id = c + 1
sr = 10000. # Hz
dtype = 'int16'
units = 'uV'
gain = 1000. / 2 ** 16
offset = 0.
# stream_id indicates how to group channels
# channels inside a "stream" share same characteristics
# (sampling rate/dtype/t_start/units/...)
stream_id = str(c // 8)
signal_channels.append((ch_name, chan_id, sr, dtype, units, gain, offset, stream_id))
signal_channels = np.array(signal_channels, dtype=_signal_channel_dtype)
# A stream can contain signals with different physical units.
# Here, the two last channels will have different units (pA)
# Since AnalogSignals must have consistent units across channels,
# this stream will be split in 2 parts on the neo.io level and finally 3 AnalogSignals
# will be generated per Segment.
signal_channels[-2:]['units'] = 'pA'
# create fake units channels
# This is mandatory!!!!
# Note that if there is no waveform at all in the file
# then wf_units/wf_gain/wf_offset/wf_left_sweep/wf_sampling_rate
# can be set to any value because _spike_raw_waveforms
# will return None
spike_channels = []
for c in range(3):
unit_name = 'unit{}'.format(c)
unit_id = '#{}'.format(c)
wf_units = 'uV'
wf_gain = 1000. / 2 ** 16
wf_offset = 0.
wf_left_sweep = 20
wf_sampling_rate = 10000.
spike_channels.append((unit_name, unit_id, wf_units, wf_gain,
wf_offset, wf_left_sweep, wf_sampling_rate))
spike_channels = np.array(spike_channels, dtype=_spike_channel_dtype)
# creating event/epoch channel
# This is mandatory!!!!
# In RawIO epoch and event they are dealt the same way.
event_channels = []
event_channels.append(('Some events', 'ev_0', 'event'))
event_channels.append(('Some epochs', 'ep_1', 'epoch'))
event_channels = np.array(event_channels, dtype=_event_channel_dtype)
# fille into header dict
# This is mandatory!!!!!
self.header = {}
self.header['nb_block'] = 2
self.header['nb_segment'] = [2, 3]
self.header['signal_streams'] = signal_streams
self.header['signal_channels'] = signal_channels
self.header['spike_channels'] = spike_channels
self.header['event_channels'] = event_channels
# insert some annotations/array_annotations at some place
# at neo.io level. IOs can add annotations
# to any object. To keep this functionality with the wrapper
# BaseFromRaw you can add annotations in a nested dict.
# `_generate_minimal_annotations()` must be called to generate the nested
# dict of annotations/array_annotations
self._generate_minimal_annotations()
# this pprint lines really help for understand the nested (and complicated sometimes) dict
# from pprint import pprint
# pprint(self.raw_annotations)
# Until here all mandatory operations for setting up a rawio are implemented.
# The following lines provide additional, recommended annotations for the
# final neo objects.
for block_index in range(2):
bl_ann = self.raw_annotations['blocks'][block_index]
bl_ann['name'] = 'Block #{}'.format(block_index)
bl_ann['block_extra_info'] = 'This is the block {}'.format(block_index)
for seg_index in range([2, 3][block_index]):
seg_ann = bl_ann['segments'][seg_index]
seg_ann['name'] = 'Seg #{} Block #{}'.format(
seg_index, block_index)
seg_ann['seg_extra_info'] = 'This is the seg {} of block {}'.format(
seg_index, block_index)
for c in range(2):
sig_an = seg_ann['signals'][c]['nickname'] = \
f'This stream {c} is from a subdevice'
# add some array annotations (8 channels)
sig_an = seg_ann['signals'][c]['__array_annotations__']['impedance'] = \
np.random.rand(8) * 10000
for c in range(3):
spiketrain_an = seg_ann['spikes'][c]
spiketrain_an['quality'] = 'Good!!'
# add some array annotations
num_spikes = self.spike_count(block_index, seg_index, c)
spiketrain_an['__array_annotations__']['amplitudes'] = \
np.random.randn(num_spikes)
for c in range(2):
event_an = seg_ann['events'][c]
if c == 0:
event_an['nickname'] = 'Miss Event 0'
# add some array annotations
num_ev = self.event_count(block_index, seg_index, c)
event_an['__array_annotations__']['button'] = ['A'] * num_ev
elif c == 1:
event_an['nickname'] = 'MrEpoch 1'
def _segment_t_start(self, block_index, seg_index):
# this must return an float scale in second
# this t_start will be shared by all object in the segment
# except AnalogSignal
all_starts = [[0., 15.], [0., 20., 60.]]
return all_starts[block_index][seg_index]
def _segment_t_stop(self, block_index, seg_index):
# this must return an float scale in second
all_stops = [[10., 25.], [10., 30., 70.]]
return all_stops[block_index][seg_index]
def _get_signal_size(self, block_index, seg_index, stream_index):
# We generate fake data in which the two stream signals have the same shape
# across all segments (10.0 seconds)
# This is not the case for real data, instead you should return the signal
# size depending on the block_index and segment_index
# this must return an int = the number of sample
# Note that channel_indexes can be ignored for most cases
# except for several sampling rate.
return 100000
def _get_signal_t_start(self, block_index, seg_index, stream_index):
# This give the t_start of signals.
# Very often this equal to _segment_t_start but not
# always.
# this must return an float scale in second
# Note that channel_indexes can be ignored for most cases
# except for several sampling rate.
# Here this is the same.
# this is not always the case
return self._segment_t_start(block_index, seg_index)
def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop,
stream_index, channel_indexes):
# this must return a signal chunk in a signal stream
# limited with i_start/i_stop (can be None)
# channel_indexes can be None (=all channel in the stream) or a list or numpy.array
# This must return a numpy array 2D (even with one channel).
# This must return the orignal dtype. No conversion here.
# This must as fast as possible.
# To speed up this call all preparatory calculations should be implemented
# in _parse_header().
# Here we are lucky: our signals is always zeros!!
# it is not always the case :)
# internally signals are int16
# convertion to real units is done with self.header['signal_channels']
if i_start is None:
i_start = 0
if i_stop is None:
i_stop = 100000
if i_start < 0 or i_stop > 100000:
# some check
raise IndexError("I don't like your jokes")
if channel_indexes is None:
nb_chan = 8
elif isinstance(channel_indexes, slice):
channel_indexes = np.arange(8, dtype='int')[channel_indexes]
nb_chan = len(channel_indexes)
else:
channel_indexes = np.asarray(channel_indexes)
if any(channel_indexes < 0):
raise IndexError('bad boy')
if any(channel_indexes >= 8):
raise IndexError('big bad wolf')
nb_chan = len(channel_indexes)
raw_signals = np.zeros((i_stop - i_start, nb_chan), dtype='int16')
return raw_signals
def _spike_count(self, block_index, seg_index, spike_channel_index):
# Must return the nb of spikes for given (block_index, seg_index, spike_channel_index)
# we are lucky: our units have all the same nb of spikes!!
# it is not always the case
nb_spikes = 20
return nb_spikes
def _get_spike_timestamps(self, block_index, seg_index, spike_channel_index, t_start, t_stop):
# In our IO, timestamp are internally coded 'int64' and they
# represent the index of the signals 10kHz
# we are lucky: spikes have the same discharge in all segments!!
# incredible neuron!! This is not always the case
# the same clip t_start/t_start must be used in _spike_raw_waveforms()
ts_start = (self._segment_t_start(block_index, seg_index) * 10000)
spike_timestamps = np.arange(0, 10000, 500) + ts_start
if t_start is not None or t_stop is not None:
# restricte spikes to given limits (in seconds)
lim0 = int(t_start * 10000)
lim1 = int(t_stop * 10000)
mask = (spike_timestamps >= lim0) & (spike_timestamps <= lim1)
spike_timestamps = spike_timestamps[mask]
return spike_timestamps
def _rescale_spike_timestamp(self, spike_timestamps, dtype):
# must rescale to second a particular spike_timestamps
# with a fixed dtype so the user can choose the precisino he want.
spike_times = spike_timestamps.astype(dtype)
spike_times /= 10000. # because 10kHz
return spike_times
def _get_spike_raw_waveforms(self, block_index, seg_index, spike_channel_index,
t_start, t_stop):
# this must return a 3D numpy array (nb_spike, nb_channel, nb_sample)
# in the original dtype
# this must be as fast as possible.
# the same clip t_start/t_start must be used in _spike_timestamps()
# If there there is no waveform supported in the
# IO them _spike_raw_waveforms must return None
# In our IO waveforms come from all channels
# they are int16
# convertion to real units is done with self.header['spike_channels']
# Here, we have a realistic case: all waveforms are only noise.
# it is not always the case
# we 20 spikes with a sweep of 50 (5ms)
# trick to get how many spike in the slice
ts = self._get_spike_timestamps(block_index, seg_index,
spike_channel_index, t_start, t_stop)
nb_spike = ts.size
np.random.seed(2205) # a magic number (my birthday)
waveforms = np.random.randint(low=-2**4, high=2**4, size=nb_spike * 50, dtype='int16')
waveforms = waveforms.reshape(nb_spike, 1, 50)
return waveforms
def _event_count(self, block_index, seg_index, event_channel_index):
# event and spike are very similar
# we have 2 event channels
if event_channel_index == 0:
# event channel
return 6
elif event_channel_index == 1:
# epoch channel
return 10
def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop):
# the main difference between spike channel and event channel
# is that for here we have 3 numpy array timestamp, durations, labels
# durations must be None for 'event'
# label must a dtype ='U'
# in our IO event are directly coded in seconds
seg_t_start = self._segment_t_start(block_index, seg_index)
if event_channel_index == 0:
timestamp = np.arange(0, 6, dtype='float64') + seg_t_start
durations = None
labels = np.array(['trigger_a', 'trigger_b'] * 3, dtype='U12')
elif event_channel_index == 1:
timestamp = np.arange(0, 10, dtype='float64') + .5 + seg_t_start
durations = np.ones((10), dtype='float64') * .25
labels = np.array(['zoneX'] * 5 + ['zoneZ'] * 5, dtype='U12')
if t_start is not None:
keep = timestamp >= t_start
timestamp, labels = timestamp[keep], labels[keep]
if durations is not None:
durations = durations[keep]
if t_stop is not None:
keep = timestamp <= t_stop
timestamp, labels = timestamp[keep], labels[keep]
if durations is not None:
durations = durations[keep]
return timestamp, durations, labels
def _rescale_event_timestamp(self, event_timestamps, dtype, event_channel_index):
# must rescale to second a particular event_timestamps
# with a fixed dtype so the user can choose the precisino he want.
# really easy here because in our case it is already seconds
event_times = event_timestamps.astype(dtype)
return event_times
def _rescale_epoch_duration(self, raw_duration, dtype, event_channel_index):
# really easy here because in our case it is already seconds
durations = raw_duration.astype(dtype)
return durations
|
INM-6/python-neo
|
neo/rawio/examplerawio.py
|
Python
|
bsd-3-clause
| 18,423
|
[
"NEURON"
] |
8b7c2f0857c4453b2be82dcaf536f8ab6eee948eaa3a79549a3a32072ac60ce6
|
#!/usr/bin/env python
# $Id: Compiler.py,v 1.148 2006/06/22 00:18:22 tavis_rudd Exp $
"""Compiler classes for Cheetah:
ModuleCompiler aka 'Compiler'
ClassCompiler
MethodCompiler
If you are trying to grok this code start with ModuleCompiler.__init__,
ModuleCompiler.compile, and ModuleCompiler.__getattr__.
Meta-Data
================================================================================
Author: Tavis Rudd <tavis@damnsimple.com>
Version: $Revision: 1.148 $
Start Date: 2001/09/19
Last Revision Date: $Date: 2006/06/22 00:18:22 $
"""
__author__ = "Tavis Rudd <tavis@damnsimple.com>"
__revision__ = "$Revision: 1.148 $"[11:-2]
import sys
import os
import os.path
from os.path import getmtime, exists
import re
import types
import time
import random
import warnings
import __builtin__
import copy
from Cheetah.Version import Version, VersionTuple
from Cheetah.SettingsManager import SettingsManager
from Cheetah.Parser import Parser, ParseError, specialVarRE, \
STATIC_CACHE, REFRESH_CACHE, SET_LOCAL, SET_GLOBAL,SET_MODULE
from Cheetah.Utils.Indenter import indentize # an undocumented preprocessor
from Cheetah import ErrorCatchers
from Cheetah import NameMapper
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
class Error(Exception): pass
DEFAULT_COMPILER_SETTINGS = {
## controlling the handling of Cheetah $placeholders
'useNameMapper': True, # Unified dotted notation and the searchList
'useSearchList': True, # if false, assume the first
# portion of the $variable (before the first dot) is a global,
# builtin, or local var that doesn't need
# looking up in the searchlist BUT use
# namemapper on the rest of the lookup
'allowSearchListAsMethArg': True,
'useAutocalling': True, # detect and call callable()'s, requires NameMapper
'useStackFrames': True, # use NameMapper.valueFromFrameOrSearchList
# rather than NameMapper.valueFromSearchList
'useErrorCatcher':False,
'alwaysFilterNone':True, # filter out None, before the filter is called
'useFilters':True, # use str instead if =False
'includeRawExprInFilterArgs':True,
#'lookForTransactionAttr':False,
'autoAssignDummyTransactionToSelf':False,
'useKWsDictArgForPassingTrans':True,
## controlling the aesthetic appearance / behaviour of generated code
'commentOffset': 1,
# should shorter str constant chunks be printed using repr rather than ''' quotes
'reprShortStrConstants': True,
'reprNewlineThreshold':3,
'outputRowColComments':True,
# should #block's be wrapped in a comment in the template's output
'includeBlockMarkers': False,
'blockMarkerStart':('\n<!-- START BLOCK: ',' -->\n'),
'blockMarkerEnd':('\n<!-- END BLOCK: ',' -->\n'),
'defDocStrMsg':'Autogenerated by CHEETAH: The Python-Powered Template Engine',
'setup__str__method': False,
'mainMethodName':'respond',
'mainMethodNameForSubclasses':'writeBody',
'indentationStep': ' '*4,
'initialMethIndentLevel': 2,
'monitorSrcFile':False,
'outputMethodsBeforeAttributes': True,
## customizing the #extends directive
'autoImportForExtendsDirective':True,
'handlerForExtendsDirective':None, # baseClassName = handler(compiler, baseClassName)
# a callback hook for customizing the
# #extends directive. It can manipulate
# the compiler's state if needed.
# also see allowExpressionsInExtendsDirective
# input filtering/restriction
# use lower case keys here!!
'disabledDirectives':[], # list of directive keys, without the start token
'enabledDirectives':[], # list of directive keys, without the start token
'disabledDirectiveHooks':[], # callable(parser, directiveKey)
'preparseDirectiveHooks':[], # callable(parser, directiveKey)
'postparseDirectiveHooks':[], # callable(parser, directiveKey)
'preparsePlaceholderHooks':[], # callable(parser)
'postparsePlaceholderHooks':[], # callable(parser)
# the above hooks don't need to return anything
'expressionFilterHooks':[], # callable(parser, expr, exprType, rawExpr=None, startPos=None)
# exprType is the name of the directive, 'psp', or 'placeholder'. all
# lowercase. The filters *must* return the expr or raise an exception.
# They can modify the expr if needed.
'templateMetaclass':None, # strictly optional. Only works with new-style baseclasses
'i18NFunctionName':'self.i18n',
## These are used in the parser, but I've put them here for the time being to
## facilitate separating the parser and compiler:
'cheetahVarStartToken':'$',
'commentStartToken':'##',
'multiLineCommentStartToken':'#*',
'multiLineCommentEndToken':'*#',
'gobbleWhitespaceAroundMultiLineComments':True,
'directiveStartToken':'#',
'directiveEndToken':'#',
'allowWhitespaceAfterDirectiveStartToken':False,
'PSPStartToken':'<%',
'PSPEndToken':'%>',
'EOLSlurpToken':'#',
'gettextTokens': ["_", "N_", "ngettext"],
'allowExpressionsInExtendsDirective': False, # the default restricts it to
# accepting dotted names
'allowEmptySingleLineMethods': False,
'allowNestedDefScopes': True,
'allowPlaceholderFilterArgs': True,
## See Parser.initDirectives() for the use of the next 3
#'directiveNamesAndParsers':{}
#'endDirectiveNamesAndHandlers':{}
#'macroDirectives':{}
}
class GenUtils:
"""An abstract baseclass for the Compiler classes that provides methods that
perform generic utility functions or generate pieces of output code from
information passed in by the Parser baseclass. These methods don't do any
parsing themselves.
"""
def genTimeInterval(self, timeString):
##@@ TR: need to add some error handling here
if timeString[-1] == 's':
interval = float(timeString[:-1])
elif timeString[-1] == 'm':
interval = float(timeString[:-1])*60
elif timeString[-1] == 'h':
interval = float(timeString[:-1])*60*60
elif timeString[-1] == 'd':
interval = float(timeString[:-1])*60*60*24
elif timeString[-1] == 'w':
interval = float(timeString[:-1])*60*60*24*7
else: # default to minutes
interval = float(timeString)*60
return interval
def genCacheInfo(self, cacheTokenParts):
"""Decipher a placeholder cachetoken
"""
cacheInfo = {}
if cacheTokenParts['REFRESH_CACHE']:
cacheInfo['type'] = REFRESH_CACHE
cacheInfo['interval'] = self.genTimeInterval(cacheTokenParts['interval'])
elif cacheTokenParts['STATIC_CACHE']:
cacheInfo['type'] = STATIC_CACHE
return cacheInfo # is empty if no cache
def genCacheInfoFromArgList(self, argList):
cacheInfo = {'type':REFRESH_CACHE}
for key, val in argList:
if val[0] in '"\'':
val = val[1:-1]
if key == 'timer':
key = 'interval'
val = self.genTimeInterval(val)
cacheInfo[key] = val
return cacheInfo
def genCheetahVar(self, nameChunks, plain=False):
if nameChunks[0][0] in self.setting('gettextTokens'):
self.addGetTextVar(nameChunks)
if self.setting('useNameMapper') and not plain:
return self.genNameMapperVar(nameChunks)
else:
return self.genPlainVar(nameChunks)
def addGetTextVar(self, nameChunks):
"""Output something that gettext can recognize.
This is a harmless side effect necessary to make gettext work when it
is scanning compiled templates for strings marked for translation.
@@TR: another marginally more efficient approach would be to put the
output in a dummy method that is never called.
"""
# @@TR: this should be in the compiler not here
self.addChunk("if False:")
self.indent()
self.addChunk(self.genPlainVar(nameChunks[:]))
self.dedent()
def genPlainVar(self, nameChunks):
"""Generate Python code for a Cheetah $var without using NameMapper
(Unified Dotted Notation with the SearchList).
"""
nameChunks.reverse()
chunk = nameChunks.pop()
pythonCode = chunk[0] + chunk[2]
while nameChunks:
chunk = nameChunks.pop()
pythonCode = (pythonCode + '.' + chunk[0] + chunk[2])
return pythonCode
def genNameMapperVar(self, nameChunks):
"""Generate valid Python code for a Cheetah $var, using NameMapper
(Unified Dotted Notation with the SearchList).
nameChunks = list of var subcomponents represented as tuples
[ (name,useAC,remainderOfExpr),
]
where:
name = the dotted name base
useAC = where NameMapper should use autocalling on namemapperPart
remainderOfExpr = any arglist, index, or slice
If remainderOfExpr contains a call arglist (e.g. '(1234)') then useAC
is False, otherwise it defaults to True. It is overridden by the global
setting 'useAutocalling' if this setting is False.
EXAMPLE
------------------------------------------------------------------------
if the raw Cheetah Var is
$a.b.c[1].d().x.y.z
nameChunks is the list
[ ('a.b.c',True,'[1]'), # A
('d',False,'()'), # B
('x.y.z',True,''), # C
]
When this method is fed the list above it returns
VFN(VFN(VFFSL(SL, 'a.b.c',True)[1], 'd',False)(), 'x.y.z',True)
which can be represented as
VFN(B`, name=C[0], executeCallables=(useAC and C[1]))C[2]
where:
VFN = NameMapper.valueForName
VFFSL = NameMapper.valueFromFrameOrSearchList
VFSL = NameMapper.valueFromSearchList # optionally used instead of VFFSL
SL = self.searchList()
useAC = self.setting('useAutocalling') # True in this example
A = ('a.b.c',True,'[1]')
B = ('d',False,'()')
C = ('x.y.z',True,'')
C` = VFN( VFN( VFFSL(SL, 'a.b.c',True)[1],
'd',False)(),
'x.y.z',True)
= VFN(B`, name='x.y.z', executeCallables=True)
B` = VFN(A`, name=B[0], executeCallables=(useAC and B[1]))B[2]
A` = VFFSL(SL, name=A[0], executeCallables=(useAC and A[1]))A[2]
Note, if the compiler setting useStackFrames=False (default is true)
then
A` = VFSL([locals()]+SL+[globals(), __builtin__], name=A[0], executeCallables=(useAC and A[1]))A[2]
This option allows Cheetah to be used with Psyco, which doesn't support
stack frame introspection.
"""
defaultUseAC = self.setting('useAutocalling')
useSearchList = self.setting('useSearchList')
nameChunks.reverse()
name, useAC, remainder = nameChunks.pop()
if not useSearchList:
firstDotIdx = name.find('.')
if firstDotIdx != -1 and firstDotIdx < len(name):
beforeFirstDot, afterDot = name[:firstDotIdx], name[firstDotIdx+1:]
pythonCode = ('VFN(' + beforeFirstDot +
',"' + afterDot +
'",' + repr(defaultUseAC and useAC) + ')'
+ remainder)
else:
pythonCode = name+remainder
elif self.setting('useStackFrames'):
pythonCode = ('VFFSL(SL,'
'"'+ name + '",'
+ repr(defaultUseAC and useAC) + ')'
+ remainder)
else:
pythonCode = ('VFSL([locals()]+SL+[globals(), __builtin__],'
'"'+ name + '",'
+ repr(defaultUseAC and useAC) + ')'
+ remainder)
##
while nameChunks:
name, useAC, remainder = nameChunks.pop()
pythonCode = ('VFN(' + pythonCode +
',"' + name +
'",' + repr(defaultUseAC and useAC) + ')'
+ remainder)
return pythonCode
##################################################
## METHOD COMPILERS
class MethodCompiler(GenUtils):
def __init__(self, methodName, classCompiler,
initialMethodComment=None,
decorator=None):
self._settingsManager = classCompiler
self._classCompiler = classCompiler
self._moduleCompiler = classCompiler._moduleCompiler
self._methodName = methodName
self._initialMethodComment = initialMethodComment
self._setupState()
self._decorator = decorator
def setting(self, key):
return self._settingsManager.setting(key)
def _setupState(self):
self._indent = self.setting('indentationStep')
self._indentLev = self.setting('initialMethIndentLevel')
self._pendingStrConstChunks = []
self._methodSignature = None
self._methodDef = None
self._docStringLines = []
self._methodBodyChunks = []
self._cacheRegionsStack = []
self._callRegionsStack = []
self._captureRegionsStack = []
self._filterRegionsStack = []
self._isErrorCatcherOn = False
self._hasReturnStatement = False
self._isGenerator = False
def cleanupState(self):
"""Called by the containing class compiler instance
"""
pass
def methodName(self):
return self._methodName
def setMethodName(self, name):
self._methodName = name
## methods for managing indentation
def indentation(self):
return self._indent * self._indentLev
def indent(self):
self._indentLev +=1
def dedent(self):
if self._indentLev:
self._indentLev -=1
else:
raise Error('Attempt to dedent when the indentLev is 0')
## methods for final code wrapping
def methodDef(self):
if self._methodDef:
return self._methodDef
else:
return self.wrapCode()
__str__ = methodDef
def wrapCode(self):
self.commitStrConst()
methodDefChunks = (
self.methodSignature(),
'\n',
self.docString(),
self.methodBody() )
methodDef = ''.join(methodDefChunks)
self._methodDef = methodDef
return methodDef
def methodSignature(self):
return self._indent + self._methodSignature + ':'
def setMethodSignature(self, signature):
self._methodSignature = signature
def methodBody(self):
return ''.join( self._methodBodyChunks )
def docString(self):
if not self._docStringLines:
return ''
ind = self._indent*2
docStr = (ind + '"""\n' + ind +
('\n' + ind).join([ln.replace('"""',"'''") for ln in self._docStringLines]) +
'\n' + ind + '"""\n')
return docStr
## methods for adding code
def addMethDocString(self, line):
self._docStringLines.append(line.replace('%','%%'))
def addChunk(self, chunk):
self.commitStrConst()
chunk = "\n" + self.indentation() + chunk
self._methodBodyChunks.append(chunk)
def appendToPrevChunk(self, appendage):
self._methodBodyChunks[-1] = self._methodBodyChunks[-1] + appendage
def addWriteChunk(self, chunk):
self.addChunk('write(' + chunk + ')')
def addFilteredChunk(self, chunk, filterArgs=None, rawExpr=None, lineCol=None):
if filterArgs is None:
filterArgs = ''
if self.setting('includeRawExprInFilterArgs') and rawExpr:
filterArgs += ', rawExpr=%s'%repr(rawExpr)
if self.setting('alwaysFilterNone'):
if rawExpr and rawExpr.find('\n')==-1 and rawExpr.find('\r')==-1:
self.addChunk("_v = %s # %r"%(chunk, rawExpr))
if lineCol:
self.appendToPrevChunk(' on line %s, col %s'%lineCol)
else:
self.addChunk("_v = %s"%chunk)
if self.setting('useFilters'):
self.addChunk("if _v is not None: write(_filter(_v%s))"%filterArgs)
else:
self.addChunk("if _v is not None: write(str(_v))")
else:
if self.setting('useFilters'):
self.addChunk("write(_filter(%s%s))"%(chunk,filterArgs))
else:
self.addChunk("write(str(%s))"%chunk)
def _appendToPrevStrConst(self, strConst):
if self._pendingStrConstChunks:
self._pendingStrConstChunks.append(strConst)
else:
self._pendingStrConstChunks = [strConst]
def _unescapeCheetahVars(self, theString):
"""Unescape any escaped Cheetah \$vars in the string.
"""
token = self.setting('cheetahVarStartToken')
return theString.replace('\\' + token, token)
def _unescapeDirectives(self, theString):
"""Unescape any escaped Cheetah \$vars in the string.
"""
token = self.setting('directiveStartToken')
return theString.replace('\\' + token, token)
def commitStrConst(self):
"""Add the code for outputting the pending strConst without chopping off
any whitespace from it.
"""
if self._pendingStrConstChunks:
strConst = self._unescapeCheetahVars(''.join(self._pendingStrConstChunks))
strConst = self._unescapeDirectives(strConst)
self._pendingStrConstChunks = []
if not strConst:
return
if self.setting('reprShortStrConstants') and \
strConst.count('\n') < self.setting('reprNewlineThreshold'):
self.addWriteChunk( repr(strConst).replace('\\012','\\n'))
else:
strConst = strConst.replace('\\','\\\\').replace("'''","'\'\'\'")
if strConst[0] == "'":
strConst = '\\' + strConst
if strConst[-1] == "'":
strConst = strConst[:-1] + '\\' + strConst[-1]
self.addWriteChunk("'''" + strConst + "'''" )
def handleWSBeforeDirective(self):
"""Truncate the pending strCont to the beginning of the current line.
"""
if self._pendingStrConstChunks:
src = self._pendingStrConstChunks[-1]
BOL = max(src.rfind('\n')+1, src.rfind('\r')+1, 0)
if BOL < len(src):
self._pendingStrConstChunks[-1] = src[:BOL]
def isErrorCatcherOn(self):
return self._isErrorCatcherOn
def turnErrorCatcherOn(self):
self._isErrorCatcherOn = True
def turnErrorCatcherOff(self):
self._isErrorCatcherOn = False
# @@TR: consider merging the next two methods into one
def addStrConst(self, strConst):
self._appendToPrevStrConst(strConst)
def addRawText(self, text):
self.addStrConst(text)
def addMethComment(self, comm):
offSet = self.setting('commentOffset')
self.addChunk('#' + ' '*offSet + comm)
def addPlaceholder(self, expr, filterArgs, rawPlaceholder,
cacheTokenParts, lineCol,
silentMode=False):
cacheInfo = self.genCacheInfo(cacheTokenParts)
if cacheInfo:
cacheInfo['ID'] = repr(rawPlaceholder)[1:-1]
self.startCacheRegion(cacheInfo, lineCol, rawPlaceholder=rawPlaceholder)
if self.isErrorCatcherOn():
methodName = self._classCompiler.addErrorCatcherCall(
expr, rawCode=rawPlaceholder, lineCol=lineCol)
expr = 'self.' + methodName + '(localsDict=locals())'
if silentMode:
self.addChunk('try:')
self.indent()
self.addFilteredChunk(expr, filterArgs, rawPlaceholder, lineCol=lineCol)
self.dedent()
self.addChunk('except NotFound: pass')
else:
self.addFilteredChunk(expr, filterArgs, rawPlaceholder, lineCol=lineCol)
if self.setting('outputRowColComments'):
self.appendToPrevChunk(' # from line %s, col %s' % lineCol + '.')
if cacheInfo:
self.endCacheRegion()
def addSilent(self, expr):
self.addChunk( expr )
def addEcho(self, expr, rawExpr=None):
self.addFilteredChunk(expr, rawExpr=rawExpr)
def addSet(self, expr, exprComponents, setStyle):
if setStyle is SET_GLOBAL:
(LVALUE, OP, RVALUE) = (exprComponents.LVALUE,
exprComponents.OP,
exprComponents.RVALUE)
# we need to split the LVALUE to deal with globalSetVars
splitPos1 = LVALUE.find('.')
splitPos2 = LVALUE.find('[')
if splitPos1 > 0 and splitPos2==-1:
splitPos = splitPos1
elif splitPos1 > 0 and splitPos1 < max(splitPos2,0):
splitPos = splitPos1
else:
splitPos = splitPos2
if splitPos >0:
primary = LVALUE[:splitPos]
secondary = LVALUE[splitPos:]
else:
primary = LVALUE
secondary = ''
LVALUE = 'self._CHEETAH__globalSetVars["' + primary + '"]' + secondary
expr = LVALUE + ' ' + OP + ' ' + RVALUE.strip()
if setStyle is SET_MODULE:
self._moduleCompiler.addModuleGlobal(expr)
else:
self.addChunk(expr)
def addInclude(self, sourceExpr, includeFrom, isRaw):
self.addChunk('self._handleCheetahInclude(' + sourceExpr +
', trans=trans, ' +
'includeFrom="' + includeFrom + '", raw=' +
repr(isRaw) + ')')
def addWhile(self, expr, lineCol=None):
self.addIndentingDirective(expr, lineCol=lineCol)
def addFor(self, expr, lineCol=None):
self.addIndentingDirective(expr, lineCol=lineCol)
def addRepeat(self, expr, lineCol=None):
#the _repeatCount stuff here allows nesting of #repeat directives
self._repeatCount = getattr(self, "_repeatCount", -1) + 1
self.addFor('for __i%s in range(%s)' % (self._repeatCount,expr), lineCol=lineCol)
def addIndentingDirective(self, expr, lineCol=None):
if expr and not expr[-1] == ':':
expr = expr + ':'
self.addChunk( expr )
if lineCol:
self.appendToPrevChunk(' # generated from line %s, col %s'%lineCol )
self.indent()
def addReIndentingDirective(self, expr, dedent=True, lineCol=None):
self.commitStrConst()
if dedent:
self.dedent()
if not expr[-1] == ':':
expr = expr + ':'
self.addChunk( expr )
if lineCol:
self.appendToPrevChunk(' # generated from line %s, col %s'%lineCol )
self.indent()
def addIf(self, expr, lineCol=None):
"""For a full #if ... #end if directive
"""
self.addIndentingDirective(expr, lineCol=lineCol)
def addOneLineIf(self, expr, lineCol=None):
"""For a full #if ... #end if directive
"""
self.addIndentingDirective(expr, lineCol=lineCol)
def addTernaryExpr(self, conditionExpr, trueExpr, falseExpr, lineCol=None):
"""For a single-lie #if ... then .... else ... directive
<condition> then <trueExpr> else <falseExpr>
"""
self.addIndentingDirective(conditionExpr, lineCol=lineCol)
self.addFilteredChunk(trueExpr)
self.dedent()
self.addIndentingDirective('else')
self.addFilteredChunk(falseExpr)
self.dedent()
def addElse(self, expr, dedent=True, lineCol=None):
expr = re.sub(r'else[ \f\t]+if','elif', expr)
self.addReIndentingDirective(expr, dedent=dedent, lineCol=lineCol)
def addElif(self, expr, dedent=True, lineCol=None):
self.addElse(expr, dedent=dedent, lineCol=lineCol)
def addUnless(self, expr, lineCol=None):
self.addIf('if not (' + expr + ')')
def addClosure(self, functionName, argsList, parserComment):
argStringChunks = []
for arg in argsList:
chunk = arg[0]
if not arg[1] == None:
chunk += '=' + arg[1]
argStringChunks.append(chunk)
signature = "def " + functionName + "(" + ','.join(argStringChunks) + "):"
self.addIndentingDirective(signature)
self.addChunk('#'+parserComment)
def addTry(self, expr, lineCol=None):
self.addIndentingDirective(expr, lineCol=lineCol)
def addExcept(self, expr, dedent=True, lineCol=None):
self.addReIndentingDirective(expr, dedent=dedent, lineCol=lineCol)
def addFinally(self, expr, dedent=True, lineCol=None):
self.addReIndentingDirective(expr, dedent=dedent, lineCol=lineCol)
def addReturn(self, expr):
assert not self._isGenerator
self.addChunk(expr)
self._hasReturnStatement = True
def addYield(self, expr):
assert not self._hasReturnStatement
self._isGenerator = True
if expr.replace('yield','').strip():
self.addChunk(expr)
else:
self.addChunk('if _dummyTrans:')
self.indent()
self.addChunk('yield trans.response().getvalue()')
self.addChunk('trans = DummyTransaction()')
self.addChunk('write = trans.response().write')
self.dedent()
self.addChunk('else:')
self.indent()
self.addChunk(
'raise TypeError("This method cannot be called with a trans arg")')
self.dedent()
def addPass(self, expr):
self.addChunk(expr)
def addDel(self, expr):
self.addChunk(expr)
def addAssert(self, expr):
self.addChunk(expr)
def addRaise(self, expr):
self.addChunk(expr)
def addBreak(self, expr):
self.addChunk(expr)
def addContinue(self, expr):
self.addChunk(expr)
def addPSP(self, PSP):
self.commitStrConst()
autoIndent = False
if PSP[0] == '=':
PSP = PSP[1:]
if PSP:
self.addWriteChunk('_filter(' + PSP + ')')
return
elif PSP.lower() == 'end':
self.dedent()
return
elif PSP[-1] == '$':
autoIndent = True
PSP = PSP[:-1]
elif PSP[-1] == ':':
autoIndent = True
for line in PSP.splitlines():
self.addChunk(line)
if autoIndent:
self.indent()
def nextCacheID(self):
return ('_'+str(random.randrange(100, 999))
+ str(random.randrange(10000, 99999)))
def startCacheRegion(self, cacheInfo, lineCol, rawPlaceholder=None):
# @@TR: we should add some runtime logging to this
ID = self.nextCacheID()
interval = cacheInfo.get('interval',None)
test = cacheInfo.get('test',None)
customID = cacheInfo.get('id',None)
if customID:
ID = customID
varyBy = cacheInfo.get('varyBy', repr(ID))
self._cacheRegionsStack.append(ID) # attrib of current methodCompiler
# @@TR: add this to a special class var as well
self.addChunk('')
self.addChunk('## START CACHE REGION: ID='+ID+
'. line %s, col %s'%lineCol + ' in the source.')
self.addChunk('_RECACHE_%(ID)s = False'%locals())
self.addChunk('_cacheRegion_%(ID)s = self.getCacheRegion(regionID='%locals()
+ repr(ID)
+ ', cacheInfo=%r'%cacheInfo
+ ')')
self.addChunk('if _cacheRegion_%(ID)s.isNew():'%locals())
self.indent()
self.addChunk('_RECACHE_%(ID)s = True'%locals())
self.dedent()
self.addChunk('_cacheItem_%(ID)s = _cacheRegion_%(ID)s.getCacheItem('%locals()
+varyBy+')')
self.addChunk('if _cacheItem_%(ID)s.hasExpired():'%locals())
self.indent()
self.addChunk('_RECACHE_%(ID)s = True'%locals())
self.dedent()
if test:
self.addChunk('if ' + test + ':')
self.indent()
self.addChunk('_RECACHE_%(ID)s = True'%locals())
self.dedent()
self.addChunk('if (not _RECACHE_%(ID)s) and _cacheItem_%(ID)s.getRefreshTime():'%locals())
self.indent()
#self.addChunk('print "DEBUG"+"-"*50')
self.addChunk('try:')
self.indent()
self.addChunk('_output = _cacheItem_%(ID)s.renderOutput()'%locals())
self.dedent()
self.addChunk('except KeyError:')
self.indent()
self.addChunk('_RECACHE_%(ID)s = True'%locals())
#self.addChunk('print "DEBUG"+"*"*50')
self.dedent()
self.addChunk('else:')
self.indent()
self.addWriteChunk('_output')
self.addChunk('del _output')
self.dedent()
self.dedent()
self.addChunk('if _RECACHE_%(ID)s or not _cacheItem_%(ID)s.getRefreshTime():'%locals())
self.indent()
self.addChunk('_orig_trans%(ID)s = trans'%locals())
self.addChunk('trans = _cacheCollector_%(ID)s = DummyTransaction()'%locals())
self.addChunk('write = _cacheCollector_%(ID)s.response().write'%locals())
if interval:
self.addChunk(("_cacheItem_%(ID)s.setExpiryTime(currentTime() +"%locals())
+ str(interval) + ")")
def endCacheRegion(self):
ID = self._cacheRegionsStack.pop()
self.addChunk('trans = _orig_trans%(ID)s'%locals())
self.addChunk('write = trans.response().write')
self.addChunk('_cacheData = _cacheCollector_%(ID)s.response().getvalue()'%locals())
self.addChunk('_cacheItem_%(ID)s.setData(_cacheData)'%locals())
self.addWriteChunk('_cacheData')
self.addChunk('del _cacheData')
self.addChunk('del _cacheCollector_%(ID)s'%locals())
self.addChunk('del _orig_trans%(ID)s'%locals())
self.dedent()
self.addChunk('## END CACHE REGION: '+ID)
self.addChunk('')
def nextCallRegionID(self):
return self.nextCacheID()
def startCallRegion(self, functionName, args, lineCol, regionTitle='CALL'):
class CallDetails: pass
callDetails = CallDetails()
callDetails.ID = ID = self.nextCallRegionID()
callDetails.functionName = functionName
callDetails.args = args
callDetails.lineCol = lineCol
callDetails.usesKeywordArgs = False
self._callRegionsStack.append((ID, callDetails)) # attrib of current methodCompiler
self.addChunk('## START %(regionTitle)s REGION: '%locals()
+ID
+' of '+functionName
+' at line %s, col %s'%lineCol + ' in the source.')
self.addChunk('_orig_trans%(ID)s = trans'%locals())
self.addChunk('_wasBuffering%(ID)s = self._CHEETAH__isBuffering'%locals())
self.addChunk('self._CHEETAH__isBuffering = True')
self.addChunk('trans = _callCollector%(ID)s = DummyTransaction()'%locals())
self.addChunk('write = _callCollector%(ID)s.response().write'%locals())
def setCallArg(self, argName, lineCol):
ID, callDetails = self._callRegionsStack[-1]
if callDetails.usesKeywordArgs:
self._endCallArg()
else:
callDetails.usesKeywordArgs = True
self.addChunk('_callKws%(ID)s = {}'%locals())
self.addChunk('_currentCallArgname%(ID)s = %(argName)r'%locals())
callDetails.currentArgname = argName
def _endCallArg(self):
ID, callDetails = self._callRegionsStack[-1]
currCallArg = callDetails.currentArgname
self.addChunk(('_callKws%(ID)s[%(currCallArg)r] ='
' _callCollector%(ID)s.response().getvalue()')%locals())
self.addChunk('del _callCollector%(ID)s'%locals())
self.addChunk('trans = _callCollector%(ID)s = DummyTransaction()'%locals())
self.addChunk('write = _callCollector%(ID)s.response().write'%locals())
def endCallRegion(self, regionTitle='CALL'):
ID, callDetails = self._callRegionsStack[-1]
functionName, initialKwArgs, lineCol = (
callDetails.functionName, callDetails.args, callDetails.lineCol)
def reset(ID=ID):
self.addChunk('trans = _orig_trans%(ID)s'%locals())
self.addChunk('write = trans.response().write')
self.addChunk('self._CHEETAH__isBuffering = _wasBuffering%(ID)s '%locals())
self.addChunk('del _wasBuffering%(ID)s'%locals())
self.addChunk('del _orig_trans%(ID)s'%locals())
if not callDetails.usesKeywordArgs:
reset()
self.addChunk('_callArgVal%(ID)s = _callCollector%(ID)s.response().getvalue()'%locals())
self.addChunk('del _callCollector%(ID)s'%locals())
if initialKwArgs:
initialKwArgs = ', '+initialKwArgs
self.addFilteredChunk('%(functionName)s(_callArgVal%(ID)s%(initialKwArgs)s)'%locals())
self.addChunk('del _callArgVal%(ID)s'%locals())
else:
if initialKwArgs:
initialKwArgs = initialKwArgs+', '
self._endCallArg()
reset()
self.addFilteredChunk('%(functionName)s(%(initialKwArgs)s**_callKws%(ID)s)'%locals())
self.addChunk('del _callKws%(ID)s'%locals())
self.addChunk('## END %(regionTitle)s REGION: '%locals()
+ID
+' of '+functionName
+' at line %s, col %s'%lineCol + ' in the source.')
self.addChunk('')
self._callRegionsStack.pop() # attrib of current methodCompiler
def nextCaptureRegionID(self):
return self.nextCacheID()
def startCaptureRegion(self, assignTo, lineCol):
class CaptureDetails: pass
captureDetails = CaptureDetails()
captureDetails.ID = ID = self.nextCaptureRegionID()
captureDetails.assignTo = assignTo
captureDetails.lineCol = lineCol
self._captureRegionsStack.append((ID,captureDetails)) # attrib of current methodCompiler
self.addChunk('## START CAPTURE REGION: '+ID
+' '+assignTo
+' at line %s, col %s'%lineCol + ' in the source.')
self.addChunk('_orig_trans%(ID)s = trans'%locals())
self.addChunk('_wasBuffering%(ID)s = self._CHEETAH__isBuffering'%locals())
self.addChunk('self._CHEETAH__isBuffering = True')
self.addChunk('trans = _captureCollector%(ID)s = DummyTransaction()'%locals())
self.addChunk('write = _captureCollector%(ID)s.response().write'%locals())
def endCaptureRegion(self):
ID, captureDetails = self._captureRegionsStack.pop()
assignTo, lineCol = (captureDetails.assignTo, captureDetails.lineCol)
self.addChunk('trans = _orig_trans%(ID)s'%locals())
self.addChunk('write = trans.response().write')
self.addChunk('self._CHEETAH__isBuffering = _wasBuffering%(ID)s '%locals())
self.addChunk('%(assignTo)s = _captureCollector%(ID)s.response().getvalue()'%locals())
self.addChunk('del _orig_trans%(ID)s'%locals())
self.addChunk('del _captureCollector%(ID)s'%locals())
self.addChunk('del _wasBuffering%(ID)s'%locals())
def setErrorCatcher(self, errorCatcherName):
self.turnErrorCatcherOn()
self.addChunk('if self._CHEETAH__errorCatchers.has_key("' + errorCatcherName + '"):')
self.indent()
self.addChunk('self._CHEETAH__errorCatcher = self._CHEETAH__errorCatchers["' +
errorCatcherName + '"]')
self.dedent()
self.addChunk('else:')
self.indent()
self.addChunk('self._CHEETAH__errorCatcher = self._CHEETAH__errorCatchers["'
+ errorCatcherName + '"] = ErrorCatchers.'
+ errorCatcherName + '(self)'
)
self.dedent()
def nextFilterRegionID(self):
return self.nextCacheID()
def setFilter(self, theFilter, isKlass):
class FilterDetails: pass
filterDetails = FilterDetails()
filterDetails.ID = ID = self.nextFilterRegionID()
filterDetails.theFilter = theFilter
filterDetails.isKlass = isKlass
self._filterRegionsStack.append((ID, filterDetails)) # attrib of current methodCompiler
self.addChunk('_orig_filter%(ID)s = _filter'%locals())
if isKlass:
self.addChunk('_filter = self._CHEETAH__currentFilter = ' + theFilter.strip() +
'(self).filter')
else:
if theFilter.lower() == 'none':
self.addChunk('_filter = self._CHEETAH__initialFilter')
else:
# is string representing the name of a builtin filter
self.addChunk('filterName = ' + repr(theFilter))
self.addChunk('if self._CHEETAH__filters.has_key("' + theFilter + '"):')
self.indent()
self.addChunk('_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]')
self.dedent()
self.addChunk('else:')
self.indent()
self.addChunk('_filter = self._CHEETAH__currentFilter'
+' = \\\n\t\t\tself._CHEETAH__filters[filterName] = '
+ 'getattr(self._CHEETAH__filtersLib, filterName)(self).filter')
self.dedent()
def closeFilterBlock(self):
ID, filterDetails = self._filterRegionsStack.pop()
#self.addChunk('_filter = self._CHEETAH__initialFilter')
self.addChunk('_filter = _orig_filter%(ID)s'%locals())
class AutoMethodCompiler(MethodCompiler):
def _setupState(self):
MethodCompiler._setupState(self)
self._argStringList = [ ("self",None) ]
self._streamingEnabled = True
def _useKWsDictArgForPassingTrans(self):
alreadyHasTransArg = [argname for argname,defval in self._argStringList
if argname=='trans']
return (self.methodName()!='respond'
and not alreadyHasTransArg
and self.setting('useKWsDictArgForPassingTrans'))
def cleanupState(self):
MethodCompiler.cleanupState(self)
self.commitStrConst()
if self._cacheRegionsStack:
self.endCacheRegion()
if self._callRegionsStack:
self.endCallRegion()
if self._streamingEnabled:
kwargsName = None
positionalArgsListName = None
for argname,defval in self._argStringList:
if argname.strip().startswith('**'):
kwargsName = argname.strip().replace('**','')
break
elif argname.strip().startswith('*'):
positionalArgsListName = argname.strip().replace('*','')
if not kwargsName and self._useKWsDictArgForPassingTrans():
kwargsName = 'KWS'
self.addMethArg('**KWS', None)
self._kwargsName = kwargsName
if not self._useKWsDictArgForPassingTrans():
if not kwargsName and not positionalArgsListName:
self.addMethArg('trans', 'None')
else:
self._streamingEnabled = False
self._indentLev = self.setting('initialMethIndentLevel')
mainBodyChunks = self._methodBodyChunks
self._methodBodyChunks = []
self._addAutoSetupCode()
self._methodBodyChunks.extend(mainBodyChunks)
self._addAutoCleanupCode()
def _addAutoSetupCode(self):
if self._initialMethodComment:
self.addChunk(self._initialMethodComment)
if self._streamingEnabled:
if self._useKWsDictArgForPassingTrans() and self._kwargsName:
self.addChunk('trans = %s.get("trans")'%self._kwargsName)
self.addChunk('if (not trans and not self._CHEETAH__isBuffering'
' and not callable(self.transaction)):')
self.indent()
self.addChunk('trans = self.transaction'
' # is None unless self.awake() was called')
self.dedent()
self.addChunk('if not trans:')
self.indent()
self.addChunk('trans = DummyTransaction()')
if self.setting('autoAssignDummyTransactionToSelf'):
self.addChunk('self.transaction = trans')
self.addChunk('_dummyTrans = True')
self.dedent()
self.addChunk('else: _dummyTrans = False')
else:
self.addChunk('trans = DummyTransaction()')
self.addChunk('_dummyTrans = True')
self.addChunk('write = trans.response().write')
if self.setting('useNameMapper'):
argNames = [arg[0] for arg in self._argStringList]
allowSearchListAsMethArg = self.setting('allowSearchListAsMethArg')
if allowSearchListAsMethArg and 'SL' in argNames:
pass
elif allowSearchListAsMethArg and 'searchList' in argNames:
self.addChunk('SL = searchList')
else:
self.addChunk('SL = self._CHEETAH__searchList')
if self.setting('useFilters'):
self.addChunk('_filter = self._CHEETAH__currentFilter')
self.addChunk('')
self.addChunk("#" *40)
self.addChunk('## START - generated method body')
self.addChunk('')
def _addAutoCleanupCode(self):
self.addChunk('')
self.addChunk("#" *40)
self.addChunk('## END - generated method body')
self.addChunk('')
if not self._isGenerator:
self.addStop()
self.addChunk('')
def addStop(self, expr=None):
self.addChunk('return _dummyTrans and trans.response().getvalue() or ""')
def addMethArg(self, name, defVal=None):
self._argStringList.append( (name,defVal) )
def methodSignature(self):
argStringChunks = []
for arg in self._argStringList:
chunk = arg[0]
if not arg[1] == None:
chunk += '=' + arg[1]
argStringChunks.append(chunk)
argString = (', ').join(argStringChunks)
output = []
if self._decorator:
output.append(self._indent + self._decorator+'\n')
output.append(self._indent + "def "
+ self.methodName() + "(" +
argString + "):\n\n")
return ''.join(output)
##################################################
## CLASS COMPILERS
_initMethod_initCheetah = """\
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
""".replace('\n','\n'+' '*8)
class ClassCompiler(GenUtils):
methodCompilerClass = AutoMethodCompiler
methodCompilerClassForInit = MethodCompiler
def __init__(self, className, mainMethodName='respond',
moduleCompiler=None,
fileName=None,
settingsManager=None):
self._settingsManager = settingsManager
self._fileName = fileName
self._className = className
self._moduleCompiler = moduleCompiler
self._mainMethodName = mainMethodName
self._setupState()
methodCompiler = self._spawnMethodCompiler(
mainMethodName,
initialMethodComment='## CHEETAH: main method generated for this template')
self._setActiveMethodCompiler(methodCompiler)
if fileName and self.setting('monitorSrcFile'):
self._addSourceFileMonitoring(fileName)
def setting(self, key):
return self._settingsManager.setting(key)
def __getattr__(self, name):
"""Provide access to the methods and attributes of the MethodCompiler
at the top of the activeMethods stack: one-way namespace sharing
WARNING: Use .setMethods to assign the attributes of the MethodCompiler
from the methods of this class!!! or you will be assigning to attributes
of this object instead."""
if self.__dict__.has_key(name):
return self.__dict__[name]
elif hasattr(self.__class__, name):
return getattr(self.__class__, name)
elif self._activeMethodsList and hasattr(self._activeMethodsList[-1], name):
return getattr(self._activeMethodsList[-1], name)
else:
raise AttributeError, name
def _setupState(self):
self._classDef = None
self._decoratorForNextMethod = None
self._activeMethodsList = [] # stack while parsing/generating
self._finishedMethodsList = [] # store by order
self._methodsIndex = {} # store by name
self._baseClass = 'Template'
self._classDocStringLines = []
# printed after methods in the gen class def:
self._generatedAttribs = ['_CHEETAH__instanceInitialized = False']
self._generatedAttribs.append('_CHEETAH_version = __CHEETAH_version__')
self._generatedAttribs.append(
'_CHEETAH_versionTuple = __CHEETAH_versionTuple__')
self._generatedAttribs.append('_CHEETAH_genTime = __CHEETAH_genTime__')
self._generatedAttribs.append('_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__')
self._generatedAttribs.append('_CHEETAH_src = __CHEETAH_src__')
self._generatedAttribs.append(
'_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__')
if self.setting('templateMetaclass'):
self._generatedAttribs.append('__metaclass__ = '+self.setting('templateMetaclass'))
self._initMethChunks = []
self._blockMetaData = {}
self._errorCatcherCount = 0
self._placeholderToErrorCatcherMap = {}
def cleanupState(self):
while self._activeMethodsList:
methCompiler = self._popActiveMethodCompiler()
self._swallowMethodCompiler(methCompiler)
self._setupInitMethod()
if self._mainMethodName == 'respond':
if self.setting('setup__str__method'):
self._generatedAttribs.append('def __str__(self): return self.respond()')
self.addAttribute('_mainCheetahMethod_for_' + self._className +
'= ' + repr(self._mainMethodName) )
def _setupInitMethod(self):
__init__ = self._spawnMethodCompiler('__init__',
klass=self.methodCompilerClassForInit)
__init__.setMethodSignature("def __init__(self, *args, **KWs)")
__init__.addChunk("%s.__init__(self, *args, **KWs)" % self._baseClass)
__init__.addChunk(_initMethod_initCheetah%{'className':self._className})
for chunk in self._initMethChunks:
__init__.addChunk(chunk)
__init__.cleanupState()
self._swallowMethodCompiler(__init__, pos=0)
def _addSourceFileMonitoring(self, fileName):
# @@TR: this stuff needs auditing for Cheetah 2.0
# the first bit is added to init
self.addChunkToInit('self._filePath = ' + repr(fileName))
self.addChunkToInit('self._fileMtime = ' + str(getmtime(fileName)) )
# the rest is added to the main output method of the class ('mainMethod')
self.addChunk('if exists(self._filePath) and ' +
'getmtime(self._filePath) > self._fileMtime:')
self.indent()
self.addChunk('self._compile(file=self._filePath, moduleName='+className + ')')
self.addChunk(
'write(getattr(self, self._mainCheetahMethod_for_' + self._className +
')(trans=trans))')
self.addStop()
self.dedent()
def setClassName(self, name):
self._className = name
def className(self):
return self._className
def setBaseClass(self, baseClassName):
self._baseClass = baseClassName
def setMainMethodName(self, methodName):
if methodName == self._mainMethodName:
return
## change the name in the methodCompiler and add new reference
mainMethod = self._methodsIndex[self._mainMethodName]
mainMethod.setMethodName(methodName)
self._methodsIndex[methodName] = mainMethod
## make sure that fileUpdate code still works properly:
chunkToChange = ('write(self.' + self._mainMethodName + '(trans=trans))')
chunks = mainMethod._methodBodyChunks
if chunkToChange in chunks:
for i in range(len(chunks)):
if chunks[i] == chunkToChange:
chunks[i] = ('write(self.' + methodName + '(trans=trans))')
## get rid of the old reference and update self._mainMethodName
del self._methodsIndex[self._mainMethodName]
self._mainMethodName = methodName
def setMainMethodArgs(self, argsList):
mainMethodCompiler = self._methodsIndex[self._mainMethodName]
for argName, defVal in argsList:
mainMethodCompiler.addMethArg(argName, defVal)
def _spawnMethodCompiler(self, methodName, klass=None,
initialMethodComment=None):
if klass is None:
klass = self.methodCompilerClass
decorator = None
if self._decoratorForNextMethod:
decorator = self._decoratorForNextMethod
self._decoratorForNextMethod = None
methodCompiler = klass(methodName, classCompiler=self,
decorator=decorator,
initialMethodComment=initialMethodComment)
self._methodsIndex[methodName] = methodCompiler
return methodCompiler
def _setActiveMethodCompiler(self, methodCompiler):
self._activeMethodsList.append(methodCompiler)
def _getActiveMethodCompiler(self):
return self._activeMethodsList[-1]
def _popActiveMethodCompiler(self):
return self._activeMethodsList.pop()
def _swallowMethodCompiler(self, methodCompiler, pos=None):
methodCompiler.cleanupState()
if pos==None:
self._finishedMethodsList.append( methodCompiler )
else:
self._finishedMethodsList.insert(pos, methodCompiler)
return methodCompiler
def startMethodDef(self, methodName, argsList, parserComment):
methodCompiler = self._spawnMethodCompiler(
methodName, initialMethodComment=parserComment)
self._setActiveMethodCompiler(methodCompiler)
for argName, defVal in argsList:
methodCompiler.addMethArg(argName, defVal)
def _finishedMethods(self):
return self._finishedMethodsList
def addDecorator(self, decoratorExpr):
"""Set the decorator to be used with the next method in the source.
See _spawnMethodCompiler() and MethodCompiler for the details of how
this is used.
"""
self._decoratorForNextMethod = decoratorExpr
def addClassDocString(self, line):
self._classDocStringLines.append( line.replace('%','%%'))
def addChunkToInit(self,chunk):
self._initMethChunks.append(chunk)
def addAttribute(self, attribExpr):
## first test to make sure that the user hasn't used any fancy Cheetah syntax
# (placeholders, directives, etc.) inside the expression
if attribExpr.find('VFN(') != -1 or attribExpr.find('VFFSL(') != -1:
raise ParseError(self,
'Invalid #attr directive.' +
' It should only contain simple Python literals.')
## now add the attribute
self._generatedAttribs.append(attribExpr)
def addErrorCatcherCall(self, codeChunk, rawCode='', lineCol=''):
if self._placeholderToErrorCatcherMap.has_key(rawCode):
methodName = self._placeholderToErrorCatcherMap[rawCode]
if not self.setting('outputRowColComments'):
self._methodsIndex[methodName].addMethDocString(
'plus at line %s, col %s'%lineCol)
return methodName
self._errorCatcherCount += 1
methodName = '__errorCatcher' + str(self._errorCatcherCount)
self._placeholderToErrorCatcherMap[rawCode] = methodName
catcherMeth = self._spawnMethodCompiler(
methodName,
klass=MethodCompiler,
initialMethodComment=('## CHEETAH: Generated from ' + rawCode +
' at line %s, col %s'%lineCol + '.')
)
catcherMeth.setMethodSignature('def ' + methodName +
'(self, localsDict={})')
# is this use of localsDict right?
catcherMeth.addChunk('try:')
catcherMeth.indent()
catcherMeth.addChunk("return eval('''" + codeChunk +
"''', globals(), localsDict)")
catcherMeth.dedent()
catcherMeth.addChunk('except self._CHEETAH__errorCatcher.exceptions(), e:')
catcherMeth.indent()
catcherMeth.addChunk("return self._CHEETAH__errorCatcher.warn(exc_val=e, code= " +
repr(codeChunk) + " , rawCode= " +
repr(rawCode) + " , lineCol=" + str(lineCol) +")")
catcherMeth.cleanupState()
self._swallowMethodCompiler(catcherMeth)
return methodName
def closeDef(self):
self.commitStrConst()
methCompiler = self._popActiveMethodCompiler()
self._swallowMethodCompiler(methCompiler)
def closeBlock(self):
self.commitStrConst()
methCompiler = self._popActiveMethodCompiler()
methodName = methCompiler.methodName()
if self.setting('includeBlockMarkers'):
endMarker = self.setting('blockMarkerEnd')
methCompiler.addStrConst(endMarker[0] + methodName + endMarker[1])
self._swallowMethodCompiler(methCompiler)
#metaData = self._blockMetaData[methodName]
#rawDirective = metaData['raw']
#lineCol = metaData['lineCol']
## insert the code to call the block, caching if #cache directive is on
codeChunk = 'self.' + methodName + '(trans=trans)'
self.addChunk(codeChunk)
#self.appendToPrevChunk(' # generated from ' + repr(rawDirective) )
#if self.setting('outputRowColComments'):
# self.appendToPrevChunk(' at line %s, col %s' % lineCol + '.')
## code wrapping methods
def classDef(self):
if self._classDef:
return self._classDef
else:
return self.wrapClassDef()
__str__ = classDef
def wrapClassDef(self):
ind = self.setting('indentationStep')
classDefChunks = [self.classSignature(),
self.classDocstring(),
]
def addMethods():
classDefChunks.extend([
ind + '#'*50,
ind + '## CHEETAH GENERATED METHODS',
'\n',
self.methodDefs(),
])
def addAttributes():
classDefChunks.extend([
ind + '#'*50,
ind + '## CHEETAH GENERATED ATTRIBUTES',
'\n',
self.attributes(),
])
if self.setting('outputMethodsBeforeAttributes'):
addMethods()
addAttributes()
else:
addAttributes()
addMethods()
classDef = '\n'.join(classDefChunks)
self._classDef = classDef
return classDef
def classSignature(self):
return "class %s(%s):" % (self.className(), self._baseClass)
def classDocstring(self):
if not self._classDocStringLines:
return ''
ind = self.setting('indentationStep')
docStr = ('%(ind)s"""\n%(ind)s' +
'\n%(ind)s'.join(self._classDocStringLines) +
'\n%(ind)s"""\n'
) % {'ind':ind}
return docStr
def methodDefs(self):
methodDefs = [str(methGen) for methGen in self._finishedMethods() ]
return '\n\n'.join(methodDefs)
def attributes(self):
attribs = [self.setting('indentationStep') + str(attrib)
for attrib in self._generatedAttribs ]
return '\n\n'.join(attribs)
class AutoClassCompiler(ClassCompiler):
pass
##################################################
## MODULE COMPILERS
class ModuleCompiler(SettingsManager, GenUtils):
parserClass = Parser
classCompilerClass = AutoClassCompiler
def __init__(self, source=None, file=None,
moduleName='DynamicallyCompiledCheetahTemplate',
mainClassName=None, # string
mainMethodName=None, # string
baseclassName=None, # string
extraImportStatements=None, # list of strings
settings=None # dict
):
SettingsManager.__init__(self)
if settings:
self.updateSettings(settings)
# disable useStackFrames if the C version of NameMapper isn't compiled
# it's painfully slow in the Python version and bites Windows users all
# the time:
if not NameMapper.C_VERSION:
if not sys.platform.startswith('java'):
warnings.warn(
"\nYou don't have the C version of NameMapper installed! "
"I'm disabling Cheetah's useStackFrames option as it is "
"painfully slow with the Python version of NameMapper. "
"You should get a copy of Cheetah with the compiled C version of NameMapper."
)
self.setSetting('useStackFrames', False)
self._compiled = False
self._moduleName = moduleName
if not mainClassName:
self._mainClassName = moduleName
else:
self._mainClassName = mainClassName
self._mainMethodNameArg = mainMethodName
if mainMethodName:
self.setSetting('mainMethodName', mainMethodName)
self._baseclassName = baseclassName
self._filePath = None
self._fileMtime = None
if source and file:
raise TypeError("Cannot compile from a source string AND file.")
elif isinstance(file, types.StringType) or isinstance(file, types.UnicodeType): # it's a filename.
f = open(file) # Raises IOError.
source = f.read()
f.close()
self._filePath = file
self._fileMtime = os.path.getmtime(file)
elif hasattr(file, 'read'):
source = file.read() # Can't set filename or mtime--they're not accessible.
elif file:
raise TypeError("'file' argument must be a filename string or file-like object")
if self._filePath:
self._fileDirName, self._fileBaseName = os.path.split(self._filePath)
self._fileBaseNameRoot, self._fileBaseNameExt = \
os.path.splitext(self._fileBaseName)
if not (isinstance(source, str) or isinstance(source, unicode)):
source = str( source )
# by converting to string here we allow objects such as other Templates
# to be passed in
# Handle the #indent directive by converting it to other directives.
# (Over the long term we'll make it a real directive.)
if source == "":
warnings.warn("You supplied an empty string for the source!", )
if source.find('#indent') != -1: #@@TR: undocumented hack
source = indentize(source)
self._parser = self.parserClass(source, filename=self._filePath, compiler=self)
self._setupCompilerState()
def __getattr__(self, name):
"""Provide one-way access to the methods and attributes of the
ClassCompiler, and thereby the MethodCompilers as well.
WARNING: Use .setMethods to assign the attributes of the ClassCompiler
from the methods of this class!!! or you will be assigning to attributes
of this object instead.
"""
if self.__dict__.has_key(name):
return self.__dict__[name]
elif hasattr(self.__class__, name):
return getattr(self.__class__, name)
elif self._activeClassesList and hasattr(self._activeClassesList[-1], name):
return getattr(self._activeClassesList[-1], name)
else:
raise AttributeError, name
def _initializeSettings(self):
self.updateSettings(copy.deepcopy(DEFAULT_COMPILER_SETTINGS))
def _setupCompilerState(self):
self._activeClassesList = []
self._finishedClassesList = [] # listed by ordered
self._finishedClassIndex = {} # listed by name
self._moduleDef = None
self._moduleShBang = '#!/usr/bin/env python'
self._moduleEncoding = 'ascii'
self._moduleEncodingStr = ''
self._moduleHeaderLines = []
self._moduleDocStringLines = []
self._specialVars = {}
self._importStatements = [
"import sys",
"import os",
"import os.path",
"from os.path import getmtime, exists",
"import time",
"import types",
"import __builtin__",
"from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion",
"from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple",
"from Cheetah.Template import Template",
"from Cheetah.DummyTransaction import DummyTransaction",
"from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList",
"from Cheetah.CacheRegion import CacheRegion",
"import Cheetah.Filters as Filters",
"import Cheetah.ErrorCatchers as ErrorCatchers",
]
self._importedVarNames = ['sys',
'os',
'os.path',
'time',
'types',
'Template',
'DummyTransaction',
'NotFound',
'Filters',
'ErrorCatchers',
'CacheRegion',
]
self._moduleConstants = [
"try:",
" True, False",
"except NameError:",
" True, False = (1==1), (1==0)",
"VFFSL=valueFromFrameOrSearchList",
"VFSL=valueFromSearchList",
"VFN=valueForName",
"currentTime=time.time",
]
def compile(self):
classCompiler = self._spawnClassCompiler(self._mainClassName)
if self._baseclassName:
classCompiler.setBaseClass(self._baseclassName)
self._addActiveClassCompiler(classCompiler)
self._parser.parse()
self._swallowClassCompiler(self._popActiveClassCompiler())
self._compiled = True
self._parser.cleanup()
def _spawnClassCompiler(self, className, klass=None):
if klass is None:
klass = self.classCompilerClass
classCompiler = klass(className,
moduleCompiler=self,
mainMethodName=self.setting('mainMethodName'),
fileName=self._filePath,
settingsManager=self,
)
return classCompiler
def _addActiveClassCompiler(self, classCompiler):
self._activeClassesList.append(classCompiler)
def _getActiveClassCompiler(self):
return self._activeClassesList[-1]
def _popActiveClassCompiler(self):
return self._activeClassesList.pop()
def _swallowClassCompiler(self, classCompiler):
classCompiler.cleanupState()
self._finishedClassesList.append( classCompiler )
self._finishedClassIndex[classCompiler.className()] = classCompiler
return classCompiler
def _finishedClasses(self):
return self._finishedClassesList
def importedVarNames(self):
return self._importedVarNames
def addImportedVarNames(self, varNames):
self._importedVarNames.extend(varNames)
## methods for adding stuff to the module and class definitions
def setBaseClass(self, baseClassName):
if self._mainMethodNameArg:
self.setMainMethodName(self._mainMethodNameArg)
else:
self.setMainMethodName(self.setting('mainMethodNameForSubclasses'))
if self.setting('handlerForExtendsDirective'):
handler = self.setting('handlerForExtendsDirective')
baseClassName = handler(compiler=self, baseClassName=baseClassName)
self._getActiveClassCompiler().setBaseClass(baseClassName)
elif (not self.setting('autoImportForExtendsDirective')
or baseClassName=='object' or baseClassName in self.importedVarNames()):
self._getActiveClassCompiler().setBaseClass(baseClassName)
# no need to import
else:
##################################################
## If the #extends directive contains a classname or modulename that isn't
# in self.importedVarNames() already, we assume that we need to add
# an implied 'from ModName import ClassName' where ModName == ClassName.
# - This is the case in WebKit servlet modules.
# - We also assume that the final . separates the classname from the
# module name. This might break if people do something really fancy
# with their dots and namespaces.
chunks = baseClassName.split('.')
if len(chunks)==1:
self._getActiveClassCompiler().setBaseClass(baseClassName)
if baseClassName not in self.importedVarNames():
modName = baseClassName
# we assume the class name to be the module name
# and that it's not a builtin:
importStatement = "from %s import %s" % (modName, baseClassName)
self.addImportStatement(importStatement)
self.addImportedVarNames( [baseClassName,] )
else:
needToAddImport = True
modName = chunks[0]
#print chunks, ':', self.importedVarNames()
for chunk in chunks[1:-1]:
if modName in self.importedVarNames():
needToAddImport = False
finalBaseClassName = baseClassName.replace(modName+'.', '')
self._getActiveClassCompiler().setBaseClass(finalBaseClassName)
break
else:
modName += '.'+chunk
if needToAddImport:
modName, finalClassName = '.'.join(chunks[:-1]), chunks[-1]
#if finalClassName != chunks[:-1][-1]:
if finalClassName != chunks[-2]:
# we assume the class name to be the module name
modName = '.'.join(chunks)
self._getActiveClassCompiler().setBaseClass(finalClassName)
importStatement = "from %s import %s" % (modName, finalClassName)
self.addImportStatement(importStatement)
self.addImportedVarNames( [finalClassName,] )
def setCompilerSetting(self, key, valueExpr):
self.setSetting(key, eval(valueExpr) )
self._parser.configureParser()
def setCompilerSettings(self, keywords, settingsStr):
KWs = keywords
merge = True
if 'nomerge' in KWs:
merge = False
if 'reset' in KWs:
# @@TR: this is actually caught by the parser at the moment.
# subject to change in the future
self._initializeSettings()
self._parser.configureParser()
return
elif 'python' in KWs:
settingsReader = self.updateSettingsFromPySrcStr
# this comes from SettingsManager
else:
# this comes from SettingsManager
settingsReader = self.updateSettingsFromConfigStr
settingsReader(settingsStr)
self._parser.configureParser()
def setShBang(self, shBang):
self._moduleShBang = shBang
def setModuleEncoding(self, encoding):
self._moduleEncoding = encoding
self._moduleEncodingStr = '# -*- coding: %s -*-' %encoding
def getModuleEncoding(self):
return self._moduleEncoding
def addModuleHeader(self, line):
"""Adds a header comment to the top of the generated module.
"""
self._moduleHeaderLines.append(line)
def addModuleDocString(self, line):
"""Adds a line to the generated module docstring.
"""
self._moduleDocStringLines.append(line)
def addModuleGlobal(self, line):
"""Adds a line of global module code. It is inserted after the import
statements and Cheetah default module constants.
"""
self._moduleConstants.append(line)
def addSpecialVar(self, basename, contents, includeUnderscores=True):
"""Adds module __specialConstant__ to the module globals.
"""
name = includeUnderscores and '__'+basename+'__' or basename
self._specialVars[name] = contents.strip()
def addImportStatement(self, impStatement):
self._importStatements.append(impStatement)
#@@TR 2005-01-01: there's almost certainly a cleaner way to do this!
importVarNames = impStatement[impStatement.find('import') + len('import'):].split(',')
importVarNames = [var.split()[-1] for var in importVarNames] # handles aliases
importVarNames = [var for var in importVarNames if var!='*']
self.addImportedVarNames(importVarNames) #used by #extend for auto-imports
def addAttribute(self, attribName, expr):
self._getActiveClassCompiler().addAttribute(attribName + ' =' + expr)
def addComment(self, comm):
if re.match(r'#+$',comm): # skip bar comments
return
specialVarMatch = specialVarRE.match(comm)
if specialVarMatch:
# @@TR: this is a bit hackish and is being replaced with
# #set module varName = ...
return self.addSpecialVar(specialVarMatch.group(1),
comm[specialVarMatch.end():])
elif comm.startswith('doc:'):
addLine = self.addMethDocString
comm = comm[len('doc:'):].strip()
elif comm.startswith('doc-method:'):
addLine = self.addMethDocString
comm = comm[len('doc-method:'):].strip()
elif comm.startswith('doc-module:'):
addLine = self.addModuleDocString
comm = comm[len('doc-module:'):].strip()
elif comm.startswith('doc-class:'):
addLine = self.addClassDocString
comm = comm[len('doc-class:'):].strip()
elif comm.startswith('header:'):
addLine = self.addModuleHeader
comm = comm[len('header:'):].strip()
else:
addLine = self.addMethComment
for line in comm.splitlines():
addLine(line)
## methods for module code wrapping
def getModuleCode(self):
if not self._compiled:
self.compile()
if self._moduleDef:
return self._moduleDef
else:
return self.wrapModuleDef()
__str__ = getModuleCode
def wrapModuleDef(self):
self.addSpecialVar('CHEETAH_docstring', self.setting('defDocStrMsg'))
self.addModuleGlobal('__CHEETAH_version__ = %r'%Version)
self.addModuleGlobal('__CHEETAH_versionTuple__ = %r'%(VersionTuple,))
self.addModuleGlobal('__CHEETAH_genTime__ = %r'%time.time())
self.addModuleGlobal('__CHEETAH_genTimestamp__ = %r'%self.timestamp())
if self._filePath:
timestamp = self.timestamp(self._fileMtime)
self.addModuleGlobal('__CHEETAH_src__ = %r'%self._filePath)
self.addModuleGlobal('__CHEETAH_srcLastModified__ = %r'%timestamp)
else:
self.addModuleGlobal('__CHEETAH_src__ = None')
self.addModuleGlobal('__CHEETAH_srcLastModified__ = None')
moduleDef = """%(header)s
%(docstring)s
##################################################
## DEPENDENCIES
%(imports)s
##################################################
## MODULE CONSTANTS
%(constants)s
%(specialVars)s
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %%s. Templates compiled before version %%s must be recompiled.'%%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
%(classes)s
## END CLASS DEFINITION
if not hasattr(%(mainClassName)s, '_initCheetahAttributes'):
templateAPIClass = getattr(%(mainClassName)s, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(%(mainClassName)s)
%(footer)s
""" % {'header':self.moduleHeader(),
'docstring':self.moduleDocstring(),
'specialVars':self.specialVars(),
'imports':self.importStatements(),
'constants':self.moduleConstants(),
'classes':self.classDefs(),
'footer':self.moduleFooter(),
'mainClassName':self._mainClassName,
}
self._moduleDef = moduleDef
return moduleDef
def timestamp(self, theTime=None):
if not theTime:
theTime = time.time()
return time.asctime(time.localtime(theTime))
def moduleHeader(self):
header = self._moduleShBang + '\n'
header += self._moduleEncodingStr + '\n'
if self._moduleHeaderLines:
offSet = self.setting('commentOffset')
header += (
'#' + ' '*offSet +
('\n#'+ ' '*offSet).join(self._moduleHeaderLines) + '\n')
return header
def moduleDocstring(self):
if not self._moduleDocStringLines:
return ''
return ('"""' +
'\n'.join(self._moduleDocStringLines) +
'\n"""\n')
def specialVars(self):
chunks = []
theVars = self._specialVars
keys = theVars.keys()
keys.sort()
for key in keys:
chunks.append(key + ' = ' + repr(theVars[key]) )
return '\n'.join(chunks)
def importStatements(self):
return '\n'.join(self._importStatements)
def moduleConstants(self):
return '\n'.join(self._moduleConstants)
def classDefs(self):
classDefs = [str(klass) for klass in self._finishedClasses() ]
return '\n\n'.join(classDefs)
def moduleFooter(self):
return """
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=%(className)s()).run()
""" % {'className':self._mainClassName}
##################################################
## Make Compiler an alias for ModuleCompiler
Compiler = ModuleCompiler
|
CymaticLabs/Unity3D.Amqp
|
lib/rabbitmq-dotnet-client-rabbitmq_v3_4_4/docs/pyle2-fcfcf7e/Cheetah/Compiler.py
|
Python
|
mit
| 78,232
|
[
"VisIt"
] |
395f77f98eb5f853f96bd312ee2eccd44e5f8b36c848f0e7d4c428f519c19ad1
|
# system modules
import os
from unittest import TestCase
# custom module
from iago import Reader
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
class TestReader(TestCase):
def test_emtpy_universe(self):
e = Reader.EmptyUniverse()
self.assertEqual(len(e), 0)
self.assertEqual(len(e.trajectory), 0)
def test_reader(self):
r = Reader.Reader(os.getcwd())
self.assertRaises(NotImplementedError, r.get_options)
self.assertRaises(NotImplementedError, r.read)
self.assertRaises(NotImplementedError, r.get_input)
self.assertRaises(NotImplementedError, r.get_output)
self.assertIsInstance(r.get_universe(), Reader.EmptyUniverse)
self.assertEqual(r.get_trajectory_frames(), [])
def test_reader_file_matching(self):
r = Reader.Reader(os.getcwd())
self.assertRaises(KeyError, r._get_first_file_matching, ['foo', ])
file_absolute = os.path.abspath(os.path.realpath(__file__))
file_relative = os.path.relpath(file_absolute)
self.assertEqual(r._get_first_file_matching([file_absolute, ]), file_absolute)
self.assertEqual(r._get_first_file_matching([file_relative, ]), file_absolute)
def test_reader_run_discovery(self):
# CP2K run
path = os.path.join(BASE_DIR, 'fixtures', 'debug-88ac57b3e437fa1d5d26d00d6c768324', 'run-1')
r_cp2k = Reader.CP2KReader(path)
r_namd = Reader.NAMDReader(path)
self.assertTrue(r_cp2k.claims())
self.assertFalse(r_namd.claims())
# NAMD run
path = os.path.join(BASE_DIR, 'fixtures', 'ubqTUTORIAL-1aa3d370076a9db6c69c587ba561ecd0', 'run-1')
r_cp2k = Reader.CP2KReader(path)
r_namd = Reader.NAMDReader(path)
self.assertFalse(r_cp2k.claims())
self.assertTrue(r_namd.claims())
|
ferchault/iago
|
tests/unit/test_reader.py
|
Python
|
mit
| 1,679
|
[
"CP2K",
"NAMD"
] |
fd3ae18e52e1b647136d98ea914605752f7dd3a44f8c6f99ea37a5f124d0415a
|
from __future__ import absolute_import
import os
import os.path as op
from copy import copy
import numpy as nm
from sfepy.base.base import (
dict_from_keys_init, select_by_names, is_string, is_integer, is_sequence,
output, get_default, Struct, IndexedStruct)
import sfepy.base.ioutils as io
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.base.conf import transform_variables, transform_materials
from sfepy.base.timing import Timer
from .functions import Functions
from sfepy.discrete.fem.mesh import Mesh
from sfepy.discrete.fem.meshio import check_format_suffix
from sfepy.discrete.fem.fields_base import set_mesh_coors
from sfepy.discrete.common.fields import fields_from_conf
from .variables import Variables, Variable
from .materials import Materials, Material
from .equations import Equations
from .integrals import Integrals
from sfepy.discrete.state import State
from sfepy.discrete.conditions import Conditions
from sfepy.discrete.evaluate import create_evaluable, eval_equations
from sfepy.solvers.ts import TimeStepper
from sfepy.discrete.evaluate import Evaluator
from sfepy.solvers import Solver, NonlinearSolver
from sfepy.solvers.solvers import use_first_available
from sfepy.solvers.ts_solvers import StationarySolver
import six
from six.moves import range
def make_is_save(options):
"""
Given problem options, return a callable that determines whether to save
results of a time step.
"""
class IsSave(Struct):
def __init__(self, save_times):
if is_sequence(save_times):
save_times = nm.asarray(save_times)
self.save_times0 = save_times
self.reset()
def reset(self, ts=None):
self.ilast = 0
self.save_times = self.save_times0
if ts is not None:
if is_integer(self.save_times0):
self.save_times = nm.linspace(ts.t0, ts.t1,
self.save_times0)
def __call__(self, ts):
if is_string(self.save_times) and self.save_times == 'all':
return True
elif isinstance(self.save_times, nm.ndarray):
if (self.ilast < len(self.save_times)
and (ts.time + (1e-14 * ts.dt)
>= self.save_times[self.ilast])):
self.ilast += 1
return True
elif callable(self.save_times):
return self.save_times(ts)
return False
save_times = options.get('save_times', 'all')
is_save = IsSave(save_times)
return is_save
def prepare_matrix(problem, state):
"""
Pre-assemble tangent system matrix.
"""
problem.update_materials()
ev = problem.get_evaluator()
try:
mtx = ev.eval_tangent_matrix(state(), is_full=True)
except ValueError:
output('matrix evaluation failed, giving up...')
raise
return mtx
##
# 29.01.2006, c
class Problem(Struct):
"""
Problem definition, the top-level class holding all data necessary to solve
a problem.
It can be constructed from a :class:`ProblemConf
<sfepy.base.conf.ProblemConf>` instance using `Problem.from_conf()` or
directly from a problem description file using `Problem.from_conf_file()`
For interactive use, the constructor requires only the `equations`,
`nls` and `ls` keyword arguments, see below.
Parameters
----------
name : str
The problem name.
conf : ProblemConf instance, optional
The :class:`ProblemConf <sfepy.base.conf.ProblemConf>` describing the
problem.
functions : Functions instance, optional
The user functions for boundary conditions, materials, etc.
domain : Domain instance, optional
The solution :class:`Domain <sfepy.discrete.common.domain.Domain>`.
fields : dict, optional
The dictionary of :class:`Field <sfepy.discrete.common.fields.Field>`
instances.
equations : Equations instance, optional
The :class:`Equations <sfepy.discrete.equations.Equations>` to solve.
This argument is required when `auto_conf` is True.
auto_conf : bool
If True, fields and domain are determined by `equations`.
active_only : bool
If True, the (tangent) matrices and residual vectors (right-hand sides)
contain only active DOFs, see below.
Notes
-----
The Problem is by default created with `active_only` set to True. Then the
(tangent) matrices and residual vectors (right-hand sides) have reduced
sizes and contain only the active DOFs, i.e., DOFs not constrained by EBCs
or EPBCs.
Setting `active_only` to False results in full-size vectors and
matrices. Then the matrix size non-zeros structure does not depend on the
actual E(P)BCs applied. It must be False when using parallel PETSc solvers.
The active DOF connectivities contain all DOFs, with the E(P)BC-constrained
ones stored as `-1 - <DOF number>`, so that the full connectivities can be
reconstructed for the matrix graph creation. However, the negative entries
mean that the assembled matrices/residuals have zero values at positions
corresponding to constrained DOFs.
The resulting linear system then provides a solution increment, that has to
be added to the initial guess used to compute the residual, just like in
the Newton iterations. The increment of the constrained DOFs is
automatically zero.
When solving with a direct solver, the diagonal entries of a matrix at
positions corresponding to constrained DOFs has to be set to ones, so that
the matrix is not singular, see
:func:`sfepy.discrete.evaluate.apply_ebc_to_matrix()`, which is called
automatically in
:func:`sfepy.discrete.evaluate.Evaluator.eval_tangent_matrix()`. It is
not called automatically in :func:`Problem.evaluate()`. Note that setting
the diagonal entries to one might not be necessary with iterative solvers,
as the zero matrix rows match the zero residual rows, i.e. if the reduced
matrix would be regular, then the right-hand side (the residual) is
orthogonal to the kernel of the matrix.
"""
@staticmethod
def from_conf_file(conf_filename, required=None, other=None,
init_fields=True, init_equations=True,
init_solvers=True):
_required, _other = get_standard_keywords()
if required is None:
required = _required
if other is None:
other = _other
conf = ProblemConf.from_file(conf_filename, required, other)
obj = Problem.from_conf(conf, init_fields=init_fields,
init_equations=init_equations,
init_solvers=init_solvers)
return obj
@staticmethod
def from_conf(conf, init_fields=True, init_equations=True,
init_solvers=True):
if conf.options.get('absolute_mesh_path', False):
conf_dir = None
else:
conf_dir = op.dirname(conf.funmod.__file__)
functions = Functions.from_conf(conf.functions)
if conf.get('filename_mesh') is not None:
from sfepy.discrete.fem.domain import FEDomain
mesh = Mesh.from_file(conf.filename_mesh, prefix_dir=conf_dir)
domain = FEDomain(mesh.name, mesh)
refine = conf.options.get('refinement_level', 0)
if refine > 0:
for ii in range(refine):
output('refine %d...' % ii)
domain = domain.refine()
output('... %d nodes %d elements'
% (domain.shape.n_nod, domain.shape.n_el))
if conf.options.get('ulf', False):
domain.mesh.coors_act = domain.mesh.coors.copy()
if conf.options.get('mesh_eps') is not None:
import sfepy.discrete.fem.mesh as msh
import sfepy.discrete.fem.periodic as per
msh.set_accuracy(conf.options.mesh_eps)
per.set_accuracy(conf.options.mesh_eps)
elif conf.get('filename_domain') is not None:
from sfepy.discrete.iga.domain import IGDomain
domain = IGDomain.from_file(conf.filename_domain)
else:
raise ValueError('missing filename_mesh or filename_domain!')
active_only = conf.options.get('active_only', True)
obj = Problem('problem_from_conf', conf=conf, functions=functions,
domain=domain, auto_conf=False,
active_only=active_only)
allow_empty = conf.options.get('allow_empty_regions', False)
obj.set_regions(conf.regions, obj.functions,
allow_empty=allow_empty)
obj.clear_equations()
if init_fields:
obj.set_fields(conf.fields)
if init_equations:
obj.set_equations(conf.equations)
if init_solvers:
obj.set_conf_solvers(conf.solvers, conf.options)
return obj
def __init__(self, name, conf=None, functions=None,
domain=None, fields=None, equations=None, auto_conf=True,
active_only=True):
self.active_only = active_only
self.name = name
self.conf = conf
self.functions = functions
self.reset()
self.ls_conf = self.nls_conf = self.ts_conf = None
self.conf_variables = self.conf_materials = None
if auto_conf:
if equations is None:
raise ValueError('missing equations in auto_conf mode!')
if fields is None:
variables = equations.variables
fields = {}
for field in [var.get_field() for var in variables]:
fields[field.name] = field
if domain is None:
domain = list(fields.values())[0].domain
if conf is None:
self.conf = Struct(options={}, ics={},
ebcs={}, epbcs={}, lcbcs={}, materials={})
self.equations = equations
self.fields = fields
self.domain = domain
self.setup_output()
def reset(self):
if hasattr(self.conf, 'options'):
self.setup_hooks(self.conf.options)
else:
self.setup_hooks()
self.mtx_a = None
self.solver = None
self.ts = self.get_default_ts()
self.clear_equations()
self._restart_filenames = []
def setup_hooks(self, options=None):
"""
Setup various hooks (user-defined functions), as given in `options`.
Supported hooks:
- `matrix_hook`
- check/modify tangent matrix in each nonlinear solver
iteration
- `nls_iter_hook`
- called prior to every iteration of nonlinear solver, if the
solver supports that
- takes the Problem instance (`self`) as the first
argument
"""
hook_names = ['nls_iter_hook', 'matrix_hook']
for hook_name in hook_names:
setattr(self, hook_name, None)
if options is not None:
hook = options.get(hook_name, None)
if hook is not None:
hook = self.conf.get_function(hook)
setattr(self, hook_name, hook)
def copy(self, name=None):
"""
Make a copy of Problem.
"""
if name is None:
name = self.name + '_copy'
obj = self.__class__(name, conf=self.conf, functions=self.functions,
domain=self.domain, fields=self.fields,
equations=self.equations, auto_conf=False,
active_only=self.active_only)
obj.ebcs = self.ebcs
obj.epbcs = self.epbcs
obj.lcbcs = self.lcbcs
obj.ics = self.ics
obj.set_conf_solvers(self.conf.solvers, self.conf.options)
obj.setup_output(output_filename_trunk=self.ofn_trunk,
output_dir=self.output_dir,
output_format=self.output_format,
file_format=self.file_format,
file_per_var=self.file_per_var,
linearization=self.linearization)
return obj
def create_subproblem(self, var_names, known_var_names):
"""
Create a sub-problem with equations containing only terms with the
given virtual variables.
Parameters
----------
var_names : list
The list of names of virtual variables.
known_var_names : list
The list of names of (already) known state variables.
Returns
-------
subpb : Problem instance
The sub-problem.
"""
subpb = Problem(self.name + '_' + '_'.join(var_names), conf=self.conf,
functions=self.functions, domain=self.domain,
fields=self.fields, auto_conf=False,
active_only=self.active_only)
subpb.set_conf_solvers(self.conf.solvers, self.conf.options)
subeqs = self.equations.create_subequations(var_names,
known_var_names)
subpb.set_equations_instance(subeqs, keep_solvers=True)
return subpb
def setup_default_output(self, conf=None, options=None):
"""
Provide default values to `Problem.setup_output()`
from `conf.options` and `options`.
"""
conf = get_default(conf, self.conf)
if options and getattr(options, 'output_filename_trunk', None):
default_output_dir, of = op.split(options.output_filename_trunk)
default_trunk = io.get_trunk(of)
else:
default_trunk = None
default_output_dir = conf.options.get('output_dir', None)
if options and getattr(options, 'output_format', None):
default_output_format = options.output_format
else:
default_output_format = conf.options.get('output_format', None)
default_file_format = conf.options.get('file_format', None)
default_file_per_var = conf.options.get('file_per_var', None)
default_float_format = conf.options.get('float_format', None)
default_linearization = Struct(kind='strip')
self.setup_output(output_filename_trunk=default_trunk,
output_dir=default_output_dir,
output_format=default_output_format,
file_format=default_file_format,
float_format=default_float_format,
file_per_var=default_file_per_var,
linearization=default_linearization)
def setup_output(self, output_filename_trunk=None, output_dir=None,
output_format=None, file_format=None, float_format=None,
file_per_var=None, linearization=None):
"""
Sets output options to given values, or uses the defaults for
each argument that is None.
"""
self.output_modes = {'vtk' : 'sequence',
'h5' : 'single', 'h5x' : 'single',
'msh' : 'sequence'}
self.ofn_trunk = get_default(output_filename_trunk,
op.basename(self.domain.name))
self.set_output_dir(output_dir)
self.output_format = get_default(output_format, 'vtk')
self.file_format = file_format
if self.file_format is not None:
check_format_suffix(self.file_format, self.output_format)
self.float_format = get_default(float_format, None)
self.file_per_var = get_default(file_per_var, False)
self.linearization = get_default(linearization, Struct(kind='strip'))
if ((self.output_format == 'h5') and
(self.linearization.kind == 'adaptive')):
self.linearization.kind = None
def set_output_dir(self, output_dir=None):
"""
Set the directory for output files.
The directory is created if it does not exist.
"""
self.output_dir = get_default(output_dir, os.curdir)
if self.output_dir and not op.exists(self.output_dir):
os.makedirs(self.output_dir)
def set_regions(self, conf_regions=None,
conf_materials=None, functions=None, allow_empty=False):
conf_regions = get_default(conf_regions, self.conf.regions)
functions = get_default(functions, self.functions)
self.domain.create_regions(conf_regions, functions,
allow_empty=allow_empty)
def set_materials(self, conf_materials=None):
"""
Set definition of materials.
"""
self.conf_materials = get_default(conf_materials, self.conf.materials)
def select_materials(self, material_names, only_conf=False):
if type(material_names) == dict:
conf_materials = transform_materials(material_names)
else:
conf_materials = select_by_names(self.conf.materials, material_names)
if not only_conf:
self.set_materials(conf_materials)
return conf_materials
def set_fields(self, conf_fields=None):
conf_fields = get_default(conf_fields, self.conf.fields)
self.fields = fields_from_conf(conf_fields, self.domain.regions)
def set_variables(self, conf_variables=None):
"""
Set definition of variables.
"""
self.conf_variables = get_default(conf_variables, self.conf.variables)
self.reset()
def select_variables(self, variable_names, only_conf=False):
if type(variable_names) == dict:
conf_variables = transform_variables(variable_names)
else:
conf_variables = select_by_names(self.conf.variables, variable_names)
if not only_conf:
self.set_variables(conf_variables)
return conf_variables
def clear_equations(self):
self.integrals = None
self.equations = None
self.ebcs = None
self.epbcs = None
self.lcbcs = None
self.ics = None
def set_equations(self, conf_equations=None, user=None,
keep_solvers=False, make_virtual=False):
"""
Set equations of the problem using the `equations` problem
description entry.
Fields and Regions have to be already set.
"""
conf_equations = get_default(conf_equations,
self.conf.get('equations', None))
self.set_variables(self.conf_variables)
variables = Variables.from_conf(self.conf_variables, self.fields)
self.set_materials(self.conf_materials)
materials = Materials.from_conf(self.conf_materials, self.functions)
self.integrals = self.get_integrals()
default_user = vars(self.conf)
if user is not None:
default_user.update(user)
user = default_user
eterm_options = self.conf.options.get('eterm', {})
equations = Equations.from_conf(conf_equations, variables,
self.domain.regions,
materials, self.integrals,
user=user,
eterm_options=eterm_options)
self.equations = equations
if not keep_solvers:
self.solver = None
def set_equations_instance(self, equations, keep_solvers=False):
"""
Set equations of the problem to `equations`.
"""
self.mtx_a = None
self.clear_equations()
self.equations = equations
if not keep_solvers:
self.solver = None
def get_integrals(self, names=None):
"""
Get integrals, initialized from problem configuration if available.
Parameters
----------
names : list, optional
If given, only the named integrals are returned.
Returns
-------
integrals : Integrals instance
The requested integrals.
"""
conf_integrals = self.conf.get('integrals', {})
integrals = Integrals.from_conf(conf_integrals)
if names is not None:
integrals.update([integrals[ii] for ii in names
if ii in integrals.names])
return integrals
def update_materials(self, ts=None, mode='normal', verbose=True):
"""
Update materials used in equations.
Parameters
----------
ts : TimeStepper instance
The time stepper.
mode : 'normal', 'update' or 'force'
The update mode, see :func:`Material.time_update()
<sfepy.discrete.materials.Material.time_update()>`.
verbose : bool
If False, reduce verbosity.
"""
if self.equations is not None:
self.update_time_stepper(ts)
self.equations.time_update_materials(self.ts, mode=mode,
problem=self, verbose=verbose)
def update_equations(self, ts=None, ebcs=None, epbcs=None,
lcbcs=None, functions=None, create_matrix=False,
is_matrix=True):
"""
Update equations for current time step.
The tangent matrix graph is automatically recomputed if the set
of active essential or periodic boundary conditions changed
w.r.t. the previous time step.
Parameters
----------
ts : TimeStepper instance, optional
The time stepper. If not given, `self.ts` is used.
ebcs : Conditions instance, optional
The essential (Dirichlet) boundary conditions. If not given,
`self.ebcs` are used.
epbcs : Conditions instance, optional
The periodic boundary conditions. If not given, `self.epbcs`
are used.
lcbcs : Conditions instance, optional
The linear combination boundary conditions. If not given,
`self.lcbcs` are used.
functions : Functions instance, optional
The user functions for boundary conditions, materials,
etc. If not given, `self.functions` are used.
create_matrix : bool
If True, force the matrix graph computation.
is_matrix : bool
If False, the matrix is not created. Has precedence over
`create_matrix`.
"""
self.update_time_stepper(ts)
functions = get_default(functions, self.functions)
ac = self.active_only
graph_changed = self.equations.time_update(
self.ts,
ebcs, epbcs, lcbcs,
functions, self,
active_only=ac,
verbose=self.conf.get('verbose', True))
self.graph_changed = graph_changed
if (is_matrix
and ((self.active_only and graph_changed)
or (self.mtx_a is None) or create_matrix)):
self.mtx_a = self.equations.create_matrix_graph(active_only=ac)
## import sfepy.base.plotutils as plu
## plu.spy(self.mtx_a)
## plu.plt.show()
def set_bcs(self, ebcs=None, epbcs=None, lcbcs=None):
"""
Update boundary conditions.
"""
if isinstance(ebcs, Conditions):
self.ebcs = ebcs
else:
conf_ebc = get_default(ebcs, self.conf.ebcs)
self.ebcs = Conditions.from_conf(conf_ebc, self.domain.regions)
conf_dgebc = self.conf.get("dgebcs", {})
self.ebcs.extend(Conditions.from_conf(conf_dgebc,
self.domain.regions))
if isinstance(epbcs, Conditions):
self.epbcs = epbcs
else:
conf_epbc = get_default(epbcs, self.conf.epbcs)
self.epbcs = Conditions.from_conf(conf_epbc, self.domain.regions)
conf_dgepbc = self.conf.get("dgepbcs", {})
self.ebcs.extend(Conditions.from_conf(conf_dgepbc,
self.domain.regions))
if isinstance(lcbcs, Conditions):
self.lcbcs = lcbcs
else:
conf_lcbc = get_default(lcbcs, self.conf.lcbcs)
self.lcbcs = Conditions.from_conf(conf_lcbc, self.domain.regions)
def time_update(self, ts=None,
ebcs=None, epbcs=None, lcbcs=None,
functions=None, create_matrix=False, is_matrix=True):
self.set_bcs(get_default(ebcs, self.ebcs),
get_default(epbcs, self.epbcs),
get_default(lcbcs, self.lcbcs))
self.update_equations(ts, self.ebcs, self.epbcs, self.lcbcs,
functions, create_matrix, is_matrix)
def set_ics(self, ics=None):
"""
Set the initial conditions to use.
"""
if isinstance(ics, Conditions):
self.ics = ics
else:
conf_ics = get_default(ics, self.conf.ics)
self.ics = Conditions.from_conf(conf_ics, self.domain.regions)
def setup_ics(self, ics=None, functions=None):
"""
Setup the initial conditions for use.
"""
self.set_ics(get_default(ics, self.ics))
functions = get_default(functions, self.functions)
self.equations.setup_initial_conditions(self.ics, functions)
def select_bcs(self, ebc_names=None, epbc_names=None,
lcbc_names=None, create_matrix=False):
if ebc_names is not None:
conf_ebc = select_by_names(self.conf.ebcs, ebc_names)
else:
conf_ebc = None
if epbc_names is not None:
conf_epbc = select_by_names(self.conf.epbcs, epbc_names)
else:
conf_epbc = None
if lcbc_names is not None:
conf_lcbc = select_by_names(self.conf.lcbcs, lcbc_names)
else:
conf_lcbc = None
self.set_bcs(conf_ebc, conf_epbc, conf_lcbc)
self.update_equations(self.ts, self.ebcs, self.epbcs, self.lcbcs,
self.functions, create_matrix)
def create_state(self):
return State(self.equations.variables)
def get_mesh_coors(self, actual=False):
return self.domain.get_mesh_coors(actual=actual)
def set_mesh_coors(self, coors, update_fields=False, actual=False,
clear_all=True, extra_dofs=False):
"""
Set mesh coordinates.
Parameters
----------
coors : array
The new coordinates.
update_fields : bool
If True, update also coordinates of fields.
actual : bool
If True, update the actual configuration coordinates,
otherwise the undeformed configuration ones.
"""
set_mesh_coors(self.domain, self.fields, coors,
update_fields=update_fields, actual=actual,
clear_all=clear_all, extra_dofs=extra_dofs)
def refine_uniformly(self, level):
"""
Refine the mesh uniformly `level`-times.
Notes
-----
This operation resets almost everything (fields, equations, ...)
- it is roughly equivalent to creating a new Problem
instance with the refined mesh.
"""
if level == 0: return
domain = self.domain
for ii in range(level):
domain = domain.refine()
self.domain = domain
self.set_regions(self.conf.regions, self.functions)
self.clear_equations()
self.set_fields(self.conf.fields)
self.set_equations(self.conf.equations, user={'ts' : self.ts})
def get_dim(self, get_sym=False):
"""Returns mesh dimension, symmetric tensor dimension (if `get_sym` is
True).
"""
dim = self.domain.mesh.dim
if get_sym:
return dim, (dim + 1) * dim // 2
else:
return dim
def init_time(self, ts):
self.update_time_stepper(ts)
self.equations.init_time(ts)
self._restart_filenames = []
def advance(self, ts=None):
self.update_time_stepper(ts)
self.equations.advance(self.ts)
def save_state(self, filename, state=None, out=None,
fill_value=None, post_process_hook=None,
linearization=None, file_per_var=False, **kwargs):
"""
Parameters
----------
file_per_var : bool or None
If True, data of each variable are stored in a separate
file. If None, it is set to the application option value.
linearization : Struct or None
The linearization configuration for higher order
approximations. If its kind is 'adaptive', `file_per_var` is
assumed True.
"""
linearization = get_default(linearization, self.linearization)
if linearization.kind != 'adaptive':
file_per_var = get_default(file_per_var, self.file_per_var)
else:
file_per_var = True
extend = not file_per_var
if (out is None) and (state is not None):
out = state.create_output_dict(fill_value=fill_value,
extend=extend,
linearization=linearization)
if post_process_hook is not None:
out = post_process_hook(out, self, state, extend=extend)
if linearization.kind == 'adaptive':
for key, val in six.iteritems(out):
mesh = val.get('mesh', self.domain.mesh)
aux = io.edit_filename(filename, suffix='_' + val.var_name)
mesh.write(aux, io='auto', out={key : val},
float_format=self.float_format, **kwargs)
if hasattr(val, 'levels'):
output('max. refinement per group:', val.levels)
elif file_per_var:
meshes = {}
if self.equations is None:
varnames = {}
for key, val in six.iteritems(out):
varnames[val.var_name] = 1
varnames = list(varnames.keys())
outvars = self.create_variables(varnames)
itervars = outvars.__iter__
else:
itervars = self.equations.variables.iter_state
for var in itervars():
rname = var.field.region.name
if rname in meshes:
mesh = meshes[rname]
else:
mesh = Mesh.from_region(var.field.region, self.domain.mesh,
localize=True,
is_surface=var.is_surface)
meshes[rname] = mesh
vout = {}
for key, val in six.iteritems(out):
try:
if val.var_name == var.name:
vout[key] = val
except AttributeError:
msg = 'missing var_name attribute in output!'
raise ValueError(msg)
aux = io.edit_filename(filename, suffix='_' + var.name)
mesh.write(aux, io='auto', out=vout,
float_format=self.float_format, **kwargs)
else:
mesh = out.pop('__mesh__', self.domain.mesh)
mesh.write(filename, io='auto', out=out,
float_format=self.float_format, **kwargs)
def save_ebc(self, filename, ebcs=None, epbcs=None,
force=True, default=0.0):
"""
Save essential boundary conditions as state variables.
Parameters
----------
filename : str
The output file name.
ebcs : Conditions instance, optional
The essential (Dirichlet) boundary conditions. If not given,
`self.conf.ebcs` are used.
epbcs : Conditions instance, optional
The periodic boundary conditions. If not given, `self.conf.epbcs`
are used.
force : bool
If True, sequential nonzero values are forced to individual `ebcs`
so that the conditions are visible even when zero.
default : float
The default constant value of state vector.
"""
output('saving ebc...')
variables = self.get_variables(auto_create=True)
if ebcs is None:
ebcs = Conditions.from_conf(self.conf.ebcs, self.domain.regions)
if epbcs is None:
epbcs = Conditions.from_conf(self.conf.epbcs, self.domain.regions)
try:
variables.equation_mapping(ebcs, epbcs, self.ts, self.functions,
problem=self)
except:
output('cannot make equation mapping!')
raise
state = State(variables)
state.fill(default)
if force:
vals = dict_from_keys_init(variables.state)
for ii, key in enumerate(six.iterkeys(vals)):
vals[key] = ii + 1
state.apply_ebc(force_values=vals)
else:
state.apply_ebc()
out = state.create_output_dict(extend=True)
self.save_state(filename, out=out, fill_value=default)
output('...done')
def save_regions(self, filename_trunk, region_names=None):
"""
Save regions as meshes.
Parameters
----------
filename_trunk : str
The output filename without suffix.
region_names : list, optional
If given, only the listed regions are saved.
"""
filename = '%s.mesh' % filename_trunk
self.domain.save_regions(filename, region_names=region_names)
def save_regions_as_groups(self, filename_trunk, region_names=None):
"""
Save regions in a single mesh but mark them by using different
element/node group numbers.
See :func:`Domain.save_regions_as_groups()
<sfepy.discrete.fem.domain.Domain.save_regions_as_groups()>` for more
details.
Parameters
----------
filename_trunk : str
The output filename without suffix.
region_names : list, optional
If given, only the listed regions are saved.
"""
filename = '%s.%s' % (filename_trunk, self.output_format)
self.domain.save_regions_as_groups(filename,
region_names=region_names)
def save_field_meshes(self, filename_trunk):
output('saving field meshes...')
for field in self.fields:
output(field.name)
field.write_mesh(filename_trunk + '_%s')
output('...done')
def get_evaluator(self, reuse=False):
"""
Either create a new Evaluator instance (reuse == False),
or return an existing instance, created in a preceding call to
Problem.init_solvers().
"""
if reuse:
try:
ev = self.evaluator
except AttributeError:
raise AttributeError('call Problem.init_solvers() or'\
' set reuse to False!')
else:
UserEvaluator = self.conf.options.get('user_evaluator', None)
Eval = UserEvaluator if UserEvaluator is not None else Evaluator
ev = self.evaluator = Eval(self, matrix_hook=self.matrix_hook)
return ev
def get_ebc_indices(self):
"""
Get indices of E(P)BC-constrained DOFs in the full global state vector.
"""
variables = self.get_variables()
ebc_indx = []
epbc_indx = []
for ii, variable in enumerate(variables.iter_state(ordered=True)):
eq_map = variable.eq_map
ebc_indx.append(eq_map.eq_ebc + variables.di.ptr[ii])
epbc_indx.append((eq_map.master + variables.di.ptr[ii],
eq_map.slave + variables.di.ptr[ii]))
ebc_indx = nm.concatenate(ebc_indx)
epbc_indx = nm.concatenate(epbc_indx, axis=1)
return ebc_indx, epbc_indx
def set_conf_solvers(self, conf_solvers=None, options=None):
"""
Choose which solvers should be used. If solvers are not set in
`options`, use the ones named `ls`, `nls` or `ts`. If such solver names
do not exist, use the first of each required solver kind listed in
`conf_solvers`.
"""
conf_solvers = get_default(conf_solvers, self.conf.solvers)
self.solver_confs = {}
for key, val in six.iteritems(conf_solvers):
self.solver_confs[val.name] = val
def _find_suitable(prefix):
cands = []
for key, val in six.iteritems(self.solver_confs):
if val.kind.find(prefix) == 0:
if val.name == prefix[:-1]:
return val
else:
cands.append(val)
if len(cands) > 0:
return cands[0]
else:
return None
def _get_solver_conf(kind):
try:
key = options[kind]
if key is None:
conf = None
else:
conf = self.solver_confs[key]
except:
conf = _find_suitable(kind + '.')
return conf
self.ts_conf = _get_solver_conf('ts')
if self.ts_conf is None:
self.ts_conf = Struct(name='no ts', kind='ts.stationary')
self.nls_conf = _get_solver_conf('nls')
self.ls_conf = _get_solver_conf('ls')
info = 'using solvers:'
if self.ts_conf:
info += '\n ts: %s' % self.ts_conf.name
if self.nls_conf:
info += '\n nls: %s' % self.nls_conf.name
if self.ls_conf:
info += '\n ls: %s' % self.ls_conf.name
if info != 'using solvers:':
output(info)
def get_solver_conf(self, name):
return self.solver_confs[name]
def init_solvers(self, status=None, ls_conf=None, nls_conf=None,
ts_conf=None, force=False):
"""
Create and initialize solver instances.
Parameters
----------
status : dict-like, IndexedStruct, optional
The user-supplied object to hold the time-stepping/nonlinear solver
convergence statistics.
ls_conf : Struct, optional
The linear solver options.
nls_conf : Struct, optional
The nonlinear solver options.
force : bool
If True, re-create the solver instances even if they already exist
in `self.nls` attribute.
"""
if (self.solver is None) or force:
ls_conf = get_default(ls_conf, self.ls_conf,
'you must set linear solver!')
nls_conf = get_default(nls_conf, self.nls_conf,
'you must set nonlinear solver!')
fb_list = []
for ii in range(100):
fb_list.append((ls_conf.kind, ls_conf))
if hasattr(ls_conf, 'fallback'):
ls_conf = self.solver_confs[ls_conf.fallback]
else:
break
if len(fb_list) > 1:
ls = use_first_available(fb_list, context=self)
else:
ls = Solver.any_from_conf(ls_conf, context=self)
ev = self.get_evaluator()
if self.conf.options.get('ulf', False):
self.nls_iter_hook = ev.new_ulf_iteration
if status is None:
status = IndexedStruct()
status.set_default('nls_status', IndexedStruct())
nls = Solver.any_from_conf(nls_conf, fun=ev.eval_residual,
fun_grad=ev.eval_tangent_matrix,
lin_solver=ls,
iter_hook=self.nls_iter_hook,
status=status.nls_status, context=self)
ts_conf = get_default(ts_conf, self.ts_conf)
if ts_conf is None:
self.set_solver(nls, status=status)
else:
tss = Solver.any_from_conf(ts_conf, nls=nls, context=self,
status=status)
self.set_solver(tss)
def get_default_ts(self, t0=None, t1=None, dt=None, n_step=None,
step=None):
t0 = get_default(t0, 0.0)
t1 = get_default(t1, 1.0)
dt = get_default(dt, 1.0)
n_step = get_default(n_step, 1)
ts = TimeStepper(t0, t1, dt, n_step, step=step)
return ts
def update_time_stepper(self, ts):
if ts is not None:
self.ts = ts
def get_timestepper(self):
return self.ts
def set_solver(self, solver, status=None):
"""
Set a time-stepping or nonlinear solver to be used in
:func:`Problem.solve()` call.
Parameters
----------
solver : NonlinearSolver or TimeSteppingSolver instance
The nonlinear or time-stepping solver.
Notes
-----
A copy of the solver is used, and the nonlinear solver functions are
set to those returned by :func:`Problem.get_nls_functions()`, if not
set already. If a nonlinear solver is set, a default StationarySolver
instance is created automatically as the time-stepping solver. Also
sets `self.ts` attribute.
"""
if isinstance(solver, NonlinearSolver):
solver = StationarySolver({}, nls=solver.copy(),
ts=self.get_default_ts(),
status=status)
self.solver = solver.copy()
self.ts = solver.ts
self.status = get_default(solver.status, IndexedStruct())
# Assign the nonlinear solver functions.
nls = self.get_nls()
if nls.fun is None:
fun, fun_grag, iter_hook = self.get_nls_functions()
nls.fun = fun
nls.fun_grad = fun_grag
nls.iter_hook = iter_hook
def try_presolve(self, mtx):
ls = self.get_ls()
timer = Timer(start=True)
ls.presolve(mtx)
tt = timer.stop()
output('presolve: %.2f [s]' % tt)
def get_solver(self):
return self.get_tss()
def get_tss(self):
tss = get_default(None, self.solver, 'solver is not set!')
return tss
def get_tss_functions(self, state0, update_bcs=True, update_materials=True,
save_results=True,
step_hook=None, post_process_hook=None):
"""
Get the problem-dependent functions required by the time-stepping
solver during the solution process.
Parameters
----------
state0 : State
The state holding the problem variables.
update_bcs : bool, optional
If True, update the boundary conditions in each `prestep_fun` call.
update_materials : bool, optional
If True, update the values of material parameters in each
`prestep_fun` call.
save_results : bool, optional
If True, save the results in each `poststep_fun` call.
step_hook : callable, optional
The optional user-defined function that is called in each
`poststep_fun` call before saving the results.
post_process_hook : callable, optional
The optional user-defined function that is passed in each
`poststep_fun` to :func:`Problem.save_state()`.
Returns
-------
init_fun : callable
The initialization function called before the actual time-stepping.
prestep_fun : callable
The function called in each time (sub-)step prior to the nonlinear
solver call.
poststep_fun : callable
The function called at the end of each time step.
"""
is_save = make_is_save(self.conf.options)
def init_fun(ts, vec0):
if not ts.is_quasistatic:
self.init_time(ts)
is_save.reset(ts)
restart_filename = self.conf.options.get('load_restart', None)
if restart_filename is not None:
self.load_restart(restart_filename, state=state0, ts=ts)
self.advance(ts)
ts.advance()
state = self.create_state()
vec0 = state.get_vec(self.active_only)
return vec0
def prestep_fun(ts, vec):
if update_bcs:
self.time_update(ts)
state = state0.copy()
state.set_vec(vec, self.active_only)
state.apply_ebc()
if update_materials:
self.update_materials(verbose=self.conf.get('verbose', True))
def poststep_fun(ts, vec):
state = state0.copy(preserve_caches=True)
state.set_vec(vec, self.active_only)
if step_hook is not None:
step_hook(self, ts, state)
restart_filename = self.get_restart_filename(ts=ts)
if restart_filename is not None:
self.save_restart(restart_filename, state, ts=ts)
if save_results and is_save(ts):
if not isinstance(self.get_solver(), StationarySolver):
suffix = ts.suffix % ts.step
else:
suffix = None
filename = self.get_output_name(suffix=suffix)
self.save_state(filename, state,
post_process_hook=post_process_hook,
file_per_var=None,
ts=ts,
file_format=self.file_format)
self.advance(ts)
return init_fun, prestep_fun, poststep_fun
def get_nls_functions(self):
"""
Returns functions to be used by a nonlinear solver to evaluate the
nonlinear function value (the residual) and its gradient (the tangent
matrix) corresponding to the problem equations.
Returns
-------
fun : function
The function ``fun(x)`` for computing the residual.
fun_grad : function
The function ``fun_grad(x)`` for computing the tangent matrix.
iter_hook : function
The optional (user-defined) function to be called before each
nonlinear solver iteration iteration.
"""
ev = self.get_evaluator()
return ev.eval_residual, ev.eval_tangent_matrix, self.nls_iter_hook
def get_nls(self):
tss = self.get_tss()
return tss.nls
def get_ls(self):
nls = self.get_nls()
return nls.lin_solver
def is_linear(self):
nls = self.get_nls()
return nls.conf.get('is_linear', False)
def set_linear(self, is_linear):
nls = self.get_nls()
nls.conf.is_linear = is_linear
def get_initial_state(self):
"""
Create a zero state vector and apply initial conditions.
"""
state = self.create_state()
self.setup_ics()
state.apply_ic()
# Initialize variables with history.
state.init_history()
return state
def solve(self, state0=None, status=None, force_values=None,
var_data=None, update_bcs=True, update_materials=True,
save_results=True,
step_hook=None, post_process_hook=None,
post_process_hook_final=None, verbose=True):
"""
Solve the problem equations by calling the top-level solver.
Before calling this function the top-level solver has to be set, see
:func:`Problem.set_solver()`. Also, the boundary conditions and the
initial conditions (for time-dependent problems) has to be set, see
:func:`Problem.set_bcs()`, :func:`Problem.set_ics()`.
Parameters
----------
state0 : State or array, optional
If given, the initial state satisfying the initial conditions. By
default, it is created and the initial conditions are applied
automatically.
status : dict-like, optional
The user-supplied object to hold the solver convergence statistics.
force_values : dict of floats or float, optional
If given, the supplied values override the values of the essential
boundary conditions.
var_data : dict, optional
A dictionary of {variable_name : data vector} used to initialize
parameter variables.
update_bcs : bool, optional
If True, update the boundary conditions in each `prestep_fun` call.
See :func:`Problem.get_tss_functions()`.
update_materials : bool, optional
If True, update the values of material parameters in each
`prestep_fun` call. See :func:`Problem.get_tss_functions()`.
save_results : bool, optional
If True, save the results in each `poststep_fun` call. See
:func:`Problem.get_tss_functions()`.
step_hook : callable, optional
The optional user-defined function that is called in each
`poststep_fun` call before saving the results. See
:func:`Problem.get_tss_functions()`.
post_process_hook : callable, optional
The optional user-defined function that is passed in each
`poststep_fun` to :func:`Problem.save_state()`. See
:func:`Problem.get_tss_functions()`.
post_process_hook_final : callable, optional
The optional user-defined function that is called after the
top-level solver returns.
Returns
-------
state : State
The final state.
"""
if status is None:
status = IndexedStruct()
if self.solver is None:
self.init_solvers(status=status)
tss = self.get_solver()
self.equations.set_data(var_data, ignore_unknown=True)
if state0 is None:
state0 = self.get_initial_state()
else:
if isinstance(state0, nm.ndarray):
state0 = State(self.equations.variables, vec=state0)
if self.conf.options.get('block_solve', False):
state = self.block_solve(state0, status=status,
save_results=save_results,
step_hook=step_hook,
post_process_hook=post_process_hook,
verbose=verbose)
else:
self.time_update(tss.ts)
state0.apply_ebc(force_values=force_values)
if self.is_linear():
mtx = prepare_matrix(self, state0) # Updates materials.
self.try_presolve(mtx)
init_fun, prestep_fun, poststep_fun = self.get_tss_functions(
state0,
update_bcs=update_bcs, update_materials=update_materials,
save_results=save_results,
step_hook=step_hook, post_process_hook=post_process_hook)
vec = tss(state0.get_vec(self.active_only),
init_fun=init_fun,
prestep_fun=prestep_fun,
poststep_fun=poststep_fun,
status=status)
output('solved in %d steps in %.2f seconds'
% (status['n_step'], status['time']), verbose=verbose)
state = state0.copy()
state.set_vec(vec, self.active_only)
if post_process_hook_final is not None: # User postprocessing.
post_process_hook_final(self, state)
return state
def block_solve(self, state0=None, status=None, save_results=True,
step_hook=None, post_process_hook=None,
verbose=True):
"""
Call :func:`Problem.solve()` sequentially for the individual matrix
blocks of a block-triangular matrix. It is called by
:func:`Problem.solve()` if the `'block_solve'` option is set to True.
"""
from sfepy.base.base import invert_dict, get_subdict
from sfepy.base.resolve_deps import resolve
if not isinstance(self.get_solver(), StationarySolver):
msg = 'The block solve can be used only for stationary problems!'
raise ValueError(msg)
def replace_virtuals(deps, pairs):
out = {}
for key, val in six.iteritems(deps):
out[pairs[key]] = val
return out
if state0 is None:
state0 = self.get_initial_state()
variables = self.get_variables()
vtos = variables.get_dual_names()
vdeps = self.equations.get_variable_dependencies()
sdeps = replace_virtuals(vdeps, vtos)
sorder = resolve(sdeps)
stov = invert_dict(vtos)
vorder = [[stov[ii] for ii in block] for block in sorder]
parts0 = state0.get_parts()
state = state0.copy()
solved = []
for ib, block in enumerate(vorder):
output('solving for %s...' % sorder[ib], verbose=verbose)
subpb = self.create_subproblem(block, solved)
subpb.conf.options.block_solve = False
subpb.equations.print_terms()
substate0 = subpb.create_state()
vals = get_subdict(parts0, block)
substate0.set_parts(vals)
substate = subpb.solve(state0=substate0, status=status,
save_results=False, step_hook=step_hook,
post_process_hook=post_process_hook,
verbose=verbose)
state.set_parts(substate.get_parts())
solved.extend(sorder[ib])
output('...done', verbose=verbose)
if step_hook is not None:
step_hook(self, None, state)
if save_results:
self.save_state(self.get_output_name(), state,
post_process_hook=post_process_hook,
file_per_var=None)
return state
def create_evaluable(self, expression, try_equations=True, auto_init=False,
preserve_caches=False, copy_materials=True,
integrals=None,
ebcs=None, epbcs=None, lcbcs=None,
ts=None, functions=None,
mode='eval', var_dict=None, strip_variables=True,
extra_args=None, active_only=True, verbose=True,
**kwargs):
"""
Create evaluable object (equations and corresponding variables)
from the `expression` string. Convenience function calling
:func:`create_evaluable()
<sfepy.discrete.evaluate.create_evaluable()>` with defaults provided
by the Problem instance `self`.
The evaluable can be repeatedly evaluated by calling
:func:`eval_equations() <sfepy.discrete.evaluate.eval_equations()>`,
e.g. for different values of variables.
Parameters
----------
expression : str
The expression to evaluate.
try_equations : bool
Try to get variables from `self.equations`. If this fails,
variables can either be provided in `var_dict`, as keyword
arguments, or are created automatically according to the
expression.
auto_init : bool
Set values of all variables to all zeros.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
copy_materials : bool
Work with a copy of `self.equations.materials` instead of
reusing them. Safe but can be slow.
integrals : Integrals instance, optional
The integrals to be used. Automatically created as needed if
not given.
ebcs : Conditions instance, optional
The essential (Dirichlet) boundary conditions for 'weak'
mode. If not given, `self.ebcs` are used.
epbcs : Conditions instance, optional
The periodic boundary conditions for 'weak'
mode. If not given, `self.epbcs` are used.
lcbcs : Conditions instance, optional
The linear combination boundary conditions for 'weak'
mode. If not given, `self.lcbcs` are used.
ts : TimeStepper instance, optional
The time stepper. If not given, `self.ts` is used.
functions : Functions instance, optional
The user functions for boundary conditions, materials
etc. If not given, `self.functions` are used.
mode : one of 'eval', 'el_avg', 'qp', 'weak'
The evaluation mode - 'weak' means the finite element
assembling, 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
var_dict : dict, optional
The variables (dictionary of (variable name) : (Variable instance))
to be used in the expression. Use this if the name of a variable
conflicts with one of the parameters of this method.
strip_variables : bool
If False, the variables in `var_dict` or `kwargs` not present in
the expression are added to the actual variables as a context.
extra_args : dict, optional
Extra arguments to be passed to terms in the expression.
active_only : bool
If True, in 'weak' mode, the (tangent) matrices and residual
vectors (right-hand sides) contain only active DOFs.
verbose : bool
If False, reduce verbosity.
**kwargs : keyword arguments
Additional variables can be passed as keyword arguments, see
`var_dict`.
Returns
-------
equations : Equations instance
The equations that can be evaluated.
variables : Variables instance
The corresponding variables. Set their values and use
:func:`eval_equations() <sfepy.discrete.evaluate.eval_equations()>`.
Examples
--------
`problem` is Problem instance.
>>> out = problem.create_evaluable('ev_integrate.i1.Omega(u)')
>>> equations, variables = out
`vec` is a vector of coefficients compatible with the field
of 'u' - let's use all ones.
>>> vec = nm.ones((variables['u'].n_dof,), dtype=nm.float64)
>>> variables['u'].set_data(vec)
>>> vec_qp = eval_equations(equations, variables, mode='qp')
Try another vector:
>>> vec = 3 * nm.ones((variables['u'].n_dof,), dtype=nm.float64)
>>> variables['u'].set_data(vec)
>>> vec_qp = eval_equations(equations, variables, mode='qp')
"""
from sfepy.discrete.equations import get_expression_arg_names
variables = Variables(six.itervalues(get_default(var_dict, {})))
var_context = get_default(var_dict, {})
if try_equations and self.equations is not None:
# Make a copy, so that possible variable caches are preserved.
for key, var in six.iteritems(self.equations.variables.as_dict()):
if key in variables:
continue
var = var.copy(name=key)
if not preserve_caches:
var.clear_evaluate_cache()
variables[key] = var
elif var_dict is None:
possible_var_names = get_expression_arg_names(expression)
variables = self.create_variables(possible_var_names)
materials = self.get_materials()
if copy_materials or (materials is None):
possible_mat_names = get_expression_arg_names(expression)
materials = self.create_materials(possible_mat_names)
else:
materials = Materials(objs=materials._objs)
_kwargs = copy(kwargs)
for key, val in six.iteritems(kwargs):
if isinstance(val, Variable):
if val.name != key:
msg = 'inconsistent variable name! (%s == %s)' \
% (val.name, key)
raise ValueError(msg)
var_context[key] = variables[key] = val.copy(name=key)
_kwargs.pop(key)
elif isinstance(val, Material):
if val.name != key:
msg = 'inconsistent material name! (%s == %s)' \
% (val.name, key)
raise ValueError(msg)
materials[val.name] = val
_kwargs.pop(key)
kwargs = _kwargs
ebcs = get_default(ebcs, self.ebcs)
epbcs = get_default(epbcs, self.epbcs)
lcbcs = get_default(lcbcs, self.lcbcs)
ts = get_default(ts, self.get_timestepper())
functions = get_default(functions, self.functions)
integrals = get_default(integrals, self.get_integrals())
out = create_evaluable(expression, self.fields, materials,
variables, integrals,
ebcs=ebcs, epbcs=epbcs, lcbcs=lcbcs,
ts=ts, functions=functions,
auto_init=auto_init,
mode=mode, extra_args=extra_args,
active_only=active_only,
verbose=verbose,
kwargs=kwargs)
if not strip_variables:
variables = out[1]
variables.extend([var for var in six.itervalues(var_context)
if var not in variables])
equations = out[0]
mode = 'update' if not copy_materials else 'normal'
equations.time_update_materials(self.ts, mode=mode, problem=self,
verbose=verbose)
return out
def evaluate(self, expression, try_equations=True, auto_init=False,
preserve_caches=False, copy_materials=True, integrals=None,
ebcs=None, epbcs=None, lcbcs=None, ts=None, functions=None,
mode='eval', dw_mode='vector', term_mode=None,
var_dict=None, strip_variables=True, ret_variables=False,
active_only=True, verbose=True, extra_args=None, **kwargs):
"""
Evaluate an expression, convenience wrapper of
:func:`Problem.create_evaluable` and
:func:`eval_equations() <sfepy.discrete.evaluate.eval_equations>`.
Parameters
----------
dw_mode : 'vector' or 'matrix'
The assembling mode for 'weak' evaluation mode.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
ret_variables : bool
If True, return the variables that were created to evaluate
the expression.
other : arguments
See docstrings of :func:`Problem.create_evaluable()`.
Returns
-------
out : array
The result of the evaluation.
variables : Variables instance
The variables that were created to evaluate
the expression. Only provided if `ret_variables` is True.
"""
aux = self.create_evaluable(expression,
try_equations=try_equations,
auto_init=auto_init,
preserve_caches=preserve_caches,
copy_materials=copy_materials,
integrals=integrals,
ebcs=ebcs, epbcs=epbcs, lcbcs=lcbcs,
ts=ts, functions=functions,
mode=mode, var_dict=var_dict,
strip_variables=strip_variables,
extra_args=extra_args,
active_only=active_only,
verbose=verbose, **kwargs)
equations, variables = aux
out = eval_equations(equations, variables,
preserve_caches=preserve_caches,
mode=mode, dw_mode=dw_mode, term_mode=term_mode,
active_only=active_only, verbose=verbose)
if ret_variables:
out = (out, variables)
return out
def eval_equations(self, names=None, preserve_caches=False,
mode='eval', dw_mode='vector', term_mode=None,
active_only=True, verbose=True):
"""
Evaluate (some of) the problem's equations, convenience wrapper of
:func:`eval_equations() <sfepy.discrete.evaluate.eval_equations>`.
Parameters
----------
names : str or sequence of str, optional
Evaluate only equations of the given name(s).
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
mode : one of 'eval', 'el_avg', 'qp', 'weak'
The evaluation mode - 'weak' means the finite element
assembling, 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
dw_mode : 'vector' or 'matrix'
The assembling mode for 'weak' evaluation mode.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
verbose : bool
If False, reduce verbosity.
Returns
-------
out : dict or result
The evaluation result. In 'weak' mode it is the vector or sparse
matrix, depending on `dw_mode`. Otherwise, it is a dict of results
with equation names as keys or a single result for a single
equation.
"""
return eval_equations(self.equations, self.equations.variables,
names=names, preserve_caches=preserve_caches,
mode=mode, dw_mode=dw_mode, term_mode=term_mode,
active_only=active_only, verbose=verbose)
def get_materials(self):
if self.equations is not None:
materials = self.equations.materials
else:
materials = None
return materials
def create_materials(self, mat_names=None):
"""
Create materials with names in `mat_names`. Their definitions
have to be present in `self.conf.materials`.
Notes
-----
This method does not change `self.equations`, so it should not
have any side effects.
"""
if mat_names is not None:
conf_materials = self.select_materials(mat_names, only_conf=True)
else:
conf_materials = self.conf.materials
materials = Materials.from_conf(conf_materials, self.functions)
return materials
def get_variables(self, auto_create=False):
if self.equations is not None:
variables = self.equations.variables
elif auto_create:
variables = self.create_variables()
else:
variables = None
return variables
def create_variables(self, var_names=None):
"""
Create variables with names in `var_names`. Their definitions
have to be present in `self.conf.variables`.
Notes
-----
This method does not change `self.equations`, so it should not
have any side effects.
"""
if var_names is not None:
conf_variables = self.select_variables(var_names, only_conf=True)
else:
conf_variables = self.conf.variables
variables = Variables.from_conf(conf_variables, self.fields)
return variables
def get_output_name(self, suffix=None, extra=None, mode=None):
"""
Return default output file name, based on the output directory,
output format, step suffix and mode. If present, the extra
string is put just before the output format suffix.
"""
out = op.join(self.output_dir, self.ofn_trunk)
if suffix is not None:
if mode is None:
mode = self.output_modes[self.output_format]
if mode == 'sequence':
out = '.'.join((out, suffix))
if extra is not None:
out = '.'.join((out, extra, self.output_format))
else:
out = '.'.join((out, self.output_format))
return out
def remove_bcs(self):
"""
Convenience function to remove boundary conditions.
"""
self.time_update(ebcs={}, epbcs={}, lcbcs={})
def get_restart_filename(self, ts=None):
"""
If restarts are allowed in problem definition options, return the
restart file name, based on the output directory and time step.
"""
if self.conf.options.get('save_restart', None) is None:
return
suffix = 'restart'
if ts is not None:
suffix += '-' + ts.suffix % ts.step
aux = self.get_output_name(extra=suffix)
iext = len(aux) - len('.' + self.output_format)
restart_filename = aux[:iext] + '.h5'
return restart_filename
def save_restart(self, filename, state=None, ts=None):
"""
Save the current state and time step to a restart file.
Parameters
----------
filename : str
The restart file name.
state : State instance, optional
The state instance. If not given, a new state is created using the
variables in problem equations.
ts : TimeStepper instance, optional
The time stepper. If not given, a default one is created.
Notes
-----
Does not support terms with internal state.
"""
import tables as pt
if state is None:
state = self.create_state()
if ts is None:
ts = self.get_default_ts()
fd = pt.open_file(filename, mode='w', title='SfePy restart file')
tgroup = fd.create_group('/', 'ts', 'ts')
for key, val in six.iteritems(ts.get_state()):
fd.create_array(tgroup, key, val, key)
if state.r_vec is not None:
fd.create_array('/', 'r_vec', state.r_vec, 'reduced state vector')
variables = state.variables
for var in variables.iter_state():
vgroup = fd.create_group('/', var.name, var.name)
history_length = len(var.data)
fd.create_array(vgroup, 'history_length', history_length,
'history length')
for ii in range(history_length):
data = var(step=-ii)
fd.create_array(vgroup, 'data_%d' % ii, data, 'data')
fd.close()
mode = self.conf.options.get('save_restart', None)
if (mode == -1) and len(self._restart_filenames):
last_filename = self._restart_filenames.pop()
try:
os.remove(last_filename)
except OSError:
pass
self._restart_filenames.append(filename)
def load_restart(self, filename, state=None, ts=None):
"""
Load the current state and time step from a restart file.
Alternatively, a regular output file in the HDF5 format can be used in
place of the restart file. In that case the restart is only
approximate, because higher order field DOFs (if any) were stripped
out. Files with the adaptive linearization are not supported. Use with
caution!
Parameters
----------
filename : str
The restart file name.
state : State instance, optional
The state instance. If not given, a new state is created using the
variables in problem equations. Otherwise, its variables are
modified in place.
ts : TimeStepper instance, optional
The time stepper. If not given, a default one is created.
Otherwise, it is modified in place.
Returns
-------
new_state : State instance
The loaded state.
"""
import tables as pt
if state is None:
state = self.create_state()
if ts is None:
ts = self.get_default_ts()
variables = state.variables
output('loading restart file "%s"...' % filename)
fd = pt.open_file(filename, mode='r')
if fd.title == 'SfePy restart file':
ts_state = {}
for val in fd.root.ts._f_walknodes():
ts_state[val.name] = val.read()
ts.set_state(**ts_state)
for var in variables.iter_state():
vgroup = fd.root._f_get_child(var.name)
history_length = vgroup.history_length.read()
for ii in range(0, history_length):
data = vgroup._f_get_child('data_%d' % ii).read()
var.set_data(data, step=-ii)
new_state = State.from_variables(variables)
if '/r_vec' in fd:
r_vec = fd.root.r_vec.read()
state.r_vec = r_vec
fd.close()
elif fd.title == 'SfePy output file':
from sfepy.discrete.fem.meshio import MeshIO
output('WARNING: using a SfePy output file in place of a restart'
' file discards higher order DOFs! Use with caution!')
fd.close()
io = MeshIO.any_from_filename(filename)
out = io.read_data(step=ts.step)
for var in variables.iter_state():
val = out[var.name]
var.set_from_mesh_vertices(val.data)
new_state = State.from_variables(variables)
else:
raise IOError('unknown file type! ("%s" in ("%s", "%s"))'
% (fd.title,
'SfePy restart file', 'SfePy output file'))
output('...done')
return new_state
|
vlukes/sfepy
|
sfepy/discrete/problem.py
|
Python
|
bsd-3-clause
| 75,579
|
[
"VTK"
] |
622fecedd560fb415f7442f0160ebf437e48f19b7c7065ef83824339c857e65f
|
""" An app to draw glacier geometry on top of a background image (local plotly)
"""
from outletglacierapp import app
import os
import warnings
import itertools
import json
import numpy as np
from flask import Flask, redirect, url_for, render_template, request, jsonify, flash, session, abort, make_response, send_from_directory
from forms import MapForm, FlowLineForm, ExtractForm, MeshForm
from config import glacier_choices, datadir
import dimarray as da
from models.greenmap import get_dict_data, get_json_data, _load_data, get_coords
from models.flowline import compute_one_flowline
from models.mesh import make_2d_grid_from_contours, Point, Line, extractglacier1d
from models.glacier1d import massbalance_diag
def flash_errors(form):
for field, errors in form.errors.items():
for error in errors:
flash(u"Error in the %s field: %s" % (
getattr(form, field).label.text,
error
))
def getmeshpath(session):
if 'mesh2d' not in session:
session['mesh2d'] = 'mesh2d.nc'
return os.path.join(datadir, session['mesh2d'])
def getglacierpath(session):
if 'glacier1d' not in session:
session['glacier1d'] = 'glacier1d.nc'
return os.path.join(datadir, session['glacier1d'])
def getlinepath(session):
if 'lines' not in session:
session['lines'] = 'lines.json'
if type(session['lines']) is list:
warnings.warn('lines is a list for some reason')
session['lines'] = 'lines.json'
return os.path.join(datadir, session['lines'])
def get_map_form(session):
""" instantiate and define MapForm based on session parameters
"""
form = MapForm()
# update form based on session parameters
if 'variable' in session and 'dataset' in session:
form.dataset.data = session['variable']+' - '+ session['dataset']
if 'coords' in session:
print 'document coords',session['coords']
form.left.data = session['coords'][0]
form.right.data = session['coords'][1]
form.bottom.data = session['coords'][2]
form.top.data = session['coords'][3]
if 'glacier' in session:
form.glacier.data = session['glacier']
if 'maxpixels' in session:
form.maxpixels.data = session['maxpixels']
return form
def get_form(form, session):
""" initialize Form with session parameters (be careful, risk of conflict)
"""
for k in form.data.keys():
nm = form.__class__.__name__+'_'+k
if nm in session:
# form.data[k] = session[nm] # flask bug??? does not work
getattr(form, k).data = session[nm]
return form
def set_form(form, session):
for k in form.data.keys():
nm = form.__class__.__name__+'_'+k
# print "set param",nm,"with",session[nm],"to session"
session[nm] = form.data[k]
@app.route('/')
def index():
# return redirect(url_for('draw_basin'))
return redirect(url_for('drawing'))
@app.route('/basin')
def draw_basin():
form = get_map_form(session)
return render_template('draw_basin.html', form=form)
@app.route('/drawing')
def drawing():
#return redirect(url_for('map'))
form = get_map_form(session)
meshform = get_form(MeshForm(), session)
# if 'variable' in session
return render_template('drawing.html', form=form, flowline=FlowLineForm(), meshform=meshform, hidemeshform=True)
@app.route('/googlemap')
def googlemap():
#return redirect(url_for('map'))
# form = get_map_form(session)
# if 'variable' in session
meshform = get_form(MeshForm(), session)
return render_template('googlemap.html', flowline=FlowLineForm(), meshform=meshform, hidemeshform=True)
@app.route('/reset', methods=["POST"])
def reset():
# if 'mesh' in session: del session['mesh']
# if 'lines' in session: del session['lines']
if 'variable' in session: del session['variable']
if 'dataset' in session: del session['dataset']
if 'coords' in session: del session['coords']
if 'glacier' in session: del session['glacier']
if 'maxpixels' in session: del session['maxpixels']
return redirect(url_for('drawing'))
@app.route('/mapdata', methods=["GET"])
def mapdata():
""" return json data to plot map on Greenland domain
"""
form = MapForm(request.args)
if not form.validate():
flash_errors(form)
# define session parameters
variable, source = form.dataset.data.split('-')
# save these parameters in session just in case, but is not used
# but leave GET to make testing easier
session['variable'] = variable.strip()
session['dataset'] = source.strip()
session['glacier'] = form.glacier.data
session['maxpixels'] = form.maxpixels.data
# update coordinates to get a fixed aspect ratio
r = 1
currentwidth = form.right.data - form.left.data
width = r*(form.top.data - form.bottom.data)
# form.right.data += (width-currentwidth)/2
# form.left.data -= (width-currentwidth)/2
form.right.data = form.left.data + width # maintain the left side...
session['coords'] = [form.left.data, form.right.data, form.bottom.data, form.top.data]
coords = session['coords'] # coordinates (can be custom)
variable = session['variable'] # coordinates (can be custom)
dataset = session['dataset'] # coordinates (can be custom)
maxshape = (session['maxpixels'],)*2
data = get_json_data(variable, dataset, coords, maxshape=maxshape)
return make_response(data) #, type='application/json')
@app.route('/glacierinfo')
def glacierinfo():
""" provide glacier coordinate information from box and decker
"""
# indicate the same list of glaciers as in settings
data = [{'name':nm, 'coords':get_coords(nm)} for nm in glacier_choices if nm.lower() != 'custom']
return jsonify(glacierinfo=data)
@app.route('/flowline', methods=['GET'])
def flowline():
""" compute flowline given a starting point
"""
# starting point in km
# x = float(request.get('x'))
# y = float(request.get('y'))
# dx = float(request.get('dx'))
# maxdist = float(request.get('maxdist'))
# dataset = request.form.get('dataset')
form = FlowLineForm(request.args)
#TODO: remove maxshape argument (related to shape of loaded data) and write
# a fortran routine !
line = compute_one_flowline(form.x.data, form.y.data, dx=form.dx.data, maxdist=form.maxdist.data,
dataset=form.dataset.data, maxshape=(500,500))
return jsonify(line=line)
@app.route('/lines', methods=['GET','POST'])
def lines():
if request.method == 'GET':
lines = _getlines(session)
return jsonify(lines=lines)
else:
lines = request.json
_setlines(session, lines)
return jsonify(lines=lines)
def _getlines(session):
linepath = getlinepath(session)
if os.path.exists(linepath):
with open(linepath,'r') as f:
lines = json.load(f)
else:
lines = []
return lines
def _setlines(session, lines):
linepath = getlinepath(session)
with open(linepath,'w') as f:
lines = json.dump(lines, f)
@app.route('/lineslonglat', methods=['GET','POST'])
def lineslonglat():
import cartopy.crs as ccrs
from models.greenmap import CRS
longlat = ccrs.PlateCarree()
def transform_line(line, crs0, crs1):
" transform a line between two coordinate systems "
x, y = zip(*[(pt['x'], pt['y']) for pt in line['values']])
x, y = np.array(x), np.array(y)
if crs0 != longlat:
x *= 1e3
y *= 1e3
pts_xyz = crs1.transform_points(crs0, x, y)
if crs1 != longlat:
pts_xyz /= 1e3
lon, lat = pts_xyz[...,0], pts_xyz[...,1]
newvalues = [{'x':lo, 'y':la} for lo, la in zip(lon, lat)]
if np.any(~np.isfinite(pts_xyz)):
raise RuntimeError("nan or inf in points !")
return {'id':line['id'], 'values':newvalues}
if request.method == 'GET':
lines = _getlines(session)
longlatlines = [transform_line(line, CRS, longlat) for line in lines]
# lines = [transform_line(line, longlat, CRS) for line in longlatlines]
return jsonify(longlatlines=longlatlines)
else:
print "received longlat", request.json
#lines = [transform_line(line, longlat, CRS) for line in request.json]
lines = [transform_line(line, longlat, CRS) for line in request.json]
print "transformed xy", lines
_setlines(session, lines)
# return jsonify(msg='all good')
return jsonify(lines=lines)
# return jsonify(lines=lines)
@app.route('/mesh', methods=['GET', 'POST'])
def mesh():
meshpath = getmeshpath(session)
if request.method == 'GET':
try:
ds = da.read_nc(meshpath)
except:
raise
raise ValueError("mesh file not found, create mesh via POST first (Save and Mesh button)")
flash("mesh file not found, create mesh via POST first (Save and Mesh button)")
return jsonify(url=url_for('drawing'))
mesh = [[{'x':x*1e-3, 'y':y*1e-3, 's':s*1e-3} for x, y in zip(xs_section, ys_section)] for xs_section, ys_section, s in zip(ds['x_coord'], ds['y_coord'], ds.x)]
# return redirect(url_for('/viewmesh'))
return jsonify(mesh=mesh)
else:
# compute mesh and return the data extraction page
lines = _getlines(session)
meshform = MeshForm(request.form)
set_form(meshform, session) # make request persistent
dx = meshform.data['dx']
ny = meshform.data['ny']
if len(lines) == 0:
flash('no lines found !')
return jsonify(url=url_for('drawing'))
elif len(lines) != 3:
flash('3 lines expected !')
return jsonify(url=url_for('drawing'))
linedict = {line['id'].lower(): line['values'] for line in lines}
if set(linedict.keys()) != {'left','right','middle'}:
flash('Unxpected line ids. Expected: {}, got: {}'.format(['left','right','middle'],linedict.keys()))
return jsonify(url=url_for('drawing'))
# make Lines objects
for nm in ['middle','left','right']:
linedict[nm] = Line([Point(pt['x']*1e3, pt['y']*1e3) for pt in linedict[nm]]) # make a Line object
# # build fake mesh for testing
# ny = 5
# nx = len(session['lines'][0])
# mesh = [[{'x':pt['x']+20*j,'y':pt['y']+20*j} for j in range(ny)] for pt in session['lines'][0]['values']]
dima_mesh = make_2d_grid_from_contours(dx=dx, ny=ny, **linedict)
dima_mesh.write_nc(meshpath, 'w') # write mesh to disk
# return jsonify(url=url_for('viewmesh'))
return redirect(url_for('mesh'))
@app.route('/viewmesh')
def viewmesh():
""" mesh / glacier view
"""
mapform = get_map_form(session)
extractform = get_form(ExtractForm(), session)
meshform = get_form(MeshForm(), session)
return render_template('mesh.html', form=mapform, extractform=extractform, meshform=meshform)
@app.route('/meshoutline', methods=['GET', 'POST'])
def meshoutline():
""" extract glacier1d outlines (lines) from existing mesh
"""
meshpath = getmeshpath(session)
if not os.path.exists(meshpath):
raise ValueError("mesh file unavailable: "+meshpath)
x_coord = da.read_nc(meshpath,'x_coord').values*1e-3
y_coord = da.read_nc(meshpath,'y_coord').values*1e-3
ni, nj = x_coord.shape
left = []
middle = []
right = []
lines = [{'id':'middle', 'values':[]},
{'id':'left', 'values':[]},
{'id':'right', 'values':[]}]
for i in range(ni): # loop over sections
lines[0]['values'].append({'x':x_coord[i][int(nj/2)], 'y':y_coord[i][int(nj/2)]})
lines[1]['values'].append({'x':x_coord[i][0], 'y':y_coord[i][0]})
lines[2]['values'].append({'x':x_coord[i][-1], 'y':y_coord[i][-1]})
# if POST, make it the default line
if request.method == 'POST':
_setlines(session, lines)
return jsonify(lines=lines)
# @app.route('/data1d/<name:variable>/<name:dataset>', methods=['GET'])
# def extract_one_variable(variable, dataset):
# """ extract one variable from the netCDF file
# """
@app.route('/glacier1d', methods=['GET', 'POST'])
def make_glacier1d():
""" extract data
"""
meshpath = getmeshpath(session)
glacierpath = getglacierpath(session)
if request.method == 'POST':
# if request.method == 'GET':
extractform = ExtractForm(request.form)
# extractform = ExtractForm()
mesh = da.read_nc(meshpath)
glacier1d = extractglacier1d(mesh, extractform.data)
# quick fix SMB shifted upward
# glacier1d['smb'].values += (0.2/(3600*24*365.25))
# glacier1d['smb'].note = "increased by 0.2 m/year, uniformly"
glacier1d.write_nc(glacierpath, 'w')
return redirect(url_for('vizualize_glacier1d')) # get method
elif request.method == 'GET':
raise ValueError("no GET route for /glacier1d, try /figure/glacier1d")
@app.route("/figure/glacier1d")
def vizualize_glacier1d():
""" return data to make a figure
"""
# read glacier data
glacierpath = getglacierpath(session)
glacier1d = da.read_nc(glacierpath)
# for the diagnostic, also add velocity divergence near surface mass balance
glacier1d = massbalance_diag(glacier1d)
# rename variables and change units for the plotting
fmt = dict(
U='surf_velocity',
hs='surface',
hb='bottom',
zb='bedrock',
W='width',
)
glacier1d = da.Dataset({fmt.pop(nm, nm): glacier1d[nm] for nm in glacier1d.keys()})
# meters into km
glacier1d.axes['x'].values *= 1e-3
glacier1d.axes['x'].units = 'km'
for nm in ['x_coord','y_coord','width']:
glacier1d[nm].values *= 1e-3
glacier1d[nm].units = 'km'
# meters/seconds into meters/year
for nm in ['surf_velocity','balance_velocity_obs','balance_velocity_mod3D','smb','runoff']:
glacier1d[nm].values *= 24*3600*365.25
glacier1d[nm].units = 'meters/year'
# group data into various views
views = [
{
'id': 'elevation',
'names' : ['bedrock','bottom','surface'],
'xlabel' : '',
'ylabel' : 'elevation (m)',
},
{
'id': 'width',
'names' : ['width'],
'xlabel' : '',
'ylabel' : 'width (km)',
},
{
'id': 'velocity',
'names' : ['surf_velocity'],
# 'names' : ['surf_velocity','balance_velocity_obs','balance_velocity_mod3D'],
# 'xlabel' : '',
'xlabel' : 'distance from ice divide(km)',
'ylabel' : 'velocity (meters/year)',
},
# {
# 'id': 'mass_balance',
# 'names' : ['cumulative_smb','ice_flux_surf_obs','ice_flux_bal_mod3D'],
# 'xlabel' : '',
# 'ylabel' : 'mass balance (meters^3/second)',
# },
# {
# 'id': 'smb',
# 'names' : ['smb','runoff'],
# 'xlabel' : 'distance from ice divide(km)',
# 'ylabel' : 'SMB (meters/year)',
# },
]
# variables to plot
names = np.unique(list(itertools.chain(*[view['names'] for view in views]))).tolist()
names += ['x_coord','y_coord'] # also pass along coordinates
print names
# replace all nan values
missing_values = -99.99
for k in glacier1d:
glacier1d[k][np.isnan(glacier1d[k])] = missing_values
# for simplicity, organize each line a list of poitns with x, y property
sources = {}
for nm in names:
sources[nm] = {
'values':[{'x':glacier1d.x[i], 'y':val} for i, val in enumerate(glacier1d[nm].values)],
'missing_values': missing_values,
}
# not used for now
units = {k:glacier1d[k].units.strip() if hasattr(glacier1d[k], 'units') else '' for k in names}
return jsonify(views=views, sources=sources, width=350, height=120)
@app.route('/download/glacier1d.nc')
def download():
direc, filename = os.path.split(getglacierpath(session))
return send_from_directory(directory=direc, filename=filename)
|
perrette/webglacier1d
|
outletglacierapp/views.py
|
Python
|
mit
| 16,362
|
[
"NetCDF"
] |
ddf8441ee379c2ac23a7ebb254857d879890d149e686453f76e5951743b3e6ff
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.