text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""The Virtual File System (VFS) error classes."""
class Error(Exception):
"""Parent class for dfVFS specific errors."""
class AccessError(Error):
"""Error indicating that a resource could not be accessed."""
class BackEndError(Error):
"""Error indicating that a dependency has encountered a problem."""
class CacheFullError(Error):
"""Error indicating a cache is full."""
class FileFormatError(Error):
"""Error indicating a problem in the format of a file."""
class MountPointError(Error):
"""Error indicating a mount point does not exist."""
class NotSupported(Error):
"""Error indicating that unsupported functionality was requested."""
class PathSpecError(Error):
"""Error indicating a problem with a path specification."""
class ScannerError(Error):
"""Error indicating that an item could not be scanned."""
class UserAbort(Error):
"""Exception indicating that the user initiated an abort."""
class VolumeSystemError(Error):
"""Error indicating a problem with a volume system."""
|
{
"content_hash": "61d9b4cc7b237670b13eb75185d62c7f",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 70,
"avg_line_length": 22.866666666666667,
"alnum_prop": 0.7278911564625851,
"repo_name": "joachimmetz/dfvfs",
"id": "6f33847841954fcd14556cbd1f04106d32ea1af5",
"size": "1053",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "dfvfs/lib/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14212"
},
{
"name": "Makefile",
"bytes": "122"
},
{
"name": "PowerShell",
"bytes": "1021"
},
{
"name": "Python",
"bytes": "2176548"
},
{
"name": "Shell",
"bytes": "19355"
}
],
"symlink_target": ""
}
|
from atom.api import Bool, Enum, Unicode, Coerced, Typed, ForwardTyped, observe
from enaml.colors import ColorMember
from enaml.core.declarative import d_
from enaml.fonts import FontMember
from enaml.layout.geometry import Size
from .toolkit_object import ToolkitObject, ProxyToolkitObject
class ProxyWidget(ProxyToolkitObject):
""" The abstract definition of a proxy Widget object.
"""
#: A reference to the Widget declaration.
declaration = ForwardTyped(lambda: Widget)
def set_enabled(self, enabled):
raise NotImplementedError
def set_visible(self, visible):
raise NotImplementedError
def set_background(self, background):
raise NotImplementedError
def set_foreground(self, foreground):
raise NotImplementedError
def set_font(self, font):
raise NotImplementedError
def set_minimum_size(self, minimum_size):
raise NotImplementedError
def set_maximum_size(self, maximum_size):
raise NotImplementedError
def set_tool_tip(self, tool_tip):
raise NotImplementedError
def set_status_tip(self, status_tip):
raise NotImplementedError
def set_show_focus_rect(self, show_focus_rect):
raise NotImplementedError
def ensure_visible(self):
raise NotImplementedError
def ensure_hidden(self):
raise NotImplementedError
# TODO remove in Enaml version 0.8.0
def _warn_prop(name, newname):
msg = "The '%s' attribute has been removed. Use the '%s' attribute"
msg += "instead. Compatibilty will be removed in Enaml version 0.8.0"
msg = msg % (name, newname)
def _warn():
import warnings
warnings.warn(msg, FutureWarning, stacklevel=3)
def getter(self):
_warn()
return getattr(self, newname)
def setter(self, value):
_warn()
setattr(self, newname, value)
def deleter(self):
_warn()
delattr(self, newname)
return property(getter, setter, deleter)
class Widget(ToolkitObject):
""" The base class of visible widgets in Enaml.
"""
#: Whether or not the widget is enabled.
enabled = d_(Bool(True))
#: Whether or not the widget is visible.
visible = d_(Bool(True))
#: The background color of the widget.
background = d_(ColorMember())
# TODO remove in Enaml version 0.8.0
bgcolor = _warn_prop('bgcolor', 'background')
#: The foreground color of the widget.
foreground = d_(ColorMember())
# TODO remove in Enaml version 0.8.0
fgcolor = _warn_prop('fgcolor', 'foreground')
#: The font used for the widget.
font = d_(FontMember())
#: The minimum size for the widget. The default means that the
#: client should determine an intelligent minimum size.
minimum_size = d_(Coerced(Size, (-1, -1)))
#: The maximum size for the widget. The default means that the
#: client should determine an intelligent maximum size.
maximum_size = d_(Coerced(Size, (-1, -1)))
#: The tool tip to show when the user hovers over the widget.
tool_tip = d_(Unicode())
#: The status tip to show when the user hovers over the widget.
status_tip = d_(Unicode())
#: A flag indicating whether or not to show the focus rectangle for
#: the given widget. This is not necessarily support by all widgets
#: on all clients. A value of None indicates to use the default as
#: supplied by the client.
show_focus_rect = d_(Enum(None, True, False))
#: A reference to the ProxyWidget object.
proxy = Typed(ProxyWidget)
#--------------------------------------------------------------------------
# Observers
#--------------------------------------------------------------------------
@observe(('enabled', 'visible', 'background', 'foreground', 'font',
'minimum_size', 'maximum_size', 'show_focus_rect', 'tool_tip',
'status_tip'))
def _update_proxy(self, change):
""" Update the proxy widget when the Widget data changes.
This method only updates the proxy when an attribute is updated;
not when it is created or deleted.
"""
# The superclass implementation is sufficient.
super(Widget, self)._update_proxy(change)
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
def show(self):
""" Ensure the widget is shown.
Calling this method will also set the widget visibility to True.
"""
self.visible = True
if self.proxy_is_active:
self.proxy.ensure_visible()
def hide(self):
""" Ensure the widget is hidden.
Calling this method will also set the widget visibility to False.
"""
self.visible = False
if self.proxy_is_active:
self.proxy.ensure_hidden()
|
{
"content_hash": "703d0d9437f1ff7032b0a77e9aed3053",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 79,
"avg_line_length": 31.189873417721518,
"alnum_prop": 0.6124188311688312,
"repo_name": "ContinuumIO/ashiba",
"id": "18fa8f5f67484625d483f9f412d5c800b3677030",
"size": "5278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enaml/enaml/widgets/widget.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4560"
},
{
"name": "C",
"bytes": "738"
},
{
"name": "C++",
"bytes": "77464"
},
{
"name": "CSS",
"bytes": "2286"
},
{
"name": "Emacs Lisp",
"bytes": "1210"
},
{
"name": "HTML",
"bytes": "4891"
},
{
"name": "JavaScript",
"bytes": "17243"
},
{
"name": "Makefile",
"bytes": "4590"
},
{
"name": "Python",
"bytes": "3241535"
},
{
"name": "Shell",
"bytes": "119"
},
{
"name": "VimL",
"bytes": "1821"
}
],
"symlink_target": ""
}
|
"""
Manage the master configuration file
"""
import logging
import os
import salt.config
import salt.utils.files
import salt.utils.verify
import salt.utils.yaml
log = logging.getLogger(__name__)
def values():
"""
Return the raw values of the config file
"""
data = salt.config.master_config(__opts__["conf_file"])
return data
def apply(key, value):
"""
Set a single key
.. note::
This will strip comments from your config file
"""
path = __opts__["conf_file"]
if os.path.isdir(path):
path = os.path.join(path, "master")
data = values()
data[key] = value
with salt.utils.files.fopen(path, "w+") as fp_:
salt.utils.yaml.safe_dump(data, default_flow_style=False)
def update_config(file_name, yaml_contents):
"""
Update master config with
``yaml_contents``.
Writes ``yaml_contents`` to a file named
``file_name.conf`` under the folder
specified by ``default_include``.
This folder is named ``master.d`` by
default. Please look at
:conf_master:`include-configuration`
for more information.
Example low data:
.. code-block:: python
data = {
'username': 'salt',
'password': 'salt',
'fun': 'config.update_config',
'file_name': 'gui',
'yaml_contents': {'id': 1},
'client': 'wheel',
'eauth': 'pam',
}
"""
file_name = "{}{}".format(file_name, ".conf")
dir_path = os.path.join(
__opts__["config_dir"], os.path.dirname(__opts__["default_include"])
)
try:
yaml_out = salt.utils.yaml.safe_dump(
yaml_contents,
default_flow_style=False,
)
if not os.path.exists(dir_path):
log.debug("Creating directory %s", dir_path)
os.makedirs(dir_path, 0o755)
file_path = os.path.join(dir_path, file_name)
if not salt.utils.verify.clean_path(dir_path, file_path):
return "Invalid path"
with salt.utils.files.fopen(file_path, "w") as fp_:
fp_.write(yaml_out)
return "Wrote {}".format(file_name)
except (OSError, salt.utils.yaml.YAMLError, ValueError) as err:
return str(err)
|
{
"content_hash": "ed5ccb2764cfe00532eb4be49c9b12a9",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 76,
"avg_line_length": 24.824175824175825,
"alnum_prop": 0.5745905267817618,
"repo_name": "saltstack/salt",
"id": "a5dfee0b602478d11bdb6d45e96ffbef0e919c4e",
"size": "2259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/wheel/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
}
|
"""Huawei LTE constants."""
DOMAIN = "huawei_lte"
DEFAULT_DEVICE_NAME = "LTE"
UPDATE_SIGNAL = f"{DOMAIN}_update"
UPDATE_OPTIONS_SIGNAL = f"{DOMAIN}_options_update"
UNIT_BYTES = "B"
UNIT_SECONDS = "s"
CONNECTION_TIMEOUT = 10
KEY_DEVICE_BASIC_INFORMATION = "device_basic_information"
KEY_DEVICE_INFORMATION = "device_information"
KEY_DEVICE_SIGNAL = "device_signal"
KEY_DIALUP_MOBILE_DATASWITCH = "dialup_mobile_dataswitch"
KEY_MONITORING_STATUS = "monitoring_status"
KEY_MONITORING_TRAFFIC_STATISTICS = "monitoring_traffic_statistics"
KEY_WLAN_HOST_LIST = "wlan_host_list"
BINARY_SENSOR_KEYS = {KEY_MONITORING_STATUS}
DEVICE_TRACKER_KEYS = {KEY_WLAN_HOST_LIST}
SENSOR_KEYS = {
KEY_DEVICE_INFORMATION,
KEY_DEVICE_SIGNAL,
KEY_MONITORING_TRAFFIC_STATISTICS,
}
SWITCH_KEYS = {KEY_DIALUP_MOBILE_DATASWITCH}
ALL_KEYS = BINARY_SENSOR_KEYS | DEVICE_TRACKER_KEYS | SENSOR_KEYS | SWITCH_KEYS
|
{
"content_hash": "2bf05e6bf3ba7dcd8d81b3805f81e17d",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 79,
"avg_line_length": 25.82857142857143,
"alnum_prop": 0.7433628318584071,
"repo_name": "joopert/home-assistant",
"id": "b6e079576ac13211d81cc2fad9d5d65685ead9d1",
"size": "904",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/huawei_lte/const.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18670593"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
import pytest
from unittest import mock
from mitmproxy.test import tflow
from mitmproxy import io
from mitmproxy import exceptions
from mitmproxy.addons import clientplayback
from mitmproxy.test import taddons
def tdump(path, flows):
with open(path, "wb") as f:
w = io.FlowWriter(f)
for i in flows:
w.add(i)
class MockThread():
def is_alive(self):
return False
class TestClientPlayback:
def test_playback(self):
cp = clientplayback.ClientPlayback()
with taddons.context() as tctx:
assert cp.count() == 0
f = tflow.tflow(resp=True)
cp.start_replay([f])
assert cp.count() == 1
RP = "mitmproxy.proxy.protocol.http_replay.RequestReplayThread"
with mock.patch(RP) as rp:
assert not cp.current_thread
cp.tick()
assert rp.called
assert cp.current_thread
cp.flows = []
cp.current_thread.is_alive.return_value = False
assert cp.count() == 1
cp.tick()
assert cp.count() == 0
assert tctx.master.has_event("update")
assert tctx.master.has_event("processing_complete")
cp.current_thread = MockThread()
cp.tick()
assert cp.current_thread is None
cp.start_replay([f])
cp.stop_replay()
assert not cp.flows
df = tflow.DummyFlow(tflow.tclient_conn(), tflow.tserver_conn(), True)
with pytest.raises(exceptions.CommandError, match="Can't replay live flow."):
cp.start_replay([df])
def test_load_file(self, tmpdir):
cp = clientplayback.ClientPlayback()
with taddons.context():
fpath = str(tmpdir.join("flows"))
tdump(fpath, [tflow.tflow(resp=True)])
cp.load_file(fpath)
assert cp.flows
with pytest.raises(exceptions.CommandError):
cp.load_file("/nonexistent")
def test_configure(self, tmpdir):
cp = clientplayback.ClientPlayback()
with taddons.context() as tctx:
path = str(tmpdir.join("flows"))
tdump(path, [tflow.tflow()])
tctx.configure(cp, client_replay=[path])
cp.configured = False
tctx.configure(cp, client_replay=[])
cp.configured = False
tctx.configure(cp)
cp.configured = False
with pytest.raises(exceptions.OptionsError):
tctx.configure(cp, client_replay=["nonexistent"])
|
{
"content_hash": "9dbd3864c1711e367c97430cbf3a1b3c",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 89,
"avg_line_length": 32.24691358024691,
"alnum_prop": 0.5704441041347627,
"repo_name": "MatthewShao/mitmproxy",
"id": "3f990668b7b8fced0643045267831c0977f9c3a5",
"size": "2612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/mitmproxy/addons/test_clientplayback.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20941"
},
{
"name": "HTML",
"bytes": "14747"
},
{
"name": "JavaScript",
"bytes": "276302"
},
{
"name": "PowerShell",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1726585"
},
{
"name": "Shell",
"bytes": "4644"
}
],
"symlink_target": ""
}
|
"""Scraper for Dept of Justice Office of Legal Counsel
CourtID: bia
Court Short Name: Dept of Justice OLC
Author: William Palin
Reviewer:
Type:
History:
2022-01-14: Created by William E. Palin
"""
from juriscraper.lib.html_utils import (
get_row_column_links,
get_row_column_text,
)
from juriscraper.OpinionSiteLinear import OpinionSiteLinear
class Site(OpinionSiteLinear):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.court_id = self.__module__
self.url = "https://www.justice.gov/olc/opinions?items_per_page=40"
self.status = "Published"
def _process_html(self):
for row in self.html.xpath(
".//tr[contains(@class , 'even')] | .//tr[contains(@class , 'odd')]"
):
date = get_row_column_text(row, 1)
if "Date of Issuance" in date:
date = date.split("\n")[-1].strip()
name = get_row_column_text(row, 2)
url = get_row_column_links(row, 2)
summary = get_row_column_text(row, 3)
self.cases.append(
{
"date": date,
"name": name,
"url": url,
"summary": summary,
"docket": "", # Docket numbers don't appear to exist.
}
)
|
{
"content_hash": "96b700213ea7d8d39cdeefff5b35603e",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 80,
"avg_line_length": 30.977272727272727,
"alnum_prop": 0.5363169479090242,
"repo_name": "freelawproject/juriscraper",
"id": "48a86524b9b2ba991ef9806d89d08edefb3281c7",
"size": "1363",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "juriscraper/opinions/united_states/administrative_agency/olc.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "63242956"
},
{
"name": "Jinja",
"bytes": "2201"
},
{
"name": "Makefile",
"bytes": "75"
},
{
"name": "Python",
"bytes": "1059228"
}
],
"symlink_target": ""
}
|
import praw
import sqlite3
import time
import string
USERNAME = ""
#This is the bot's Username. In order to send mail, he must have some amount of Karma.
PASSWORD = ""
#This is the bot's Password.
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter bot"
SUBREDDIT = "pics+gifs+funny+askreddit"
#This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subreddits, use "sub1+sub2+sub3+..."
WIKISUBREDDIT = "GoldTesting"
#This is the subreddit which owns the wikipage. Perhaps you wish to document posts on subs other than your own.
WIKIPAGE = "Gold"
#This is the page of the wiki that you will be editing
MAXPOSTS = 100
#This is how many posts you want to retrieve all at once. PRAW can download 100 at a time.
WAIT = 20
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
VERBOSE = False
#IF Verbose is set to true, the console will spit out a lot more information. Use True or False (Use capitals! No quotations!)
'''All done!'''
WAITS = str(WAIT)
letters = string.ascii_uppercase
lets = string.ascii_letters
try:
import bot #This is a file in my python library which contains my Bot's username and password. I can push code to Git without showing credentials
USERNAME = bot.getuG()
PASSWORD = bot.getpG()
USERAGENT = bot.getaG()
except ImportError:
pass
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(id TEXT)')
print('Loaded Oldposts')
sql.commit()
r = praw.Reddit(USERAGENT)
r.login(USERNAME, PASSWORD)
def scan():
print('Reading Wiki')
names = []
finals = []
wikisubreddit = r.get_subreddit(WIKISUBREDDIT)
wikipage = r.get_wiki_page(wikisubreddit, WIKIPAGE)
pcontent = wikipage.content_md
print('Gathering names')
pcontentsplit = pcontent.split('\n')
for item in pcontentsplit:
if 'http://' in item:
names.append(item.replace('\r',''))
print('Scanning ' + SUBREDDIT)
scansub = r.get_subreddit(SUBREDDIT)
posts = scansub.get_new(limit=MAXPOSTS)
for post in posts:
pid = post.id
plink = post.permalink
cur.execute('SELECT * FROM oldposts WHERE id=?', [pid])
if not cur.fetchone():
try:
pauthor = post.author.name
print(pid + ': ' + pauthor)
for item in names:
if pauthor in item:
print('\tDeleting old entry')
names.remove(item)
print('\tAdding new entry')
names.append('[' + pauthor + '](' + plink + ')')
except AttributeError:
print(pid + ': Post deleted')
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
sql.commit()
names = sorted(names, key=str.lower)
if VERBOSE == True:
print(names)
finals.append('**0-9 and others**\n\n_____\n\n')
for item in names:
if item[1] not in lets:
finals.append(item + '\n\n')
for letter in letters:
finals.append('**' + letter + '**\n\n_____\n\n')
for item in names:
if item[1].lower() == letter.lower():
finals.append(item + '\n\n')
if VERBOSE == True:
print(finals)
print('Saving wiki page')
wikipage.edit(''.join(finals))
while True:
# scan()
# sql.commit()
try:
scan()
sql.commit()
except Exception:
print('fail')
print('Running again in ' + WAITS + ' seconds.\n')
time.sleep(WAIT)
|
{
"content_hash": "e5594815039785196ed82f5294f3a167",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 149,
"avg_line_length": 27.65,
"alnum_prop": 0.6868595539481616,
"repo_name": "tehp/reddit",
"id": "7ccf8f9a6e313ce53bfbedeccadc1b96609ee5b7",
"size": "3335",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Wikiname/wikiname.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1078988"
},
{
"name": "PostScript",
"bytes": "235598"
},
{
"name": "Python",
"bytes": "508351"
},
{
"name": "Shell",
"bytes": "289"
}
],
"symlink_target": ""
}
|
import pytest # noqa: 401
import mock # noqa: 401
from backup.thinning import ThinningStrategy, LatestStrategy, ThinOutStrategy
from backup.utils import timestamp2date
from datetime import datetime, timedelta
from collections import namedtuple
from random import randint
def everyday(lastday):
date = lastday
dates = set()
delta = timedelta(hours=6)
for x in range(4000):
jitter = timedelta(seconds=randint(-6000, +6000))
dates.add(date + jitter)
date = date - delta
delta = timedelta(days=1)
for x in range(2000):
jitter = timedelta(seconds=randint(-60000, +60000))
dates.add(date + jitter)
date = date - delta
return dates
def somedays():
Day = namedtuple('Day', ['timestamp'])
timestamps = """
20140201213106
20150208051039
20160207050939
20170219040403
20170716040403
20170723040402
20170730040403
20170806040403
20170813040402
20170820040402
20170827040403
20170903040402
20170910040403
20170917040404
20170924040403
20171001040402
20171008040402
20171015040402
20171022040403
20171029040403
20171105040402
20171112040402
20171119040402
20171126040403
20171203040403
20171210040402
20171217040403
20171224040402
20171231040402
20180107040403
20180114040402
20180121040403
20180128040403
20180204040402
20180211040402
20180218040403
20180225040403
20180304040403
20180311040403
20180312010603
20180318064806
""".strip().split('\n')
return list(map(Day, map(timestamp2date, timestamps)))
def testArgumentFactory():
# invalid parameters
with pytest.raises(ValueError):
ThinningStrategy.fromArgument("A")
with pytest.raises(ValueError):
ThinningStrategy.fromArgument("L0")
with pytest.raises(ValueError):
ThinningStrategy.fromArgument("-1")
# valid parameters
s = ThinningStrategy.fromArgument("L17")
assert type(s) is LatestStrategy
def assertThinningOn(dates, indates, outdates):
if len(dates):
assert next(reversed(sorted(dates))) in indates
assert len(dates) == len(indates) + len(outdates)
assert indates.isdisjoint(outdates)
assert indates.union(outdates) == set(dates)
def testThinningLatest():
lastday = datetime(2222, 2, 2, 22, 22, 22)
dates = everyday(lastday)
(indates, outdates) = LatestStrategy(17).executeOn(dates)
assertThinningOn(dates, indates, outdates)
assert len(indates) == 17
assert len(outdates) == len(dates) - 17
assert all(outdate < min(indates) for outdate in outdates)
(indates, outdates) = LatestStrategy(17171717).executeOn(dates)
assertThinningOn(dates, indates, outdates)
assert len(indates) == len(dates)
assert len(outdates) == 0
assert all(outdate < min(indates) for outdate in outdates)
def testThinOut(caplog):
lastday = datetime(2222, 2, 2, 22, 22, 22)
fixdate = datetime(2222, 1, 31)
# test empty
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn([], fix=fixdate)
assertThinningOn([], indates, outdates)
assert len(indates) == 0
assert len(outdates) == 0
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn([], fix=fixdate, attr='timestamp')
assertThinningOn([], indates, outdates)
assert len(indates) == 0
assert len(outdates) == 0
dates = everyday(lastday)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(dates, fix=fixdate)
assertThinningOn(dates, indates, outdates)
# thin out again - must be the same result
(indates2, outdates2) = ThinOutStrategy(2, 3, 2).executeOn(indates, fix=fixdate)
assertThinningOn(indates, indates2, outdates2)
assert len(indates2) == len(indates)
assert len(outdates2) == 0
# fast forward
for x in range(0, 100):
fixdate = fixdate + timedelta(weeks=1)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(indates, fix=fixdate)
assert len(indates) == 10 # there must always be 9 yearly dates + latest
# fast forward again (add a date first)
indates.add(fixdate)
for x in range(0, 100):
fixdate = fixdate + timedelta(weeks=1)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(indates, fix=fixdate)
assert len(indates) == 10 # there must always be 10 yearly dates now
fixdate = datetime(2018, 3, 21)
dates = somedays()
(indates, outdates) = ThinOutStrategy(7, 7, 7).executeOn(
dates, fix=fixdate, attr='timestamp'
)
assertThinningOn(dates, indates, outdates)
assert len(indates) == 21
# thin out again - must be the same result
(indates2, outdates2) = ThinOutStrategy(7, 7, 7).executeOn(
indates, fix=fixdate, attr='timestamp'
)
assertThinningOn(indates, indates2, outdates2)
assert len(indates2) == len(indates)
assert len(indates2) == 21
assert len(outdates2) == 0
# fast forward (days)
for x in range(0, 100):
fixdate = fixdate + timedelta(days=1)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(
indates, fix=fixdate, attr='timestamp'
)
assert len(indates) == 6
# fast forward (weeks)
for x in range(0, 100):
fixdate = fixdate + timedelta(weeks=1)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(
indates, fix=fixdate, attr='timestamp'
)
assert len(indates) == 6
# what happens if we go back in time
fixdate = datetime(2018, 3, 21)
for x in range(0, 100):
fixdate = fixdate - timedelta(days=1)
(indates, outdates) = ThinOutStrategy(2, 3, 2).executeOn(
indates, fix=fixdate, attr='timestamp'
)
assert len(indates) == 6
|
{
"content_hash": "c1cd4bd3d5214976d4bf8f2515a3c386",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 95,
"avg_line_length": 30.906593406593405,
"alnum_prop": 0.688,
"repo_name": "edmw/site-backup",
"id": "4c5cc69f371524c0dbda06325a72bdbcd1612d8a",
"size": "5642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_thinning.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "82910"
}
],
"symlink_target": ""
}
|
import numpy as np
import os
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.voc import voc_utils
from chainercv.utils import read_image
from chainercv.utils import read_label
class VOCSemanticSegmentationDataset(GetterDataset):
"""Semantic segmentation dataset for PASCAL `VOC2012`_.
.. _`VOC2012`: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/voc`.
split ({'train', 'val', 'trainval'}): Select a split of the dataset.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, ":math:`(H, W)`", :obj:`int32`, \
":math:`[-1, \#class - 1]`"
"""
def __init__(self, data_dir='auto', split='train'):
super(VOCSemanticSegmentationDataset, self).__init__()
if split not in ['train', 'trainval', 'val']:
raise ValueError(
'please pick split from \'train\', \'trainval\', \'val\'')
if data_dir == 'auto':
data_dir = voc_utils.get_voc('2012', split)
id_list_file = os.path.join(
data_dir, 'ImageSets/Segmentation/{0}.txt'.format(split))
self.ids = [id_.strip() for id_ in open(id_list_file)]
self.data_dir = data_dir
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
def __len__(self):
return len(self.ids)
def _get_image(self, i):
img_path = os.path.join(
self.data_dir, 'JPEGImages', self.ids[i] + '.jpg')
img = read_image(img_path, color=True)
return img
def _get_label(self, i):
label_path = os.path.join(
self.data_dir, 'SegmentationClass', self.ids[i] + '.png')
label = read_label(label_path, dtype=np.int32)
label[label == 255] = -1
# (1, H, W) -> (H, W)
return label
|
{
"content_hash": "8dc50e134e07be21b6e97bd351dbc943",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 76,
"avg_line_length": 33.07462686567164,
"alnum_prop": 0.5843862815884476,
"repo_name": "chainer/chainercv",
"id": "75a036db5b6ebd845edcc09ac9ad9b95d10c01c3",
"size": "2216",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "chainercv/datasets/voc/voc_semantic_segmentation_dataset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3132"
},
{
"name": "Python",
"bytes": "1288391"
},
{
"name": "Shell",
"bytes": "11424"
}
],
"symlink_target": ""
}
|
import datetime
import logging
import webapp2
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
# A simple log server for rebaseline-o-matic.
#
# Accepts updates to the same log entry and shows a simple status page.
# Has a special state for the case where there are no NeedsRebaseline
# lines in TestExpectations to avoid cluttering the log with useless
# entries every 30 seconds.
#
# Other than that, new updatelog calls append to the most recent log
# entry until they have the newentry parameter, in which case, it
# starts a new log entry.
LOG_PARAM = "log"
NEW_ENTRY_PARAM = "newentry"
NO_NEEDS_REBASELINE_PARAM = "noneedsrebaseline"
NUM_LOGS_PARAM = "numlogs"
BEFORE_PARAM = "before"
class LogEntry(ndb.Model):
content = ndb.TextProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
is_no_needs_rebaseline = ndb.BooleanProperty()
def logs_query():
return LogEntry.query().order(-LogEntry.date)
class UpdateLog(webapp2.RequestHandler):
def post(self):
new_log_data = self.request.POST.get(LOG_PARAM)
# This entry is set to on whenever a new auto-rebaseline run is going to
# start logging entries. If this is not on, then the log will get appended
# to the most recent log entry.
new_entry = self.request.POST.get(NEW_ENTRY_PARAM) == "on"
# The case of no NeedsRebaseline lines in TestExpectations is special-cased
# to always overwrite the previous noneedsrebaseline entry in the log to
# avoid cluttering the log with useless empty posts. It just updates the
# date of the entry so that users can see that rebaseline-o-matic is still
# running.
no_needs_rebaseline = self.request.POST.get(NO_NEEDS_REBASELINE_PARAM) == "on"
out = "Wrote new log entry."
if not new_entry or no_needs_rebaseline:
log_entries = logs_query().fetch(1)
if log_entries:
log_entry = log_entries[0]
log_entry.date = datetime.datetime.now()
if no_needs_rebaseline:
# Don't write out a new log entry for repeated no_needs_rebaseline cases.
# The repeated entries just add noise to the logs.
if log_entry.is_no_needs_rebaseline:
out = "Overwrote existing no needs rebaseline log."
else:
out = "Wrote new no needs rebaseline log."
new_entry = True
new_log_data = ""
elif log_entry.is_no_needs_rebaseline:
out = "Previous entry was a no need rebaseline log. Writing a new log."
new_entry = True
else:
out = "Added to existing log entry."
log_entry.content = log_entry.content + "\n" + new_log_data
if new_entry or not log_entries:
log_entry = LogEntry(content=new_log_data, is_no_needs_rebaseline=no_needs_rebaseline)
log_entry.put()
self.response.out.write(out)
class UploadForm(webapp2.RequestHandler):
def get(self):
self.response.out.write(template.render("uploadform.html", {
"update_log_url": "/updatelog",
"set_no_needs_rebaseline_url": "/noneedsrebaselines",
"log_param": LOG_PARAM,
"new_entry_param": NEW_ENTRY_PARAM,
"no_needs_rebaseline_param": NO_NEEDS_REBASELINE_PARAM,
}))
class ShowLatest(webapp2.RequestHandler):
def get(self):
query = logs_query()
before = self.request.get(BEFORE_PARAM)
if before:
date = datetime.datetime.strptime(before, "%Y-%m-%dT%H:%M:%SZ")
query = query.filter(LogEntry.date < date)
num_logs = self.request.get(NUM_LOGS_PARAM)
logs = query.fetch(int(num_logs) if num_logs else 3)
self.response.out.write(template.render("logs.html", {
"logs": logs,
"num_logs_param": NUM_LOGS_PARAM,
"before_param": BEFORE_PARAM,
}))
routes = [
('/uploadform', UploadForm),
('/updatelog', UpdateLog),
('/', ShowLatest),
]
app = webapp2.WSGIApplication(routes, debug=True)
|
{
"content_hash": "6332e0b465bfc37188e5365b418ffc92",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 98,
"avg_line_length": 37.10434782608696,
"alnum_prop": 0.62034216076869,
"repo_name": "lordmos/blink",
"id": "9211df34f97bd6efdcd43d0f1e1d7bfc6571c419",
"size": "5797",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Tools/RebaselineLogServer/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "6433"
},
{
"name": "C",
"bytes": "753714"
},
{
"name": "C++",
"bytes": "40028043"
},
{
"name": "CSS",
"bytes": "539440"
},
{
"name": "F#",
"bytes": "8755"
},
{
"name": "Java",
"bytes": "18650"
},
{
"name": "JavaScript",
"bytes": "25700387"
},
{
"name": "Objective-C",
"bytes": "426711"
},
{
"name": "PHP",
"bytes": "141755"
},
{
"name": "Perl",
"bytes": "901523"
},
{
"name": "Python",
"bytes": "3748305"
},
{
"name": "Ruby",
"bytes": "141818"
},
{
"name": "Shell",
"bytes": "9635"
},
{
"name": "XSLT",
"bytes": "49328"
}
],
"symlink_target": ""
}
|
import RPi.GPIO as GPIO
import time
import atexit
import sys
import re
import pygame.mixer
fd = None
PIN_ENABLE = 11 # GPIO pin to enable the motor driver
PIN_CLK = 15 # Must be same as "Using P1 pins" line above
def cleanup():
print "Cleaning up"
pygame.mixer.music.stop()
GPIO.output(PIN_ENABLE, False)
GPIO.cleanup()
def position(angle):
str = "0={0}\n".format(angle)
#print "Positioning to " + str,
fd.write(str)
fd.flush()
if len(sys.argv) != 3:
raise RuntimeError("usage: {0} musicfile servofile".format(sys.argv[0]))
musicfile = sys.argv[1]
servofile = sys.argv[2]
sfd = open(servofile, "r")
pygame.mixer.init(channels=2,frequency=48000,size=-16)
pygame.mixer.music.load(musicfile)
fd = open("/dev/servoblaster", "w")
GPIO.setmode(GPIO.BOARD)
GPIO.setup(PIN_ENABLE, GPIO.OUT) #Enable
atexit.register(cleanup)
GPIO.output(PIN_ENABLE, True)
pygame.mixer.music.play(0)
starttime = time.time()
prog = re.compile("(\d+)\s+([\d\.]+)")
for line in sfd:
result = prog.match(line)
if result:
angle = int(result.group(1)) * 120 / 500 + 50 # The numbers depend on the servo being used
delta = float(result.group(2))
position(angle)
# delta is seconds from the begin of music
nowtime = time.time()
endtime = starttime + delta
tosleep = endtime - nowtime
#print "Sleep ", tosleep
if tosleep > 0:
time.sleep(tosleep)
sfd.close()
fd.close()
|
{
"content_hash": "ff9139cbc88b02ccf64961356551a6f2",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 98,
"avg_line_length": 23.58730158730159,
"alnum_prop": 0.6460296096904441,
"repo_name": "HackaRobot/rpi",
"id": "01953ff4462b70eb932373d5bca1dc75a42cce3e",
"size": "3128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "servodance.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "62635"
},
{
"name": "Shell",
"bytes": "834"
}
],
"symlink_target": ""
}
|
from segwit import send_to_witness
from test_framework.test_framework import BitcoinTestFramework
from test_framework import blocktools
from test_framework.mininode import CTransaction
from test_framework.util import *
from test_framework.util import *
import io
import time
# Sequence number that is BIP 125 opt-in and BIP 68-compliant
BIP125_SEQUENCE_NUMBER = 0xfffffffd
WALLET_PASSPHRASE = "test"
WALLET_PASSPHRASE_TIMEOUT = 3600
class BumpFeeTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self, split=False):
extra_args = [["-debug", "-prematurewitness", "-walletprematurewitness", "-walletrbf={}".format(i)]
for i in range(self.num_nodes)]
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
# Encrypt wallet for test_locked_wallet_fails test
self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
bitcoind_processes[1].wait()
self.nodes[1] = start_node(1, self.options.tmpdir, extra_args[1])
self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split = False
self.sync_all()
def run_test(self):
peer_node, rbf_node = self.nodes
rbf_node_address = rbf_node.getnewaddress()
# fund rbf node with 10 coins of 0.01 btc (1,000,000 satoshis)
print("Mining blocks...")
peer_node.generate(110)
self.sync_all()
for i in range(25):
peer_node.sendtoaddress(rbf_node_address, 0.01)
self.sync_all()
peer_node.generate(1)
self.sync_all()
assert_equal(rbf_node.getbalance(), Decimal("0.25"))
print("Running tests")
dest_address = peer_node.getnewaddress()
test_small_output_fails(rbf_node, dest_address)
test_dust_to_fee(rbf_node, dest_address)
test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address)
# FIXME: Reenable when segwit is active for Namecoin.
#test_segwit_bumpfee_succeeds(rbf_node, dest_address)
test_nonrbf_bumpfee_fails(peer_node, dest_address)
test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address)
test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address)
test_settxfee(rbf_node, dest_address)
test_rebumping(rbf_node, dest_address)
test_rebumping_not_replaceable(rbf_node, dest_address)
test_unconfirmed_not_spendable(rbf_node, rbf_node_address)
test_bumpfee_metadata(rbf_node, dest_address)
test_locked_wallet_fails(rbf_node, dest_address)
print("Success")
def test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address):
rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
rbftx = rbf_node.gettransaction(rbfid)
sync_mempools((rbf_node, peer_node))
assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
bumped_tx = rbf_node.bumpfee(rbfid)
assert bumped_tx["fee"] - abs(rbftx["fee"]) > 0
# check that bumped_tx propogates, original tx was evicted and has a wallet conflict
sync_mempools((rbf_node, peer_node))
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert bumped_tx["txid"] in peer_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
assert rbfid not in peer_node.getrawmempool()
oldwtx = rbf_node.gettransaction(rbfid)
assert len(oldwtx["walletconflicts"]) > 0
# check wallet transaction replaces and replaced_by values
bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
assert_equal(bumpedwtx["replaces_txid"], rbfid)
def test_segwit_bumpfee_succeeds(rbf_node, dest_address):
# Create a transaction with segwit output, then create an RBF transaction
# which spends it, and make sure bumpfee can be called on it.
segwit_in = next(u for u in rbf_node.listunspent() if u["amount"] == Decimal("0.001"))
segwit_out = rbf_node.validateaddress(rbf_node.getnewaddress())
rbf_node.addwitnessaddress(segwit_out["address"])
segwitid = send_to_witness(
version=0,
node=rbf_node,
utxo=segwit_in,
pubkey=segwit_out["pubkey"],
encode_p2sh=False,
amount=Decimal("0.0009"),
sign=True)
rbfraw = rbf_node.createrawtransaction([{
'txid': segwitid,
'vout': 0,
"sequence": BIP125_SEQUENCE_NUMBER
}], {dest_address: Decimal("0.0005"),
get_change_address(rbf_node): Decimal("0.0003")})
rbfsigned = rbf_node.signrawtransaction(rbfraw)
rbfid = rbf_node.sendrawtransaction(rbfsigned["hex"])
assert rbfid in rbf_node.getrawmempool()
bumped_tx = rbf_node.bumpfee(rbfid)
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
def test_nonrbf_bumpfee_fails(peer_node, dest_address):
# cannot replace a non RBF transaction (from node which did not enable RBF)
not_rbfid = create_fund_sign_send(peer_node, {dest_address: 0.00090000})
assert_raises_message(JSONRPCException, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address):
# cannot bump fee unless the tx has only inputs that we own.
# here, the rbftx has a peer_node coin and then adds a rbf_node input
# Note that this test depends upon the RPC code checking input ownership prior to change outputs
# (since it can't use fundrawtransaction, it lacks a proper change output)
utxos = [node.listunspent()[-1] for node in (rbf_node, peer_node)]
inputs = [{
"txid": utxo["txid"],
"vout": utxo["vout"],
"address": utxo["address"],
"sequence": BIP125_SEQUENCE_NUMBER
} for utxo in utxos]
output_val = sum(utxo["amount"] for utxo in utxos) - Decimal("0.001")
rawtx = rbf_node.createrawtransaction(inputs, {dest_address: output_val})
signedtx = rbf_node.signrawtransaction(rawtx)
signedtx = peer_node.signrawtransaction(signedtx["hex"])
rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
assert_raises_message(JSONRPCException, "Transaction contains inputs that don't belong to this wallet",
rbf_node.bumpfee, rbfid)
def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address):
# cannot bump fee if the transaction has a descendant
# parent is send-to-self, so we don't have to check which output is change when creating the child tx
parent_id = create_fund_sign_send(rbf_node, {rbf_node_address: 0.00050000})
tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000})
tx = rbf_node.signrawtransaction(tx)
txid = rbf_node.sendrawtransaction(tx["hex"])
assert_raises_message(JSONRPCException, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
def test_small_output_fails(rbf_node, dest_address):
# cannot bump fee with a too-small output
rbfid = spend_one_input(rbf_node,
Decimal("0.01000000"),
{dest_address: 0.00800000,
get_change_address(rbf_node): Decimal("0.00100000")})
rbf_node.bumpfee(rbfid, {"totalFee": 200000})
rbfid = spend_one_input(rbf_node,
Decimal("0.01000000"),
{dest_address: 0.00800000,
get_change_address(rbf_node): Decimal("0.00100000")})
assert_raises_message(JSONRPCException, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 200001})
def test_dust_to_fee(rbf_node, dest_address):
# check that if output is reduced to dust, it will be converted to fee
# the bumped tx sets fee=9900, but it converts to 10,000
rbfid = spend_one_input(rbf_node,
Decimal("0.01000000"),
{dest_address: 0.00800000,
get_change_address(rbf_node): Decimal("0.00100000")})
fulltx = rbf_node.getrawtransaction(rbfid, 1)
bumped_tx = rbf_node.bumpfee(rbfid, {"totalFee": 199900})
full_bumped_tx = rbf_node.getrawtransaction(bumped_tx["txid"], 1)
assert_equal(bumped_tx["fee"], Decimal("0.00200000"))
assert_equal(len(fulltx["vout"]), 2)
assert_equal(len(full_bumped_tx["vout"]), 1) #change output is eliminated
def test_settxfee(rbf_node, dest_address):
# check that bumpfee reacts correctly to the use of settxfee (paytxfee)
# increase feerate by 2.5x, test that fee increased at least 2x
rbf_node.settxfee(Decimal("0.00100000"))
rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.09000000})
rbftx = rbf_node.gettransaction(rbfid)
rbf_node.settxfee(Decimal("0.00250000"))
bumped_tx = rbf_node.bumpfee(rbfid)
assert bumped_tx["fee"] > 2 * abs(rbftx["fee"])
rbf_node.settxfee(Decimal("0.00000000")) # unset paytxfee
def test_rebumping(rbf_node, dest_address):
# check that re-bumping the original tx fails, but bumping the bumper succeeds
rbf_node.settxfee(Decimal("0.00010000"))
rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00900000})
bumped = rbf_node.bumpfee(rbfid, {"totalFee": 100000})
assert_raises_message(JSONRPCException, "already bumped", rbf_node.bumpfee, rbfid, {"totalFee": 200000})
rbf_node.bumpfee(bumped["txid"], {"totalFee": 200000})
def test_rebumping_not_replaceable(rbf_node, dest_address):
# check that re-bumping a non-replaceable bump tx fails
rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00900000})
bumped = rbf_node.bumpfee(rbfid, {"totalFee": 100000, "replaceable": False})
assert_raises_message(JSONRPCException, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
{"totalFee": 200000})
def test_unconfirmed_not_spendable(rbf_node, rbf_node_address):
# check that unconfirmed outputs from bumped transactions are not spendable
rbfid = create_fund_sign_send(rbf_node, {rbf_node_address: 0.00090000})
rbftx = rbf_node.gettransaction(rbfid)["hex"]
assert rbfid in rbf_node.getrawmempool()
bumpid = rbf_node.bumpfee(rbfid)["txid"]
assert bumpid in rbf_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
# check that outputs from the bump transaction are not spendable
# due to the replaces_txid check in CWallet::AvailableCoins
assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == bumpid], [])
# submit a block with the rbf tx to clear the bump tx out of the mempool,
# then call abandon to make sure the wallet doesn't attempt to resubmit the
# bump tx, then invalidate the block so the rbf tx will be put back in the
# mempool. this makes it possible to check whether the rbf tx outputs are
# spendable before the rbf tx is confirmed.
block = submit_block_with_tx(rbf_node, rbftx)
rbf_node.abandontransaction(bumpid)
rbf_node.invalidateblock(block.hash)
assert bumpid not in rbf_node.getrawmempool()
assert rbfid in rbf_node.getrawmempool()
# check that outputs from the rbf tx are not spendable before the
# transaction is confirmed, due to the replaced_by_txid check in
# CWallet::AvailableCoins
assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == rbfid], [])
# check that the main output from the rbf tx is spendable after confirmed
rbf_node.generate(1)
assert_equal(
sum(1 for t in rbf_node.listunspent(minconf=0, include_unsafe=False)
if t["txid"] == rbfid and t["address"] == rbf_node_address and t["spendable"]), 1)
def test_bumpfee_metadata(rbf_node, dest_address):
rbfid = rbf_node.sendtoaddress(dest_address, 0.00090000, "comment value", "to value")
bumped_tx = rbf_node.bumpfee(rbfid)
bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(bumped_wtx["comment"], "comment value")
assert_equal(bumped_wtx["to"], "to value")
def test_locked_wallet_fails(rbf_node, dest_address):
rbfid = create_fund_sign_send(rbf_node, {dest_address: 0.00090000})
rbf_node.walletlock()
assert_raises_message(JSONRPCException, "Please enter the wallet passphrase with walletpassphrase first.",
rbf_node.bumpfee, rbfid)
def create_fund_sign_send(node, outputs):
rawtx = node.createrawtransaction([], outputs)
fundtx = node.fundrawtransaction(rawtx)
signedtx = node.signrawtransaction(fundtx["hex"])
txid = node.sendrawtransaction(signedtx["hex"])
return txid
def spend_one_input(node, input_amount, outputs):
input = dict(sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == input_amount))
rawtx = node.createrawtransaction([input], outputs)
signedtx = node.signrawtransaction(rawtx)
txid = node.sendrawtransaction(signedtx["hex"])
return txid
def get_change_address(node):
"""Get a wallet change address.
There is no wallet RPC to access unused change addresses, so this creates a
dummy transaction, calls fundrawtransaction to give add an input and change
output, then returns the change address."""
dest_address = node.getnewaddress()
dest_amount = Decimal("0.00012345")
rawtx = node.createrawtransaction([], {dest_address: dest_amount})
fundtx = node.fundrawtransaction(rawtx)
info = node.decoderawtransaction(fundtx["hex"])
return next(address for out in info["vout"]
if out["value"] != dest_amount for address in out["scriptPubKey"]["addresses"])
def submit_block_with_tx(node, tx):
ctx = CTransaction()
ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))
tip = node.getbestblockhash()
height = node.getblockcount() + 1
block_time = node.getblockheader(tip)["mediantime"] + 1
block = blocktools.create_block(int(tip, 16), blocktools.create_coinbase(height), block_time)
block.vtx.append(ctx)
block.rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
error = node.submitblock(bytes_to_hex_str(block.serialize(True)))
if error is not None:
raise Exception(error)
return block
if __name__ == "__main__":
BumpFeeTest().main()
|
{
"content_hash": "9f1c895fbdc5f475511afcbf059ae6e3",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 120,
"avg_line_length": 45.046583850931675,
"alnum_prop": 0.6785246466735608,
"repo_name": "wiggi/huntercore",
"id": "4d3688a974e729958637c847ea1b62d6be41b796",
"size": "14715",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/bumpfee.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "708759"
},
{
"name": "C++",
"bytes": "6189673"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50622"
},
{
"name": "Java",
"bytes": "32388"
},
{
"name": "M4",
"bytes": "186077"
},
{
"name": "Makefile",
"bytes": "108278"
},
{
"name": "Objective-C",
"bytes": "4032"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Protocol Buffer",
"bytes": "2328"
},
{
"name": "Python",
"bytes": "1168914"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "48970"
}
],
"symlink_target": ""
}
|
'''
thevideo urlresolver plugin
Copyright (C) 2014 Eldorado
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from lib import jsunpack
from urlresolver9 import common
from urlresolver9.resolver import UrlResolver, ResolverError
class VidAgResolver(UrlResolver):
name = "vid.ag"
domains = ["vid.ag"]
pattern = '(?://|\.)(vid\.ag)/(?:embed-)?([0-9A-Za-z]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url).content
for match in re.finditer('(eval\(function.*?)</script>', html, re.DOTALL):
js_data = jsunpack.unpack(match.group(1))
r = re.search('file\s*:\s*"([^"]+)', js_data)
if r:
return r.group(1)
r = re.search('file\s*:\s*"([^"]+)', html)
if r:
return r.group(1)
raise ResolverError('File Not Found or removed')
def get_url(self, host, media_id):
return 'http://vid.ag/embed-%s.html' % media_id
|
{
"content_hash": "c075dd7e37cc43be674620087fffb7e9",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 82,
"avg_line_length": 34.208333333333336,
"alnum_prop": 0.6571254567600487,
"repo_name": "mrknow/filmkodi",
"id": "23517bf262b0eb29f11cbdbf8dde3c6f4783a2a6",
"size": "1642",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "script.mrknow.urlresolver/lib/urlresolver9/plugins/vidag.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7510"
},
{
"name": "Python",
"bytes": "8058464"
},
{
"name": "Shell",
"bytes": "18531"
}
],
"symlink_target": ""
}
|
from __future__ import division, unicode_literals, print_function
import glob
import itertools
import logging
import math
import os
import re
import warnings
import xml.etree.cElementTree as ET
from collections import defaultdict
from io import StringIO
import numpy as np
from monty.io import zopen, reverse_readfile
from monty.json import MSONable
from monty.json import jsanitize
from monty.re import regrep
from six import string_types
from six.moves import map, zip
from pymatgen.analysis.nmr import NMRChemicalShiftNotation
from pymatgen.core.composition import Composition
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import Element
from pymatgen.core.structure import Structure
from pymatgen.core.units import unitized
from pymatgen.electronic_structure.bandstructure import BandStructure, \
BandStructureSymmLine, get_reconstructed_band_structure
from pymatgen.electronic_structure.core import Spin, Orbital, OrbitalType, Magmom
from pymatgen.electronic_structure.dos import CompleteDos, Dos
from pymatgen.entries.computed_entries import \
ComputedEntry, ComputedStructureEntry
from pymatgen.io.vasp.inputs import Incar, Kpoints, Poscar, Potcar
from pymatgen.util.io_utils import clean_lines, micro_pyawk
"""
Classes for reading/manipulating/writing VASP ouput files.
"""
__author__ = "Shyue Ping Ong, Geoffroy Hautier, Rickard Armiento, " + \
"Vincent L Chevrier, Ioannis Petousis, Stephen Dacek"
__credits__ = "Anubhav Jain"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.2"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Nov 30, 2012"
logger = logging.getLogger(__name__)
def _parse_parameters(val_type, val):
"""
Helper function to convert a Vasprun parameter into the proper type.
Boolean, int and float types are converted.
Args:
val_type: Value type parsed from vasprun.xml.
val: Actual string value parsed for vasprun.xml.
"""
if val_type == "logical":
return val == "T"
elif val_type == "int":
return int(val)
elif val_type == "string":
return val.strip()
else:
return float(val)
def _parse_v_parameters(val_type, val, filename, param_name):
"""
Helper function to convert a Vasprun array-type parameter into the proper
type. Boolean, int and float types are converted.
Args:
val_type: Value type parsed from vasprun.xml.
val: Actual string value parsed for vasprun.xml.
filename: Fullpath of vasprun.xml. Used for robust error handling.
E.g., if vasprun.xml contains \\*\\*\\* for some Incar parameters,
the code will try to read from an INCAR file present in the same
directory.
param_name: Name of parameter.
Returns:
Parsed value.
"""
if val_type == "logical":
val = [i == "T" for i in val.split()]
elif val_type == "int":
try:
val = [int(i) for i in val.split()]
except ValueError:
# Fix for stupid error in vasprun sometimes which displays
# LDAUL/J as 2****
val = _parse_from_incar(filename, param_name)
if val is None:
raise IOError("Error in parsing vasprun.xml")
elif val_type == "string":
val = val.split()
else:
try:
val = [float(i) for i in val.split()]
except ValueError:
# Fix for stupid error in vasprun sometimes which displays
# MAGMOM as 2****
val = _parse_from_incar(filename, param_name)
if val is None:
raise IOError("Error in parsing vasprun.xml")
return val
def _parse_varray(elem):
if elem.get("type", None) == 'logical':
m = [[True if i=='T' else False for i in v.text.split()] for v in elem]
else:
m = [[_vasprun_float(i) for i in v.text.split()] for v in elem]
return m
def _parse_from_incar(filename, key):
"""
Helper function to parse a parameter from the INCAR.
"""
dirname = os.path.dirname(filename)
for f in os.listdir(dirname):
if re.search(r"INCAR", f):
warnings.warn("INCAR found. Using " + key + " from INCAR.")
incar = Incar.from_file(os.path.join(dirname, f))
if key in incar:
return incar[key]
else:
return None
return None
def _vasprun_float(f):
"""
Large numbers are often represented as ********* in the vasprun.
This function parses these values as np.nan
"""
try:
return float(f)
except ValueError as e:
f = f.strip()
if f == '*' * len(f):
warnings.warn('Float overflow (*******) encountered in vasprun')
return np.nan
raise e
class Vasprun(MSONable):
"""
Vastly improved cElementTree-based parser for vasprun.xml files. Uses
iterparse to support incremental parsing of large files.
Speedup over Dom is at least 2x for smallish files (~1Mb) to orders of
magnitude for larger files (~10Mb).
Args:
filename (str): Filename to parse
ionic_step_skip (int): If ionic_step_skip is a number > 1,
only every ionic_step_skip ionic steps will be read for
structure and energies. This is very useful if you are parsing
very large vasprun.xml files and you are not interested in every
single ionic step. Note that the final energies may not be the
actual final energy in the vasprun.
ionic_step_offset (int): Used together with ionic_step_skip. If set,
the first ionic step read will be offset by the amount of
ionic_step_offset. For example, if you want to start reading
every 10th structure but only from the 3rd structure onwards,
set ionic_step_skip to 10 and ionic_step_offset to 3. Main use
case is when doing statistical structure analysis with
extremely long time scale multiple VASP calculations of
varying numbers of steps.
parse_dos (bool): Whether to parse the dos. Defaults to True. Set
to False to shave off significant time from the parsing if you
are not interested in getting those data.
parse_eigen (bool): Whether to parse the eigenvalues. Defaults to
True. Set to False to shave off significant time from the
parsing if you are not interested in getting those data.
parse_projected_eigen (bool): Whether to parse the projected
eigenvalues. Defaults to False. Set to True to obtain projected
eigenvalues. **Note that this can take an extreme amount of time
and memory.** So use this wisely.
parse_potcar_file (bool/str): Whether to parse the potcar file to read
the potcar hashes for the potcar_spec attribute. Defaults to True,
where no hashes will be determined and the potcar_spec dictionaries
will read {"symbol": ElSymbol, "hash": None}. By Default, looks in
the same directory as the vasprun.xml, with same extensions as
Vasprun.xml. If a string is provided, looks at that filepath.
occu_tol (float): Sets the minimum tol for the determination of the
vbm and cbm. Usually the default of 1e-8 works well enough,
but there may be pathological cases.
exception_on_bad_xml (bool): Whether to throw a ParseException if a
malformed XML is detected. Default to True, which ensures only
proper vasprun.xml are parsed. You can set to False if you want
partial results (e.g., if you are monitoring a calculation during a
run), but use the results with care. A warning is issued.
**Vasp results**
.. attribute:: ionic_steps
All ionic steps in the run as a list of
{"structure": structure at end of run,
"electronic_steps": {All electronic step data in vasprun file},
"stresses": stress matrix}
.. attribute:: structures
List of Structure objects for the structure at each ionic step.
.. attribute:: tdos
Total dos calculated at the end of run.
.. attribute:: idos
Integrated dos calculated at the end of run.
.. attribute:: pdos
List of list of PDos objects. Access as pdos[atomindex][orbitalindex]
.. attribute:: efermi
Fermi energy
.. attribute:: eigenvalues
Available only if parse_eigen=True. Final eigenvalues as a dict of
{(spin, kpoint index):[[eigenvalue, occu]]}.
This representation is based on actual ordering in VASP and is meant as
an intermediate representation to be converted into proper objects. The
kpoint index is 0-based (unlike the 1-based indexing in VASP).
.. attribute:: projected_eigenvalues
Final projected eigenvalues as a dict of {spin: nd-array}. To access
a particular value, you need to do
Vasprun.projected_eigenvalues[spin][kpoint index][band index][atom index][orbital_index]
This representation is based on actual ordering in VASP and is meant as
an intermediate representation to be converted into proper objects. The
kpoint, band and atom indices are 0-based (unlike the 1-based indexing
in VASP).
.. attribute:: dielectric
The real and imaginary part of the dielectric constant (e.g., computed
by RPA) in function of the energy (frequency). Optical properties (e.g.
absorption coefficient) can be obtained through this.
The data is given as a tuple of 3 values containing each of them
the energy, the real part tensor, and the imaginary part tensor
([energies],[[real_partxx,real_partyy,real_partzz,real_partxy,
real_partyz,real_partxz]],[[imag_partxx,imag_partyy,imag_partzz,
imag_partxy, imag_partyz, imag_partxz]])
.. attribute:: other_dielectric
Dictionary, with the tag comment as key, containing other variants of
the real and imaginary part of the dielectric constant (e.g., computed
by RPA) in function of the energy (frequency). Optical properties (e.g.
absorption coefficient) can be obtained through this.
The data is given as a tuple of 3 values containing each of them
the energy, the real part tensor, and the imaginary part tensor
([energies],[[real_partxx,real_partyy,real_partzz,real_partxy,
real_partyz,real_partxz]],[[imag_partxx,imag_partyy,imag_partzz,
imag_partxy, imag_partyz, imag_partxz]])
.. attribute:: epsilon_static
The static part of the dielectric constant. Present when it's a DFPT run
(LEPSILON=TRUE)
.. attribute:: epsilon_static_wolfe
The static part of the dielectric constant without any local field
effects. Present when it's a DFPT run (LEPSILON=TRUE)
.. attribute:: epsilon_ionic
The ionic part of the static dielectric constant. Present when it's a
DFPT run (LEPSILON=TRUE) and IBRION=5, 6, 7 or 8
.. attribute:: nionic_steps
The total number of ionic steps. This number is always equal
to the total number of steps in the actual run even if
ionic_step_skip is used.
.. attribute:: force_constants
Force constants computed in phonon DFPT run(IBRION = 8).
The data is a 4D numpy array of shape (natoms, natoms, 3, 3).
.. attribute:: normalmode_eigenvals
Normal mode frequencies.
1D numpy array of size 3*natoms.
.. attribute:: normalmode_eigenvecs
Normal mode eigen vectors.
3D numpy array of shape (3*natoms, natoms, 3).
**Vasp inputs**
.. attribute:: incar
Incar object for parameters specified in INCAR file.
.. attribute:: parameters
Incar object with parameters that vasp actually used, including all
defaults.
.. attribute:: kpoints
Kpoints object for KPOINTS specified in run.
.. attribute:: actual_kpoints
List of actual kpoints, e.g.,
[[0.25, 0.125, 0.08333333], [-0.25, 0.125, 0.08333333],
[0.25, 0.375, 0.08333333], ....]
.. attribute:: actual_kpoints_weights
List of kpoint weights, E.g.,
[0.04166667, 0.04166667, 0.04166667, 0.04166667, 0.04166667, ....]
.. attribute:: atomic_symbols
List of atomic symbols, e.g., ["Li", "Fe", "Fe", "P", "P", "P"]
.. attribute:: potcar_symbols
List of POTCAR symbols. e.g.,
["PAW_PBE Li 17Jan2003", "PAW_PBE Fe 06Sep2000", ..]
Author: Shyue Ping Ong
"""
def __init__(self, filename, ionic_step_skip=None,
ionic_step_offset=0, parse_dos=True,
parse_eigen=True, parse_projected_eigen=False,
parse_potcar_file=True, occu_tol=1e-8,
exception_on_bad_xml=True):
self.filename = filename
self.ionic_step_skip = ionic_step_skip
self.ionic_step_offset = ionic_step_offset
self.occu_tol = occu_tol
self.exception_on_bad_xml = exception_on_bad_xml
with zopen(filename, "rt") as f:
if ionic_step_skip or ionic_step_offset:
# remove parts of the xml file and parse the string
run = f.read()
steps = run.split("<calculation>")
# The text before the first <calculation> is the preamble!
preamble = steps.pop(0)
self.nionic_steps = len(steps)
new_steps = steps[ionic_step_offset::int(ionic_step_skip)]
# add the tailing informat in the last step from the run
to_parse = "<calculation>".join(new_steps)
if steps[-1] != new_steps[-1]:
to_parse = "{}<calculation>{}{}".format(
preamble, to_parse,
steps[-1].split("</calculation>")[-1])
else:
to_parse = "{}<calculation>{}".format(preamble, to_parse)
self._parse(StringIO(to_parse), parse_dos=parse_dos,
parse_eigen=parse_eigen,
parse_projected_eigen=parse_projected_eigen)
else:
self._parse(f, parse_dos=parse_dos, parse_eigen=parse_eigen,
parse_projected_eigen=parse_projected_eigen)
self.nionic_steps = len(self.ionic_steps)
if parse_potcar_file:
self.update_potcar_spec(parse_potcar_file)
if self.incar.get("ALGO", "") != "BSE" and (not self.converged):
msg = "%s is an unconverged VASP run.\n" % filename
msg += "Electronic convergence reached: %s.\n" % \
self.converged_electronic
msg += "Ionic convergence reached: %s." % self.converged_ionic
warnings.warn(msg, UnconvergedVASPWarning)
def _parse(self, stream, parse_dos, parse_eigen, parse_projected_eigen):
self.efermi = None
self.eigenvalues = None
self.projected_eigenvalues = None
self.other_dielectric = {}
ionic_steps = []
parsed_header = False
try:
for event, elem in ET.iterparse(stream):
tag = elem.tag
if not parsed_header:
if tag == "generator":
self.generator = self._parse_params(elem)
elif tag == "incar":
self.incar = self._parse_params(elem)
elif tag == "kpoints":
self.kpoints, self.actual_kpoints, \
self.actual_kpoints_weights = self._parse_kpoints(
elem)
elif tag == "parameters":
self.parameters = self._parse_params(elem)
elif tag == "structure" and elem.attrib.get("name") == \
"initialpos":
self.initial_structure = self._parse_structure(elem)
elif tag == "atominfo":
self.atomic_symbols, self.potcar_symbols = \
self._parse_atominfo(elem)
self.potcar_spec = [{"titel": p,
"hash": None} for
p in self.potcar_symbols]
if tag == "calculation":
parsed_header = True
if not self.parameters.get("LCHIMAG", False):
ionic_steps.append(self._parse_calculation(elem))
else:
ionic_steps.extend(self._parse_chemical_shift_calculation(elem))
elif parse_dos and tag == "dos":
try:
self.tdos, self.idos, self.pdos = self._parse_dos(elem)
self.efermi = self.tdos.efermi
self.dos_has_errors = False
except Exception as ex:
self.dos_has_errors = True
elif parse_eigen and tag == "eigenvalues":
self.eigenvalues = self._parse_eigen(elem)
elif parse_projected_eigen and tag == "projected":
self.projected_eigenvalues = self._parse_projected_eigen(
elem)
elif tag == "dielectricfunction":
if ("comment" not in elem.attrib) or \
elem.attrib["comment"] == "INVERSE MACROSCOPIC DIELECTRIC TENSOR (including local field effects in RPA (Hartree))":
self.dielectric = self._parse_diel(elem)
else:
comment = elem.attrib["comment"]
self.other_dielectric[comment] = self._parse_diel(elem)
elif tag == "structure" and elem.attrib.get("name") == \
"finalpos":
self.final_structure = self._parse_structure(elem)
elif tag == "dynmat":
hessian, eigenvalues, eigenvectors = self._parse_dynmat(elem)
natoms = len(self.atomic_symbols)
hessian = np.array(hessian)
self.force_constants = np.zeros((natoms, natoms, 3, 3), dtype='double')
for i in range(natoms):
for j in range(natoms):
self.force_constants[i, j] = hessian[i*3:(i+1)*3,j*3:(j+1)*3]
phonon_eigenvectors = []
for ev in eigenvectors:
phonon_eigenvectors.append(np.array(ev).reshape(natoms, 3))
self.normalmode_eigenvals = np.array(eigenvalues)
self.normalmode_eigenvecs = np.array(phonon_eigenvectors)
except ET.ParseError as ex:
if self.exception_on_bad_xml:
raise ex
else:
warnings.warn(
"XML is malformed. Parsing has stopped but partial data"
"is available.", UserWarning)
self.ionic_steps = ionic_steps
self.vasp_version = self.generator["version"]
@property
def structures(self):
return [step["structure"] for step in self.ionic_steps]
@property
def epsilon_static(self):
"""
Property only available for DFPT calculations.
"""
return self.ionic_steps[-1].get("epsilon", [])
@property
def epsilon_static_wolfe(self):
"""
Property only available for DFPT calculations.
"""
return self.ionic_steps[-1].get("epsilon_rpa", [])
@property
def epsilon_ionic(self):
"""
Property only available for DFPT calculations and when IBRION=5, 6, 7 or 8.
"""
return self.ionic_steps[-1].get("epsilon_ion", [])
@property
def lattice(self):
return self.final_structure.lattice
@property
def lattice_rec(self):
return self.final_structure.lattice.reciprocal_lattice
@property
def converged_electronic(self):
"""
Checks that electronic step convergence has been reached in the final
ionic step
"""
final_esteps = self.ionic_steps[-1]["electronic_steps"]
if 'LEPSILON' in self.incar and self.incar['LEPSILON']:
i = 1
to_check = set(['e_wo_entrp', 'e_fr_energy', 'e_0_energy'])
while set(final_esteps[i].keys()) == to_check:
i += 1
return i + 1 != self.parameters["NELM"]
return len(final_esteps) < self.parameters["NELM"]
@property
def converged_ionic(self):
"""
Checks that ionic step convergence has been reached, i.e. that vasp
exited before reaching the max ionic steps for a relaxation run
"""
nsw = self.parameters.get("NSW", 0)
return nsw <= 1 or len(self.ionic_steps) < nsw
@property
def converged(self):
"""
Returns true if a relaxation run is converged.
"""
return self.converged_electronic and self.converged_ionic
@property
@unitized("eV")
def final_energy(self):
"""
Final energy from the vasp run.
"""
try:
final_istep = self.ionic_steps[-1]
if final_istep["e_wo_entrp"] != final_istep[
'electronic_steps'][-1]["e_0_energy"]:
warnings.warn("Final e_wo_entrp differs from the final "
"electronic step. VASP may have included some "
"corrections, e.g., vdw. Vasprun will return "
"the final e_wo_entrp, i.e., including "
"corrections in such instances.")
return final_istep["e_wo_entrp"]
return final_istep['electronic_steps'][-1]["e_0_energy"]
except (IndexError, KeyError):
warnings.warn("Calculation does not have a total energy. "
"Possibly a GW or similar kind of run. A value of "
"infinity is returned.")
return float('inf')
@property
def complete_dos(self):
"""
A complete dos object which incorporates the total dos and all
projected dos.
"""
final_struct = self.final_structure
pdoss = {final_struct[i]: pdos for i, pdos in enumerate(self.pdos)}
return CompleteDos(self.final_structure, self.tdos, pdoss)
@property
def hubbards(self):
"""
Hubbard U values used if a vasprun is a GGA+U run. {} otherwise.
"""
symbols = [s.split()[1] for s in self.potcar_symbols]
symbols = [re.split(r"_", s)[0] for s in symbols]
if not self.incar.get("LDAU", False):
return {}
us = self.incar.get("LDAUU", self.parameters.get("LDAUU"))
js = self.incar.get("LDAUJ", self.parameters.get("LDAUJ"))
if len(js) != len(us):
js = [0] * len(us)
if len(us) == len(symbols):
return {symbols[i]: us[i] - js[i] for i in range(len(symbols))}
elif sum(us) == 0 and sum(js) == 0:
return {}
else:
raise VaspParserError("Length of U value parameters and atomic "
"symbols are mismatched")
@property
def run_type(self):
"""
Returns the run type. Currently supports LDA, GGA, vdW-DF and HF calcs.
TODO: Fix for other functional types like PW91, other vdW types, etc.
"""
if self.parameters.get("LHFCALC", False):
rt = "HF"
elif self.parameters.get("LUSE_VDW", False):
vdw_gga = {"RE": "DF", "OR": "optPBE", "BO": "optB88",
"MK": "optB86b", "ML": "DF2"}
gga = self.parameters.get("GGA").upper()
rt = "vdW-" + vdw_gga[gga]
elif self.potcar_symbols[0].split()[0] == 'PAW':
rt = "LDA"
else:
rt = "GGA"
if self.is_hubbard:
rt += "+U"
return rt
@property
def is_hubbard(self):
"""
True if run is a DFT+U run.
"""
if len(self.hubbards) == 0:
return False
return sum(self.hubbards.values()) > 1e-8
@property
def is_spin(self):
"""
True if run is spin-polarized.
"""
return self.parameters.get("ISPIN", 1) == 2
def get_computed_entry(self, inc_structure=True, parameters=None,
data=None):
"""
Returns a ComputedStructureEntry from the vasprun.
Args:
inc_structure (bool): Set to True if you want
ComputedStructureEntries to be returned instead of
ComputedEntries.
parameters (list): Input parameters to include. It has to be one of
the properties supported by the Vasprun object. If
parameters is None, a default set of parameters that are
necessary for typical post-processing will be set.
data (list): Output data to include. Has to be one of the properties
supported by the Vasprun object.
Returns:
ComputedStructureEntry/ComputedEntry
"""
param_names = {"is_hubbard", "hubbards", "potcar_symbols",
"potcar_spec", "run_type"}
if parameters:
param_names.update(parameters)
params = {p: getattr(self, p) for p in param_names}
data = {p: getattr(self, p) for p in data} if data is not None else {}
if inc_structure:
return ComputedStructureEntry(self.final_structure,
self.final_energy, parameters=params,
data=data)
else:
return ComputedEntry(self.final_structure.composition,
self.final_energy, parameters=params,
data=data)
def get_band_structure(self, kpoints_filename=None, efermi=None,
line_mode=False):
"""
Returns the band structure as a BandStructure object
Args:
kpoints_filename (str): Full path of the KPOINTS file from which
the band structure is generated.
If none is provided, the code will try to intelligently
determine the appropriate KPOINTS file by substituting the
filename of the vasprun.xml with KPOINTS.
The latter is the default behavior.
efermi (float): If you want to specify manually the fermi energy
this is where you should do it. By default, the None value
means the code will get it from the vasprun.
line_mode (bool): Force the band structure to be considered as
a run along symmetry lines.
Returns:
a BandStructure object (or more specifically a
BandStructureSymmLine object if the run is detected to be a run
along symmetry lines)
Two types of runs along symmetry lines are accepted: non-sc with
Line-Mode in the KPOINT file or hybrid, self-consistent with a
uniform grid+a few kpoints along symmetry lines (explicit KPOINTS
file) (it's not possible to run a non-sc band structure with hybrid
functionals). The explicit KPOINTS file needs to have data on the
kpoint label as commentary.
"""
if not kpoints_filename:
kpoints_filename = self.filename.replace('vasprun.xml', 'KPOINTS')
if not os.path.exists(kpoints_filename) and line_mode is True:
raise VaspParserError('KPOINTS needed to obtain band structure '
'along symmetry lines.')
if efermi is None:
efermi = self.efermi
kpoint_file = None
if os.path.exists(kpoints_filename):
kpoint_file = Kpoints.from_file(kpoints_filename)
lattice_new = Lattice(self.lattice_rec.matrix)
kpoints = [np.array(self.actual_kpoints[i])
for i in range(len(self.actual_kpoints))]
p_eigenvals = defaultdict(list)
eigenvals = defaultdict(list)
nkpts = len(kpoints)
neigenvalues = [len(v) for v in self.eigenvalues[Spin.up]]
min_eigenvalues = min(neigenvalues)
for spin, v in self.eigenvalues.items():
v = np.swapaxes(v, 0, 1)
eigenvals[spin] = v[:, :, 0]
if self.projected_eigenvalues:
peigen = self.projected_eigenvalues[spin]
# Original axes for self.projected_eigenvalues are kpoints,
# band, ion, orb.
# For BS input, we need band, kpoints, orb, ion.
peigen = np.swapaxes(peigen, 0, 1) # Swap kpoint and band axes
peigen = np.swapaxes(peigen, 2, 3) # Swap ion and orb axes
p_eigenvals[spin] = peigen
# for b in range(min_eigenvalues):
# p_eigenvals[spin].append(
# [{Orbital(orb): v for orb, v in enumerate(peigen[b, k])}
# for k in range(nkpts)])
# check if we have an hybrid band structure computation
# for this we look at the presence of the LHFCALC tag
hybrid_band = False
if self.parameters.get('LHFCALC', False):
hybrid_band = True
if kpoint_file is not None:
if kpoint_file.style == Kpoints.supported_modes.Line_mode:
line_mode = True
if line_mode:
labels_dict = {}
if hybrid_band:
start_bs_index = 0
for i in range(len(self.actual_kpoints)):
if self.actual_kpoints_weights[i] == 0.0:
start_bs_index = i
break
for i in range(start_bs_index, len(kpoint_file.kpts)):
if kpoint_file.labels[i] is not None:
labels_dict[kpoint_file.labels[i]] = \
kpoint_file.kpts[i]
# remake the data only considering line band structure k-points
# (weight = 0.0 kpoints)
nbands = len(eigenvals[Spin.up])
kpoints = kpoints[start_bs_index:nkpts]
up_eigen = [eigenvals[Spin.up][i][start_bs_index:nkpts]
for i in range(nbands)]
if self.projected_eigenvalues:
p_eigenvals[Spin.up] = [p_eigenvals[Spin.up][i][
start_bs_index:nkpts]
for i in range(nbands)]
if self.is_spin:
down_eigen = [eigenvals[Spin.down][i][start_bs_index:nkpts]
for i in range(nbands)]
eigenvals = {Spin.up: up_eigen, Spin.down: down_eigen}
if self.projected_eigenvalues:
p_eigenvals[Spin.down] = [p_eigenvals[Spin.down][i][
start_bs_index:nkpts]
for i in range(nbands)]
else:
eigenvals = {Spin.up: up_eigen}
else:
if '' in kpoint_file.labels:
raise Exception("A band structure along symmetry lines "
"requires a label for each kpoint. "
"Check your KPOINTS file")
labels_dict = dict(zip(kpoint_file.labels, kpoint_file.kpts))
labels_dict.pop(None, None)
return BandStructureSymmLine(kpoints, eigenvals, lattice_new,
efermi, labels_dict,
structure=self.final_structure,
projections=p_eigenvals)
else:
return BandStructure(kpoints, eigenvals, lattice_new, efermi,
structure=self.final_structure,
projections=p_eigenvals)
@property
def eigenvalue_band_properties(self):
"""
Band properties from the eigenvalues as a tuple,
(band gap, cbm, vbm, is_band_gap_direct).
"""
vbm = -float("inf")
vbm_kpoint = None
cbm = float("inf")
cbm_kpoint = None
for spin, d in self.eigenvalues.items():
for k, val in enumerate(d):
for (eigenval, occu) in val:
if occu > self.occu_tol and eigenval > vbm:
vbm = eigenval
vbm_kpoint = k
elif occu <= self.occu_tol and eigenval < cbm:
cbm = eigenval
cbm_kpoint = k
return max(cbm - vbm, 0), cbm, vbm, vbm_kpoint == cbm_kpoint
def update_potcar_spec(self, path):
def get_potcar_in_path(p):
for fn in os.listdir(os.path.abspath(p)):
if fn.startswith('POTCAR'):
pc = Potcar.from_file(os.path.join(p, fn))
if {d.header for d in pc} == \
{sym for sym in self.potcar_symbols}:
return pc
warnings.warn("No POTCAR file with matching TITEL fields"
" was found in {}".format(os.path.abspath(p)))
if isinstance(path, string_types):
if "POTCAR" in path:
potcar = Potcar.from_file(path)
if {d.TITEL for d in potcar} != \
{sym for sym in self.potcar_symbols}:
raise ValueError("Potcar TITELs do not match Vasprun")
else:
potcar = get_potcar_in_path(path)
elif isinstance(path, bool) and path:
potcar = get_potcar_in_path(os.path.split(self.filename)[0])
else:
potcar = None
if potcar:
self.potcar_spec = [{"titel": sym, "hash": ps.get_potcar_hash()}
for sym in self.potcar_symbols
for ps in potcar if
ps.symbol == sym.split()[1]]
def as_dict(self):
"""
Json-serializable dict representation.
"""
d = {"vasp_version": self.vasp_version,
"has_vasp_completed": self.converged,
"nsites": len(self.final_structure)}
comp = self.final_structure.composition
d["unit_cell_formula"] = comp.as_dict()
d["reduced_cell_formula"] = Composition(comp.reduced_formula).as_dict()
d["pretty_formula"] = comp.reduced_formula
symbols = [s.split()[1] for s in self.potcar_symbols]
symbols = [re.split(r"_", s)[0] for s in symbols]
d["is_hubbard"] = self.is_hubbard
d["hubbards"] = self.hubbards
unique_symbols = sorted(list(set(self.atomic_symbols)))
d["elements"] = unique_symbols
d["nelements"] = len(unique_symbols)
d["run_type"] = self.run_type
vin = {"incar": {k: v for k, v in self.incar.items()},
"crystal": self.initial_structure.as_dict(),
"kpoints": self.kpoints.as_dict()}
actual_kpts = [{"abc": list(self.actual_kpoints[i]),
"weight": self.actual_kpoints_weights[i]}
for i in range(len(self.actual_kpoints))]
vin["kpoints"]["actual_points"] = actual_kpts
vin["potcar"] = [s.split(" ")[1] for s in self.potcar_symbols]
vin["potcar_spec"] = self.potcar_spec
vin["potcar_type"] = [s.split(" ")[0] for s in self.potcar_symbols]
vin["parameters"] = {k: v for k, v in self.parameters.items()}
vin["lattice_rec"] = self.lattice_rec.as_dict()
d["input"] = vin
nsites = len(self.final_structure)
try:
vout = {"ionic_steps": self.ionic_steps,
"final_energy": self.final_energy,
"final_energy_per_atom": self.final_energy / nsites,
"crystal": self.final_structure.as_dict(),
"efermi": self.efermi}
except (ArithmeticError, TypeError):
vout = {"ionic_steps": self.ionic_steps,
"final_energy": self.final_energy,
"final_energy_per_atom": None,
"crystal": self.final_structure.as_dict(),
"efermi": self.efermi}
if self.eigenvalues:
eigen = {str(spin): v.tolist()
for spin, v in self.eigenvalues.items()}
vout["eigenvalues"] = eigen
(gap, cbm, vbm, is_direct) = self.eigenvalue_band_properties
vout.update(dict(bandgap=gap, cbm=cbm, vbm=vbm,
is_gap_direct=is_direct))
if self.projected_eigenvalues:
vout['projected_eigenvalues'] = {
str(spin): v.tolist()
for spin, v in self.projected_eigenvalues.items()}
vout['epsilon_static'] = self.epsilon_static
vout['epsilon_static_wolfe'] = self.epsilon_static_wolfe
vout['epsilon_ionic'] = self.epsilon_ionic
d['output'] = vout
return jsanitize(d, strict=True)
def _parse_params(self, elem):
params = {}
for c in elem:
name = c.attrib.get("name")
if c.tag not in ("i", "v"):
p = self._parse_params(c)
if name == "response functions":
# Delete duplicate fields from "response functions",
# which overrides the values in the root params.
p = {k: v for k, v in p.items() if k not in params}
params.update(p)
else:
ptype = c.attrib.get("type")
val = c.text.strip() if c.text else ""
if c.tag == "i":
params[name] = _parse_parameters(ptype, val)
else:
params[name] = _parse_v_parameters(ptype, val,
self.filename, name)
elem.clear()
return Incar(params)
def _parse_atominfo(self, elem):
for a in elem.findall("array"):
if a.attrib["name"] == "atoms":
atomic_symbols = [rc.find("c").text.strip()
for rc in a.find("set")]
elif a.attrib["name"] == "atomtypes":
potcar_symbols = [rc.findall("c")[4].text.strip()
for rc in a.find("set")]
# ensure atomic symbols are valid elements
def parse_atomic_symbol(symbol):
try:
return str(Element(symbol))
# vasprun.xml uses X instead of Xe for xenon
except ValueError as e:
if symbol == "X":
return "Xe"
elif symbol == "r":
return "Zr"
raise e
elem.clear()
return [parse_atomic_symbol(sym) for
sym in atomic_symbols], potcar_symbols
def _parse_kpoints(self, elem):
e = elem
if elem.find("generation"):
e = elem.find("generation")
k = Kpoints("Kpoints from vasprun.xml")
k.style = Kpoints.supported_modes.from_string(
e.attrib["param"] if "param" in e.attrib else "Reciprocal")
for v in e.findall("v"):
name = v.attrib.get("name")
toks = v.text.split()
if name == "divisions":
k.kpts = [[int(i) for i in toks]]
elif name == "usershift":
k.kpts_shift = [float(i) for i in toks]
elif name in {"genvec1", "genvec2", "genvec3", "shift"}:
setattr(k, name, [float(i) for i in toks])
for va in elem.findall("varray"):
name = va.attrib["name"]
if name == "kpointlist":
actual_kpoints = _parse_varray(va)
elif name == "weights":
weights = [i[0] for i in _parse_varray(va)]
elem.clear()
if k.style == Kpoints.supported_modes.Reciprocal:
k = Kpoints(comment="Kpoints from vasprun.xml",
style=Kpoints.supported_modes.Reciprocal,
num_kpts=len(k.kpts),
kpts=actual_kpoints, kpts_weights=weights)
return k, actual_kpoints, weights
def _parse_structure(self, elem):
latt = _parse_varray(elem.find("crystal").find("varray"))
pos = _parse_varray(elem.find("varray"))
struct = Structure(latt, self.atomic_symbols, pos)
sdyn = elem.find("varray/[@name='selective']")
if sdyn:
struct.add_site_property('selective_dynamics',
_parse_varray(sdyn))
return struct
def _parse_diel(self, elem):
imag = [[float(l) for l in r.text.split()]
for r in elem.find("imag").find("array")
.find("set").findall("r")]
real = [[float(l) for l in r.text.split()]
for r in elem.find("real")
.find("array").find("set").findall("r")]
elem.clear()
return [e[0] for e in imag], \
[e[1:] for e in real], [e[1:] for e in imag]
def _parse_chemical_shift_calculation(self, elem):
calculation = []
istep = {}
try:
s = self._parse_structure(elem.find("structure"))
except AttributeError: # not all calculations have a structure
s = None
pass
for va in elem.findall("varray"):
istep[va.attrib["name"]] = _parse_varray(va)
istep["structure"] = s
istep["electronic_steps"] = []
calculation.append(istep)
for scstep in elem.findall("scstep"):
try:
d = {i.attrib["name"]: _vasprun_float(i.text)
for i in scstep.find("energy").findall("i")}
cur_ene = d['e_fr_energy']
min_steps = 1 if len(calculation) >= 1 else self.parameters.get("NELMIN", 5)
if len(calculation[-1]["electronic_steps"]) <= min_steps:
calculation[-1]["electronic_steps"].append(d)
else:
last_ene = calculation[-1]["electronic_steps"][-1]["e_fr_energy"]
if abs(cur_ene - last_ene) < 1.0:
calculation[-1]["electronic_steps"].append(d)
else:
calculation.append({"electronic_steps": [d]})
except AttributeError: # not all calculations have an energy
pass
calculation[-1].update(calculation[-1]["electronic_steps"][-1])
return calculation
def _parse_calculation(self, elem):
try:
istep = {i.attrib["name"]: float(i.text)
for i in elem.find("energy").findall("i")}
except AttributeError: # not all calculations have an energy
istep = {}
pass
esteps = []
for scstep in elem.findall("scstep"):
try:
d = {i.attrib["name"]: _vasprun_float(i.text)
for i in scstep.find("energy").findall("i")}
esteps.append(d)
except AttributeError: # not all calculations have an energy
pass
try:
s = self._parse_structure(elem.find("structure"))
except AttributeError: # not all calculations have a structure
s = None
pass
for va in elem.findall("varray"):
istep[va.attrib["name"]] = _parse_varray(va)
istep["electronic_steps"] = esteps
istep["structure"] = s
elem.clear()
return istep
def _parse_dos(self, elem):
efermi = float(elem.find("i").text)
energies = None
tdensities = {}
idensities = {}
for s in elem.find("total").find("array").find("set").findall("set"):
data = np.array(_parse_varray(s))
energies = data[:, 0]
spin = Spin.up if s.attrib["comment"] == "spin 1" else Spin.down
tdensities[spin] = data[:, 1]
idensities[spin] = data[:, 2]
pdoss = []
partial = elem.find("partial")
if partial is not None:
orbs = [ss.text for ss in partial.find("array").findall("field")]
orbs.pop(0)
lm = any(["x" in s for s in orbs])
for s in partial.find("array").find("set").findall("set"):
pdos = defaultdict(dict)
for ss in s.findall("set"):
spin = Spin.up if ss.attrib["comment"] == "spin 1" else \
Spin.down
data = np.array(_parse_varray(ss))
nrow, ncol = data.shape
for j in range(1, ncol):
if lm:
orb = Orbital(j - 1)
else:
orb = OrbitalType(j - 1)
pdos[orb][spin] = data[:, j]
pdoss.append(pdos)
elem.clear()
return Dos(efermi, energies, tdensities), \
Dos(efermi, energies, idensities), pdoss
def _parse_eigen(self, elem):
eigenvalues = defaultdict(list)
for s in elem.find("array").find("set").findall("set"):
spin = Spin.up if s.attrib["comment"] == "spin 1" else Spin.down
for ss in s.findall("set"):
eigenvalues[spin].append(_parse_varray(ss))
eigenvalues = {spin: np.array(v) for spin, v in eigenvalues.items()}
elem.clear()
return eigenvalues
def _parse_projected_eigen(self, elem):
root = elem.find("array").find("set")
proj_eigen = defaultdict(list)
for s in root.findall("set"):
spin = int(re.match(r"spin(\d+)", s.attrib["comment"]).group(1))
# Force spin to be +1 or -1
spin = Spin.up if spin == 1 else Spin.down
for kpt, ss in enumerate(s.findall("set")):
dk = []
for band, sss in enumerate(ss.findall("set")):
db = _parse_varray(sss)
dk.append(db)
proj_eigen[spin].append(dk)
proj_eigen = {spin: np.array(v) for spin, v in proj_eigen.items()}
elem.clear()
return proj_eigen
def _parse_dynmat(self, elem):
hessian = []
eigenvalues = []
eigenvectors = []
for v in elem.findall("v"):
if v.attrib["name"] == "eigenvalues":
eigenvalues = [float(i) for i in v.text.split()]
for va in elem.findall("varray"):
if va.attrib["name"] == "hessian":
for v in va.findall("v"):
hessian.append([float(i) for i in v.text.split()])
elif va.attrib["name"] == "eigenvectors":
for v in va.findall("v"):
eigenvectors.append([float(i) for i in v.text.split()])
return hessian, eigenvalues, eigenvectors
class BSVasprun(Vasprun):
"""
A highly optimized version of Vasprun that parses only eigenvalues for
bandstructures. All other properties like structures, parameters,
etc. are ignored.
"""
def __init__(self, filename, parse_projected_eigen=False,
parse_potcar_file=False, occu_tol=1e-8):
self.filename = filename
self.occu_tol = occu_tol
with zopen(filename, "rt") as f:
self.efermi = None
parsed_header = False
self.eigenvalues = None
self.projected_eigenvalues = None
for event, elem in ET.iterparse(f):
tag = elem.tag
if not parsed_header:
if tag == "generator":
self.generator = self._parse_params(elem)
elif tag == "incar":
self.incar = self._parse_params(elem)
elif tag == "kpoints":
self.kpoints, self.actual_kpoints, \
self.actual_kpoints_weights = self._parse_kpoints(
elem)
elif tag == "parameters":
self.parameters = self._parse_params(elem)
elif tag == "atominfo":
self.atomic_symbols, self.potcar_symbols = \
self._parse_atominfo(elem)
self.potcar_spec = [{"titel": p,
"hash": None} for
p in self.potcar_symbols]
parsed_header = True
elif tag == "i" and elem.attrib.get("name") == "efermi":
self.efermi = float(elem.text)
elif tag == "eigenvalues":
self.eigenvalues = self._parse_eigen(elem)
elif parse_projected_eigen and tag == "projected":
self.projected_eigenvalues = self._parse_projected_eigen(
elem)
elif tag == "structure" and elem.attrib.get("name") == \
"finalpos":
self.final_structure = self._parse_structure(elem)
self.vasp_version = self.generator["version"]
if parse_potcar_file:
self.update_potcar_spec(parse_potcar_file)
def as_dict(self):
"""
Json-serializable dict representation.
"""
d = {"vasp_version": self.vasp_version,
"has_vasp_completed": True,
"nsites": len(self.final_structure)}
comp = self.final_structure.composition
d["unit_cell_formula"] = comp.as_dict()
d["reduced_cell_formula"] = Composition(comp.reduced_formula).as_dict()
d["pretty_formula"] = comp.reduced_formula
symbols = [s.split()[1] for s in self.potcar_symbols]
symbols = [re.split(r"_", s)[0] for s in symbols]
d["is_hubbard"] = self.is_hubbard
d["hubbards"] = self.hubbards
unique_symbols = sorted(list(set(self.atomic_symbols)))
d["elements"] = unique_symbols
d["nelements"] = len(unique_symbols)
d["run_type"] = self.run_type
vin = {"incar": {k: v for k, v in self.incar.items()},
"crystal": self.final_structure.as_dict(),
"kpoints": self.kpoints.as_dict()}
actual_kpts = [{"abc": list(self.actual_kpoints[i]),
"weight": self.actual_kpoints_weights[i]}
for i in range(len(self.actual_kpoints))]
vin["kpoints"]["actual_points"] = actual_kpts
vin["potcar"] = [s.split(" ")[1] for s in self.potcar_symbols]
vin["potcar_spec"] = self.potcar_spec
vin["potcar_type"] = [s.split(" ")[0] for s in self.potcar_symbols]
vin["parameters"] = {k: v for k, v in self.parameters.items()}
vin["lattice_rec"] = self.lattice_rec.as_dict()
d["input"] = vin
vout = {"crystal": self.final_structure.as_dict(),
"efermi": self.efermi}
if self.eigenvalues:
eigen = defaultdict(dict)
for spin, values in self.eigenvalues.items():
for i, v in enumerate(values):
eigen[i][str(spin)] = v
vout["eigenvalues"] = eigen
(gap, cbm, vbm, is_direct) = self.eigenvalue_band_properties
vout.update(dict(bandgap=gap, cbm=cbm, vbm=vbm,
is_gap_direct=is_direct))
if self.projected_eigenvalues:
peigen = []
for i in range(len(eigen)):
peigen.append({})
for spin, v in self.projected_eigenvalues.items():
for kpoint_index, vv in enumerate(v):
if str(spin) not in peigen[kpoint_index]:
peigen[kpoint_index][str(spin)] = vv
vout['projected_eigenvalues'] = peigen
d['output'] = vout
return jsanitize(d, strict=True)
class Outcar(MSONable):
"""
Parser for data in OUTCAR that is not available in Vasprun.xml
Note, this class works a bit differently than most of the other
VaspObjects, since the OUTCAR can be very different depending on which
"type of run" performed.
Creating the OUTCAR class with a filename reads "regular parameters" that
are always present.
Args:
filename (str): OUTCAR filename to parse.
.. attribute:: magnetization
Magnetization on each ion as a tuple of dict, e.g.,
({"d": 0.0, "p": 0.003, "s": 0.002, "tot": 0.005}, ... )
Note that this data is not always present. LORBIT must be set to some
other value than the default.
.. attribute:: chemical_shifts
Chemical Shift on each ion as a tuple of ChemicalShiftNotation, e.g.,
(cs1, cs2, ...)
.. attribute:: unsym_cs_tensor
Unsymmetrized Chemical Shift tensor matrixes on each ion as a list.
e.g.,
[[[sigma11, sigma12, sigma13],
[sigma21, sigma22, sigma23],
[sigma31, sigma32, sigma33]],
...
[[sigma11, sigma12, sigma13],
[sigma21, sigma22, sigma23],
[sigma31, sigma32, sigma33]]]
.. attribute:: unsym_cs_tensor
G=0 contribution to chemical shift. 2D rank 3 matrix
.. attribute:: cs_core_contribution
Core contribution to chemical shift. dict. e.g.,
{'Mg': -412.8, 'C': -200.5, 'O': -271.1}
.. attribute:: efg
Electric Field Gradient (EFG) tensor on each ion as a tuple of dict, e.g.,
({"cq": 0.1, "eta", 0.2, "nuclear_quadrupole_moment": 0.3},
{"cq": 0.7, "eta", 0.8, "nuclear_quadrupole_moment": 0.9},
...)
.. attribute:: charge
Charge on each ion as a tuple of dict, e.g.,
({"p": 0.154, "s": 0.078, "d": 0.0, "tot": 0.232}, ...)
Note that this data is not always present. LORBIT must be set to some
other value than the default.
.. attribute:: is_stopped
True if OUTCAR is from a stopped run (using STOPCAR, see Vasp Manual).
.. attribute:: run_stats
Various useful run stats as a dict including "System time (sec)",
"Total CPU time used (sec)", "Elapsed time (sec)",
"Maximum memory used (kb)", "Average memory used (kb)",
"User time (sec)".
.. attribute:: elastic_tensor
Total elastic moduli (Kbar) is given in a 6x6 array matrix.
One can then call a specific reader depending on the type of run being
performed. These are currently: read_igpar(), read_lepsilon() and
read_lcalcpol(), read_core_state_eign(), read_avg_core_pot().
See the documentation of those methods for more documentation.
Authors: Rickard Armiento, Shyue Ping Ong
"""
def __init__(self, filename):
self.filename = filename
self.is_stopped = False
# data from end of OUTCAR
charge = []
mag_x = []
mag_y = []
mag_z = []
header = []
run_stats = {}
total_mag = None
nelect = None
efermi = None
total_energy = None
time_patt = re.compile(r"\((sec|kb)\)")
efermi_patt = re.compile(r"E-fermi\s*:\s*(\S+)")
nelect_patt = re.compile(r"number of electron\s+(\S+)\s+magnetization")
mag_patt = re.compile(r"number of electron\s+\S+\s+magnetization\s+("
r"\S+)")
toten_pattern = re.compile(r"free energy TOTEN\s+=\s+([\d\-\.]+)")
all_lines = []
for line in reverse_readfile(self.filename):
clean = line.strip()
all_lines.append(clean)
if clean.find("soft stop encountered! aborting job") != -1:
self.is_stopped = True
else:
if time_patt.search(line):
tok = line.strip().split(":")
run_stats[tok[0].strip()] = float(tok[1].strip())
continue
m = efermi_patt.search(clean)
if m:
try:
# try-catch because VASP sometimes prints
# 'E-fermi: ******** XC(G=0): -6.1327
# alpha+bet : -1.8238'
efermi = float(m.group(1))
continue
except ValueError:
efermi = None
continue
m = nelect_patt.search(clean)
if m:
nelect = float(m.group(1))
m = mag_patt.search(clean)
if m:
total_mag = float(m.group(1))
if total_energy is None:
m = toten_pattern.search(clean)
if m:
total_energy = float(m.group(1))
if all([nelect, total_mag is not None, efermi is not None,
run_stats]):
break
# For single atom systems, VASP doesn't print a total line, so
# reverse parsing is very difficult
read_charge = False
read_mag_x = False
read_mag_y = False # for SOC calculations only
read_mag_z = False
all_lines.reverse()
for clean in all_lines:
if read_charge or read_mag_x or read_mag_y or read_mag_z:
if clean.startswith("# of ion"):
header = re.split(r"\s{2,}", clean.strip())
header.pop(0)
else:
m = re.match(r"\s*(\d+)\s+(([\d\.\-]+)\s+)+", clean)
if m:
toks = [float(i)
for i in re.findall(r"[\d\.\-]+", clean)]
toks.pop(0)
if read_charge:
charge.append(dict(zip(header, toks)))
elif read_mag_x:
mag_x.append(dict(zip(header, toks)))
elif read_mag_y:
mag_y.append(dict(zip(header, toks)))
elif read_mag_z:
mag_z.append(dict(zip(header, toks)))
elif clean.startswith('tot'):
read_charge = False
read_mag_x = False
read_mag_y = False
read_mag_z = False
if clean == "total charge":
charge = []
read_charge = True
read_mag_x, read_mag_y, read_mag_z = False, False, False
elif clean == "magnetization (x)":
mag_x = []
read_mag_x = True
read_charge, read_mag_y, read_mag_z = False, False, False
elif clean == "magnetization (y)":
mag_y = []
read_mag_y = True
read_charge, read_mag_x, read_mag_z = False, False, False
elif clean == "magnetization (z)":
mag_z = []
read_mag_z = True
read_charge, read_mag_x, read_mag_y = False, False, False
# merge x, y and z components of magmoms if present (SOC calculation)
if mag_y and mag_z:
# TODO: detect spin axis
mag = []
for idx in range(len(mag_x)):
mag.append({
key: Magmom([mag_x[idx][key], mag_y[idx][key], mag_z[idx][key]])
for key in mag_x[0].keys()
})
else:
mag = mag_x
# data from beginning of OUTCAR
run_stats['cores'] = 0
with zopen(filename, "rt") as f:
for line in f:
if "running" in line:
run_stats['cores'] = line.split()[2]
break
self.run_stats = run_stats
self.magnetization = tuple(mag)
self.charge = tuple(charge)
self.efermi = efermi
self.nelect = nelect
self.total_mag = total_mag
self.final_energy = total_energy
self.data = {}
# Check if calculation is spin polarized
self.spin = False
self.read_pattern({'spin': 'ISPIN = 2'})
if self.data.get('spin',[]):
self.spin = True
# Check if calculation is noncollinear
self.noncollinear = False
self.read_pattern({'noncollinear': 'LNONCOLLINEAR = T'})
if self.data.get('noncollinear',[]):
self.noncollinear = False
# Check to see if LEPSILON is true and read piezo data if so
self.lepsilon = False
self.read_pattern({'epsilon': 'LEPSILON= T'})
if self.data.get('epsilon',[]):
self.lepsilon = True
self.read_lepsilon()
self.read_lepsilon_ionic()
# Check to see if LCALCPOL is true and read polarization data if so
self.lcalcpol = False
self.read_pattern({'calcpol': 'LCALCPOL = T'})
if self.data.get('calcpol',[]):
self.lcalcpol = True
self.read_lcalcpol()
self.read_pseudo_zval()
def read_pattern(self, patterns, reverse=False, terminate_on_match=False,
postprocess=str):
"""
General pattern reading. Uses monty's regrep method. Takes the same
arguments.
Args:
patterns (dict): A dict of patterns, e.g.,
{"energy": r"energy\\(sigma->0\\)\\s+=\\s+([\\d\\-.]+)"}.
reverse (bool): Read files in reverse. Defaults to false. Useful for
large files, esp OUTCARs, especially when used with
terminate_on_match.
terminate_on_match (bool): Whether to terminate when there is at
least one match in each key in pattern.
postprocess (callable): A post processing function to convert all
matches. Defaults to str, i.e., no change.
Renders accessible:
Any attribute in patterns. For example,
{"energy": r"energy\\(sigma->0\\)\\s+=\\s+([\\d\\-.]+)"} will set the
value of self.data["energy"] = [[-1234], [-3453], ...], to the
results from regex and postprocess. Note that the returned values
are lists of lists, because you can grep multiple items on one line.
"""
matches = regrep(self.filename, patterns, reverse=reverse,
terminate_on_match=terminate_on_match,
postprocess=postprocess)
for k in patterns.keys():
self.data[k] = [i[0] for i in matches.get(k, [])]
def read_table_pattern(self, header_pattern, row_pattern, footer_pattern,
postprocess=str, attribute_name=None,
last_one_only=True):
"""
Parse table-like data. A table composes of three parts: header,
main body, footer. All the data matches "row pattern" in the main body
will be returned.
Args:
header_pattern (str): The regular expression pattern matches the
table header. This pattern should match all the text
immediately before the main body of the table. For multiple
sections table match the text until the section of
interest. MULTILINE and DOTALL options are enforced, as a
result, the "." meta-character will also match "\n" in this
section.
row_pattern (str): The regular expression matches a single line in
the table. Capture interested field using regular expression
groups.
footer_pattern (str): The regular expression matches the end of the
table. E.g. a long dash line.
postprocess (callable): A post processing function to convert all
matches. Defaults to str, i.e., no change.
attribute_name (str): Name of this table. If present the parsed data
will be attached to "data. e.g. self.data["efg"] = [...]
last_one_only (bool): All the tables will be parsed, if this option
is set to True, only the last table will be returned. The
enclosing list will be removed. i.e. Only a single table will
be returned. Default to be True.
Returns:
List of tables. 1) A table is a list of rows. 2) A row if either a list of
attribute values in case the the capturing group is defined without name in
row_pattern, or a dict in case that named capturing groups are defined by
row_pattern.
"""
with zopen(self.filename, 'rt') as f:
text = f.read()
table_pattern_text = header_pattern + r"\s*^(?P<table_body>(?:\s+" + \
row_pattern + r")+)\s+" + footer_pattern
table_pattern = re.compile(table_pattern_text, re.MULTILINE | re.DOTALL)
rp = re.compile(row_pattern)
tables = []
for mt in table_pattern.finditer(text):
table_body_text = mt.group("table_body")
table_contents = []
for line in table_body_text.split("\n"):
ml = rp.search(line)
d = ml.groupdict()
if len(d) > 0:
processed_line = {k: postprocess(v) for k, v in d.items()}
else:
processed_line = [postprocess(v) for v in ml.groups()]
table_contents.append(processed_line)
tables.append(table_contents)
if last_one_only:
retained_data = tables[-1]
else:
retained_data = tables
if attribute_name is not None:
self.data[attribute_name] = retained_data
return retained_data
def read_freq_dielectric(self):
"""
Parses the frequency dependent dielectric function (obtained with
LOPTICS). Frequencies (in eV) are in self.frequencies, and dielectric
tensor function is given as self.dielectric_tensor_function.
"""
header_pattern = r"\s+frequency dependent\s+IMAGINARY " \
r"DIELECTRIC FUNCTION \(independent particle, " \
r"no local field effects\)(\sdensity-density)*$"
row_pattern = r"\s+".join([r"([\.\-\d]+)"] * 7)
lines = []
for l in reverse_readfile(self.filename):
lines.append(l)
if re.match(header_pattern, l):
break
freq = []
data = {"REAL": [], "IMAGINARY": []}
lines.reverse()
count = 0
component = "IMAGINARY"
for l in lines[3:]: # Skip the preamble.
if re.match(row_pattern, l.strip()):
toks = l.strip().split()
if component == "IMAGINARY":
freq.append(float(toks[0]))
xx, yy, zz, xy, yz, xz = [float(t) for t in toks[1:]]
matrix = [[xx, xy, xz], [xy, yy, yz], [xz, yz, zz]]
data[component].append(matrix)
elif re.match(r"\s*-+\s*", l):
count += 1
if count == 1:
component = "REAL"
elif count == 2:
break
self.frequencies = np.array(freq)
self.dielectric_tensor_function = np.array(data["REAL"]) + \
1j * np.array(data["IMAGINARY"])
def read_chemical_shifts(self):
"""
Parse the NMR chemical shifts data. Only the second part "absolute, valence and core"
will be parsed. And only the three right most field (ISO_SHIFT, SPAN, SKEW) will be retrieved.
Returns:
List of chemical shifts in the order of atoms from the OUTCAR. Maryland notation is adopted.
"""
header_pattern = r"\s+CSA tensor \(J\. Mason, Solid State Nucl\. Magn\. Reson\. 2, " \
r"285 \(1993\)\)\s+" \
r"\s+-{50,}\s+" \
r"\s+EXCLUDING G=0 CONTRIBUTION\s+INCLUDING G=0 CONTRIBUTION\s+" \
r"\s+-{20,}\s+-{20,}\s+" \
r"\s+ATOM\s+ISO_SHIFT\s+SPAN\s+SKEW\s+ISO_SHIFT\s+SPAN\s+SKEW\s+" \
r"-{50,}\s*$"
first_part_pattern = r"\s+\(absolute, valence only\)\s+$"
swallon_valence_body_pattern = r".+?\(absolute, valence and core\)\s+$"
row_pattern = r"\d+(?:\s+[-]?\d+\.\d+){3}\s+" + r'\s+'.join(
[r"([-]?\d+\.\d+)"] * 3)
footer_pattern = r"-{50,}\s*$"
h1 = header_pattern + first_part_pattern
cs_valence_only = self.read_table_pattern(
h1, row_pattern, footer_pattern, postprocess=float,
last_one_only=True)
h2 = header_pattern + swallon_valence_body_pattern
cs_valence_and_core = self.read_table_pattern(
h2, row_pattern, footer_pattern, postprocess=float,
last_one_only=True)
all_cs = {}
for name, cs_table in [["valence_only", cs_valence_only],
["valence_and_core", cs_valence_and_core]]:
cs = []
for sigma_iso, omega, kappa in cs_table:
tensor = NMRChemicalShiftNotation.from_maryland_notation(sigma_iso, omega, kappa)
cs.append(tensor)
all_cs[name] = tuple(cs)
self.data["chemical_shifts"] = all_cs
def read_cs_g0_contribution(self):
"""
Parse the G0 contribution of NMR chemical shift.
Returns:
G0 contribution matrix as list of list.
"""
header_pattern = r'^\s+G\=0 CONTRIBUTION TO CHEMICAL SHIFT \(field along BDIR\)\s+$\n' \
r'^\s+-{50,}$\n' \
r'^\s+BDIR\s+X\s+Y\s+Z\s*$\n' \
r'^\s+-{50,}\s*$\n'
row_pattern = r'(?:\d+)\s+' + r'\s+'.join([r'([-]?\d+\.\d+)'] * 3)
footer_pattern = r'\s+-{50,}\s*$'
self.read_table_pattern(header_pattern, row_pattern, footer_pattern, postprocess=float,
last_one_only=True, attribute_name="cs_g0_contribution")
return self.data["cs_g0_contribution"]
def read_cs_core_contribution(self):
"""
Parse the core contribution of NMR chemical shift.
Returns:
G0 contribution matrix as list of list.
"""
header_pattern = r'^\s+Core NMR properties\s*$\n' \
r'\n' \
r'^\s+typ\s+El\s+Core shift \(ppm\)\s*$\n' \
r'^\s+-{20,}$\n'
row_pattern = r'\d+\s+(?P<element>[A-Z][a-z]?\w?)\s+(?P<shift>[-]?\d+\.\d+)'
footer_pattern = r'\s+-{20,}\s*$'
self.read_table_pattern(header_pattern, row_pattern, footer_pattern, postprocess=str,
last_one_only=True, attribute_name="cs_core_contribution")
core_contrib = {d['element']: float(d['shift'])
for d in self.data["cs_core_contribution"]}
self.data["cs_core_contribution"] = core_contrib
return self.data["cs_core_contribution"]
def read_cs_raw_symmetrized_tensors(self):
"""
Parse the matrix form of NMR tensor before corrected to table.
Returns:
nsymmetrized tensors list in the order of atoms.
"""
header_pattern = r"\s+-{50,}\s+" \
r"\s+Absolute Chemical Shift tensors\s+" \
r"\s+-{50,}$"
first_part_pattern = r"\s+UNSYMMETRIZED TENSORS\s+$"
row_pattern = r"\s+".join([r"([-]?\d+\.\d+)"]*3)
unsym_footer_pattern = "^\s+SYMMETRIZED TENSORS\s+$"
with zopen(self.filename, 'rt') as f:
text = f.read()
unsym_table_pattern_text = header_pattern + first_part_pattern + \
r"(?P<table_body>.+)" + unsym_footer_pattern
table_pattern = re.compile(unsym_table_pattern_text, re.MULTILINE | re.DOTALL)
rp = re.compile(row_pattern)
m = table_pattern.search(text)
if m:
table_text = m.group("table_body")
micro_header_pattern = r"ion\s+\d+"
micro_table_pattern_text = micro_header_pattern + r"\s*^(?P<table_body>(?:\s*" + \
row_pattern + r")+)\s+"
micro_table_pattern = re.compile(micro_table_pattern_text, re.MULTILINE | re.DOTALL)
unsym_tensors = []
for mt in micro_table_pattern.finditer(table_text):
table_body_text = mt.group("table_body")
tensor_matrix = []
for line in table_body_text.rstrip().split("\n"):
ml = rp.search(line)
processed_line = [float(v) for v in ml.groups()]
tensor_matrix.append(processed_line)
unsym_tensors.append(tensor_matrix)
self.data["unsym_cs_tensor"] = unsym_tensors
return unsym_tensors
else:
raise ValueError("NMR UNSYMMETRIZED TENSORS is not found")
def read_nmr_efg(self):
"""
Parse the NMR Electric Field Gradient tensors.
Returns:
Electric Field Gradient tensors as a list of dict in the order of atoms from OUTCAR.
Each dict key/value pair corresponds to a component of the tensors.
"""
header_pattern = r'^\s+NMR quadrupolar parameters\s+$\n' \
r'^\s+Cq : quadrupolar parameter\s+Cq=e[*]Q[*]V_zz/h$\n' \
r'^\s+eta: asymmetry parameters\s+\(V_yy - V_xx\)/ V_zz$\n' \
r'^\s+Q : nuclear electric quadrupole moment in mb \(millibarn\)$\n' \
r'^-{50,}$\n' \
r'^\s+ion\s+Cq\(MHz\)\s+eta\s+Q \(mb\)\s+$\n' \
r'^-{50,}\s*$\n'
row_pattern = r'\d+\s+(?P<cq>[-]?\d+\.\d+)\s+(?P<eta>[-]?\d+\.\d+)\s+' \
r'(?P<nuclear_quadrupole_moment>[-]?\d+\.\d+)'
footer_pattern = r'-{50,}\s*$'
self.read_table_pattern(header_pattern, row_pattern, footer_pattern, postprocess=float,
last_one_only=True, attribute_name="efg")
def read_elastic_tensor(self):
"""
Parse the elastic tensor data.
Returns:
6x6 array corresponding to the elastic tensor from the OUTCAR.
"""
header_pattern = r"TOTAL ELASTIC MODULI \(kBar\)\s+"\
r"Direction\s+([X-Z][X-Z]\s+)+"\
r"\-+"
row_pattern = r"[X-Z][X-Z]\s+"+r"\s+".join([r"(\-*[\.\d]+)"] * 6)
footer_pattern = r"\-+"
et_table = self.read_table_pattern(header_pattern, row_pattern,
footer_pattern, postprocess=float)
self.data["elastic_tensor"] = et_table
def read_piezo_tensor(self):
"""
Parse the piezo tensor data
"""
header_pattern = r"PIEZOELECTRIC TENSOR for field in x, y, " \
r"z\s+\(C/m\^2\)\s+([X-Z][X-Z]\s+)+\-+"
row_pattern = r"[x-z]\s+"+r"\s+".join([r"(\-*[\.\d]+)"] * 6)
footer_pattern = r"BORN EFFECTIVE"
pt_table = self.read_table_pattern(header_pattern, row_pattern,
footer_pattern, postprocess=float)
self.data["piezo_tensor"] = pt_table
def read_corrections(self, reverse=True, terminate_on_match=True):
patterns = {
"dipol_quadrupol_correction": r"dipol\+quadrupol energy "
r"correction\s+([\d\-\.]+)"
}
self.read_pattern(patterns, reverse=reverse,
terminate_on_match=terminate_on_match,
postprocess=float)
self.data["dipol_quadrupol_correction"] = self.data["dipol_quadrupol_correction"][0][0]
def read_neb(self, reverse=True, terminate_on_match=True):
"""
Reads NEB data. This only works with OUTCARs from both normal
VASP NEB calculations or from the CI NEB method implemented by
Henkelman et al.
Args:
reverse (bool): Read files in reverse. Defaults to false. Useful for
large files, esp OUTCARs, especially when used with
terminate_on_match. Defaults to True here since we usually
want only the final value.
terminate_on_match (bool): Whether to terminate when there is at
least one match in each key in pattern. Defaults to True here
since we usually want only the final value.
Renders accessible:
tangent_force - Final tangent force.
energy - Final energy.
These can be accessed under Outcar.data[key]
"""
patterns = {
"energy": r"energy\(sigma->0\)\s+=\s+([\d\-\.]+)",
"tangent_force": r"(NEB: projections on to tangent \(spring, REAL\)\s+\S+|tangential force \(eV/A\))\s+([\d\-\.]+)"
}
self.read_pattern(patterns, reverse=reverse,
terminate_on_match=terminate_on_match,
postprocess=str)
self.data["energy"] = float(self.data["energy"][0][0])
if self.data.get("tangent_force"):
self.data["tangent_force"] = float(
self.data["tangent_force"][0][1])
def read_igpar(self):
"""
Renders accessible:
er_ev = e<r>_ev (dictionary with Spin.up/Spin.down as keys)
er_bp = e<r>_bp (dictionary with Spin.up/Spin.down as keys)
er_ev_tot = spin up + spin down summed
er_bp_tot = spin up + spin down summed
p_elc = spin up + spin down summed
p_ion = spin up + spin down summed
(See VASP section "LBERRY, IGPAR, NPPSTR, DIPOL tags" for info on
what these are).
"""
# variables to be filled
self.er_ev = {} # will be dict (Spin.up/down) of array(3*float)
self.er_bp = {} # will be dics (Spin.up/down) of array(3*float)
self.er_ev_tot = None # will be array(3*float)
self.er_bp_tot = None # will be array(3*float)
self.p_elec = None
self.p_ion = None
try:
search = []
# Nonspin cases
def er_ev(results, match):
results.er_ev[Spin.up] = np.array(map(float,
match.groups()[1:4])) / 2
results.er_ev[Spin.down] = results.er_ev[Spin.up]
results.context = 2
search.append([r"^ *e<r>_ev=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
None, er_ev])
def er_bp(results, match):
results.er_bp[Spin.up] = np.array([float(match.group(i))
for i in range(1, 4)]) / 2
results.er_bp[Spin.down] = results.er_bp[Spin.up]
search.append([r"^ *e<r>_bp=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
lambda results, line: results.context == 2, er_bp])
# Spin cases
def er_ev_up(results, match):
results.er_ev[Spin.up] = np.array([float(match.group(i))
for i in range(1, 4)])
results.context = Spin.up
search.append([r"^.*Spin component 1 *e<r>_ev=\( *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *([-0-9.Ee+]*) *\)",
None, er_ev_up])
def er_bp_up(results, match):
results.er_bp[Spin.up] = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
search.append([r"^ *e<r>_bp=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
lambda results,
line: results.context == Spin.up, er_bp_up])
def er_ev_dn(results, match):
results.er_ev[Spin.down] = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
results.context = Spin.down
search.append([r"^.*Spin component 2 *e<r>_ev=\( *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *([-0-9.Ee+]*) *\)",
None, er_ev_dn])
def er_bp_dn(results, match):
results.er_bp[Spin.down] = np.array([float(match.group(i))
for i in range(1, 4)])
search.append([r"^ *e<r>_bp=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
lambda results,
line: results.context == Spin.down, er_bp_dn])
# Always present spin/non-spin
def p_elc(results, match):
results.p_elc = np.array([float(match.group(i))
for i in range(1, 4)])
search.append([r"^.*Total electronic dipole moment: "
r"*p\[elc\]=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)", None, p_elc])
def p_ion(results, match):
results.p_ion = np.array([float(match.group(i))
for i in range(1, 4)])
search.append([r"^.*ionic dipole moment: "
r"*p\[ion\]=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)", None, p_ion])
self.context = None
self.er_ev = {Spin.up: None, Spin.down: None}
self.er_bp = {Spin.up: None, Spin.down: None}
micro_pyawk(self.filename, search, self)
if self.er_ev[Spin.up] is not None and \
self.er_ev[Spin.down] is not None:
self.er_ev_tot = self.er_ev[Spin.up] + self.er_ev[Spin.down]
if self.er_bp[Spin.up] is not None and \
self.er_bp[Spin.down] is not None:
self.er_bp_tot = self.er_bp[Spin.up] + self.er_bp[Spin.down]
except:
self.er_ev_tot = None
self.er_bp_tot = None
raise Exception("IGPAR OUTCAR could not be parsed.")
def read_lepsilon(self):
# variables to be filled
try:
search = []
def dielectric_section_start(results, match):
results.dielectric_index = -1
search.append([r"MACROSCOPIC STATIC DIELECTRIC TENSOR \(", None,
dielectric_section_start])
def dielectric_section_start2(results, match):
results.dielectric_index = 0
search.append(
[r"-------------------------------------",
lambda results, line: results.dielectric_index == -1,
dielectric_section_start2])
def dielectric_data(results, match):
results.dielectric_tensor[results.dielectric_index, :] = \
np.array([float(match.group(i)) for i in range(1, 4)])
results.dielectric_index += 1
search.append(
[r"^ *([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) *$",
lambda results, line: results.dielectric_index >= 0
if results.dielectric_index is not None
else None,
dielectric_data])
def dielectric_section_stop(results, match):
results.dielectric_index = None
search.append(
[r"-------------------------------------",
lambda results, line: results.dielectric_index >= 1
if results.dielectric_index is not None
else None,
dielectric_section_stop])
self.dielectric_index = None
self.dielectric_tensor = np.zeros((3, 3))
def piezo_section_start(results, match):
results.piezo_index = 0
search.append([r"PIEZOELECTRIC TENSOR for field in x, y, z "
r"\(C/m\^2\)",
None, piezo_section_start])
def piezo_data(results, match):
results.piezo_tensor[results.piezo_index, :] = \
np.array([float(match.group(i)) for i in range(1, 7)])
results.piezo_index += 1
search.append(
[r"^ *[xyz] +([-0-9.Ee+]+) +([-0-9.Ee+]+)" +
r" +([-0-9.Ee+]+) *([-0-9.Ee+]+) +([-0-9.Ee+]+)" +
r" +([-0-9.Ee+]+)*$",
lambda results, line: results.piezo_index >= 0
if results.piezo_index is not None
else None,
piezo_data])
def piezo_section_stop(results, match):
results.piezo_index = None
search.append(
[r"-------------------------------------",
lambda results, line: results.piezo_index >= 1
if results.piezo_index is not None
else None,
piezo_section_stop])
self.piezo_index = None
self.piezo_tensor = np.zeros((3, 6))
def born_section_start(results, match):
results.born_ion = -1
search.append([r"BORN EFFECTIVE CHARGES " +
r"\(in e, cummulative output\)",
None, born_section_start])
def born_ion(results, match):
results.born_ion = int(match.group(1)) - 1
results.born.append(np.zeros((3, 3)))
search.append([r"ion +([0-9]+)", lambda results,
line: results.born_ion is not None, born_ion])
def born_data(results, match):
results.born[results.born_ion][int(match.group(1)) - 1, :] = \
np.array([float(match.group(i)) for i in range(2, 5)])
search.append(
[r"^ *([1-3]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+)$",
lambda results, line: results.born_ion >= 0
if results.born_ion is not None
else results.born_ion,
born_data])
def born_section_stop(results, match):
results.born_index = None
search.append(
[r"-------------------------------------",
lambda results, line: results.born_ion >= 1
if results.born_ion is not None
else results.born_ion,
born_section_stop])
self.born_ion = None
self.born = []
micro_pyawk(self.filename, search, self)
self.born = np.array(self.born)
self.dielectric_tensor = self.dielectric_tensor.tolist()
self.piezo_tensor = self.piezo_tensor.tolist()
except:
raise Exception("LEPSILON OUTCAR could not be parsed.")
def read_lepsilon_ionic(self):
# variables to be filled
try:
search = []
def dielectric_section_start(results, match):
results.dielectric_ionic_index = -1
search.append([r"MACROSCOPIC STATIC DIELECTRIC TENSOR IONIC", None,
dielectric_section_start])
def dielectric_section_start2(results, match):
results.dielectric_ionic_index = 0
search.append(
[r"-------------------------------------",
lambda results, line: results.dielectric_ionic_index == -1
if results.dielectric_ionic_index is not None
else results.dielectric_ionic_index,
dielectric_section_start2])
def dielectric_data(results, match):
results.dielectric_ionic_tensor[results.dielectric_ionic_index, :] = \
np.array([float(match.group(i)) for i in range(1, 4)])
results.dielectric_ionic_index += 1
search.append(
[r"^ *([-0-9.Ee+]+) +([-0-9.Ee+]+) +([-0-9.Ee+]+) *$",
lambda results, line: results.dielectric_ionic_index >= 0
if results.dielectric_ionic_index is not None
else results.dielectric_ionic_index,
dielectric_data])
def dielectric_section_stop(results, match):
results.dielectric_ionic_index = None
search.append(
[r"-------------------------------------",
lambda results, line: results.dielectric_ionic_index >= 1
if results.dielectric_ionic_index is not None
else results.dielectric_ionic_index,
dielectric_section_stop])
self.dielectric_ionic_index = None
self.dielectric_ionic_tensor = np.zeros((3, 3))
def piezo_section_start(results, match):
results.piezo_ionic_index = 0
search.append([r"PIEZOELECTRIC TENSOR IONIC CONTR for field in "
r"x, y, z ",
None, piezo_section_start])
def piezo_data(results, match):
results.piezo_ionic_tensor[results.piezo_ionic_index, :] = \
np.array([float(match.group(i)) for i in range(1, 7)])
results.piezo_ionic_index += 1
search.append(
[r"^ *[xyz] +([-0-9.Ee+]+) +([-0-9.Ee+]+)" +
r" +([-0-9.Ee+]+) *([-0-9.Ee+]+) +([-0-9.Ee+]+)" +
r" +([-0-9.Ee+]+)*$",
lambda results, line: results.piezo_ionic_index >= 0
if results.piezo_ionic_index is not None
else results.piezo_ionic_index,
piezo_data])
def piezo_section_stop(results, match):
results.piezo_ionic_index = None
search.append(
["-------------------------------------",
lambda results, line: results.piezo_ionic_index >= 1
if results.piezo_ionic_index is not None
else results.piezo_ionic_index,
piezo_section_stop])
self.piezo_ionic_index = None
self.piezo_ionic_tensor = np.zeros((3, 6))
micro_pyawk(self.filename, search, self)
self.dielectric_ionic_tensor = self.dielectric_ionic_tensor.tolist()
self.piezo_ionic_tensor = self.piezo_ionic_tensor.tolist()
except:
raise Exception(
"ionic part of LEPSILON OUTCAR could not be parsed.")
def read_lcalcpol(self):
# variables to be filled
self.p_elec = None
self.p_sp1 = None
self.p_sp2 = None
self.p_ion = None
try:
search = []
# Always present spin/non-spin
def p_elec(results, match):
results.p_elec = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
search.append([r"^.*Total electronic dipole moment: "
r"*p\[elc\]=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
None, p_elec])
# If spin-polarized (and not noncollinear)
# save spin-polarized electronic values
if self.spin and not self.noncollinear:
def p_sp1(results, match):
results.p_sp1 = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
search.append([r"^.*p\[sp1\]=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
None, p_sp1])
def p_sp2(results, match):
results.p_sp2 = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
search.append([r"^.*p\[sp2\]=\( *([-0-9.Ee+]*) *([-0-9.Ee+]*) "
r"*([-0-9.Ee+]*) *\)",
None, p_sp2])
def p_ion(results, match):
results.p_ion = np.array([float(match.group(1)),
float(match.group(2)),
float(match.group(3))])
search.append([r"^.*Ionic dipole moment: *p\[ion\]="
r"\( *([-0-9.Ee+]*)"
r" *([-0-9.Ee+]*) *([-0-9.Ee+]*) *\)",
None, p_ion])
micro_pyawk(self.filename, search, self)
except:
raise Exception("LCALCPOL OUTCAR could not be parsed.")
def read_pseudo_zval(self):
"""
Create pseudopotential ZVAL dictionary.
"""
try:
def poscar_line(results, match):
poscar_line = match.group(1)
results.poscar_line = re.findall(r'[A-Z][a-z]?', poscar_line)
def zvals(results, match):
zvals = match.group(1)
results.zvals = map(float, re.findall(r'-?\d+\.\d*', zvals))
search = []
search.append([r'^.*POSCAR.*=(.*)', None, poscar_line])
search.append([r'^\s+ZVAL.*=(.*)', None, zvals])
micro_pyawk(self.filename, search, self)
zval_dict = {}
for x,y in zip(self.poscar_line, self.zvals):
zval_dict.update({x:y})
self.zval_dict = zval_dict
# Clean-up
del(self.poscar_line)
del(self.zvals)
except:
raise Exception("ZVAL dict could not be parsed.")
def read_core_state_eigen(self):
"""
Read the core state eigenenergies at each ionic step.
Returns:
A list of dict over the atom such as [{"AO":[core state eig]}].
The core state eigenenergie list for each AO is over all ionic
step.
Example:
The core state eigenenergie of the 2s AO of the 6th atom of the
structure at the last ionic step is [5]["2s"][-1]
"""
with zopen(self.filename, "rt") as foutcar:
line = foutcar.readline()
while line != "":
line = foutcar.readline()
if "NIONS =" in line:
natom = int(line.split("NIONS =")[1])
cl = [defaultdict(list) for i in range(natom)]
if "the core state eigen" in line:
iat = -1
while line != "":
line = foutcar.readline()
# don't know number of lines to parse without knowing
# specific species, so stop parsing when we reach
# "E-fermi" instead
if "E-fermi" in line:
break
data = line.split()
# data will contain odd number of elements if it is
# the start of a new entry, or even number of elements
# if it continues the previous entry
if len(data) % 2 == 1:
iat += 1 # started parsing a new ion
data = data[1:] # remove element with ion number
for i in range(0, len(data), 2):
cl[iat][data[i]].append(float(data[i + 1]))
return cl
def read_avg_core_poten(self):
"""
Read the core potential at each ionic step.
Returns:
A list for each ionic step containing a list of the average core
potentials for each atom: [[avg core pot]].
Example:
The average core potential of the 2nd atom of the structure at the
last ionic step is: [-1][1]
"""
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a = iter(iterable)
return zip(a, a)
with zopen(self.filename, "rt") as foutcar:
line = foutcar.readline()
aps = []
while line != "":
line = foutcar.readline()
if "the norm of the test charge is" in line:
ap = []
while line != "":
line = foutcar.readline()
# don't know number of lines to parse without knowing
# specific species, so stop parsing when we reach
# "E-fermi" instead
if "E-fermi" in line:
aps.append(ap)
break
data = line.split()
# the average core potentials of up to 5 elements are
# given per line
for i, pot in pairwise(data):
ap.append(float(pot))
return aps
def as_dict(self):
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__, "efermi": self.efermi,
"run_stats": self.run_stats, "magnetization": self.magnetization,
"charge": self.charge, "total_magnetization": self.total_mag,
"nelect": self.nelect, "is_stopped": self.is_stopped}
if self.lepsilon:
d.update({'piezo_tensor': self.piezo_tensor,
'piezo_ionic_tensor': self.piezo_ionic_tensor,
'dielectric_tensor': self.dielectric_tensor,
'dielectric_ionic_tensor': self.dielectric_ionic_tensor,
'born_ion': self.born_ion,
'born': self.born})
if self.lcalcpol:
d.update({'p_elec': self.p_elec,
'p_ion': self.p_ion})
if self.spin and not self.noncollinear:
d.update({'p_sp1': self.p_sp1,
'p_sp2': self.p_sp2})
d.update({'zval_dict': self.zval_dict})
return d
def read_fermi_contact_shift(self):
'''
output example:
Fermi contact (isotropic) hyperfine coupling parameter (MHz)
-------------------------------------------------------------
ion A_pw A_1PS A_1AE A_1c A_tot
-------------------------------------------------------------
1 -0.002 -0.002 -0.051 0.000 -0.052
2 -0.002 -0.002 -0.051 0.000 -0.052
3 0.056 0.056 0.321 -0.048 0.321
-------------------------------------------------------------
, which corresponds to
[[-0.002, -0.002, -0.051, 0.0, -0.052],
[-0.002, -0.002, -0.051, 0.0, -0.052],
[0.056, 0.056, 0.321, -0.048, 0.321]] from 'fch' data
'''
# Fermi contact (isotropic) hyperfine coupling parameter (MHz)
header_pattern1 = r"\s*Fermi contact \(isotropic\) hyperfine coupling parameter \(MHz\)\s+" \
r"\s*\-+" \
r"\s*ion\s+A_pw\s+A_1PS\s+A_1AE\s+A_1c\s+A_tot\s+" \
r"\s*\-+"
row_pattern1 = r'(?:\d+)\s+' + r'\s+'.join([r'([-]?\d+\.\d+)'] * 5)
footer_pattern = r"\-+"
fch_table = self.read_table_pattern(header_pattern1, row_pattern1,
footer_pattern, postprocess=float,
last_one_only=True)
# Dipolar hyperfine coupling parameters (MHz)
header_pattern2 = r"\s*Dipolar hyperfine coupling parameters \(MHz\)\s+" \
r"\s*\-+" \
r"\s*ion\s+A_xx\s+A_yy\s+A_zz\s+A_xy\s+A_xz\s+A_yz\s+" \
r"\s*\-+"
row_pattern2 = r'(?:\d+)\s+' + r'\s+'.join([r'([-]?\d+\.\d+)'] * 6)
dh_table = self.read_table_pattern(header_pattern2, row_pattern2,
footer_pattern, postprocess=float,
last_one_only=True)
# Total hyperfine coupling parameters after diagonalization (MHz)
header_pattern3 = r"\s*Total hyperfine coupling parameters after diagonalization \(MHz\)\s+" \
r"\s*\(convention: \|A_zz\| > \|A_xx\| > \|A_yy\|\)\s+" \
r"\s*\-+" \
r"\s*ion\s+A_xx\s+A_yy\s+A_zz\s+asymmetry \(A_yy - A_xx\)/ A_zz\s+" \
r"\s*\-+"
row_pattern3 = r'(?:\d+)\s+' + r'\s+'.join([r'([-]?\d+\.\d+)'] * 4)
th_table = self.read_table_pattern(header_pattern3, row_pattern3,
footer_pattern, postprocess=float,
last_one_only=True)
fc_shift_table = {'fch': fch_table, 'dh': dh_table, 'th': th_table}
self.data["fermi_contact_shift"] = fc_shift_table
class VolumetricData(object):
"""
Simple volumetric object for reading LOCPOT and CHGCAR type files.
.. attribute:: structure
Structure associated with the Volumetric Data object
..attribute:: is_spin_polarized
True if run is spin polarized
..attribute:: dim
Tuple of dimensions of volumetric grid in each direction (nx, ny, nz).
..attribute:: data
Actual data as a dict of {string: np.array}. The string are "total"
and "diff", in accordance to the output format of vasp LOCPOT and
CHGCAR files where the total spin density is written first, followed
by the difference spin density.
.. attribute:: ngridpts
Total number of grid points in volumetric data.
"""
def __init__(self, structure, data, distance_matrix=None, data_aug=None):
"""
Typically, this constructor is not used directly and the static
from_file constructor is used. This constructor is designed to allow
summation and other operations between VolumetricData objects.
Args:
structure: Structure associated with the volumetric data
data: Actual volumetric data.
data_aug: Any extra information associated with volumetric data
(typically augmentation charges)
distance_matrix: A pre-computed distance matrix if available.
Useful so pass distance_matrices between sums,
shortcircuiting an otherwise expensive operation.
"""
self.structure = structure
self.is_spin_polarized = len(data) >= 2
self.is_soc = len(data) >= 4
self.dim = data["total"].shape
self.data = data
self.data_aug = data_aug if data_aug else {}
self.ngridpts = self.dim[0] * self.dim[1] * self.dim[2]
# lazy init the spin data since this is not always needed.
self._spin_data = {}
self._distance_matrix = {} if not distance_matrix else distance_matrix
@property
def spin_data(self):
"""
The data decomposed into actual spin data as {spin: data}.
Essentially, this provides the actual Spin.up and Spin.down data
instead of the total and diff. Note that by definition, a
non-spin-polarized run would have Spin.up data == Spin.down data.
"""
if not self._spin_data:
spin_data = dict()
spin_data[Spin.up] = 0.5 * (self.data["total"] +
self.data.get("diff", 0))
spin_data[Spin.down] = 0.5 * (self.data["total"] -
self.data.get("diff", 0))
self._spin_data = spin_data
return self._spin_data
def get_axis_grid(self, ind):
"""
Returns the grid for a particular axis.
Args:
ind (int): Axis index.
"""
ng = self.dim
num_pts = ng[ind]
lengths = self.structure.lattice.abc
return [i / num_pts * lengths[ind] for i in range(num_pts)]
def __add__(self, other):
return self.linear_add(other, 1.0)
def __sub__(self, other):
return self.linear_add(other, -1.0)
def linear_add(self, other, scale_factor=1.0):
"""
Method to do a linear sum of volumetric objects. Used by + and -
operators as well. Returns a VolumetricData object containing the
linear sum.
Args:
other (VolumetricData): Another VolumetricData object
scale_factor (float): Factor to scale the other data by.
Returns:
VolumetricData corresponding to self + scale_factor * other.
"""
if self.structure != other.structure:
raise ValueError("Adding or subtraction operations can only be "
"performed for volumetric data with the exact "
"same structure.")
# To add checks
data = {}
for k in self.data.keys():
data[k] = self.data[k] + scale_factor * other.data[k]
return VolumetricData(self.structure, data, self._distance_matrix)
@staticmethod
def parse_file(filename):
"""
Convenience method to parse a generic volumetric data file in the vasp
like format. Used by subclasses for parsing file.
Args:
filename (str): Path of file to parse
Returns:
(poscar, data)
"""
poscar_read = False
poscar_string = []
dataset = []
all_dataset = []
# for holding any strings in input that are not Poscar
# or VolumetricData (typically augmentation charges)
all_dataset_aug = {}
dim = None
dimline = None
read_dataset = False
ngrid_pts = 0
data_count = 0
poscar = None
with zopen(filename, "rt") as f:
for line in f:
original_line = line
line = line.strip()
if read_dataset:
toks = line.split()
for tok in toks:
if data_count < ngrid_pts:
# This complicated procedure is necessary because
# vasp outputs x as the fastest index, followed by y
# then z.
x = data_count % dim[0]
y = int(math.floor(data_count / dim[0])) % dim[1]
z = int(math.floor(data_count / dim[0] / dim[1]))
dataset[x, y, z] = float(tok)
data_count += 1
if data_count >= ngrid_pts:
read_dataset = False
data_count = 0
all_dataset.append(dataset)
elif not poscar_read:
if line != "" or len(poscar_string) == 0:
poscar_string.append(line)
elif line == "":
poscar = Poscar.from_string("\n".join(poscar_string))
poscar_read = True
elif not dim:
dim = [int(i) for i in line.split()]
ngrid_pts = dim[0] * dim[1] * dim[2]
dimline = line
read_dataset = True
dataset = np.zeros(dim)
elif line == dimline:
# when line == dimline, expect volumetric data to follow
# so set read_dataset to True
read_dataset = True
dataset = np.zeros(dim)
else:
# store any extra lines that were not part of the volumetric data
key = len(all_dataset)-1 # so we know which set of data the extra lines are associated with
if key not in all_dataset_aug:
all_dataset_aug[key] = []
all_dataset_aug[key].append(original_line)
if len(all_dataset) == 4:
data = {"total": all_dataset[0], "diff_x": all_dataset[1],
"diff_y": all_dataset[2], "diff_z": all_dataset[3]}
data_aug = {"total": all_dataset_aug.get(0, None), "diff_x": all_dataset_aug.get(1, None),
"diff_y": all_dataset_aug.get(2, None), "diff_z": all_dataset_aug.get(3, None)}
# construct a "diff" dict for scalar-like magnetization density,
# referenced to an arbitrary direction (using same method as
# pymatgen.electronic_structure.core.Magmom, see
# Magmom documentation for justification for this)
# TODO: re-examine this, and also similar behavior in Magmom - @mkhorton
# TODO: does CHGCAR change with different SAXIS?
diff_xyz = np.array([data["diff_x"], data["diff_y"], data["diff_z"]])
diff_xyz = diff_xyz.reshape((3, dim[0]*dim[1]*dim[2]))
ref_direction = np.array([1.01, 1.02, 1.03])
ref_sign = np.sign(np.dot(ref_direction, diff_xyz))
diff = np.multiply(np.linalg.norm(diff_xyz, axis=0), ref_sign)
data["diff"] = diff.reshape((dim[0], dim[1], dim[2]))
elif len(all_dataset) == 2:
data = {"total": all_dataset[0], "diff": all_dataset[1]}
data_aug = {"total": all_dataset_aug.get(0, None), "diff": all_dataset_aug.get(1, None)}
else:
data = {"total": all_dataset[0]}
data_aug = {"total": all_dataset_aug.get(0, None)}
return poscar, data, data_aug
def write_file(self, file_name, vasp4_compatible=False):
"""
Write the VolumetricData object to a vasp compatible file.
Args:
file_name (str): Path to a file
vasp4_compatible (bool): True if the format is vasp4 compatible
"""
def _print_fortran_float(f):
'''
Fortran codes print floats with a leading zero in scientific
notation. When writing CHGCAR files, we adopt this convention
to ensure written CHGCAR files are byte-to-byte identical to
their input files as far as possible.
:param f: float
:return: str
'''
s = "{:.10E}".format(f)
if f > 0:
return "0."+s[0]+s[2:12]+'E'+"{:+03}".format(int(s[13:])+1)
else:
return "-."+s[1]+s[3:13]+'E'+"{:+03}".format(int(s[14:])+1)
with zopen(file_name, "wt") as f:
p = Poscar(self.structure)
# use original name if it's been set (e.g. from Chgcar)
comment = getattr(self, 'name', p.comment)
lines = comment + "\n"
lines += " 1.00000000000000\n"
latt = self.structure.lattice.matrix
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[0, :])
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[1, :])
lines += " %12.6f%12.6f%12.6f\n" % tuple(latt[2, :])
if not vasp4_compatible:
lines += "".join(["%5s" % s for s in p.site_symbols]) + "\n"
lines += "".join(["%6d" % x for x in p.natoms]) + "\n"
lines += "Direct\n"
for site in self.structure:
lines += "%10.6f%10.6f%10.6f\n" % tuple(site.frac_coords)
lines += " \n"
f.write(lines)
a = self.dim
def write_spin(data_type):
lines = []
count = 0
f.write(" {} {} {}\n".format(a[0], a[1], a[2]))
for (k, j, i) in itertools.product(list(range(a[2])), list(range(a[1])),
list(range(a[0]))):
lines.append(_print_fortran_float(self.data[data_type][i, j, k]))
count += 1
if count % 5 == 0:
f.write(" " + "".join(lines) + "\n")
lines = []
else:
lines.append(" ")
f.write(" " + "".join(lines) + " \n")
f.write("".join(self.data_aug.get(data_type, [])))
write_spin("total")
if self.is_spin_polarized and self.is_soc:
write_spin("diff_x")
write_spin("diff_y")
write_spin("diff_z")
elif self.is_spin_polarized:
write_spin("diff")
def get_integrated_diff(self, ind, radius, nbins=1):
"""
Get integrated difference of atom index ind up to radius. This can be
an extremely computationally intensive process, depending on how many
grid points are in the VolumetricData.
Args:
ind (int): Index of atom.
radius (float): Radius of integration.
nbins (int): Number of bins. Defaults to 1. This allows one to
obtain the charge integration up to a list of the cumulative
charge integration values for radii for [radius/nbins,
2 * radius/nbins, ....].
Returns:
Differential integrated charge as a np array of [[radius, value],
...]. Format is for ease of plotting. E.g., plt.plot(data[:,0],
data[:,1])
"""
# For non-spin-polarized runs, this is zero by definition.
if not self.is_spin_polarized:
radii = [radius / nbins * (i + 1) for i in range(nbins)]
data = np.zeros((nbins, 2))
data[:, 0] = radii
return data
struct = self.structure
a = self.dim
if ind not in self._distance_matrix or\
self._distance_matrix[ind]["max_radius"] < radius:
coords = []
for (x, y, z) in itertools.product(*[list(range(i)) for i in a]):
coords.append([x / a[0], y / a[1], z / a[2]])
sites_dist = struct.lattice.get_points_in_sphere(
coords, struct[ind].coords, radius)
self._distance_matrix[ind] = {"max_radius": radius,
"data": np.array(sites_dist)}
data = self._distance_matrix[ind]["data"]
# Use boolean indexing to find all charges within the desired distance.
inds = data[:, 1] <= radius
dists = data[inds, 1]
data_inds = np.rint(np.mod(list(data[inds, 0]), 1) *
np.tile(a, (len(dists), 1))).astype(int)
vals = [self.data["diff"][x, y, z] for x, y, z in data_inds]
hist, edges = np.histogram(dists, bins=nbins,
range=[0, radius],
weights=vals)
data = np.zeros((nbins, 2))
data[:, 0] = edges[1:]
data[:, 1] = [sum(hist[0:i + 1]) / self.ngridpts
for i in range(nbins)]
return data
def get_average_along_axis(self, ind):
"""
Get the averaged total of the volumetric data a certain axis direction.
For example, useful for visualizing Hartree Potentials from a LOCPOT
file.
Args:
ind (int): Index of axis.
Returns:
Average total along axis
"""
m = self.data["total"]
ng = self.dim
if ind == 0:
total = np.sum(np.sum(m, axis=1), 1)
elif ind == 1:
total = np.sum(np.sum(m, axis=0), 1)
else:
total = np.sum(np.sum(m, axis=0), 0)
return total / ng[(ind + 1) % 3] / ng[(ind + 2) % 3]
class Locpot(VolumetricData):
"""
Simple object for reading a LOCPOT file.
Args:
poscar (Poscar): Poscar object containing structure.
data: Actual data.
"""
def __init__(self, poscar, data):
super(Locpot, self).__init__(poscar.structure, data)
self.name = poscar.comment
@staticmethod
def from_file(filename):
(poscar, data, data_aug) = VolumetricData.parse_file(filename)
return Locpot(poscar, data)
class Chgcar(VolumetricData):
"""
Simple object for reading a CHGCAR file.
Args:
poscar (Poscar): Poscar object containing structure.
data: Actual data.
"""
def __init__(self, poscar, data, data_aug=None):
super(Chgcar, self).__init__(poscar.structure, data, data_aug=data_aug)
self.poscar = poscar
self.name = poscar.comment
self._distance_matrix = {}
@staticmethod
def from_file(filename):
(poscar, data, data_aug) = VolumetricData.parse_file(filename)
return Chgcar(poscar, data, data_aug=data_aug)
@property
def net_magnetization(self):
if self.is_spin_polarized:
return np.sum(self.data['diff'])
else:
return None
class Procar(object):
"""
Object for reading a PROCAR file.
Args:
filename: Name of file containing PROCAR.
.. attribute:: data
The PROCAR data of the form below. It should VASP uses 1-based indexing,
but all indices are converted to 0-based here.::
{
spin: nd.array accessed with (k-point index, band index, ion index, orbital index)
}
.. attribute:: weights
The weights associated with each k-point as an nd.array of lenght
nkpoints.
..attribute:: phase_factors
Phase factors, where present (e.g. LORBIT = 12). A dict of the form:
{
spin: complex nd.array accessed with (k-point index, band index, ion index, orbital index)
}
..attribute:: nbands
Number of bands
..attribute:: nkpoints
Number of k-points
..attribute:: nions
Number of ions
"""
def __init__(self, filename):
headers = None
with zopen(filename, "rt") as f:
preambleexpr = re.compile(
r"# of k-points:\s*(\d+)\s+# of bands:\s*(\d+)\s+# of "
r"ions:\s*(\d+)")
kpointexpr = re.compile(r"^k-point\s+(\d+).*weight = ([0-9\.]+)")
bandexpr = re.compile(r"^band\s+(\d+)")
ionexpr = re.compile(r"^ion.*")
expr = re.compile(r"^([0-9]+)\s+")
current_kpoint = 0
current_band = 0
done = False
spin = Spin.down
for l in f:
l = l.strip()
if bandexpr.match(l):
m = bandexpr.match(l)
current_band = int(m.group(1)) - 1
done = False
elif kpointexpr.match(l):
m = kpointexpr.match(l)
current_kpoint = int(m.group(1)) - 1
weights[current_kpoint] = float(m.group(2))
if current_kpoint == 0:
spin = Spin.up if spin == Spin.down else Spin.down
done = False
elif headers is None and ionexpr.match(l):
headers = l.split()
headers.pop(0)
headers.pop(-1)
def f():
return np.zeros((nkpoints, nbands, nions, len(headers)))
data = defaultdict(f)
def f2():
return np.full((nkpoints, nbands, nions, len(headers)),
np.NaN, dtype=np.complex128)
phase_factors = defaultdict(f2)
elif expr.match(l):
toks = l.split()
index = int(toks.pop(0)) - 1
num_data = np.array([float(t)
for t in toks[:len(headers)]])
if not done:
data[spin][current_kpoint, current_band,
index, :] = num_data
else:
if np.isnan(phase_factors[spin][
current_kpoint, current_band, index, 0]):
phase_factors[spin][current_kpoint, current_band,
index, :] = num_data
else:
phase_factors[spin][current_kpoint, current_band,
index, :] += 1j * num_data
elif l.startswith("tot"):
done = True
elif preambleexpr.match(l):
m = preambleexpr.match(l)
nkpoints = int(m.group(1))
nbands = int(m.group(2))
nions = int(m.group(3))
weights = np.zeros(nkpoints)
self.nkpoints = nkpoints
self.nbands = nbands
self.nions = nions
self.weights = weights
self.orbitals = headers
self.data = data
self.phase_factors = phase_factors
def get_projection_on_elements(self, structure):
"""
Method returning a dictionary of projections on elements.
Args:
structure (Structure): Input structure.
Returns:
a dictionary in the {Spin.up:[k index][b index][{Element:values}]]
"""
dico = {}
for spin in self.data.keys():
dico[spin] = [[defaultdict(float)
for i in range(self.nkpoints)]
for j in range(self.nbands)]
for iat in range(self.nions):
name = structure.species[iat].symbol
for spin, d in self.data.items():
for k, b in itertools.product(range(self.nkpoints),
range(self.nbands)):
dico[spin][b][k][name] = np.sum(d[k, b, iat, :])
return dico
def get_occupation(self, atom_index, orbital):
"""
Returns the occupation for a particular orbital of a particular atom.
Args:
atom_num (int): Index of atom in the PROCAR. It should be noted
that VASP uses 1-based indexing for atoms, but this is
converted to 0-based indexing in this parser to be
consistent with representation of structures in pymatgen.
orbital (str): An orbital. If it is a single character, e.g., s,
p, d or f, the sum of all s-type, p-type, d-type or f-type
orbitals occupations are returned respectively. If it is a
specific orbital, e.g., px, dxy, etc., only the occupation
of that orbital is returned.
Returns:
Sum occupation of orbital of atom.
"""
orbital_index = self.orbitals.index(orbital)
return {spin: np.sum(d[:, :, atom_index, orbital_index] * self.weights[:, None])
for spin, d in self.data.items()}
class Oszicar(object):
"""
A basic parser for an OSZICAR output from VASP. In general, while the
OSZICAR is useful for a quick look at the output from a VASP run, we
recommend that you use the Vasprun parser instead, which gives far richer
information about a run.
Args:
filename (str): Filename of file to parse
.. attribute:: electronic_steps
All electronic steps as a list of list of dict. e.g.,
[[{"rms": 160.0, "E": 4507.24605593, "dE": 4507.2, "N": 1,
"deps": -17777.0, "ncg": 16576}, ...], [....]
where electronic_steps[index] refers the list of electronic steps
in one ionic_step, electronic_steps[index][subindex] refers to a
particular electronic step at subindex in ionic step at index. The
dict of properties depends on the type of VASP run, but in general,
"E", "dE" and "rms" should be present in almost all runs.
.. attribute:: ionic_steps:
All ionic_steps as a list of dict, e.g.,
[{"dE": -526.36, "E0": -526.36024, "mag": 0.0, "F": -526.36024},
...]
This is the typical output from VASP at the end of each ionic step.
"""
def __init__(self, filename):
electronic_steps = []
ionic_steps = []
ionic_pattern = re.compile(r"(\d+)\s+F=\s*([\d\-\.E\+]+)\s+"
r"E0=\s*([\d\-\.E\+]+)\s+"
r"d\s*E\s*=\s*([\d\-\.E\+]+)$")
ionic_mag_pattern = re.compile(r"(\d+)\s+F=\s*([\d\-\.E\+]+)\s+"
r"E0=\s*([\d\-\.E\+]+)\s+"
r"d\s*E\s*=\s*([\d\-\.E\+]+)\s+"
r"mag=\s*([\d\-\.E\+]+)")
ionic_MD_pattern = re.compile(r"(\d+)\s+T=\s*([\d\-\.E\+]+)\s+"
r"E=\s*([\d\-\.E\+]+)\s+"
r"F=\s*([\d\-\.E\+]+)\s+"
r"E0=\s*([\d\-\.E\+]+)\s+"
r"EK=\s*([\d\-\.E\+]+)\s+"
r"SP=\s*([\d\-\.E\+]+)\s+"
r"SK=\s*([\d\-\.E\+]+)")
electronic_pattern = re.compile(r"\s*\w+\s*:(.*)")
def smart_convert(header, num):
try:
if header == "N" or header == "ncg":
v = int(num)
return v
v = float(num)
return v
except ValueError:
return "--"
header = []
with zopen(filename, "rt") as fid:
for line in fid:
line = line.strip()
m = electronic_pattern.match(line)
if m:
toks = m.group(1).split()
data = {header[i]: smart_convert(header[i], toks[i])
for i in range(len(toks))}
if toks[0] == "1":
electronic_steps.append([data])
else:
electronic_steps[-1].append(data)
elif ionic_pattern.match(line.strip()):
m = ionic_pattern.match(line.strip())
ionic_steps.append({"F": float(m.group(2)),
"E0": float(m.group(3)),
"dE": float(m.group(4))})
elif ionic_mag_pattern.match(line.strip()):
m = ionic_mag_pattern.match(line.strip())
ionic_steps.append({"F": float(m.group(2)),
"E0": float(m.group(3)),
"dE": float(m.group(4)),
"mag": float(m.group(5))})
elif ionic_MD_pattern.match(line.strip()):
m = ionic_MD_pattern.match(line.strip())
ionic_steps.append({"T": float(m.group(2)),
"E": float(m.group(3)),
"F": float(m.group(4)),
"E0": float(m.group(5)),
"EK": float(m.group(6)),
"SP": float(m.group(7)),
"SK": float(m.group(8))})
elif re.match(r"^\s*N\s+E\s*", line):
header = line.strip().replace("d eps", "deps").split()
self.electronic_steps = electronic_steps
self.ionic_steps = ionic_steps
@property
def all_energies(self):
"""
Compilation of all energies from all electronic steps and ionic steps
as a tuple of list of energies, e.g.,
((4507.24605593, 143.824705755, -512.073149912, ...), ...)
"""
all_energies = []
for i in range(len(self.electronic_steps)):
energies = [step["E"] for step in self.electronic_steps[i]]
energies.append(self.ionic_steps[i]["F"])
all_energies.append(tuple(energies))
return tuple(all_energies)
@property
@unitized("eV")
def final_energy(self):
"""
Final energy from run.
"""
return self.ionic_steps[-1]["E0"]
def as_dict(self):
return {"electronic_steps": self.electronic_steps,
"ionic_steps": self.ionic_steps}
class VaspParserError(Exception):
"""
Exception class for VASP parsing.
"""
pass
def get_band_structure_from_vasp_multiple_branches(dir_name, efermi=None,
projections=False):
"""
This method is used to get band structure info from a VASP directory. It
takes into account that the run can be divided in several branches named
"branch_x". If the run has not been divided in branches the method will
turn to parsing vasprun.xml directly.
The method returns None is there"s a parsing error
Args:
dir_name: Directory containing all bandstructure runs.
efermi: Efermi for bandstructure.
projections: True if you want to get the data on site projections if
any. Note that this is sometimes very large
Returns:
A BandStructure Object
"""
# TODO: Add better error handling!!!
if os.path.exists(os.path.join(dir_name, "branch_0")):
# get all branch dir names
branch_dir_names = [os.path.abspath(d)
for d in glob.glob("{i}/branch_*"
.format(i=dir_name))
if os.path.isdir(d)]
# sort by the directory name (e.g, branch_10)
sort_by = lambda x: int(x.split("_")[-1])
sorted_branch_dir_names = sorted(branch_dir_names, key=sort_by)
# populate branches with Bandstructure instances
branches = []
for dir_name in sorted_branch_dir_names:
xml_file = os.path.join(dir_name, "vasprun.xml")
if os.path.exists(xml_file):
run = Vasprun(xml_file, parse_projected_eigen=projections)
branches.append(run.get_band_structure(efermi=efermi))
else:
# It might be better to throw an exception
warnings.warn("Skipping {}. Unable to find {}"
.format(d=dir_name, f=xml_file))
return get_reconstructed_band_structure(branches, efermi)
else:
xml_file = os.path.join(dir_name, "vasprun.xml")
# Better handling of Errors
if os.path.exists(xml_file):
return Vasprun(xml_file, parse_projected_eigen=projections)\
.get_band_structure(kpoints_filename=None, efermi=efermi)
else:
return None
class Xdatcar(object):
"""
Class representing an XDATCAR file. Only tested with VASP 5.x files.
.. attribute:: structures
List of structures parsed from XDATCAR.
.. attribute:: comment
Optional comment string.
Authors: Ram Balachandran
"""
def __init__(self, filename, ionicstep_start=1,
ionicstep_end=None, comment=None):
"""
Init a Xdatcar.
Args:
filename (str): Filename of input XDATCAR file.
ionicstep_start (int): Starting number of ionic step.
ionicstep_end (int): Ending number of ionic step.
"""
preamble = None
coords_str = []
structures = []
preamble_done = False
if (ionicstep_start < 1):
raise Exception('Start ionic step cannot be less than 1')
if (ionicstep_end is not None and
ionicstep_start < 1):
raise Exception('End ionic step cannot be less than 1')
ionicstep_cnt = 1
with zopen(filename, "rt") as f:
for l in f:
l = l.strip()
if preamble is None:
preamble = [l]
elif not preamble_done:
if l == "" or "Direct configuration=" in l:
preamble_done = True
tmp_preamble = [preamble[0]]
for i in range(1, len(preamble)):
if preamble[0] != preamble[i]:
tmp_preamble.append(preamble[i])
else:
break
preamble = tmp_preamble
else:
preamble.append(l)
elif l == "" or "Direct configuration=" in l:
p = Poscar.from_string("\n".join(preamble +
["Direct"] + coords_str))
if ionicstep_end is None:
if (ionicstep_cnt >= ionicstep_start):
structures.append(p.structure)
else:
if ionicstep_start <= ionicstep_cnt < ionicstep_end:
structures.append(p.structure)
ionicstep_cnt += 1
coords_str = []
else:
coords_str.append(l)
p = Poscar.from_string("\n".join(preamble +
["Direct"] + coords_str))
if ionicstep_end is None:
if ionicstep_cnt >= ionicstep_start:
structures.append(p.structure)
else:
if ionicstep_start <= ionicstep_cnt < ionicstep_end:
structures.append(p.structure)
self.structures = structures
self.comment = comment or self.structures[0].formula
@property
def site_symbols(self):
"""
Sequence of symbols associated with the Xdatcar. Similar to 6th line in
vasp 5+ Xdatcar.
"""
syms = [site.specie.symbol for site in self.structures[0]]
return [a[0] for a in itertools.groupby(syms)]
@property
def natoms(self):
"""
Sequence of number of sites of each type associated with the Poscar.
Similar to 7th line in vasp 5+ Xdatcar.
"""
syms = [site.specie.symbol for site in self.structures[0]]
return [len(tuple(a[1])) for a in itertools.groupby(syms)]
def concatenate(self, filename, ionicstep_start=1,
ionicstep_end=None):
"""
Concatenate structures in file to Xdatcar.
Args:
filename (str): Filename of XDATCAR file to be concatenated.
ionicstep_start (int): Starting number of ionic step.
ionicstep_end (int): Ending number of ionic step.
TODO(rambalachandran):
Requires a check to ensure if the new concatenating file has the
same lattice structure and atoms as the Xdatcar class.
"""
preamble = None
coords_str = []
structures = self.structures
preamble_done = False
if ionicstep_start < 1:
raise Exception('Start ionic step cannot be less than 1')
if (ionicstep_end is not None and
ionicstep_start < 1):
raise Exception('End ionic step cannot be less than 1')
ionicstep_cnt = 1
with zopen(filename, "rt") as f:
for l in f:
l = l.strip()
if preamble is None:
preamble = [l]
elif not preamble_done:
if l == "" or "Direct configuration=" in l:
preamble_done = True
tmp_preamble = [preamble[0]]
for i in range(1, len(preamble)):
if preamble[0] != preamble[i]:
tmp_preamble.append(preamble[i])
else:
break
preamble = tmp_preamble
else:
preamble.append(l)
elif l == "" or "Direct configuration=" in l:
p = Poscar.from_string("\n".join(preamble +
["Direct"] + coords_str))
if ionicstep_end is None:
if (ionicstep_cnt >= ionicstep_start):
structures.append(p.structure)
else:
if ionicstep_start <= ionicstep_cnt < ionicstep_end:
structures.append(p.structure)
ionicstep_cnt += 1
coords_str = []
else:
coords_str.append(l)
p = Poscar.from_string("\n".join(preamble +
["Direct"] + coords_str))
if ionicstep_end is None:
if ionicstep_cnt >= ionicstep_start:
structures.append(p.structure)
else:
if ionicstep_start <= ionicstep_cnt < ionicstep_end:
structures.append(p.structure)
self.structures = structures
def get_string(self, ionicstep_start=1,
ionicstep_end=None,
significant_figures=8):
"""
Write Xdatcar class into a file
Args:
filename (str): Filename of output XDATCAR file.
ionicstep_start (int): Starting number of ionic step.
ionicstep_end (int): Ending number of ionic step.
"""
from pymatgen.io.vasp import Poscar
if (ionicstep_start < 1):
raise Exception('Start ionic step cannot be less than 1')
if (ionicstep_end is not None and
ionicstep_start < 1):
raise Exception('End ionic step cannot be less than 1')
latt = self.structures[0].lattice
if np.linalg.det(latt.matrix) < 0:
latt = Lattice(-latt.matrix)
lines = [self.comment, "1.0", str(latt)]
lines.append(" ".join(self.site_symbols))
lines.append(" ".join([str(x) for x in self.natoms]))
format_str = "{{:.{0}f}}".format(significant_figures)
ionicstep_cnt = 1
output_cnt = 1
for cnt, structure in enumerate(self.structures):
ionicstep_cnt = cnt + 1
if ionicstep_end is None:
if (ionicstep_cnt >= ionicstep_start):
lines.append("Direct configuration="+
' '*(7-len(str(output_cnt)))+str(output_cnt))
for (i, site) in enumerate(structure):
coords = site.frac_coords
line = " ".join([format_str.format(c) for c in coords])
lines.append(line)
output_cnt += 1
else:
if ionicstep_start <= ionicstep_cnt < ionicstep_end:
lines.append("Direct configuration="+
' '*(7-len(str(output_cnt)))+str(output_cnt))
for (i, site) in enumerate(structure):
coords = site.frac_coords
line = " ".join([format_str.format(c) for c in coords])
lines.append(line)
output_cnt += 1
return "\n".join(lines) + "\n"
def write_file(self, filename, **kwargs):
"""
Write Xdatcar class into a file.
Args:
filename (str): Filename of output XDATCAR file.
The supported kwargs are the same as those for the
Xdatcar.get_string method and are passed through directly.
"""
with zopen(filename, "wt") as f:
f.write(self.get_string(**kwargs))
def __str__(self):
return self.get_string()
class Dynmat(object):
"""
Object for reading a DYNMAT file.
Args:
filename: Name of file containing DYNMAT.
.. attribute:: data
A nested dict containing the DYNMAT data of the form::
[atom <int>][disp <int>]['dispvec'] =
displacement vector (part of first line in dynmat block, e.g. "0.01 0 0")
[atom <int>][disp <int>]['dynmat'] =
<list> list of dynmat lines for this atom and this displacement
Authors: Patrick Huck
"""
def __init__(self, filename):
with zopen(filename, "rt") as f:
lines = list(clean_lines(f.readlines()))
self._nspecs, self._natoms, self._ndisps = map(int, lines[
0].split())
self._masses = map(float, lines[1].split())
self.data = defaultdict(dict)
atom, disp = None, None
for i, l in enumerate(lines[2:]):
v = list(map(float, l.split()))
if not i % (self._natoms + 1):
atom, disp = map(int, v[:2])
if atom not in self.data:
self.data[atom] = {}
if disp not in self.data[atom]:
self.data[atom][disp] = {}
self.data[atom][disp]['dispvec'] = v[2:]
else:
if 'dynmat' not in self.data[atom][disp]:
self.data[atom][disp]['dynmat'] = []
self.data[atom][disp]['dynmat'].append(v)
def get_phonon_frequencies(self):
"""calculate phonon frequencies"""
# TODO: the following is most likely not correct or suboptimal
# hence for demonstration purposes only
frequencies = []
for k, v0 in self.data.iteritems():
for v1 in v0.itervalues():
vec = map(abs, v1['dynmat'][k - 1])
frequency = math.sqrt(sum(vec)) * 2. * \
math.pi * 15.633302 # THz
frequencies.append(frequency)
return frequencies
@property
def nspecs(self):
"""returns the number of species"""
return self._nspecs
@property
def natoms(self):
"""returns the number of atoms"""
return self._natoms
@property
def ndisps(self):
"""returns the number of displacements"""
return self._ndisps
@property
def masses(self):
"""returns the list of atomic masses"""
return list(self._masses)
def get_adjusted_fermi_level(efermi, cbm, band_structure):
"""
When running a band structure computations the fermi level needs to be
take from the static run that gave the charge density used for the non-self
consistent band structure run. Sometimes this fermi level is however a
little too low because of the mismatch between the uniform grid used in
the static run and the band structure k-points (e.g., the VBM is on Gamma
and the Gamma point is not in the uniform mesh). Here we use a procedure
consisting in looking for energy levels higher than the static fermi level
(but lower than the LUMO) if any of these levels make the band structure
appears insulating and not metallic anymore, we keep this adjusted fermi
level. This procedure has shown to detect correctly most insulators.
Args:
efermi (float): Fermi energy of the static run
cbm (float): Conduction band minimum of the static run
run_bandstructure: a band_structure object
Returns:
a new adjusted fermi level
"""
# make a working copy of band_structure
bs_working = BandStructureSymmLine.from_dict(band_structure.as_dict())
if bs_working.is_metal():
e = efermi
while e < cbm:
e += 0.01
bs_working._efermi = e
if not bs_working.is_metal():
return e
return efermi
class Wavederf(object):
"""
Object for reading a WAVEDERF file.
Note: This file is only produced when LOPTICS is true AND vasp has been
recompiled after uncommenting the line that calls
WRT_CDER_BETWEEN_STATES_FORMATTED in linear_optics.F
Args:
filename: Name of file containing WAVEDERF.
.. attribute:: data
A numpy array containing the WAVEDERF data of the form below. It should
be noted that VASP uses 1-based indexing for bands, but this is
converted to 0-based numpy array indexing.
For each kpoint (in the same order as in IBZKPT), and for each pair of
bands:
[ #kpoint index
[ #band 1 index
[ #band 2 index
[cdum_x_real, cdum_x_imag, cdum_y_real, cdum_y_imag, cdum_z_real, cdum_z_imag]
]
]
]
This structure follows the file format. Numpy array methods can be used
to fetch data in a more useful way (e.g., get matrix elements between
wo specific bands at each kpoint, fetch x/y/z components,
real/imaginary parts, abs/phase, etc. )
Author: Miguel Dias Costa
"""
def __init__(self, filename):
with zopen(filename, "rt") as f:
header = f.readline().split()
ispin = int(header[0])
nb_kpoints = int(header[1])
nb_bands = int(header[2])
data = np.zeros((nb_kpoints, nb_bands, nb_bands, 6))
for ik in range(nb_kpoints):
for ib1 in range(nb_bands):
for ib2 in range(nb_bands):
# each line in the file includes besides the band
# indexes, which are redundant, each band's energy
# and occupation, which are already available elsewhere,
# so we store only the 6 matrix elements after this 6
# redundant values
data[ik][ib1][ib2] = [
float(element)
for element in f.readline().split()[6:]]
self.data = data
self._nb_kpoints = nb_kpoints
self._nb_bands = nb_bands
@property
def nb_bands(self):
"""
returns the number of bands in the band structure
"""
return self._nb_bands
@property
def nb_kpoints(self):
"""
Returns the number of k-points in the band structure calculation
"""
return self._nb_kpoints
def get_elements_between_bands(self, band_i, band_j):
"""
Method returning a numpy array with elements
[cdum_x_real, cdum_x_imag, cdum_y_real, cdum_y_imag, cdum_z_real, cdum_z_imag]
between bands band_i and band_j (vasp 1-based indexing) for all kpoints.
Args:
band_i (Integer): Index of band i
band_j (Integer): Index of band j
Returns:
a numpy list of elements for each kpoint
"""
if band_i < 1 or band_i > self.nb_bands or band_j < 1 or band_j > self.nb_bands:
raise ValueError("Band index out of bounds")
return self.data[:, band_i - 1, band_j - 1, :]
class UnconvergedVASPWarning(Warning):
"""
Warning for unconverged vasp run.
"""
pass
|
{
"content_hash": "718be9784deff0c50c5ed308f7564896",
"timestamp": "",
"source": "github",
"line_count": 3618,
"max_line_length": 138,
"avg_line_length": 41.202045328911,
"alnum_prop": 0.5089119803580892,
"repo_name": "matk86/pymatgen",
"id": "d6ecd719a73b9fe40178f53469251f2f35e54c52",
"size": "149179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatgen/io/vasp/outputs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5938"
},
{
"name": "CSS",
"bytes": "7550"
},
{
"name": "Common Lisp",
"bytes": "3029065"
},
{
"name": "HTML",
"bytes": "4886182"
},
{
"name": "Makefile",
"bytes": "5573"
},
{
"name": "Perl",
"bytes": "229104"
},
{
"name": "Propeller Spin",
"bytes": "4026362"
},
{
"name": "Python",
"bytes": "6054951"
},
{
"name": "Roff",
"bytes": "868"
}
],
"symlink_target": ""
}
|
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
"crosvm",
]
def RunSteps(api):
with api.crosvm.cros_container_build_context():
api.crosvm.step_in_container("true", ["true"], cros=True)
def GenTests(api):
yield api.test("basic")
|
{
"content_hash": "d2e9665ad9d655c7f52ba1c408d6a8a5",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 65,
"avg_line_length": 17.928571428571427,
"alnum_prop": 0.6454183266932271,
"repo_name": "google/crosvm",
"id": "e6fc32fb1139af00ff6bb832dc2f1a533d907265",
"size": "394",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "infra/recipe_modules/crosvm/examples/cros_container_build_context.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1648"
},
{
"name": "Batchfile",
"bytes": "108"
},
{
"name": "C",
"bytes": "217997"
},
{
"name": "Dockerfile",
"bytes": "4659"
},
{
"name": "Makefile",
"bytes": "7446"
},
{
"name": "Python",
"bytes": "205322"
},
{
"name": "Rust",
"bytes": "12683982"
},
{
"name": "Shell",
"bytes": "57495"
},
{
"name": "Starlark",
"bytes": "10836"
}
],
"symlink_target": ""
}
|
import json
import unittest
from service.LocalizationService import LocalizationService
from service.LocalizationService import LocalizationService
from service.LocalizationService import getErrorMessage
from service.LocalizationService import getTelegramMessage
from service.LocalizationService import COUNTRY
from service.LocalizationService import SPACE
from service.LocalizationService import IP
from service.LocalizationService import CITY
CODE = '123456'
JSON = json.dumps({"ip": "31.172.182.114",
"hostname": "No Hostname",
"city": "Wawel",
"region": "Malopolskie",
"country": "PL",
"loc": "50.0500,19.9333",
"org": "AS50481 Fibertech Networks Sp. z o.o.",
"postal": "30-811"
})
class LocalizationServiceTest(unittest.TestCase):
localizationService = LocalizationService();
def testGetTelegramMessage(self):
resp = json.loads(JSON)
self.assertEqual(getTelegramMessage(JSON),
"Your vpn connection is broken please reconnect. " + resp[COUNTRY] + SPACE + resp[IP] + SPACE + resp[CITY])
def testGetErrorMessage(self):
self.assertEqual(getErrorMessage(CODE),
"If you need to make more requests or custom data, see our paid plans, which all have soft limits. " + CODE)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "4c3dcc1e17d7e0cb308fd6504d32c721",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 116,
"avg_line_length": 37.578947368421055,
"alnum_prop": 0.665266106442577,
"repo_name": "vermer/RASPBERRY-PI-scripts",
"id": "854a7a62a718158ef06976c595911b7eaaace6ee",
"size": "1450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/LocalizationServiceTest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12171"
},
{
"name": "Shell",
"bytes": "477"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from photoboothapp import PhotoBoothApp
from imutils.video import VideoStream
import argparse
import time
ap = argparse.ArgumentParser()
ap.add_argument("-o", "--output", required=True, help="path to output directory to store snapshots")
ap.add_argument("-p", "--picamera", type=int, default=-1, help="path to output directory to store snapshots")
args = vars(ap.parse_args())
print("[INFO] warming up camera...")
vs = VideoStream(usePiCamera=args["picamera"]>0).start()
time.sleep(2.0)
pba = PhotoBoothApp(vs,args["output"])
pba.root.mainloop()
|
{
"content_hash": "622e17c56a180749db8615396c35fbce",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 109,
"avg_line_length": 33.55555555555556,
"alnum_prop": 0.7218543046357616,
"repo_name": "AntonioQu/pythontest",
"id": "ad49bac2b9749aae419f8f77143d7458759ce786",
"size": "604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "photo_booth.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14482"
}
],
"symlink_target": ""
}
|
import threading
import Queue
import json
from gluon.common.particleGenerator.generator import getDataBaseGeneratorInstance as getDBGen
from gluon.db import api as dbapi
from oslo_log import log as logging
import etcd
LOG = logging.getLogger(__name__)
service = "net-l3vpn"
source = "proton"
port = 2379
class MyData:
pass
SyncData = MyData()
SyncData.sync_thread_running = False
SyncData.sync_queue = Queue.Queue()
class SyncThread(threading.Thread):
""" A worker thread that takes takes commands to
update etcd with table changes.
"""
def __init__(self, input_q):
super(SyncThread, self).__init__()
self.input_q = input_q
self.db_instance = dbapi.get_instance()
self.etcd_client = etcd.Client(port=port)
LOG.info("SyncThread starting")
def proc_sync_msg(self, msg):
try:
obj_key = "_".join(msg["key"].split()) # Get rid of spaces
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format(service, source, msg["table"], obj_key)
if msg["operation"] == "update":
table_class = getDBGen().get_table_class(msg["table"])
data = self.db_instance.get_by_primary_key(table_class, msg["key"])
values = data.as_dict()
d = {}
for key in values.iterkeys():
d[key] = str(values[key])
json_str = json.dumps(d)
self.etcd_client.write(etcd_key, json_str)
elif msg["operation"] == "delete":
self.etcd_client.delete(etcd_key)
else:
LOG.error("Unkown operation in msg %s" % (msg["operation"]))
except Exception as e:
print(e.__doc__)
print(e.message)
LOG.error("Error writing to etcd %s, %s" % (e.__doc__, e.message))
raise ValueError
def run(self):
while 1:
try:
msg = self.input_q.get(True, 10.0)
LOG.info("SyncThread: received message %s " % msg)
self.proc_sync_msg(msg)
except Queue.Empty:
LOG.debug("SyncThread: Queue timeout")
except ValueError:
LOG.error("Error processing sync message")
break
LOG.error("SyncThread exiting")
SyncData.sync_thread_running = False
def start_sync_thread():
SyncData.sync_thread = SyncThread(SyncData.sync_queue)
SyncData.sync_thread_running = True
SyncData.sync_thread.start()
|
{
"content_hash": "92f6da6c9818689f21c593121f97aa02",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 95,
"avg_line_length": 33.64,
"alnum_prop": 0.5719381688466112,
"repo_name": "iawells/gluon",
"id": "ad0e0eaf392f15d6249dd01a74d582bd4ad15e75",
"size": "2523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gluon/sync_etcd/thread.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34035"
}
],
"symlink_target": ""
}
|
from .BuildCookRun import BuildCookRun, BuildCookRunLogLineObserver
|
{
"content_hash": "00bd421f79570acb8c25a2299a3afdb6",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 67,
"avg_line_length": 68,
"alnum_prop": 0.8970588235294118,
"repo_name": "pampersrocker/buildbot-UnrealEngine",
"id": "f691272726aac5ea566a46390cfa9b2f7ffeadd7",
"size": "68",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "buildbot_UnrealEngine/AutomationTool/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "59901"
}
],
"symlink_target": ""
}
|
import unittest
from unittest.mock import patch
import backup_database
from lib import config
class StandardBackupTestCase(unittest.TestCase):
""" Dummy test for learning """
def testStandardBackup(self):
with patch("shutil.copyfile") as copyfile:
backup_database.copyfile = copyfile
backup_database.backup()
self.assertTrue(copyfile.called)
if __name__ == "__main__":
unittest.__main__()
|
{
"content_hash": "8d5f1d22da4ffa1865259083c9389a2c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 48,
"avg_line_length": 23.941176470588236,
"alnum_prop": 0.7395577395577395,
"repo_name": "Varabe/Guild-Manager",
"id": "d12a353731d910c5107a115c54c4e77972e659e2",
"size": "407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/tests/test_backup_database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "70123"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from exileui.admin import exileui, ExStacked, ExTabular, DateRangeEX
import models
import nested_admin
import forms
class OperarioAdmin(nested_admin.NestedModelAdmin):
list_display = ('identificacion', 'email', 'first_name', 'last_name',
'direccion', 'telefono', 'nacimiento')
list_filter =[('nacimiento', DateRangeEX)]
search_fields = (list_display)
form = forms.OperarioForm
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.OperarioFormEdit
# end if
return super(OperarioAdmin, self).get_form(request, obj, *args, **kwargs)
# end def
class Media:
js = ('/static/empleados/js/empleados.js',)
# end class
# end class
class RecepcionistaAdmin(nested_admin.NestedModelAdmin):
list_display = ('username', 'email', 'first_name', 'last_name',
'direccion', 'telefono', 'nacimiento')
search_fields = list_display
form = forms.RecepcionistaForm
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.RecepcionistaFormEdit
# end if
return super(RecepcionistaAdmin, self).get_form(request, obj, *args, **kwargs)
# end def
# end class
class CajeroAdmin(nested_admin.NestedModelAdmin):
list_display = ('username', 'email', 'first_name', 'last_name',
'direccion', 'telefono', 'nacimiento')
search_fields = list_display
form = forms.CajeroForm
def get_form(self, request, obj=None, *args, **kwargs):
if obj:
kwargs['form'] = forms.CajeroFormEdit
# end if
return super(CajeroAdmin, self).get_form(request, obj, *args, **kwargs)
# end def
# end class
# Register your models here.
exileui.register(models.Empleado, OperarioAdmin)
exileui.register(models.Recepcionista, RecepcionistaAdmin)
exileui.register(models.Cajero, CajeroAdmin)
|
{
"content_hash": "0574d661163033c4d4c18b61b478538d",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 86,
"avg_line_length": 32.459016393442624,
"alnum_prop": 0.6525252525252525,
"repo_name": "exildev/AutoLavadox",
"id": "8549d4d80dcba6189fee632f03dea955253c5242",
"size": "1980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "empleados/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "238948"
},
{
"name": "HTML",
"bytes": "38341"
},
{
"name": "JavaScript",
"bytes": "620499"
},
{
"name": "PLpgSQL",
"bytes": "19460"
},
{
"name": "Python",
"bytes": "226893"
},
{
"name": "Shell",
"bytes": "3252"
}
],
"symlink_target": ""
}
|
"""The hypervisors admin extension."""
from oslo_log import log as logging
from oslo_serialization import jsonutils
import webob.exc
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack.compute.views import hypervisors as hyper_view
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.i18n import _
from nova.policies import hypervisors as hv_policies
from nova import servicegroup
LOG = logging.getLogger(__name__)
ALIAS = "os-hypervisors"
class HypervisorsController(wsgi.Controller):
"""The Hypervisors API controller for the OpenStack API."""
_view_builder_class = hyper_view.ViewBuilder
def __init__(self):
self.host_api = compute.HostAPI()
self.servicegroup_api = servicegroup.API()
super(HypervisorsController, self).__init__()
def _view_hypervisor(self, hypervisor, service, detail, req, servers=None,
**kwargs):
alive = self.servicegroup_api.service_is_up(service)
hyp_dict = {
'id': hypervisor.id,
'hypervisor_hostname': hypervisor.hypervisor_hostname,
'state': 'up' if alive else 'down',
'status': ('disabled' if service.disabled
else 'enabled'),
}
if detail and not servers:
for field in ('vcpus', 'memory_mb', 'local_gb', 'vcpus_used',
'memory_mb_used', 'local_gb_used',
'hypervisor_type', 'hypervisor_version',
'free_ram_mb', 'free_disk_gb', 'current_workload',
'running_vms', 'disk_available_least', 'host_ip'):
hyp_dict[field] = getattr(hypervisor, field)
hyp_dict['service'] = {
'id': service.id,
'host': hypervisor.host,
'disabled_reason': service.disabled_reason,
}
if api_version_request.is_supported(req, min_version='2.28'):
if hypervisor.cpu_info:
hyp_dict['cpu_info'] = jsonutils.loads(hypervisor.cpu_info)
else:
hyp_dict['cpu_info'] = {}
else:
hyp_dict['cpu_info'] = hypervisor.cpu_info
if servers:
hyp_dict['servers'] = [dict(name=serv['name'], uuid=serv['uuid'])
for serv in servers]
# Add any additional info
if kwargs:
hyp_dict.update(kwargs)
return hyp_dict
@wsgi.Controller.api_version("2.33") # noqa
@extensions.expected_errors((400))
def index(self, req):
limit, marker = common.get_limit_and_marker(req)
return self._index(req, limit=limit, marker=marker, links=True)
@wsgi.Controller.api_version("2.1", "2.32") # noqa
@extensions.expected_errors(())
def index(self, req):
return self._index(req)
def _index(self, req, limit=None, marker=None, links=False):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
try:
compute_nodes = self.host_api.compute_node_get_all(
context, limit=limit, marker=marker)
except exception.MarkerNotFound:
msg = _('marker [%s] not found') % marker
raise webob.exc.HTTPBadRequest(explanation=msg)
req.cache_db_compute_nodes(compute_nodes)
hypervisors_list = []
for hyp in compute_nodes:
try:
service = self.host_api.service_get_by_compute_host(
context, hyp.host)
hypervisors_list.append(
self._view_hypervisor(
hyp, service, False, req))
except exception.ComputeHostNotFound:
# The compute service could be deleted which doesn't delete
# the compute node record, that has to be manually removed
# from the database so we just ignore it when listing nodes.
LOG.debug('Unable to find service for compute node %s. The '
'service may be deleted and compute nodes need to '
'be manually cleaned up.', hyp.host)
hypervisors_dict = dict(hypervisors=hypervisors_list)
if links:
hypervisors_links = self._view_builder.get_links(req,
hypervisors_list)
if hypervisors_links:
hypervisors_dict['hypervisors_links'] = hypervisors_links
return hypervisors_dict
@wsgi.Controller.api_version("2.33") # noqa
@extensions.expected_errors((400))
def detail(self, req):
limit, marker = common.get_limit_and_marker(req)
return self._detail(req, limit=limit, marker=marker, links=True)
@wsgi.Controller.api_version("2.1", "2.32") # noqa
@extensions.expected_errors(())
def detail(self, req):
return self._detail(req)
def _detail(self, req, limit=None, marker=None, links=False):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
try:
compute_nodes = self.host_api.compute_node_get_all(
context, limit=limit, marker=marker)
except exception.MarkerNotFound:
msg = _('marker [%s] not found') % marker
raise webob.exc.HTTPBadRequest(explanation=msg)
req.cache_db_compute_nodes(compute_nodes)
hypervisors_list = []
for hyp in compute_nodes:
try:
service = self.host_api.service_get_by_compute_host(
context, hyp.host)
hypervisors_list.append(
self._view_hypervisor(
hyp, service, True, req))
except exception.ComputeHostNotFound:
# The compute service could be deleted which doesn't delete
# the compute node record, that has to be manually removed
# from the database so we just ignore it when listing nodes.
LOG.debug('Unable to find service for compute node %s. The '
'service may be deleted and compute nodes need to '
'be manually cleaned up.', hyp.host)
hypervisors_dict = dict(hypervisors=hypervisors_list)
if links:
hypervisors_links = self._view_builder.get_links(
req, hypervisors_list, detail=True)
if hypervisors_links:
hypervisors_dict['hypervisors_links'] = hypervisors_links
return hypervisors_dict
@extensions.expected_errors(404)
def show(self, req, id):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
service = self.host_api.service_get_by_compute_host(
context, hyp.host)
return dict(hypervisor=self._view_hypervisor(
hyp, service, True, req))
@extensions.expected_errors((400, 404, 501))
def uptime(self, req, id):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
try:
hyp = self.host_api.compute_node_get(context, id)
req.cache_db_compute_node(hyp)
except (ValueError, exception.ComputeHostNotFound):
msg = _("Hypervisor with ID '%s' could not be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
# Get the uptime
try:
host = hyp.host
uptime = self.host_api.get_host_uptime(context, host)
except NotImplementedError:
common.raise_feature_not_supported()
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
service = self.host_api.service_get_by_compute_host(context, host)
return dict(hypervisor=self._view_hypervisor(hyp, service, False, req,
uptime=uptime))
@extensions.expected_errors(404)
def search(self, req, id):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
hypervisors = self.host_api.compute_node_search_by_hypervisor(
context, id)
if hypervisors:
return dict(hypervisors=[self._view_hypervisor(
hyp,
self.host_api.service_get_by_compute_host(
context, hyp.host),
False, req)
for hyp in hypervisors])
else:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
@extensions.expected_errors(404)
def servers(self, req, id):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
compute_nodes = self.host_api.compute_node_search_by_hypervisor(
context, id)
if not compute_nodes:
msg = _("No hypervisor matching '%s' could be found.") % id
raise webob.exc.HTTPNotFound(explanation=msg)
hypervisors = []
for compute_node in compute_nodes:
instances = self.host_api.instance_get_all_by_host(context,
compute_node.host)
service = self.host_api.service_get_by_compute_host(
context, compute_node.host)
hyp = self._view_hypervisor(compute_node, service, False, req,
instances)
hypervisors.append(hyp)
return dict(hypervisors=hypervisors)
@extensions.expected_errors(())
def statistics(self, req):
context = req.environ['nova.context']
context.can(hv_policies.BASE_POLICY_NAME)
stats = self.host_api.compute_node_statistics(context)
return dict(hypervisor_statistics=stats)
class Hypervisors(extensions.V21APIExtensionBase):
"""Admin-only hypervisor administration."""
name = "Hypervisors"
alias = ALIAS
version = 1
def get_resources(self):
resources = [extensions.ResourceExtension(ALIAS,
HypervisorsController(),
collection_actions={'detail': 'GET',
'statistics': 'GET'},
member_actions={'uptime': 'GET',
'search': 'GET',
'servers': 'GET'})]
return resources
def get_controller_extensions(self):
return []
|
{
"content_hash": "a413f83d551637735190c644c31b80a5",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 79,
"avg_line_length": 41.111111111111114,
"alnum_prop": 0.5736936936936937,
"repo_name": "hanlind/nova",
"id": "ce28fe2d8f667cdc71332d2d9555c99fe69b2ad3",
"size": "11740",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/hypervisors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "18681206"
},
{
"name": "Shell",
"bytes": "32127"
},
{
"name": "Smarty",
"bytes": "306159"
}
],
"symlink_target": ""
}
|
import touchdown.aws # noqa
import touchdown.config # noqa
import touchdown.goals # noqa
import touchdown.gpg # noqa
import touchdown.local # noqa
import touchdown.notifications # noqa
import touchdown.provisioner # noqa
import touchdown.ssh # noqa
import touchdown.template # noqa
from touchdown.core import Workspace
__all__ = ["Workspace"]
|
{
"content_hash": "28e3ae4cde18e0563735a5ec5ba17eec",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 38,
"avg_line_length": 29.416666666666668,
"alnum_prop": 0.7705382436260623,
"repo_name": "yaybu/touchdown",
"id": "743a348b49ffb0ee6911baef80148b7d90697273",
"size": "931",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "touchdown/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "760"
},
{
"name": "Python",
"bytes": "1047173"
}
],
"symlink_target": ""
}
|
import logging
from django.core.management.base import BaseCommand
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Permission
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
This management command creates non-standard Django permissions
"""
help = 'Usage: manage.py initialize_permissions'
def handle(self, *args, **options):
try:
content_type_system_settings = ContentType.objects.get(app_label='dojo', model='system_settings')
google_permission = Permission.objects.filter(content_type=content_type_system_settings,
codename='change_google_sheet').count()
if google_permission == 0:
Permission.objects.create(
name='Can change Google Sheet',
content_type=content_type_system_settings,
codename='change_google_sheet'
)
logger.info('Non-standard permissions have been created')
except ContentType.DoesNotExist:
logger.warning('No content type found for dojo.system_settings')
except ContentType.MultipleObjectsReturned:
logger.warning('Multiple content types found for dojo.system_settings')
|
{
"content_hash": "1ded3b5bb18cd6318594d85f7fd915a0",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 109,
"avg_line_length": 40.4375,
"alnum_prop": 0.6638330757341576,
"repo_name": "rackerlabs/django-DefectDojo",
"id": "61630eef2f10fb8782eca6cdeae51d8eca47df49",
"size": "1294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dojo/management/commands/initialize_permissions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "18132"
},
{
"name": "Groff",
"bytes": "91"
},
{
"name": "HTML",
"bytes": "666571"
},
{
"name": "JavaScript",
"bytes": "6393"
},
{
"name": "Python",
"bytes": "524728"
},
{
"name": "Shell",
"bytes": "20558"
},
{
"name": "XSLT",
"bytes": "6624"
}
],
"symlink_target": ""
}
|
"""Generic Z-Wave Entity Classes."""
import copy
import logging
from openzwavemqtt.const import (
EVENT_INSTANCE_STATUS_CHANGED,
EVENT_VALUE_CHANGED,
OZW_READY_STATES,
CommandClass,
ValueIndex,
)
from openzwavemqtt.models.node import OZWNode
from openzwavemqtt.models.value import OZWValue
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from . import const
from .const import DOMAIN, PLATFORMS
from .discovery import check_node_schema, check_value_schema
_LOGGER = logging.getLogger(__name__)
OZW_READY_STATES_VALUES = {st.value for st in OZW_READY_STATES}
class ZWaveDeviceEntityValues:
"""Manages entity access to the underlying Z-Wave value objects."""
def __init__(self, hass, options, schema, primary_value):
"""Initialize the values object with the passed entity schema."""
self._hass = hass
self._entity_created = False
self._schema = copy.deepcopy(schema)
self._values = {}
self.options = options
# Go through values listed in the discovery schema, initialize them,
# and add a check to the schema to make sure the Instance matches.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
self._values[name] = None
disc_settings[const.DISC_INSTANCE] = (primary_value.instance,)
self._values[const.DISC_PRIMARY] = primary_value
self._node = primary_value.node
self._schema[const.DISC_NODE_ID] = [self._node.node_id]
def async_setup(self):
"""Set up values instance."""
# Check values that have already been discovered for node
# and see if they match the schema and need added to the entity.
for value in self._node.values():
self.async_check_value(value)
# Check if all the _required_ values in the schema are present and
# create the entity.
self._async_check_entity_ready()
def __getattr__(self, name):
"""Get the specified value for this entity."""
return self._values.get(name, None)
def __iter__(self):
"""Allow iteration over all values."""
return iter(self._values.values())
def __contains__(self, name):
"""Check if the specified name/key exists in the values."""
return name in self._values
@callback
def async_check_value(self, value):
"""Check if the new value matches a missing value for this entity.
If a match is found, it is added to the values mapping.
"""
# Make sure the node matches the schema for this entity.
if not check_node_schema(value.node, self._schema):
return
# Go through the possible values for this entity defined by the schema.
for name in self._values:
# Skip if it's already been added.
if self._values[name] is not None:
continue
# Skip if the value doesn't match the schema.
if not check_value_schema(value, self._schema[const.DISC_VALUES][name]):
continue
# Add value to mapping.
self._values[name] = value
# If the entity has already been created, notify it of the new value.
if self._entity_created:
async_dispatcher_send(
self._hass, f"{DOMAIN}_{self.values_id}_value_added"
)
# Check if entity has all required values and create the entity if needed.
self._async_check_entity_ready()
@callback
def _async_check_entity_ready(self):
"""Check if all required values are discovered and create entity."""
# Abort if the entity has already been created
if self._entity_created:
return
# Go through values defined in the schema and abort if a required value is missing.
for name, disc_settings in self._schema[const.DISC_VALUES].items():
if self._values[name] is None and not disc_settings.get(
const.DISC_OPTIONAL
):
return
# We have all the required values, so create the entity.
component = self._schema[const.DISC_COMPONENT]
_LOGGER.debug(
"Adding Node_id=%s Generic_command_class=%s, "
"Specific_command_class=%s, "
"Command_class=%s, Index=%s, Value type=%s, "
"Genre=%s as %s",
self._node.node_id,
self._node.node_generic,
self._node.node_specific,
self.primary.command_class,
self.primary.index,
self.primary.type,
self.primary.genre,
component,
)
self._entity_created = True
if component in PLATFORMS:
async_dispatcher_send(self._hass, f"{DOMAIN}_new_{component}", self)
@property
def values_id(self):
"""Identification for this values collection."""
return create_value_id(self.primary)
class ZWaveDeviceEntity(Entity):
"""Generic Entity Class for a Z-Wave Device."""
def __init__(self, values):
"""Initialize a generic Z-Wave device entity."""
self.values = values
self.options = values.options
@callback
def on_value_update(self):
"""Call when a value is added/updated in the entity EntityValues Collection.
To be overridden by platforms needing this event.
"""
async def async_added_to_hass(self):
"""Call when entity is added."""
# Add dispatcher and OZW listeners callbacks.
# Add to on_remove so they will be cleaned up on entity removal.
self.async_on_remove(
self.options.listen(EVENT_VALUE_CHANGED, self._value_changed)
)
self.async_on_remove(
self.options.listen(EVENT_INSTANCE_STATUS_CHANGED, self._instance_updated)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, const.SIGNAL_DELETE_ENTITY, self._delete_callback
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self.values.values_id}_value_added",
self._value_added,
)
)
@property
def device_info(self):
"""Return device information for the device registry."""
node = self.values.primary.node
node_instance = self.values.primary.instance
dev_id = create_device_id(node, self.values.primary.instance)
node_firmware = node.get_value(
CommandClass.VERSION, ValueIndex.VERSION_APPLICATION
)
device_info = {
"identifiers": {(DOMAIN, dev_id)},
"name": create_device_name(node),
"manufacturer": node.node_manufacturer_name,
"model": node.node_product_name,
}
if node_firmware is not None:
device_info["sw_version"] = node_firmware.value
# device with multiple instances is split up into virtual devices for each instance
if node_instance > 1:
parent_dev_id = create_device_id(node)
device_info["name"] += f" - Instance {node_instance}"
device_info["via_device"] = (DOMAIN, parent_dev_id)
return device_info
@property
def extra_state_attributes(self):
"""Return the device specific state attributes."""
return {const.ATTR_NODE_ID: self.values.primary.node.node_id}
@property
def name(self):
"""Return the name of the entity."""
node = self.values.primary.node
return f"{create_device_name(node)}: {self.values.primary.label}"
@property
def unique_id(self):
"""Return the unique_id of the entity."""
return self.values.values_id
@property
def available(self) -> bool:
"""Return entity availability."""
# Use OZW Daemon status for availability.
instance_status = self.values.primary.ozw_instance.get_status()
return instance_status and instance_status.status in OZW_READY_STATES_VALUES
@callback
def _value_changed(self, value):
"""Call when a value from ZWaveDeviceEntityValues is changed.
Should not be overridden by subclasses.
"""
if value.value_id_key in (v.value_id_key for v in self.values if v):
self.on_value_update()
self.async_write_ha_state()
@callback
def _value_added(self):
"""Call when a value from ZWaveDeviceEntityValues is added.
Should not be overridden by subclasses.
"""
self.on_value_update()
@callback
def _instance_updated(self, new_status):
"""Call when the instance status changes.
Should not be overridden by subclasses.
"""
self.on_value_update()
self.async_write_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
async def _delete_callback(self, values_id):
"""Remove this entity."""
if not self.values:
return # race condition: delete already requested
if values_id == self.values.values_id:
await self.async_remove(force_remove=True)
def create_device_name(node: OZWNode):
"""Generate sensible (short) default device name from a OZWNode."""
# Prefer custom name set by OZWAdmin if present
if node.node_name:
return node.node_name
# Prefer short devicename from metadata if present
if node.meta_data and node.meta_data.get("Name"):
return node.meta_data["Name"]
# Fallback to productname or devicetype strings
if node.node_product_name:
return node.node_product_name
if node.node_device_type_string:
return node.node_device_type_string
if node.node_specific_string:
return node.node_specific_string
# Last resort: use Node id (should never happen, but just in case)
return f"Node {node.id}"
def create_device_id(node: OZWNode, node_instance: int = 1):
"""Generate unique device_id from a OZWNode."""
ozw_instance = node.parent.id
dev_id = f"{ozw_instance}.{node.node_id}.{node_instance}"
return dev_id
def create_value_id(value: OZWValue):
"""Generate unique value_id from an OZWValue."""
# [OZW_INSTANCE_ID]-[NODE_ID]-[VALUE_ID_KEY]
return f"{value.node.parent.id}-{value.node.id}-{value.value_id_key}"
|
{
"content_hash": "dfe1b09d3d8bfe059fbe134723a61caa",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 91,
"avg_line_length": 34.96369636963696,
"alnum_prop": 0.6181801019444969,
"repo_name": "kennedyshead/home-assistant",
"id": "305601a2333e0b6e4adad4bc92ae8f3648e785ba",
"size": "10594",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/ozw/entity.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
"""Hook for HashiCorp Vault"""
import json
from typing import Optional, Tuple
import hvac
from hvac.exceptions import VaultError
from requests import Response
from airflow.hooks.base import BaseHook
from airflow.providers.hashicorp._internal_client.vault_client import ( # noqa
DEFAULT_KUBERNETES_JWT_PATH,
DEFAULT_KV_ENGINE_VERSION,
_VaultClient,
)
class VaultHook(BaseHook):
"""
Hook to Interact with HashiCorp Vault KeyValue Secret engine.
HashiCorp hvac documentation:
* https://hvac.readthedocs.io/en/stable/
You connect to the host specified as host in the connection. The login/password from the connection
are used as credentials usually and you can specify different authentication parameters
via init params or via corresponding extras in the connection.
The mount point should be placed as a path in the URL - similarly to Vault's URL schema:
This indicates the "path" the secret engine is mounted on. Default id not specified is "secret".
Note that this ``mount_point`` is not used for authentication if authentication is done via a
different engines. Each engine uses it's own engine-specific authentication mount_point.
The extras in the connection are named the same as the parameters ('kv_engine_version', 'auth_type', ...).
You can also use gcp_keyfile_dict extra to pass json-formatted dict in case of 'gcp' authentication.
The URL schemas supported are "vault", "http" (using http to connect to the vault) or
"vaults" and "https" (using https to connect to the vault).
Example URL:
.. code-block::
vault://user:password@host:port/mount_point?kv_engine_version=1&auth_type=github
Login/Password are used as credentials:
* approle: password -> secret_id
* github: password -> token
* token: password -> token
* aws_iam: login -> key_id, password -> secret_id
* azure: login -> client_id, password -> client_secret
* ldap: login -> username, password -> password
* userpass: login -> username, password -> password
* radius: password -> radius_secret
:param vault_conn_id: The id of the connection to use
:type vault_conn_id: str
:param auth_type: Authentication Type for the Vault. Default is ``token``. Available values are:
('approle', 'github', 'gcp', 'kubernetes', 'ldap', 'token', 'userpass')
:type auth_type: str
:param auth_mount_point: It can be used to define mount_point for authentication chosen
Default depends on the authentication method used.
:type auth_mount_point: str
:param kv_engine_version: Select the version of the engine to run (``1`` or ``2``). Defaults to
version defined in connection or ``2`` if not defined in connection.
:type kv_engine_version: int
:param role_id: Role ID for Authentication (for ``approle``, ``aws_iam`` auth_types)
:type role_id: str
:param kubernetes_role: Role for Authentication (for ``kubernetes`` auth_type)
:type kubernetes_role: str
:param kubernetes_jwt_path: Path for kubernetes jwt token (for ``kubernetes`` auth_type, default:
``/var/run/secrets/kubernetes.io/serviceaccount/token``)
:type kubernetes_jwt_path: str
:param token_path: path to file containing authentication token to include in requests sent to Vault
(for ``token`` and ``github`` auth_type).
:type token_path: str
:param gcp_key_path: Path to Google Cloud Service Account key file (JSON) (for ``gcp`` auth_type)
Mutually exclusive with gcp_keyfile_dict
:type gcp_key_path: str
:param gcp_scopes: Comma-separated string containing OAuth2 scopes (for ``gcp`` auth_type)
:type gcp_scopes: str
:param azure_tenant_id: The tenant id for the Azure Active Directory (for ``azure`` auth_type)
:type azure_tenant_id: str
:param azure_resource: The configured URL for the application registered in Azure Active Directory
(for ``azure`` auth_type)
:type azure_resource: str
:param radius_host: Host for radius (for ``radius`` auth_type)
:type radius_host: str
:param radius_port: Port for radius (for ``radius`` auth_type)
:type radius_port: int
"""
conn_name_attr = 'vault_conn_id'
default_conn_name = 'imap_default'
conn_type = 'vault'
hook_name = 'Hashicorp Vault'
def __init__( # pylint: disable=too-many-arguments
self,
vault_conn_id: str = default_conn_name,
auth_type: Optional[str] = None,
auth_mount_point: Optional[str] = None,
kv_engine_version: Optional[int] = None,
role_id: Optional[str] = None,
kubernetes_role: Optional[str] = None,
kubernetes_jwt_path: Optional[str] = None,
token_path: Optional[str] = None,
gcp_key_path: Optional[str] = None,
gcp_scopes: Optional[str] = None,
azure_tenant_id: Optional[str] = None,
azure_resource: Optional[str] = None,
radius_host: Optional[str] = None,
radius_port: Optional[int] = None,
):
super().__init__()
self.connection = self.get_connection(vault_conn_id)
if not auth_type:
auth_type = self.connection.extra_dejson.get('auth_type') or "token"
if not auth_mount_point:
auth_mount_point = self.connection.extra_dejson.get('auth_mount_point')
if not kv_engine_version:
conn_version = self.connection.extra_dejson.get("kv_engine_version")
try:
kv_engine_version = int(conn_version) if conn_version else DEFAULT_KV_ENGINE_VERSION
except ValueError:
raise VaultError(f"The version is not an int: {conn_version}. ")
if auth_type in ["approle", "aws_iam"]:
if not role_id:
role_id = self.connection.extra_dejson.get('role_id')
azure_resource, azure_tenant_id = (
self._get_azure_parameters_from_connection(azure_resource, azure_tenant_id)
if auth_type == 'azure'
else (None, None)
)
gcp_key_path, gcp_keyfile_dict, gcp_scopes = (
self._get_gcp_parameters_from_connection(gcp_key_path, gcp_scopes)
if auth_type == 'gcp'
else (None, None, None)
)
kubernetes_jwt_path, kubernetes_role = (
self._get_kubernetes_parameters_from_connection(kubernetes_jwt_path, kubernetes_role)
if auth_type == 'kubernetes'
else (None, None)
)
radius_host, radius_port = (
self._get_radius_parameters_from_connection(radius_host, radius_port)
if auth_type == 'radius'
else (None, None)
)
if self.connection.conn_type == 'vault':
conn_protocol = 'http'
elif self.connection.conn_type == 'vaults':
conn_protocol = 'https'
elif self.connection.conn_type == 'http':
conn_protocol = 'http'
elif self.connection.conn_type == 'https':
conn_protocol = 'https'
else:
raise VaultError("The url schema must be one of ['http', 'https', 'vault', 'vaults' ]")
url = f"{conn_protocol}://{self.connection.host}"
if self.connection.port:
url += f":{self.connection.port}"
# Schema is really path in the Connection definition. This is pretty confusing because of URL schema
mount_point = self.connection.schema if self.connection.schema else 'secret'
self.vault_client = _VaultClient(
url=url,
auth_type=auth_type,
auth_mount_point=auth_mount_point,
mount_point=mount_point,
kv_engine_version=kv_engine_version,
token=self.connection.password,
token_path=token_path,
username=self.connection.login,
password=self.connection.password,
key_id=self.connection.login,
secret_id=self.connection.password,
role_id=role_id,
kubernetes_role=kubernetes_role,
kubernetes_jwt_path=kubernetes_jwt_path,
gcp_key_path=gcp_key_path,
gcp_keyfile_dict=gcp_keyfile_dict,
gcp_scopes=gcp_scopes,
azure_tenant_id=azure_tenant_id,
azure_resource=azure_resource,
radius_host=radius_host,
radius_secret=self.connection.password,
radius_port=radius_port,
)
def _get_kubernetes_parameters_from_connection(
self, kubernetes_jwt_path: Optional[str], kubernetes_role: Optional[str]
) -> Tuple[str, Optional[str]]:
if not kubernetes_jwt_path:
kubernetes_jwt_path = self.connection.extra_dejson.get("kubernetes_jwt_path")
if not kubernetes_jwt_path:
kubernetes_jwt_path = DEFAULT_KUBERNETES_JWT_PATH
if not kubernetes_role:
kubernetes_role = self.connection.extra_dejson.get("kubernetes_role")
return kubernetes_jwt_path, kubernetes_role
def _get_gcp_parameters_from_connection(
self,
gcp_key_path: Optional[str],
gcp_scopes: Optional[str],
) -> Tuple[Optional[str], Optional[dict], Optional[str]]:
if not gcp_scopes:
gcp_scopes = self.connection.extra_dejson.get("gcp_scopes")
if not gcp_key_path:
gcp_key_path = self.connection.extra_dejson.get("gcp_key_path")
string_keyfile_dict = self.connection.extra_dejson.get("gcp_keyfile_dict")
gcp_keyfile_dict = json.loads(string_keyfile_dict) if string_keyfile_dict else None
return gcp_key_path, gcp_keyfile_dict, gcp_scopes
def _get_azure_parameters_from_connection(
self, azure_resource: Optional[str], azure_tenant_id: Optional[str]
) -> Tuple[Optional[str], Optional[str]]:
if not azure_tenant_id:
azure_tenant_id = self.connection.extra_dejson.get("azure_tenant_id")
if not azure_resource:
azure_resource = self.connection.extra_dejson.get("azure_resource")
return azure_resource, azure_tenant_id
def _get_radius_parameters_from_connection(
self, radius_host: Optional[str], radius_port: Optional[int]
) -> Tuple[Optional[str], Optional[int]]:
if not radius_port:
radius_port_str = self.connection.extra_dejson.get("radius_port")
if radius_port_str:
try:
radius_port = int(radius_port_str)
except ValueError:
raise VaultError(f"Radius port was wrong: {radius_port_str}")
if not radius_host:
radius_host = self.connection.extra_dejson.get("radius_host")
return radius_host, radius_port
def get_conn(self) -> hvac.Client:
"""
Retrieves connection to Vault.
:rtype: hvac.Client
:return: connection used.
"""
return self.vault_client.client
def get_secret(self, secret_path: str, secret_version: Optional[int] = None) -> Optional[dict]:
"""
Get secret value from the engine.
:param secret_path: Path of the secret
:type secret_path: str
:param secret_version: Optional version of key to read - can only be used in case of version 2 of KV
:type secret_version: int
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:param secret_path: Path of the secret
:type secret_path: str
:rtype: dict
:return: secret stored in the vault as a dictionary
"""
return self.vault_client.get_secret(secret_path=secret_path, secret_version=secret_version)
def get_secret_metadata(self, secret_path: str) -> Optional[dict]:
"""
Reads secret metadata (including versions) from the engine. It is only valid for KV version 2.
:param secret_path: Path to read from
:type secret_path: str
:rtype: dict
:return: secret metadata. This is a Dict containing metadata for the secret.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
return self.vault_client.get_secret_metadata(secret_path=secret_path)
def get_secret_including_metadata(
self, secret_path: str, secret_version: Optional[int] = None
) -> Optional[dict]:
"""
Reads secret including metadata. It is only valid for KV version 2.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:param secret_path: Path of the secret
:type secret_path: str
:param secret_version: Optional version of key to read - can only be used in case of version 2 of KV
:type secret_version: int
:rtype: dict
:return: key info. This is a Dict with "data" mapping keeping secret
and "metadata" mapping keeping metadata of the secret.
"""
return self.vault_client.get_secret_including_metadata(
secret_path=secret_path, secret_version=secret_version
)
def create_or_update_secret(
self, secret_path: str, secret: dict, method: Optional[str] = None, cas: Optional[int] = None
) -> Response:
"""
Creates or updates secret.
:param secret_path: Path to read from
:type secret_path: str
:param secret: Secret to create or update for the path specified
:type secret: dict
:param method: Optional parameter to explicitly request a POST (create) or PUT (update) request to
the selected kv secret engine. If no argument is provided for this parameter, hvac attempts to
intelligently determine which method is appropriate. Only valid for KV engine version 1
:type method: str
:param cas: Set the "cas" value to use a Check-And-Set operation. If not set the write will be
allowed. If set to 0 a write will only be allowed if the key doesn't exist.
If the index is non-zero the write will only be allowed if the key's current version
matches the version specified in the cas parameter. Only valid for KV engine version 2.
:type cas: int
:rtype: requests.Response
:return: The response of the create_or_update_secret request.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
return self.vault_client.create_or_update_secret(
secret_path=secret_path, secret=secret, method=method, cas=cas
)
|
{
"content_hash": "32e922d484153f5dd90a6bfff054fd5b",
"timestamp": "",
"source": "github",
"line_count": 340,
"max_line_length": 110,
"avg_line_length": 43.529411764705884,
"alnum_prop": 0.6396621621621622,
"repo_name": "DinoCow/airflow",
"id": "f14c022e2e571a0800af3b7dd24da40bc258b5d8",
"size": "15586",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "airflow/providers/hashicorp/hooks/vault.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "56963"
},
{
"name": "HTML",
"bytes": "140781"
},
{
"name": "JavaScript",
"bytes": "1370838"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "1473771"
},
{
"name": "Shell",
"bytes": "18638"
}
],
"symlink_target": ""
}
|
"""Energy for a two-phase, liquid+water vapor evaluator."""
import sys, os
sys.path.append(os.path.join(os.environ['ATS_SRC_DIR'], "tools", "evaluator_generator"))
from evaluator_generator import generate_evaluator
deps = [("porosity", "phi"),
("base_porosity", "phi0"),
("saturation_liquid", "sl"),
("molar_density_liquid", "nl"),
("internal_energy_liquid", "ul"),
("saturation_gas", "sg"),
("molar_density_gas", "ng"),
("internal_energy_gas", "ug"),
("density_rock", "rho_r"),
("internal_energy_rock", "ur"),
("cell_volume", "cv")
]
params = []
import sympy
phi, phi0, sl, nl, ul, sg, ng, ug, rho_r, ur, cv = sympy.var("phi, phi0, sl, nl, ul, sg, ng, ug, rho_r, ur, cv")
expression = (phi*(sl*nl*ul + sg*ng*ug) + (1-phi0)*rho_r*ur) * cv;
generate_evaluator("liquid_gas_energy", "Energy",
"liquid+gas energy", "energy",
deps, params, expression=expression, doc=__doc__)
|
{
"content_hash": "fbe9ddb22d7c6e21565e9ec6b4592005",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 112,
"avg_line_length": 37.074074074074076,
"alnum_prop": 0.5634365634365635,
"repo_name": "amanzi/ats-dev",
"id": "a89d4baf300392ef68d14083595f05fc491ad1df",
"size": "1001",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/pks/energy/constitutive_relations/energy/liquid_gas_energy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3360"
},
{
"name": "C++",
"bytes": "2842879"
},
{
"name": "CMake",
"bytes": "97837"
},
{
"name": "Fortran",
"bytes": "8905"
},
{
"name": "Python",
"bytes": "6272"
}
],
"symlink_target": ""
}
|
import errno
import sys
import urlparse
import archiveinstaller.downloader
import archiveinstaller.extractor
import os
from abc import abstractmethod, ABCMeta
from os.path import expanduser
from os.path import join
from archiveinstaller.downloader import ArchivingDownloader
def create_installer(app_directory, download_cache_directory=None):
downloader = archiveinstaller.downloader.Downloader()
if download_cache_directory:
mkdir_p(download_cache_directory)
downloader = ArchivingDownloader(download_cache_directory, downloader)
return ApplicationInstaller(app_directory, downloader)
def _search_path_for(pathname_suffix):
candidates = [os.path.join(directory, pathname_suffix) for directory in sys.path]
try:
return filter(os.path.exists, candidates)[0]
except IndexError:
return None
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
class EnvironmentConfiguration:
def __init__(self, dictionary):
self.dictionary = dictionary
def path_element(self):
return self.dictionary.get('path')
def environment_variables(self):
return self.dictionary.get('env', {})
class ArchiveConfiguration:
def __init__(self, dictionary):
self.dictionary = dictionary
def url(self):
return self.dictionary['url']
def nesting_level(self):
return self.dictionary.get('nesting_level', 0)
class Application:
def __init__(self, name, version, archive_configuration, environment_configuration):
self.name = name
self.version = version
self.environment_configuration = environment_configuration
self.archive_configuration = archive_configuration
def filename(self):
parsed_url = urlparse.urlparse(self.url())
filename = os.path.basename(parsed_url.path)
return filename
def archive(self):
return self.archive_configuration
def environment(self):
return self.environment_configuration
def url(self):
return self.archive_configuration.url() % {'version': self.version}
class InstallationStep(object):
__metaclass__ = ABCMeta
def __init__(self):
pass
@abstractmethod
def install(self, directory_structure, application, template_data): pass
class FileWriter:
def __init__(self):
pass
# noinspection PyMethodMayBeStatic
def write_to(self, path, mode, content):
with open(path, mode) as output_file:
output_file.write(content)
class WriteEnvironmentVariableFile(InstallationStep):
def install(self, directory_structure, application, template_data):
env = application.environment().environment_variables()
if not env:
return
path_to_env_file = os.path.join(directory_structure.configuration_path, application.name + '.env')
lines = []
for key, value in env.items():
lines.append(key + '="' + value + '"')
content_with_template_variables = '\n'.join(lines)
content = content_with_template_variables % template_data
FileWriter().write_to(path_to_env_file, 'wt', content)
class WritePathAdditionFile(InstallationStep):
def install(self, directory_structure, application, template_data):
path = application.environment().path_element()
if path is not None:
path_to_path_file = os.path.join(directory_structure.configuration_path, application.name + '.path')
with open(path_to_path_file, 'wt') as path_file:
path_file.write(path % template_data)
class PointCurrentSymLinkToApplication(InstallationStep):
def install(self, directory_structure, application, template_data):
current_sym_link = directory_structure.current_symlink_path_for(application)
if os.path.islink(current_sym_link):
os.unlink(current_sym_link)
extract_directory = directory_structure.directory_for(application)
os.symlink(extract_directory + '/', current_sym_link)
class ApplicationInstaller:
def __init__(self, path, downloader):
self.directory_structure = DirectoryStructure(expanduser(path))
self.downloader = downloader
def ensure_environment_is_setup(self):
self.directory_structure.ensure_directories_are_setup()
self._write_rc_file()
def install(self, application):
template_data = self._template_data_for(application)
self.directory_structure.ensure_installation_directory_exists(application)
self._ensure_archive_was_downloaded(application)
self._extract_archive(application)
PointCurrentSymLinkToApplication().install(self.directory_structure, application, template_data)
WritePathAdditionFile().install(self.directory_structure, application, template_data)
WriteEnvironmentVariableFile().install(self.directory_structure, application, template_data)
def _write_rc_file(self):
path_to_rc_script = _search_path_for('archiveinstaller/shell/application.sh')
with open(path_to_rc_script, 'r') as rc_file:
rc_file_template = rc_file.read()
path_to_destination = os.path.join(self.directory_structure.path, 'application.rc')
with open(path_to_destination, 'w')as rc_file_installed:
replacement = "application_directory='%(path)s'" % {'path': self.directory_structure.path}
rc_file_installed.write(rc_file_template.replace("application_directory='/tmp'", replacement))
def _ensure_archive_was_downloaded(self, application):
self.downloader.download(application, self.directory_structure.archive_path_for(application))
def _template_data_for(self, application):
return {'installation_directory': self.directory_structure.current_symlink_path_for(application)}
def _extract_archive(self, application):
if self.directory_structure.archive_already_extracted(application):
print('already extracted ' + application.filename())
return
archive_path = self.directory_structure.archive_path_for(application)
target_directory_path = self.directory_structure.directory_for(application)
nesting_level = application.archive().nesting_level()
archiveinstaller.extractor.ArchiveExtractor().extract(archive_path, target_directory_path, nesting_level)
class DirectoryStructure:
def __init__(self, path):
self.path = path
self.configuration_path = os.path.join(self.path, "etc")
def ensure_directories_are_setup(self):
mkdir_p(self.path)
mkdir_p(self.configuration_path)
def ensure_installation_directory_exists(self, application):
mkdir_p(self._parent_directory_for(application))
def current_symlink_path_for(self, application):
return join(self._parent_directory_for(application), 'current')
def archive_path_for(self, application):
return join(self._parent_directory_for(application), application.filename())
def directory_for(self, application):
return os.path.join(self._parent_directory_for(application), application.version)
def archive_already_extracted(self, application):
return os.path.isdir(self.directory_for(application))
def _parent_directory_for(self, application):
return os.path.join(self.path, application.name)
|
{
"content_hash": "6c35ab2c1e1f51209384cb33512aa6cb",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 113,
"avg_line_length": 35.691943127962084,
"alnum_prop": 0.6924711193732572,
"repo_name": "signed/archiveinstaller",
"id": "19ee494dad5bc9762c4490cb065562be0e10a61e",
"size": "7531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/archiveinstaller/installer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25139"
},
{
"name": "Shell",
"bytes": "710"
}
],
"symlink_target": ""
}
|
import unittest
import pytest
import azure.mgmt.network
import azure.mgmt.network
from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy
AZURE_LOCATION = 'eastus'
@pytest.mark.live_test_only
class TestMgmtNetwork(AzureMgmtRecordedTestCase):
def setup_method(self, method):
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.network.NetworkManagementClient
)
def create_virtual_network(self, group_name, location, network_name):
result = self.mgmt_client.virtual_networks.begin_create_or_update(
group_name,
network_name,
{
'location': location,
'address_space': {
'address_prefixes': ['10.0.0.0/16']
}
},
)
return result.result()
@RandomNameResourceGroupPreparer(location=AZURE_LOCATION)
@recorded_by_proxy
def test_network(self, resource_group):
SUBSCRIPTION_ID = self.get_settings_value("SUBSCRIPTION_ID")
RESOURCE_GROUP = resource_group.name
VIRTUAL_NETWORK_NAME = "virtualnetworkx"
IP_GROUPS_NAME = "myIpGroups"
IP_ALLOCATION_NAME = "myIpAllocation"
# self.create_virtual_network(RESOURCE_GROUP, AZURE_LOCATION, VIRTUAL_NETWORK_NAME)
# /IpGroups/put/CreateOrUpdate_IpGroups[put]
BODY = {
"tags": {
"key1": "value1"
},
"location": "West US",
"ip_addresses": [
"13.64.39.16/32",
"40.74.146.80/31",
"40.74.147.32/28"
]
}
result = self.mgmt_client.ip_groups.begin_create_or_update(resource_group_name=RESOURCE_GROUP, ip_groups_name=IP_GROUPS_NAME, parameters=BODY)
result = result.result()
# /IpAllocations/put/Create IpAllocation[put]
# BODY = {
# "location": "centraluseuap",
# "type": "Undefined",
# "prefix": "3.2.5.0/24",
# "allocation_tags": {
# "vnet_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME
# }
# }
# result = self.mgmt_client.ip_allocations.begin_create_or_update(resource_group_name=RESOURCE_GROUP, ip_allocation_name=IP_ALLOCATION_NAME, parameters=BODY)
# result = result.result()
# /IpAllocations/get/Get IpAllocation[get]
# result = self.mgmt_client.ip_allocations.get(resource_group_name=RESOURCE_GROUP, ip_allocation_name=IP_ALLOCATION_NAME)
# /IpGroups/get/Get_IpGroups[get]
result = self.mgmt_client.ip_groups.get(resource_group_name=RESOURCE_GROUP, ip_groups_name=IP_GROUPS_NAME)
# /IpAllocations/get/List IpAllocations in resource group[get]
# result = self.mgmt_client.ip_allocations.list_by_resource_group(resource_group_name=RESOURCE_GROUP)
# /IpGroups/get/ListByResourceGroup_IpGroups[get]
result = self.mgmt_client.ip_groups.list_by_resource_group(resource_group_name=RESOURCE_GROUP)
# /IpAllocations/get/List all IpAllocations[get]
# result = self.mgmt_client.ip_allocations.list()
# /IpGroups/get/List_IpGroups[get]
result = self.mgmt_client.ip_groups.list()
# /IpAllocations/patch/Update virtual network tags[patch]
# BODY = {
# "tags": {
# "tag1": "value1",
# "tag2": "value2"
# }
# }
# result = self.mgmt_client.ip_allocations.update_tags(resource_group_name=RESOURCE_GROUP, ip_allocation_name=IP_ALLOCATION_NAME, parameters=BODY)
# TODO: The requested resource does not support http method 'PATCH'.
# /IpGroups/patch/Update_IpGroups[patch]
# BODY = {
# "tags": {
# "key1": "value1",
# "key2": "value2"
# }
# }
# result = self.mgmt_client.ip_groups.update_groups(resource_group_name=RESOURCE_GROUP, ip_groups_name=IP_GROUPS_NAME, parameters=BODY)
# /IpAllocations/delete/Delete IpAllocation[delete]
# result = self.mgmt_client.ip_allocations.begin_delete(resource_group_name=RESOURCE_GROUP, ip_allocation_name=IP_ALLOCATION_NAME)
# result = result.result()
# /IpGroups/delete/Delete_IpGroups[delete]
result = self.mgmt_client.ip_groups.begin_delete(resource_group_name=RESOURCE_GROUP, ip_groups_name=IP_GROUPS_NAME)
result = result.result()
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "99801903625d3583719909e09cd6e8e0",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 171,
"avg_line_length": 38.96666666666667,
"alnum_prop": 0.6120615911035072,
"repo_name": "Azure/azure-sdk-for-python",
"id": "90811e3f9ced0107cf5c24bec19ca351a3c07c14",
"size": "5331",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/network/azure-mgmt-network/tests/test_cli_mgmt_network_ip.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from .select_fittest import SelectFittest # noqa: F401
|
{
"content_hash": "805da9f7d35b1df0326f27635e21bacb",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 56,
"avg_line_length": 57,
"alnum_prop": 0.7719298245614035,
"repo_name": "google-research/evoflow",
"id": "78e2fdb8d785a5cf7cb4191197836f58e8713c07",
"size": "633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evoflow/selection/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4192603"
},
{
"name": "Python",
"bytes": "268137"
}
],
"symlink_target": ""
}
|
from flask import Blueprint
home = Blueprint('home', __name__)
from . import routes
|
{
"content_hash": "0e0f13a71f34bd0cc14368d5b1a645f7",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 34,
"avg_line_length": 14.5,
"alnum_prop": 0.7011494252873564,
"repo_name": "benjaminhuanghuang/math_clone",
"id": "dabd300e02cb933206619790611bd7b89f7c1b62",
"size": "87",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/home/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8262"
},
{
"name": "HTML",
"bytes": "23588"
},
{
"name": "JavaScript",
"bytes": "2755"
},
{
"name": "Python",
"bytes": "26819"
}
],
"symlink_target": ""
}
|
"""OAI-PMH 2.0 response generator."""
from datetime import MINYEAR, datetime, timedelta
import arrow
from flask import current_app, url_for
from lxml import etree
from lxml.etree import Element, ElementTree, SubElement
from invenio_oaiserver.percolator import sets_search_all
from .models import OAISet
from .provider import OAIIDProvider
from .proxies import current_oaiserver
from .query import get_records
from .resumption_token import serialize
from .utils import datetime_to_datestamp, sanitize_unicode, serializer
NS_OAIPMH = "http://www.openarchives.org/OAI/2.0/"
NS_OAIPMH_XSD = "http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd"
NS_XSI = "http://www.w3.org/2001/XMLSchema-instance"
NS_OAIDC = "http://www.openarchives.org/OAI/2.0/oai_dc/"
NS_DC = "http://purl.org/dc/elements/1.1/"
NSMAP = {
None: NS_OAIPMH,
}
NSMAP_DESCRIPTION = {
"oai_dc": NS_OAIDC,
"dc": NS_DC,
"xsi": NS_XSI,
}
DATETIME_FORMATS = {
"YYYY-MM-DDThh:mm:ssZ": "%Y-%m-%dT%H:%M:%SZ",
"YYYY-MM-DD": "%Y-%m-%d",
}
def envelope(**kwargs):
"""Create OAI-PMH envelope for response."""
e_oaipmh = Element(etree.QName(NS_OAIPMH, "OAI-PMH"), nsmap=NSMAP)
e_oaipmh.set(
etree.QName(NS_XSI, "schemaLocation"),
"{0} {1}".format(NS_OAIPMH, NS_OAIPMH_XSD),
)
e_tree = ElementTree(element=e_oaipmh)
if current_app.config["OAISERVER_XSL_URL"]:
e_oaipmh.addprevious(
etree.ProcessingInstruction(
"xml-stylesheet",
'type="text/xsl" href="{0}"'.format(
current_app.config["OAISERVER_XSL_URL"]
),
)
)
e_responseDate = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, "responseDate"))
# date should be first possible moment
e_responseDate.text = datetime_to_datestamp(datetime.utcnow())
e_request = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, "request"))
for key, value in kwargs.items():
if key == "from_" or key == "until":
value = datetime_to_datestamp(value)
elif key == "resumptionToken":
value = value["token"]
e_request.set(key, value)
e_request.text = url_for("invenio_oaiserver.response", _external=True)
return e_tree, e_oaipmh
def error(errors):
"""Create error element."""
e_tree, e_oaipmh = envelope()
for code, message in errors:
e_error = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, "error"))
e_error.set("code", code)
e_error.text = message
return e_tree
def verb(**kwargs):
"""Create OAI-PMH envelope for response with verb."""
e_tree, e_oaipmh = envelope(**kwargs)
e_element = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, kwargs["verb"]))
return e_tree, e_element
def identify(**kwargs):
"""Create OAI-PMH response for verb Identify."""
cfg = current_app.config
e_tree, e_identify = verb(**kwargs)
e_repositoryName = SubElement(e_identify, etree.QName(NS_OAIPMH, "repositoryName"))
e_repositoryName.text = cfg["OAISERVER_REPOSITORY_NAME"]
e_baseURL = SubElement(e_identify, etree.QName(NS_OAIPMH, "baseURL"))
e_baseURL.text = url_for("invenio_oaiserver.response", _external=True)
e_protocolVersion = SubElement(
e_identify, etree.QName(NS_OAIPMH, "protocolVersion")
)
e_protocolVersion.text = cfg["OAISERVER_PROTOCOL_VERSION"]
for adminEmail in cfg["OAISERVER_ADMIN_EMAILS"]:
e = SubElement(e_identify, etree.QName(NS_OAIPMH, "adminEmail"))
e.text = adminEmail
e_earliestDatestamp = SubElement(
e_identify, etree.QName(NS_OAIPMH, "earliestDatestamp")
)
earliest_date = datetime(MINYEAR, 1, 1)
earliest_record = (
current_oaiserver.search_cls(index=current_app.config["OAISERVER_RECORD_INDEX"])
.sort({current_oaiserver.created_key: {"order": "asc"}})[0:1]
.execute()
)
if len(earliest_record.hits.hits) > 0:
hit = earliest_record.hits.hits[0]
hit = hit.to_dict()
created_date_str = hit.get("_source", {}).get(current_oaiserver.created_key)
if created_date_str:
earliest_date = (
arrow.get(created_date_str).to("utc").datetime.replace(tzinfo=None)
)
e_earliestDatestamp.text = datetime_to_datestamp(earliest_date)
e_deletedRecord = SubElement(e_identify, etree.QName(NS_OAIPMH, "deletedRecord"))
e_deletedRecord.text = "no"
e_granularity = SubElement(e_identify, etree.QName(NS_OAIPMH, "granularity"))
assert cfg["OAISERVER_GRANULARITY"] in DATETIME_FORMATS
e_granularity.text = cfg["OAISERVER_GRANULARITY"]
compressions = cfg["OAISERVER_COMPRESSIONS"]
if compressions != ["identity"]:
for compression in compressions:
e_compression = SubElement(
e_identify, etree.QName(NS_OAIPMH, "compression")
)
e_compression.text = compression
for description in cfg.get("OAISERVER_DESCRIPTIONS", []):
e_description = SubElement(e_identify, etree.QName(NS_OAIPMH, "description"))
e_description.append(etree.fromstring(description))
return e_tree
def resumption_token(parent, pagination, **kwargs):
"""Attach resumption token element to a parent."""
# Do not add resumptionToken if all results fit to the first page.
if pagination.page == 1 and not pagination.has_next:
return
token = serialize(pagination, **kwargs)
e_resumptionToken = SubElement(parent, etree.QName(NS_OAIPMH, "resumptionToken"))
if pagination.total:
expiration_date = datetime.utcnow() + timedelta(
seconds=current_app.config["OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME"]
)
e_resumptionToken.set("expirationDate", datetime_to_datestamp(expiration_date))
e_resumptionToken.set(
"cursor", str((pagination.page - 1) * pagination.per_page)
)
e_resumptionToken.set("completeListSize", str(pagination.total))
if token:
e_resumptionToken.text = token
def listsets(**kwargs):
"""Create OAI-PMH response for ListSets verb."""
e_tree, e_listsets = verb(**kwargs)
page = kwargs.get("resumptionToken", {}).get("page", 1)
size = current_app.config["OAISERVER_PAGE_SIZE"]
oai_sets = OAISet.query.paginate(page=page, per_page=size, error_out=False)
for oai_set in oai_sets.items:
e_set = SubElement(e_listsets, etree.QName(NS_OAIPMH, "set"))
e_setSpec = SubElement(e_set, etree.QName(NS_OAIPMH, "setSpec"))
e_setSpec.text = oai_set.spec
e_setName = SubElement(e_set, etree.QName(NS_OAIPMH, "setName"))
e_setName.text = sanitize_unicode(oai_set.name)
if oai_set.description:
e_setDescription = SubElement(
e_set, etree.QName(NS_OAIPMH, "setDescription")
)
e_dc = SubElement(
e_setDescription,
etree.QName(NS_OAIDC, "dc"),
nsmap=NSMAP_DESCRIPTION,
)
e_dc.set(etree.QName(NS_XSI, "schemaLocation"), NS_OAIDC)
e_description = SubElement(e_dc, etree.QName(NS_DC, "description"))
e_description.text = sanitize_unicode(oai_set.description)
resumption_token(e_listsets, oai_sets, **kwargs)
return e_tree
def listmetadataformats(**kwargs):
"""Create OAI-PMH response for ListMetadataFormats verb."""
cfg = current_app.config
e_tree, e_listmetadataformats = verb(**kwargs)
if "identifier" in kwargs:
# test if record exists
OAIIDProvider.get(pid_value=kwargs["identifier"])
for prefix, metadata in cfg.get("OAISERVER_METADATA_FORMATS", {}).items():
e_metadataformat = SubElement(
e_listmetadataformats, etree.QName(NS_OAIPMH, "metadataFormat")
)
e_metadataprefix = SubElement(
e_metadataformat, etree.QName(NS_OAIPMH, "metadataPrefix")
)
e_metadataprefix.text = prefix
e_schema = SubElement(e_metadataformat, etree.QName(NS_OAIPMH, "schema"))
e_schema.text = metadata["schema"]
e_metadataNamespace = SubElement(
e_metadataformat, etree.QName(NS_OAIPMH, "metadataNamespace")
)
e_metadataNamespace.text = metadata["namespace"]
return e_tree
def header(parent, identifier, datestamp, sets=None, deleted=False):
"""Attach ``<header/>`` element to a parent."""
e_header = SubElement(parent, etree.QName(NS_OAIPMH, "header"))
if deleted:
e_header.set("status", "deleted")
e_identifier = SubElement(e_header, etree.QName(NS_OAIPMH, "identifier"))
e_identifier.text = identifier
e_datestamp = SubElement(e_header, etree.QName(NS_OAIPMH, "datestamp"))
e_datestamp.text = datetime_to_datestamp(datestamp)
for spec in sets or []:
e = SubElement(e_header, etree.QName(NS_OAIPMH, "setSpec"))
e.text = spec
return e_header
def getrecord(**kwargs):
"""Create OAI-PMH response for verb Identify."""
record_dumper = serializer(kwargs["metadataPrefix"])
pid = OAIIDProvider.get(pid_value=kwargs["identifier"]).pid
record = current_oaiserver.record_fetcher(pid.object_uuid)
e_tree, e_getrecord = verb(**kwargs)
e_record = SubElement(e_getrecord, etree.QName(NS_OAIPMH, "record"))
header(
e_record,
identifier=pid.pid_value,
datestamp=record["updated"],
sets=current_oaiserver.record_sets_fetcher(record),
)
e_metadata = SubElement(e_record, etree.QName(NS_OAIPMH, "metadata"))
e_metadata.append(record_dumper(pid, {"_source": record}))
return e_tree
def listidentifiers(**kwargs):
"""Create OAI-PMH response for verb ListIdentifiers."""
e_tree, e_listidentifiers = verb(**kwargs)
result = get_records(**kwargs)
all_records = [record for record in result.items]
records_sets = sets_search_all([r["json"]["_source"] for r in all_records])
for index, record in enumerate(all_records):
pid = current_oaiserver.oaiid_fetcher(record["id"], record["json"]["_source"])
header(
e_listidentifiers,
identifier=pid.pid_value,
datestamp=record["updated"],
sets=records_sets[index],
)
resumption_token(e_listidentifiers, result, **kwargs)
return e_tree
def listrecords(**kwargs):
"""Create OAI-PMH response for verb ListRecords."""
metadataPrefix = (
kwargs.get("resumptionToken").get("metadataPrefix")
if kwargs.get("resumptionToken")
else kwargs["metadataPrefix"]
)
record_dumper = serializer(metadataPrefix)
e_tree, e_listrecords = verb(**kwargs)
result = get_records(**kwargs)
all_records = [record for record in result.items]
records_sets = sets_search_all([r["json"]["_source"] for r in all_records])
for index, record in enumerate(all_records):
pid = current_oaiserver.oaiid_fetcher(record["id"], record["json"]["_source"])
e_record = SubElement(e_listrecords, etree.QName(NS_OAIPMH, "record"))
header(
e_record,
identifier=pid.pid_value,
datestamp=record["updated"],
sets=records_sets[index],
)
e_metadata = SubElement(e_record, etree.QName(NS_OAIPMH, "metadata"))
e_metadata.append(record_dumper(pid, record["json"]))
resumption_token(e_listrecords, result, **kwargs)
return e_tree
|
{
"content_hash": "d173c2e32cc81ac1776a05ff6dd737df",
"timestamp": "",
"source": "github",
"line_count": 320,
"max_line_length": 88,
"avg_line_length": 35.73125,
"alnum_prop": 0.6431694944901172,
"repo_name": "inveniosoftware/invenio-oaiserver",
"id": "f78eef68a56c45d6d13e77a184c6baff2f6a8b49",
"size": "11721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "invenio_oaiserver/response.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "126413"
},
{
"name": "Shell",
"bytes": "745"
},
{
"name": "XSLT",
"bytes": "8862"
}
],
"symlink_target": ""
}
|
from oslo_config import cfg
import sqlalchemy
from sqlalchemy.sql.expression import false
from keystone import assignment as keystone_assignment
from keystone.common import sql
from keystone import exception
from keystone.i18n import _
CONF = cfg.CONF
class AssignmentType(object):
USER_PROJECT = 'UserProject'
GROUP_PROJECT = 'GroupProject'
USER_DOMAIN = 'UserDomain'
GROUP_DOMAIN = 'GroupDomain'
@classmethod
def calculate_type(cls, user_id, group_id, project_id, domain_id):
if user_id:
if project_id:
return cls.USER_PROJECT
if domain_id:
return cls.USER_DOMAIN
if group_id:
if project_id:
return cls.GROUP_PROJECT
if domain_id:
return cls.GROUP_DOMAIN
# Invalid parameters combination
raise exception.AssignmentTypeCalculationError(**locals())
class Assignment(keystone_assignment.AssignmentDriverV8):
def default_role_driver(self):
return 'sql'
def default_resource_driver(self):
return 'sql'
def list_user_ids_for_project(self, tenant_id):
with sql.session_for_read() as session:
query = session.query(RoleAssignment.actor_id)
query = query.filter_by(type=AssignmentType.USER_PROJECT)
query = query.filter_by(target_id=tenant_id)
query = query.distinct('actor_id')
assignments = query.all()
return [assignment.actor_id for assignment in assignments]
def create_grant(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
assignment_type = AssignmentType.calculate_type(
user_id, group_id, project_id, domain_id)
try:
with sql.session_for_write() as session:
session.add(RoleAssignment(
type=assignment_type,
actor_id=user_id or group_id,
target_id=project_id or domain_id,
role_id=role_id,
inherited=inherited_to_projects))
except sql.DBDuplicateEntry: # nosec : The v3 grant APIs are silent if
# the assignment already exists
pass
def list_grant_role_ids(self, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
with sql.session_for_read() as session:
q = session.query(RoleAssignment.role_id)
q = q.filter(RoleAssignment.actor_id == (user_id or group_id))
q = q.filter(RoleAssignment.target_id == (project_id or domain_id))
q = q.filter(RoleAssignment.inherited == inherited_to_projects)
return [x.role_id for x in q.all()]
def _build_grant_filter(self, session, role_id, user_id, group_id,
domain_id, project_id, inherited_to_projects):
q = session.query(RoleAssignment)
q = q.filter_by(actor_id=user_id or group_id)
q = q.filter_by(target_id=project_id or domain_id)
q = q.filter_by(role_id=role_id)
q = q.filter_by(inherited=inherited_to_projects)
return q
def check_grant_role_id(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
with sql.session_for_read() as session:
try:
q = self._build_grant_filter(
session, role_id, user_id, group_id, domain_id, project_id,
inherited_to_projects)
q.one()
except sql.NotFound:
actor_id = user_id or group_id
target_id = domain_id or project_id
raise exception.RoleAssignmentNotFound(role_id=role_id,
actor_id=actor_id,
target_id=target_id)
def delete_grant(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
with sql.session_for_write() as session:
q = self._build_grant_filter(
session, role_id, user_id, group_id, domain_id, project_id,
inherited_to_projects)
if not q.delete(False):
actor_id = user_id or group_id
target_id = domain_id or project_id
raise exception.RoleAssignmentNotFound(role_id=role_id,
actor_id=actor_id,
target_id=target_id)
def _list_project_ids_for_actor(self, actors, hints, inherited,
group_only=False):
# TODO(henry-nash): Now that we have a single assignment table, we
# should be able to honor the hints list that is provided.
assignment_type = [AssignmentType.GROUP_PROJECT]
if not group_only:
assignment_type.append(AssignmentType.USER_PROJECT)
sql_constraints = sqlalchemy.and_(
RoleAssignment.type.in_(assignment_type),
RoleAssignment.inherited == inherited,
RoleAssignment.actor_id.in_(actors))
with sql.session_for_read() as session:
query = session.query(RoleAssignment.target_id).filter(
sql_constraints).distinct()
return [x.target_id for x in query.all()]
def list_project_ids_for_user(self, user_id, group_ids, hints,
inherited=False):
actor_list = [user_id]
if group_ids:
actor_list = actor_list + group_ids
return self._list_project_ids_for_actor(actor_list, hints, inherited)
def list_domain_ids_for_user(self, user_id, group_ids, hints,
inherited=False):
with sql.session_for_read() as session:
query = session.query(RoleAssignment.target_id)
filters = []
if user_id:
sql_constraints = sqlalchemy.and_(
RoleAssignment.actor_id == user_id,
RoleAssignment.inherited == inherited,
RoleAssignment.type == AssignmentType.USER_DOMAIN)
filters.append(sql_constraints)
if group_ids:
sql_constraints = sqlalchemy.and_(
RoleAssignment.actor_id.in_(group_ids),
RoleAssignment.inherited == inherited,
RoleAssignment.type == AssignmentType.GROUP_DOMAIN)
filters.append(sql_constraints)
if not filters:
return []
query = query.filter(sqlalchemy.or_(*filters)).distinct()
return [assignment.target_id for assignment in query.all()]
def list_role_ids_for_groups_on_domain(self, group_ids, domain_id):
if not group_ids:
# If there's no groups then there will be no domain roles.
return []
sql_constraints = sqlalchemy.and_(
RoleAssignment.type == AssignmentType.GROUP_DOMAIN,
RoleAssignment.target_id == domain_id,
RoleAssignment.inherited == false(),
RoleAssignment.actor_id.in_(group_ids))
with sql.session_for_read() as session:
query = session.query(RoleAssignment.role_id).filter(
sql_constraints).distinct()
return [role.role_id for role in query.all()]
def list_role_ids_for_groups_on_project(
self, group_ids, project_id, project_domain_id, project_parents):
if not group_ids:
# If there's no groups then there will be no project roles.
return []
# NOTE(rodrigods): First, we always include projects with
# non-inherited assignments
sql_constraints = sqlalchemy.and_(
RoleAssignment.type == AssignmentType.GROUP_PROJECT,
RoleAssignment.inherited == false(),
RoleAssignment.target_id == project_id)
if CONF.os_inherit.enabled:
# Inherited roles from domains
sql_constraints = sqlalchemy.or_(
sql_constraints,
sqlalchemy.and_(
RoleAssignment.type == AssignmentType.GROUP_DOMAIN,
RoleAssignment.inherited,
RoleAssignment.target_id == project_domain_id))
# Inherited roles from projects
if project_parents:
sql_constraints = sqlalchemy.or_(
sql_constraints,
sqlalchemy.and_(
RoleAssignment.type == AssignmentType.GROUP_PROJECT,
RoleAssignment.inherited,
RoleAssignment.target_id.in_(project_parents)))
sql_constraints = sqlalchemy.and_(
sql_constraints, RoleAssignment.actor_id.in_(group_ids))
with sql.session_for_read() as session:
# NOTE(morganfainberg): Only select the columns we actually care
# about here, in this case role_id.
query = session.query(RoleAssignment.role_id).filter(
sql_constraints).distinct()
return [result.role_id for result in query.all()]
def list_project_ids_for_groups(self, group_ids, hints,
inherited=False):
return self._list_project_ids_for_actor(
group_ids, hints, inherited, group_only=True)
def list_domain_ids_for_groups(self, group_ids, inherited=False):
if not group_ids:
# If there's no groups then there will be no domains.
return []
group_sql_conditions = sqlalchemy.and_(
RoleAssignment.type == AssignmentType.GROUP_DOMAIN,
RoleAssignment.inherited == inherited,
RoleAssignment.actor_id.in_(group_ids))
with sql.session_for_read() as session:
query = session.query(RoleAssignment.target_id).filter(
group_sql_conditions).distinct()
return [x.target_id for x in query.all()]
def add_role_to_user_and_project(self, user_id, tenant_id, role_id):
try:
with sql.session_for_write() as session:
session.add(RoleAssignment(
type=AssignmentType.USER_PROJECT,
actor_id=user_id, target_id=tenant_id,
role_id=role_id, inherited=False))
except sql.DBDuplicateEntry:
msg = ('User %s already has role %s in tenant %s'
% (user_id, role_id, tenant_id))
raise exception.Conflict(type='role grant', details=msg)
def remove_role_from_user_and_project(self, user_id, tenant_id, role_id):
with sql.session_for_write() as session:
q = session.query(RoleAssignment)
q = q.filter_by(actor_id=user_id)
q = q.filter_by(target_id=tenant_id)
q = q.filter_by(role_id=role_id)
if q.delete() == 0:
raise exception.RoleNotFound(message=_(
'Cannot remove role that has not been granted, %s') %
role_id)
def _get_user_assignment_types(self):
return [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN]
def _get_group_assignment_types(self):
return [AssignmentType.GROUP_PROJECT, AssignmentType.GROUP_DOMAIN]
def _get_project_assignment_types(self):
return [AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT]
def _get_domain_assignment_types(self):
return [AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN]
def _get_assignment_types(self, user, group, project, domain):
"""Returns a list of role assignment types based on provided entities
If one of user or group (the "actor") as well as one of project or
domain (the "target") are provided, the list will contain the role
assignment type for that specific pair of actor and target.
If only an actor or target is provided, the list will contain the
role assignment types that satisfy the specified entity.
For example, if user and project are provided, the return will be:
[AssignmentType.USER_PROJECT]
However, if only user was provided, the return would be:
[AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN]
It is not expected that user and group (or project and domain) are
specified - but if they are, the most fine-grained value will be
chosen (i.e. user over group, project over domain).
"""
actor_types = []
if user:
actor_types = self._get_user_assignment_types()
elif group:
actor_types = self._get_group_assignment_types()
target_types = []
if project:
target_types = self._get_project_assignment_types()
elif domain:
target_types = self._get_domain_assignment_types()
if actor_types and target_types:
return list(set(actor_types).intersection(target_types))
return actor_types or target_types
def list_role_assignments(self, role_id=None,
user_id=None, group_ids=None,
domain_id=None, project_ids=None,
inherited_to_projects=None):
def denormalize_role(ref):
assignment = {}
if ref.type == AssignmentType.USER_PROJECT:
assignment['user_id'] = ref.actor_id
assignment['project_id'] = ref.target_id
elif ref.type == AssignmentType.USER_DOMAIN:
assignment['user_id'] = ref.actor_id
assignment['domain_id'] = ref.target_id
elif ref.type == AssignmentType.GROUP_PROJECT:
assignment['group_id'] = ref.actor_id
assignment['project_id'] = ref.target_id
elif ref.type == AssignmentType.GROUP_DOMAIN:
assignment['group_id'] = ref.actor_id
assignment['domain_id'] = ref.target_id
else:
raise exception.Error(message=_(
'Unexpected assignment type encountered, %s') %
ref.type)
assignment['role_id'] = ref.role_id
if ref.inherited:
assignment['inherited_to_projects'] = 'projects'
return assignment
with sql.session_for_read() as session:
assignment_types = self._get_assignment_types(
user_id, group_ids, project_ids, domain_id)
targets = None
if project_ids:
targets = project_ids
elif domain_id:
targets = [domain_id]
actors = None
if group_ids:
actors = group_ids
elif user_id:
actors = [user_id]
query = session.query(RoleAssignment)
if role_id:
query = query.filter_by(role_id=role_id)
if actors:
query = query.filter(RoleAssignment.actor_id.in_(actors))
if targets:
query = query.filter(RoleAssignment.target_id.in_(targets))
if assignment_types:
query = query.filter(RoleAssignment.type.in_(assignment_types))
if inherited_to_projects is not None:
query = query.filter_by(inherited=inherited_to_projects)
return [denormalize_role(ref) for ref in query.all()]
def delete_project_assignments(self, project_id):
with sql.session_for_write() as session:
q = session.query(RoleAssignment)
q = q.filter_by(target_id=project_id)
q.delete(False)
def delete_role_assignments(self, role_id):
with sql.session_for_write() as session:
q = session.query(RoleAssignment)
q = q.filter_by(role_id=role_id)
q.delete(False)
def delete_user_assignments(self, user_id):
with sql.session_for_write() as session:
q = session.query(RoleAssignment)
q = q.filter_by(actor_id=user_id)
q.delete(False)
def delete_group_assignments(self, group_id):
with sql.session_for_write() as session:
q = session.query(RoleAssignment)
q = q.filter_by(actor_id=group_id)
q.delete(False)
class RoleAssignment(sql.ModelBase, sql.DictBase):
__tablename__ = 'assignment'
attributes = ['type', 'actor_id', 'target_id', 'role_id', 'inherited']
# NOTE(henry-nash); Postgres requires a name to be defined for an Enum
type = sql.Column(
sql.Enum(AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT,
AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN,
name='type'),
nullable=False)
actor_id = sql.Column(sql.String(64), nullable=False)
target_id = sql.Column(sql.String(64), nullable=False)
role_id = sql.Column(sql.String(64), nullable=False)
inherited = sql.Column(sql.Boolean, default=False, nullable=False)
__table_args__ = (
sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id',
'inherited'),
sql.Index('ix_actor_id', 'actor_id'),
)
def to_dict(self):
"""Override parent method with a simpler implementation.
RoleAssignment doesn't have non-indexed 'extra' attributes, so the
parent implementation is not applicable.
"""
return dict(self.items())
|
{
"content_hash": "d19c1a61c3ae9b9433e441fb1c963b6a",
"timestamp": "",
"source": "github",
"line_count": 438,
"max_line_length": 79,
"avg_line_length": 40.82876712328767,
"alnum_prop": 0.5750153777330426,
"repo_name": "himanshu-setia/keystone",
"id": "88c10a6a9a990a4177f4cadebc0abb4dd124bc77",
"size": "18472",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keystone/assignment/V8_backends/sql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "4415061"
}
],
"symlink_target": ""
}
|
"""Manage indexing for snapshotter service"""
from sqlalchemy.sql.expression import tuple_
from ggrc import db
from ggrc import models
from ggrc.fulltext.mysql import MysqlRecordProperty as Record
from ggrc.models.reflection import AttributeInfo
from ggrc.snapshotter.rules import Types
from ggrc.snapshotter.datastructures import Pair
def _get_tag(pair):
return u"{parent_type}-{parent_id}-{child_type}".format(
parent_type=pair.parent.type,
parent_id=pair.parent.id,
child_type=pair.child.type
)
def _get_parent_property(pair):
return u"{parent_type}-{parent_id}".format(
parent_type=pair.parent.type,
parent_id=pair.parent.id
)
def _get_child_property(pair):
return u"{child_type}-{child_id}".format(
child_type=pair.child.type,
child_id=pair.child.id
)
def _get_columns():
"""Get common columns for snapshots and revisions tables."""
snapshot_columns = db.session.query(
models.Snapshot.id,
models.Snapshot.context_id,
models.Snapshot.parent_type,
models.Snapshot.parent_id,
models.Snapshot.child_type,
models.Snapshot.child_id,
models.Snapshot.revision_id
)
revision_columns = db.session.query(
models.Revision.id,
models.Revision.resource_type,
models.Revision.content
)
return snapshot_columns, revision_columns
def _get_revision_content(revision_ids=None):
"""Get content for provided revision_ids
Args:
revision_ids: An iterable of revision IDs
Returns:
Dictionary mapping revision_ids to content.
"""
revision_dict = dict()
if revision_ids:
revisions = db.session.query(
models.Revision.id,
models.Revision.content).filter(models.Revision.id.in_(revision_ids))
revision_dict = dict(revisions)
return revision_dict
def _get_model_properties():
"""Get indexable properties for all snapshottable objects
Args:
None
Returns:
tuple(class_properties dict, custom_attribute_definitions dict) - Tuple of
dictionaries, first one representing a list of searchable attributes
for every model and second one representing dictionary of custom
attribute definition attributes.
"""
# pylint: disable=protected-access
from ggrc.models import all_models
class_properties = dict()
custom_attribute_definitions = dict()
klass_names = Types.all
cadef_klass_names = {
getattr(all_models, klass)._inflector.table_singular
for klass in klass_names
}
cad_query = db.session.query(
models.CustomAttributeDefinition.id,
models.CustomAttributeDefinition.title,
).filter(
models.CustomAttributeDefinition.definition_type.in_(cadef_klass_names),
models.CustomAttributeDefinition.attribute_type.in_(
["Text", "Rich Text", "Date"])
)
custom_attribute_definitions = dict(cad_query)
for klass_name in klass_names:
model_attributes = AttributeInfo.gather_attrs(
getattr(all_models, klass_name), '_fulltext_attrs')
class_properties[klass_name] = model_attributes
return class_properties, custom_attribute_definitions
def get_searchable_attributes(attributes, cad_keys,
ca_definitions, content):
"""Get all searchable attributes for a given object that should be indexed
Args:
attributes: Attributes that should be extracted from some model
cad_keys: IDs of custom attribute definitions
ca_definitions: Dictionary of "CAD ID" -> "CAD title"
content: dictionary (JSON) representation of an object
Return:
Dict of "key": "value" from objects revision
"""
searchable_values = {attr: content.get(attr) for attr in attributes}
if "custom_attributes" in content and content["custom_attributes"]:
for cav in content["custom_attributes"]:
cav_id = cav["custom_attribute_id"]
if cav_id in cad_keys:
searchable_values[ca_definitions[cav_id]] = cav["attribute_value"]
return searchable_values
def reindex(parents=None):
"""Reindex all snapshots or limit to a subset of certain parents.
Args:
parents: An iterable of parents for which to reindex their scopes.
Returns:
Pair of parent-child that were reindexed.
"""
columns = db.session.query(
models.Snapshot.parent_type,
models.Snapshot.parent_id,
models.Snapshot.child_type,
models.Snapshot.child_id,
)
query = columns
if parents:
_parents = {(obj.type, obj.id) for obj in parents}
query = query.filter(
tuple_(
models.Snapshot.parent_type,
models.Snapshot.parent_id,
).in_(_parents))
pairs = {Pair.from_4tuple(p) for p in query}
reindex_pairs(pairs)
return pairs
def delete_records(snapshot_ids):
"""Delete all records for some snapshots.
Args:
snapshot_ids: An iterable with snapshot IDs whose full text records should
be deleted.
"""
to_delete = {("Snapshot", _id) for _id in snapshot_ids}
db.session.query(Record).filter(
tuple_(Record.type, Record.key).in_(to_delete)
).delete(synchronize_session=False)
db.session.commit()
def insert_records(payload):
"""Insert records to full text table.
Args:
payload: List of dictionaries that represent records entries.
"""
engine = db.engine
engine.execute(Record.__table__.insert(), payload)
db.session.commit()
def reindex_pairs(pairs):
"""Reindex selected snapshots.
Args:
pairs: A list of parent-child pairs that uniquely represent snapshot
object whose properties should be reindexed.
"""
# pylint: disable=too-many-locals
snapshots = dict()
revisions = dict()
snap_to_sid_cache = dict()
search_payload = list()
object_properties, custom_attr_def_properties = _get_model_properties()
ca_def_keys = set(custom_attr_def_properties.keys())
snapshot_columns, revision_columns = _get_columns()
snapshot_query = snapshot_columns
if pairs:
pairs_filter = tuple_(
models.Snapshot.parent_type,
models.Snapshot.parent_id,
models.Snapshot.child_type,
models.Snapshot.child_id,
).in_({pair.to_4tuple() for pair in pairs})
snapshot_query = snapshot_columns.filter(pairs_filter)
for _id, ctx_id, ptype, pid, ctype, cid, revid in snapshot_query:
pair = Pair.from_4tuple((ptype, pid, ctype, cid))
snapshots[pair] = [_id, ctx_id, revid]
snap_to_sid_cache[pair] = _id
revision_ids = {revid for _, _, revid in snapshots.values()}
revision_query = revision_columns.filter(
models.Revision.id.in_(revision_ids)
)
for _id, _type, content in revision_query:
revisions[_id] = get_searchable_attributes(
object_properties[_type], ca_def_keys, custom_attr_def_properties,
content)
snapshot_ids = set()
for pair in snapshots:
snapshot_id, ctx_id, revision_id = snapshots[pair]
snapshot_ids.add(snapshot_id)
properties = revisions[revision_id]
properties.update({
"parent": _get_parent_property(pair),
"child": _get_child_property(pair),
"child_type": pair.child.type,
"child_id": pair.child.id
})
for prop, val in properties.items():
if prop and val:
data = {
"key": snapshot_id,
"type": "Snapshot",
"context_id": ctx_id,
"tags": _get_tag(pair),
"property": prop,
"content": val,
}
search_payload += [data]
delete_records(snapshot_ids)
insert_records(search_payload)
|
{
"content_hash": "27d0a1528ef62d0b799dccdada9a1956",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 78,
"avg_line_length": 29.396887159533073,
"alnum_prop": 0.671872931833223,
"repo_name": "josthkko/ggrc-core",
"id": "e1ac91a2d14301e3270010b4188f2604ef7c9c93",
"size": "7668",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/ggrc/snapshotter/indexer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "163629"
},
{
"name": "Cucumber",
"bytes": "136321"
},
{
"name": "HTML",
"bytes": "1057288"
},
{
"name": "JavaScript",
"bytes": "1492054"
},
{
"name": "Makefile",
"bytes": "6161"
},
{
"name": "Mako",
"bytes": "2178"
},
{
"name": "Python",
"bytes": "2148568"
},
{
"name": "Shell",
"bytes": "29929"
}
],
"symlink_target": ""
}
|
class BrowserDetect:
def i_am(self):
return 'Pyjd-hulahop'
|
{
"content_hash": "3b205ce07b8a38de7fe742e3cc924a20",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 29,
"avg_line_length": 23.666666666666668,
"alnum_prop": 0.6338028169014085,
"repo_name": "pombredanne/pyjs",
"id": "f9f4c6feba9f4b7bdec754a5050ba0e7c5824fff",
"size": "71",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "examples/browserdetect/BrowserDetect.hulahop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4640"
},
{
"name": "Groff",
"bytes": "6633"
},
{
"name": "HTML",
"bytes": "10106"
},
{
"name": "JavaScript",
"bytes": "63385"
},
{
"name": "Makefile",
"bytes": "453"
},
{
"name": "Python",
"bytes": "5515375"
},
{
"name": "Shell",
"bytes": "4264"
}
],
"symlink_target": ""
}
|
from distutils.version import LooseVersion
from itertools import product
import operator
import numpy as np
from numpy import nan
import pytest
from pandas.compat import PY2, PY35, is_platform_windows, lrange, range
import pandas.util._test_decorators as td
import pandas as pd
from pandas import (
Categorical, CategoricalIndex, DataFrame, Series, compat, date_range, isna,
notna)
from pandas.api.types import is_scalar
from pandas.core.index import MultiIndex
from pandas.core.indexes.datetimes import Timestamp
import pandas.util.testing as tm
from pandas.util.testing import (
assert_almost_equal, assert_frame_equal, assert_index_equal,
assert_series_equal)
class TestSeriesAnalytics(object):
def test_describe(self):
s = Series([0, 1, 2, 3, 4], name='int_data')
result = s.describe()
expected = Series([5, 2, s.std(), 0, 1, 2, 3, 4],
name='int_data',
index=['count', 'mean', 'std', 'min', '25%',
'50%', '75%', 'max'])
tm.assert_series_equal(result, expected)
s = Series([True, True, False, False, False], name='bool_data')
result = s.describe()
expected = Series([5, 2, False, 3], name='bool_data',
index=['count', 'unique', 'top', 'freq'])
tm.assert_series_equal(result, expected)
s = Series(['a', 'a', 'b', 'c', 'd'], name='str_data')
result = s.describe()
expected = Series([5, 4, 'a', 2], name='str_data',
index=['count', 'unique', 'top', 'freq'])
tm.assert_series_equal(result, expected)
def test_describe_with_tz(self, tz_naive_fixture):
# GH 21332
tz = tz_naive_fixture
name = str(tz_naive_fixture)
start = Timestamp(2018, 1, 1)
end = Timestamp(2018, 1, 5)
s = Series(date_range(start, end, tz=tz), name=name)
result = s.describe()
expected = Series(
[5, 5, s.value_counts().index[0], 1, start.tz_localize(tz),
end.tz_localize(tz)
],
name=name,
index=['count', 'unique', 'top', 'freq', 'first', 'last']
)
tm.assert_series_equal(result, expected)
def test_argsort(self, datetime_series):
self._check_accum_op('argsort', datetime_series, check_dtype=False)
argsorted = datetime_series.argsort()
assert issubclass(argsorted.dtype.type, np.integer)
# GH 2967 (introduced bug in 0.11-dev I think)
s = Series([Timestamp('201301%02d' % (i + 1)) for i in range(5)])
assert s.dtype == 'datetime64[ns]'
shifted = s.shift(-1)
assert shifted.dtype == 'datetime64[ns]'
assert isna(shifted[4])
result = s.argsort()
expected = Series(lrange(5), dtype='int64')
assert_series_equal(result, expected)
result = shifted.argsort()
expected = Series(lrange(4) + [-1], dtype='int64')
assert_series_equal(result, expected)
def test_argsort_stable(self):
s = Series(np.random.randint(0, 100, size=10000))
mindexer = s.argsort(kind='mergesort')
qindexer = s.argsort()
mexpected = np.argsort(s.values, kind='mergesort')
qexpected = np.argsort(s.values, kind='quicksort')
tm.assert_series_equal(mindexer, Series(mexpected),
check_dtype=False)
tm.assert_series_equal(qindexer, Series(qexpected),
check_dtype=False)
msg = (r"ndarray Expected type <(class|type) 'numpy\.ndarray'>,"
r" found <class 'pandas\.core\.series\.Series'> instead")
with pytest.raises(AssertionError, match=msg):
tm.assert_numpy_array_equal(qindexer, mindexer)
def test_cumsum(self, datetime_series):
self._check_accum_op('cumsum', datetime_series)
def test_cumprod(self, datetime_series):
self._check_accum_op('cumprod', datetime_series)
def test_cummin(self, datetime_series):
tm.assert_numpy_array_equal(datetime_series.cummin().values,
np.minimum
.accumulate(np.array(datetime_series)))
ts = datetime_series.copy()
ts[::2] = np.NaN
result = ts.cummin()[1::2]
expected = np.minimum.accumulate(ts.dropna())
tm.assert_series_equal(result, expected)
def test_cummax(self, datetime_series):
tm.assert_numpy_array_equal(datetime_series.cummax().values,
np.maximum
.accumulate(np.array(datetime_series)))
ts = datetime_series.copy()
ts[::2] = np.NaN
result = ts.cummax()[1::2]
expected = np.maximum.accumulate(ts.dropna())
tm.assert_series_equal(result, expected)
def test_cummin_datetime64(self):
s = pd.Series(pd.to_datetime(['NaT', '2000-1-2', 'NaT', '2000-1-1',
'NaT', '2000-1-3']))
expected = pd.Series(pd.to_datetime(['NaT', '2000-1-2', 'NaT',
'2000-1-1', 'NaT', '2000-1-1']))
result = s.cummin(skipna=True)
tm.assert_series_equal(expected, result)
expected = pd.Series(pd.to_datetime(
['NaT', '2000-1-2', '2000-1-2', '2000-1-1', '2000-1-1', '2000-1-1'
]))
result = s.cummin(skipna=False)
tm.assert_series_equal(expected, result)
def test_cummax_datetime64(self):
s = pd.Series(pd.to_datetime(['NaT', '2000-1-2', 'NaT', '2000-1-1',
'NaT', '2000-1-3']))
expected = pd.Series(pd.to_datetime(['NaT', '2000-1-2', 'NaT',
'2000-1-2', 'NaT', '2000-1-3']))
result = s.cummax(skipna=True)
tm.assert_series_equal(expected, result)
expected = pd.Series(pd.to_datetime(
['NaT', '2000-1-2', '2000-1-2', '2000-1-2', '2000-1-2', '2000-1-3'
]))
result = s.cummax(skipna=False)
tm.assert_series_equal(expected, result)
def test_cummin_timedelta64(self):
s = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'NaT',
'1 min',
'NaT',
'3 min', ]))
expected = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'NaT',
'1 min',
'NaT',
'1 min', ]))
result = s.cummin(skipna=True)
tm.assert_series_equal(expected, result)
expected = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'2 min',
'1 min',
'1 min',
'1 min', ]))
result = s.cummin(skipna=False)
tm.assert_series_equal(expected, result)
def test_cummax_timedelta64(self):
s = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'NaT',
'1 min',
'NaT',
'3 min', ]))
expected = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'NaT',
'2 min',
'NaT',
'3 min', ]))
result = s.cummax(skipna=True)
tm.assert_series_equal(expected, result)
expected = pd.Series(pd.to_timedelta(['NaT',
'2 min',
'2 min',
'2 min',
'2 min',
'3 min', ]))
result = s.cummax(skipna=False)
tm.assert_series_equal(expected, result)
def test_npdiff(self):
pytest.skip("skipping due to Series no longer being an "
"ndarray")
# no longer works as the return type of np.diff is now nd.array
s = Series(np.arange(5))
r = np.diff(s)
assert_series_equal(Series([nan, 0, 0, 0, nan]), r)
def _check_accum_op(self, name, datetime_series_, check_dtype=True):
func = getattr(np, name)
tm.assert_numpy_array_equal(func(datetime_series_).values,
func(np.array(datetime_series_)),
check_dtype=check_dtype)
# with missing values
ts = datetime_series_.copy()
ts[::2] = np.NaN
result = func(ts)[1::2]
expected = func(np.array(ts.dropna()))
tm.assert_numpy_array_equal(result.values, expected,
check_dtype=False)
def test_compress(self):
cond = [True, False, True, False, False]
s = Series([1, -1, 5, 8, 7],
index=list('abcde'), name='foo')
expected = Series(s.values.compress(cond),
index=list('ac'), name='foo')
with tm.assert_produces_warning(FutureWarning):
result = s.compress(cond)
tm.assert_series_equal(result, expected)
def test_numpy_compress(self):
cond = [True, False, True, False, False]
s = Series([1, -1, 5, 8, 7],
index=list('abcde'), name='foo')
expected = Series(s.values.compress(cond),
index=list('ac'), name='foo')
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
tm.assert_series_equal(np.compress(cond, s), expected)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
msg = "the 'axis' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.compress(cond, s, axis=1)
msg = "the 'out' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.compress(cond, s, out=s)
def test_round(self, datetime_series):
datetime_series.index.name = "index_name"
result = datetime_series.round(2)
expected = Series(np.round(datetime_series.values, 2),
index=datetime_series.index, name='ts')
assert_series_equal(result, expected)
assert result.name == datetime_series.name
def test_numpy_round(self):
# See gh-12600
s = Series([1.53, 1.36, 0.06])
out = np.round(s, decimals=0)
expected = Series([2., 1., 0.])
assert_series_equal(out, expected)
msg = "the 'out' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.round(s, decimals=0, out=s)
@pytest.mark.xfail(
PY2 and is_platform_windows(), reason="numpy/numpy#7882",
raises=AssertionError, strict=True)
def test_numpy_round_nan(self):
# See gh-14197
s = Series([1.53, np.nan, 0.06])
with tm.assert_produces_warning(None):
result = s.round()
expected = Series([2., np.nan, 0.])
assert_series_equal(result, expected)
def test_built_in_round(self):
if not compat.PY3:
pytest.skip(
'build in round cannot be overridden prior to Python 3')
s = Series([1.123, 2.123, 3.123], index=lrange(3))
result = round(s)
expected_rounded0 = Series([1., 2., 3.], index=lrange(3))
tm.assert_series_equal(result, expected_rounded0)
decimals = 2
expected_rounded = Series([1.12, 2.12, 3.12], index=lrange(3))
result = round(s, decimals)
tm.assert_series_equal(result, expected_rounded)
def test_prod_numpy16_bug(self):
s = Series([1., 1., 1.], index=lrange(3))
result = s.prod()
assert not isinstance(result, Series)
@td.skip_if_no_scipy
def test_corr(self, datetime_series):
import scipy.stats as stats
# full overlap
tm.assert_almost_equal(datetime_series.corr(datetime_series), 1)
# partial overlap
tm.assert_almost_equal(datetime_series[:15].corr(datetime_series[5:]),
1)
assert isna(datetime_series[:15].corr(datetime_series[5:],
min_periods=12))
ts1 = datetime_series[:15].reindex(datetime_series.index)
ts2 = datetime_series[5:].reindex(datetime_series.index)
assert isna(ts1.corr(ts2, min_periods=12))
# No overlap
assert np.isnan(datetime_series[::2].corr(datetime_series[1::2]))
# all NA
cp = datetime_series[:10].copy()
cp[:] = np.nan
assert isna(cp.corr(cp))
A = tm.makeTimeSeries()
B = tm.makeTimeSeries()
result = A.corr(B)
expected, _ = stats.pearsonr(A, B)
tm.assert_almost_equal(result, expected)
@td.skip_if_no_scipy
def test_corr_rank(self):
import scipy
import scipy.stats as stats
# kendall and spearman
A = tm.makeTimeSeries()
B = tm.makeTimeSeries()
A[-5:] = A[:5]
result = A.corr(B, method='kendall')
expected = stats.kendalltau(A, B)[0]
tm.assert_almost_equal(result, expected)
result = A.corr(B, method='spearman')
expected = stats.spearmanr(A, B)[0]
tm.assert_almost_equal(result, expected)
# these methods got rewritten in 0.8
if LooseVersion(scipy.__version__) < LooseVersion('0.9'):
pytest.skip("skipping corr rank because of scipy version "
"{0}".format(scipy.__version__))
# results from R
A = Series(
[-0.89926396, 0.94209606, -1.03289164, -0.95445587, 0.76910310, -
0.06430576, -2.09704447, 0.40660407, -0.89926396, 0.94209606])
B = Series(
[-1.01270225, -0.62210117, -1.56895827, 0.59592943, -0.01680292,
1.17258718, -1.06009347, -0.10222060, -0.89076239, 0.89372375])
kexp = 0.4319297
sexp = 0.5853767
tm.assert_almost_equal(A.corr(B, method='kendall'), kexp)
tm.assert_almost_equal(A.corr(B, method='spearman'), sexp)
def test_corr_invalid_method(self):
# GH PR #22298
s1 = pd.Series(np.random.randn(10))
s2 = pd.Series(np.random.randn(10))
msg = ("method must be either 'pearson', 'spearman', "
"or 'kendall'")
with pytest.raises(ValueError, match=msg):
s1.corr(s2, method="____")
def test_corr_callable_method(self, datetime_series):
# simple correlation example
# returns 1 if exact equality, 0 otherwise
my_corr = lambda a, b: 1. if (a == b).all() else 0.
# simple example
s1 = Series([1, 2, 3, 4, 5])
s2 = Series([5, 4, 3, 2, 1])
expected = 0
tm.assert_almost_equal(
s1.corr(s2, method=my_corr),
expected)
# full overlap
tm.assert_almost_equal(datetime_series.corr(
datetime_series, method=my_corr), 1.)
# partial overlap
tm.assert_almost_equal(datetime_series[:15].corr(
datetime_series[5:], method=my_corr), 1.)
# No overlap
assert np.isnan(datetime_series[::2].corr(
datetime_series[1::2], method=my_corr))
# dataframe example
df = pd.DataFrame([s1, s2])
expected = pd.DataFrame([
{0: 1., 1: 0}, {0: 0, 1: 1.}])
tm.assert_almost_equal(
df.transpose().corr(method=my_corr), expected)
def test_cov(self, datetime_series):
# full overlap
tm.assert_almost_equal(datetime_series.cov(datetime_series),
datetime_series.std() ** 2)
# partial overlap
tm.assert_almost_equal(datetime_series[:15].cov(datetime_series[5:]),
datetime_series[5:15].std() ** 2)
# No overlap
assert np.isnan(datetime_series[::2].cov(datetime_series[1::2]))
# all NA
cp = datetime_series[:10].copy()
cp[:] = np.nan
assert isna(cp.cov(cp))
# min_periods
assert isna(datetime_series[:15].cov(datetime_series[5:],
min_periods=12))
ts1 = datetime_series[:15].reindex(datetime_series.index)
ts2 = datetime_series[5:].reindex(datetime_series.index)
assert isna(ts1.cov(ts2, min_periods=12))
def test_count(self, datetime_series):
assert datetime_series.count() == len(datetime_series)
datetime_series[::2] = np.NaN
assert datetime_series.count() == np.isfinite(datetime_series).sum()
mi = MultiIndex.from_arrays([list('aabbcc'), [1, 2, 2, nan, 1, 2]])
ts = Series(np.arange(len(mi)), index=mi)
left = ts.count(level=1)
right = Series([2, 3, 1], index=[1, 2, nan])
assert_series_equal(left, right)
ts.iloc[[0, 3, 5]] = nan
assert_series_equal(ts.count(level=1), right - 1)
def test_dot(self):
a = Series(np.random.randn(4), index=['p', 'q', 'r', 's'])
b = DataFrame(np.random.randn(3, 4), index=['1', '2', '3'],
columns=['p', 'q', 'r', 's']).T
result = a.dot(b)
expected = Series(np.dot(a.values, b.values), index=['1', '2', '3'])
assert_series_equal(result, expected)
# Check index alignment
b2 = b.reindex(index=reversed(b.index))
result = a.dot(b)
assert_series_equal(result, expected)
# Check ndarray argument
result = a.dot(b.values)
assert np.all(result == expected.values)
assert_almost_equal(a.dot(b['2'].values), expected['2'])
# Check series argument
assert_almost_equal(a.dot(b['1']), expected['1'])
assert_almost_equal(a.dot(b2['1']), expected['1'])
msg = r"Dot product shape mismatch, \(4L?,\) vs \(3L?,\)"
# exception raised is of type Exception
with pytest.raises(Exception, match=msg):
a.dot(a.values[:3])
msg = "matrices are not aligned"
with pytest.raises(ValueError, match=msg):
a.dot(b.T)
@pytest.mark.skipif(not PY35,
reason='matmul supported for Python>=3.5')
def test_matmul(self):
# matmul test is for GH #10259
a = Series(np.random.randn(4), index=['p', 'q', 'r', 's'])
b = DataFrame(np.random.randn(3, 4), index=['1', '2', '3'],
columns=['p', 'q', 'r', 's']).T
# Series @ DataFrame
result = operator.matmul(a, b)
expected = Series(np.dot(a.values, b.values), index=['1', '2', '3'])
assert_series_equal(result, expected)
# DataFrame @ Series
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values),
index=['1', '2', '3'])
assert_series_equal(result, expected)
# Series @ Series
result = operator.matmul(a, a)
expected = np.dot(a.values, a.values)
assert_almost_equal(result, expected)
# GH 21530
# vector (1D np.array) @ Series (__rmatmul__)
result = operator.matmul(a.values, a)
expected = np.dot(a.values, a.values)
assert_almost_equal(result, expected)
# GH 21530
# vector (1D list) @ Series (__rmatmul__)
result = operator.matmul(a.values.tolist(), a)
expected = np.dot(a.values, a.values)
assert_almost_equal(result, expected)
# GH 21530
# matrix (2D np.array) @ Series (__rmatmul__)
result = operator.matmul(b.T.values, a)
expected = np.dot(b.T.values, a.values)
assert_almost_equal(result, expected)
# GH 21530
# matrix (2D nested lists) @ Series (__rmatmul__)
result = operator.matmul(b.T.values.tolist(), a)
expected = np.dot(b.T.values, a.values)
assert_almost_equal(result, expected)
# mixed dtype DataFrame @ Series
a['p'] = int(a.p)
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values),
index=['1', '2', '3'])
assert_series_equal(result, expected)
# different dtypes DataFrame @ Series
a = a.astype(int)
result = operator.matmul(b.T, a)
expected = Series(np.dot(b.T.values, a.T.values),
index=['1', '2', '3'])
assert_series_equal(result, expected)
msg = r"Dot product shape mismatch, \(4,\) vs \(3,\)"
# exception raised is of type Exception
with pytest.raises(Exception, match=msg):
a.dot(a.values[:3])
msg = "matrices are not aligned"
with pytest.raises(ValueError, match=msg):
a.dot(b.T)
def test_clip(self, datetime_series):
val = datetime_series.median()
with tm.assert_produces_warning(FutureWarning):
assert datetime_series.clip_lower(val).min() == val
with tm.assert_produces_warning(FutureWarning):
assert datetime_series.clip_upper(val).max() == val
assert datetime_series.clip(lower=val).min() == val
assert datetime_series.clip(upper=val).max() == val
result = datetime_series.clip(-0.5, 0.5)
expected = np.clip(datetime_series, -0.5, 0.5)
assert_series_equal(result, expected)
assert isinstance(expected, Series)
def test_clip_types_and_nulls(self):
sers = [Series([np.nan, 1.0, 2.0, 3.0]), Series([None, 'a', 'b', 'c']),
Series(pd.to_datetime(
[np.nan, 1, 2, 3], unit='D'))]
for s in sers:
thresh = s[2]
with tm.assert_produces_warning(FutureWarning):
lower = s.clip_lower(thresh)
with tm.assert_produces_warning(FutureWarning):
upper = s.clip_upper(thresh)
assert lower[notna(lower)].min() == thresh
assert upper[notna(upper)].max() == thresh
assert list(isna(s)) == list(isna(lower))
assert list(isna(s)) == list(isna(upper))
def test_clip_with_na_args(self):
"""Should process np.nan argument as None """
# GH # 17276
s = Series([1, 2, 3])
assert_series_equal(s.clip(np.nan), Series([1, 2, 3]))
assert_series_equal(s.clip(upper=np.nan, lower=np.nan),
Series([1, 2, 3]))
# GH #19992
assert_series_equal(s.clip(lower=[0, 4, np.nan]),
Series([1, 4, np.nan]))
assert_series_equal(s.clip(upper=[1, np.nan, 1]),
Series([1, np.nan, 1]))
def test_clip_against_series(self):
# GH #6966
s = Series([1.0, 1.0, 4.0])
threshold = Series([1.0, 2.0, 3.0])
with tm.assert_produces_warning(FutureWarning):
assert_series_equal(s.clip_lower(threshold),
Series([1.0, 2.0, 4.0]))
with tm.assert_produces_warning(FutureWarning):
assert_series_equal(s.clip_upper(threshold),
Series([1.0, 1.0, 3.0]))
lower = Series([1.0, 2.0, 3.0])
upper = Series([1.5, 2.5, 3.5])
assert_series_equal(s.clip(lower, upper), Series([1.0, 2.0, 3.5]))
assert_series_equal(s.clip(1.5, upper), Series([1.5, 1.5, 3.5]))
@pytest.mark.parametrize("inplace", [True, False])
@pytest.mark.parametrize("upper", [[1, 2, 3], np.asarray([1, 2, 3])])
def test_clip_against_list_like(self, inplace, upper):
# GH #15390
original = pd.Series([5, 6, 7])
result = original.clip(upper=upper, inplace=inplace)
expected = pd.Series([1, 2, 3])
if inplace:
result = original
tm.assert_series_equal(result, expected, check_exact=True)
def test_clip_with_datetimes(self):
# GH 11838
# naive and tz-aware datetimes
t = Timestamp('2015-12-01 09:30:30')
s = Series([Timestamp('2015-12-01 09:30:00'),
Timestamp('2015-12-01 09:31:00')])
result = s.clip(upper=t)
expected = Series([Timestamp('2015-12-01 09:30:00'),
Timestamp('2015-12-01 09:30:30')])
assert_series_equal(result, expected)
t = Timestamp('2015-12-01 09:30:30', tz='US/Eastern')
s = Series([Timestamp('2015-12-01 09:30:00', tz='US/Eastern'),
Timestamp('2015-12-01 09:31:00', tz='US/Eastern')])
result = s.clip(upper=t)
expected = Series([Timestamp('2015-12-01 09:30:00', tz='US/Eastern'),
Timestamp('2015-12-01 09:30:30', tz='US/Eastern')])
assert_series_equal(result, expected)
def test_cummethods_bool(self):
# GH 6270
a = pd.Series([False, False, False, True, True, False, False])
b = ~a
c = pd.Series([False] * len(b))
d = ~c
methods = {'cumsum': np.cumsum,
'cumprod': np.cumprod,
'cummin': np.minimum.accumulate,
'cummax': np.maximum.accumulate}
args = product((a, b, c, d), methods)
for s, method in args:
expected = Series(methods[method](s.values))
result = getattr(s, method)()
assert_series_equal(result, expected)
e = pd.Series([False, True, nan, False])
cse = pd.Series([0, 1, nan, 1], dtype=object)
cpe = pd.Series([False, 0, nan, 0])
cmin = pd.Series([False, False, nan, False])
cmax = pd.Series([False, True, nan, True])
expecteds = {'cumsum': cse,
'cumprod': cpe,
'cummin': cmin,
'cummax': cmax}
for method in methods:
res = getattr(e, method)()
assert_series_equal(res, expecteds[method])
def test_isin(self):
s = Series(['A', 'B', 'C', 'a', 'B', 'B', 'A', 'C'])
result = s.isin(['A', 'C'])
expected = Series([True, False, True, False, False, False, True, True])
assert_series_equal(result, expected)
# GH: 16012
# This specific issue has to have a series over 1e6 in len, but the
# comparison array (in_list) must be large enough so that numpy doesn't
# do a manual masking trick that will avoid this issue altogether
s = Series(list('abcdefghijk' * 10 ** 5))
# If numpy doesn't do the manual comparison/mask, these
# unorderable mixed types are what cause the exception in numpy
in_list = [-1, 'a', 'b', 'G', 'Y', 'Z', 'E',
'K', 'E', 'S', 'I', 'R', 'R'] * 6
assert s.isin(in_list).sum() == 200000
def test_isin_with_string_scalar(self):
# GH4763
s = Series(['A', 'B', 'C', 'a', 'B', 'B', 'A', 'C'])
msg = (r"only list-like objects are allowed to be passed to isin\(\),"
r" you passed a \[str\]")
with pytest.raises(TypeError, match=msg):
s.isin('a')
s = Series(['aaa', 'b', 'c'])
with pytest.raises(TypeError, match=msg):
s.isin('aaa')
def test_isin_with_i8(self):
# GH 5021
expected = Series([True, True, False, False, False])
expected2 = Series([False, True, False, False, False])
# datetime64[ns]
s = Series(date_range('jan-01-2013', 'jan-05-2013'))
result = s.isin(s[0:2])
assert_series_equal(result, expected)
result = s.isin(s[0:2].values)
assert_series_equal(result, expected)
# fails on dtype conversion in the first place
result = s.isin(s[0:2].values.astype('datetime64[D]'))
assert_series_equal(result, expected)
result = s.isin([s[1]])
assert_series_equal(result, expected2)
result = s.isin([np.datetime64(s[1])])
assert_series_equal(result, expected2)
result = s.isin(set(s[0:2]))
assert_series_equal(result, expected)
# timedelta64[ns]
s = Series(pd.to_timedelta(lrange(5), unit='d'))
result = s.isin(s[0:2])
assert_series_equal(result, expected)
@pytest.mark.parametrize("empty", [[], Series(), np.array([])])
def test_isin_empty(self, empty):
# see gh-16991
s = Series(["a", "b"])
expected = Series([False, False])
result = s.isin(empty)
tm.assert_series_equal(expected, result)
def test_ptp(self):
# GH21614
N = 1000
arr = np.random.randn(N)
ser = Series(arr)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
assert np.ptp(ser) == np.ptp(arr)
# GH11163
s = Series([3, 5, np.nan, -3, 10])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
assert s.ptp() == 13
assert pd.isna(s.ptp(skipna=False))
mi = pd.MultiIndex.from_product([['a', 'b'], [1, 2, 3]])
s = pd.Series([1, np.nan, 7, 3, 5, np.nan], index=mi)
expected = pd.Series([6, 2], index=['a', 'b'], dtype=np.float64)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
tm.assert_series_equal(s.ptp(level=0), expected)
expected = pd.Series([np.nan, np.nan], index=['a', 'b'])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
tm.assert_series_equal(s.ptp(level=0, skipna=False), expected)
msg = r"No axis named 1 for object type <(class|type) 'type'>"
with pytest.raises(ValueError, match=msg):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
s.ptp(axis=1)
s = pd.Series(['a', 'b', 'c', 'd', 'e'])
msg = r"unsupported operand type\(s\) for -: 'str' and 'str'"
with pytest.raises(TypeError, match=msg):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
s.ptp()
msg = r"Series\.ptp does not implement numeric_only\."
with pytest.raises(NotImplementedError, match=msg):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
s.ptp(numeric_only=True)
def test_repeat(self):
s = Series(np.random.randn(3), index=['a', 'b', 'c'])
reps = s.repeat(5)
exp = Series(s.values.repeat(5), index=s.index.values.repeat(5))
assert_series_equal(reps, exp)
to_rep = [2, 3, 4]
reps = s.repeat(to_rep)
exp = Series(s.values.repeat(to_rep),
index=s.index.values.repeat(to_rep))
assert_series_equal(reps, exp)
def test_numpy_repeat(self):
s = Series(np.arange(3), name='x')
expected = Series(s.values.repeat(2), name='x',
index=s.index.values.repeat(2))
assert_series_equal(np.repeat(s, 2), expected)
msg = "the 'axis' parameter is not supported"
with pytest.raises(ValueError, match=msg):
np.repeat(s, 2, axis=0)
def test_searchsorted(self):
s = Series([1, 2, 3])
result = s.searchsorted(1, side='left')
assert is_scalar(result)
assert result == 0
result = s.searchsorted(1, side='right')
assert is_scalar(result)
assert result == 1
def test_searchsorted_numeric_dtypes_scalar(self):
s = Series([1, 2, 90, 1000, 3e9])
r = s.searchsorted(30)
assert is_scalar(r)
assert r == 2
r = s.searchsorted([30])
e = np.array([2], dtype=np.intp)
tm.assert_numpy_array_equal(r, e)
def test_searchsorted_numeric_dtypes_vector(self):
s = Series([1, 2, 90, 1000, 3e9])
r = s.searchsorted([91, 2e6])
e = np.array([3, 4], dtype=np.intp)
tm.assert_numpy_array_equal(r, e)
def test_search_sorted_datetime64_scalar(self):
s = Series(pd.date_range('20120101', periods=10, freq='2D'))
v = pd.Timestamp('20120102')
r = s.searchsorted(v)
assert is_scalar(r)
assert r == 1
def test_search_sorted_datetime64_list(self):
s = Series(pd.date_range('20120101', periods=10, freq='2D'))
v = [pd.Timestamp('20120102'), pd.Timestamp('20120104')]
r = s.searchsorted(v)
e = np.array([1, 2], dtype=np.intp)
tm.assert_numpy_array_equal(r, e)
def test_searchsorted_sorter(self):
# GH8490
s = Series([3, 1, 2])
r = s.searchsorted([0, 3], sorter=np.argsort(s))
e = np.array([0, 2], dtype=np.intp)
tm.assert_numpy_array_equal(r, e)
def test_is_monotonic(self):
s = Series(np.random.randint(0, 10, size=1000))
assert not s.is_monotonic
s = Series(np.arange(1000))
assert s.is_monotonic is True
assert s.is_monotonic_increasing is True
s = Series(np.arange(1000, 0, -1))
assert s.is_monotonic_decreasing is True
s = Series(pd.date_range('20130101', periods=10))
assert s.is_monotonic is True
assert s.is_monotonic_increasing is True
s = Series(list(reversed(s.tolist())))
assert s.is_monotonic is False
assert s.is_monotonic_decreasing is True
def test_sort_index_level(self):
mi = MultiIndex.from_tuples([[1, 1, 3], [1, 1, 1]], names=list('ABC'))
s = Series([1, 2], mi)
backwards = s.iloc[[1, 0]]
res = s.sort_index(level='A')
assert_series_equal(backwards, res)
res = s.sort_index(level=['A', 'B'])
assert_series_equal(backwards, res)
res = s.sort_index(level='A', sort_remaining=False)
assert_series_equal(s, res)
res = s.sort_index(level=['A', 'B'], sort_remaining=False)
assert_series_equal(s, res)
def test_apply_categorical(self):
values = pd.Categorical(list('ABBABCD'), categories=list('DCBA'),
ordered=True)
s = pd.Series(values, name='XX', index=list('abcdefg'))
result = s.apply(lambda x: x.lower())
# should be categorical dtype when the number of categories are
# the same
values = pd.Categorical(list('abbabcd'), categories=list('dcba'),
ordered=True)
exp = pd.Series(values, name='XX', index=list('abcdefg'))
tm.assert_series_equal(result, exp)
tm.assert_categorical_equal(result.values, exp.values)
result = s.apply(lambda x: 'A')
exp = pd.Series(['A'] * 7, name='XX', index=list('abcdefg'))
tm.assert_series_equal(result, exp)
assert result.dtype == np.object
def test_shift_int(self, datetime_series):
ts = datetime_series.astype(int)
shifted = ts.shift(1)
expected = ts.astype(float).shift(1)
assert_series_equal(shifted, expected)
def test_shift_categorical(self):
# GH 9416
s = pd.Series(['a', 'b', 'c', 'd'], dtype='category')
assert_series_equal(s.iloc[:-1], s.shift(1).shift(-1).dropna())
sp1 = s.shift(1)
assert_index_equal(s.index, sp1.index)
assert np.all(sp1.values.codes[:1] == -1)
assert np.all(s.values.codes[:-1] == sp1.values.codes[1:])
sn2 = s.shift(-2)
assert_index_equal(s.index, sn2.index)
assert np.all(sn2.values.codes[-2:] == -1)
assert np.all(s.values.codes[2:] == sn2.values.codes[:-2])
assert_index_equal(s.values.categories, sp1.values.categories)
assert_index_equal(s.values.categories, sn2.values.categories)
def test_unstack(self):
from numpy import nan
index = MultiIndex(levels=[['bar', 'foo'], ['one', 'three', 'two']],
codes=[[1, 1, 0, 0], [0, 1, 0, 2]])
s = Series(np.arange(4.), index=index)
unstacked = s.unstack()
expected = DataFrame([[2., nan, 3.], [0., 1., nan]],
index=['bar', 'foo'],
columns=['one', 'three', 'two'])
assert_frame_equal(unstacked, expected)
unstacked = s.unstack(level=0)
assert_frame_equal(unstacked, expected.T)
index = MultiIndex(levels=[['bar'], ['one', 'two', 'three'], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2],
[0, 1, 0, 1, 0, 1]])
s = Series(np.random.randn(6), index=index)
exp_index = MultiIndex(levels=[['one', 'two', 'three'], [0, 1]],
codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]])
expected = DataFrame({'bar': s.values},
index=exp_index).sort_index(level=0)
unstacked = s.unstack(0).sort_index()
assert_frame_equal(unstacked, expected)
# GH5873
idx = pd.MultiIndex.from_arrays([[101, 102], [3.5, np.nan]])
ts = pd.Series([1, 2], index=idx)
left = ts.unstack()
right = DataFrame([[nan, 1], [2, nan]], index=[101, 102],
columns=[nan, 3.5])
assert_frame_equal(left, right)
idx = pd.MultiIndex.from_arrays([['cat', 'cat', 'cat', 'dog', 'dog'
], ['a', 'a', 'b', 'a', 'b'],
[1, 2, 1, 1, np.nan]])
ts = pd.Series([1.0, 1.1, 1.2, 1.3, 1.4], index=idx)
right = DataFrame([[1.0, 1.3], [1.1, nan], [nan, 1.4], [1.2, nan]],
columns=['cat', 'dog'])
tpls = [('a', 1), ('a', 2), ('b', nan), ('b', 1)]
right.index = pd.MultiIndex.from_tuples(tpls)
assert_frame_equal(ts.unstack(level=0), right)
def test_value_counts_datetime(self):
# most dtypes are tested in test_base.py
values = [pd.Timestamp('2011-01-01 09:00'),
pd.Timestamp('2011-01-01 10:00'),
pd.Timestamp('2011-01-01 11:00'),
pd.Timestamp('2011-01-01 09:00'),
pd.Timestamp('2011-01-01 09:00'),
pd.Timestamp('2011-01-01 11:00')]
exp_idx = pd.DatetimeIndex(['2011-01-01 09:00', '2011-01-01 11:00',
'2011-01-01 10:00'])
exp = pd.Series([3, 2, 1], index=exp_idx, name='xxx')
s = pd.Series(values, name='xxx')
tm.assert_series_equal(s.value_counts(), exp)
# check DatetimeIndex outputs the same result
idx = pd.DatetimeIndex(values, name='xxx')
tm.assert_series_equal(idx.value_counts(), exp)
# normalize
exp = pd.Series(np.array([3., 2., 1]) / 6.,
index=exp_idx, name='xxx')
tm.assert_series_equal(s.value_counts(normalize=True), exp)
tm.assert_series_equal(idx.value_counts(normalize=True), exp)
def test_value_counts_datetime_tz(self):
values = [pd.Timestamp('2011-01-01 09:00', tz='US/Eastern'),
pd.Timestamp('2011-01-01 10:00', tz='US/Eastern'),
pd.Timestamp('2011-01-01 11:00', tz='US/Eastern'),
pd.Timestamp('2011-01-01 09:00', tz='US/Eastern'),
pd.Timestamp('2011-01-01 09:00', tz='US/Eastern'),
pd.Timestamp('2011-01-01 11:00', tz='US/Eastern')]
exp_idx = pd.DatetimeIndex(['2011-01-01 09:00', '2011-01-01 11:00',
'2011-01-01 10:00'], tz='US/Eastern')
exp = pd.Series([3, 2, 1], index=exp_idx, name='xxx')
s = pd.Series(values, name='xxx')
tm.assert_series_equal(s.value_counts(), exp)
idx = pd.DatetimeIndex(values, name='xxx')
tm.assert_series_equal(idx.value_counts(), exp)
exp = pd.Series(np.array([3., 2., 1]) / 6.,
index=exp_idx, name='xxx')
tm.assert_series_equal(s.value_counts(normalize=True), exp)
tm.assert_series_equal(idx.value_counts(normalize=True), exp)
def test_value_counts_period(self):
values = [pd.Period('2011-01', freq='M'),
pd.Period('2011-02', freq='M'),
pd.Period('2011-03', freq='M'),
pd.Period('2011-01', freq='M'),
pd.Period('2011-01', freq='M'),
pd.Period('2011-03', freq='M')]
exp_idx = pd.PeriodIndex(['2011-01', '2011-03', '2011-02'], freq='M')
exp = pd.Series([3, 2, 1], index=exp_idx, name='xxx')
s = pd.Series(values, name='xxx')
tm.assert_series_equal(s.value_counts(), exp)
# check DatetimeIndex outputs the same result
idx = pd.PeriodIndex(values, name='xxx')
tm.assert_series_equal(idx.value_counts(), exp)
# normalize
exp = pd.Series(np.array([3., 2., 1]) / 6.,
index=exp_idx, name='xxx')
tm.assert_series_equal(s.value_counts(normalize=True), exp)
tm.assert_series_equal(idx.value_counts(normalize=True), exp)
def test_value_counts_categorical_ordered(self):
# most dtypes are tested in test_base.py
values = pd.Categorical([1, 2, 3, 1, 1, 3], ordered=True)
exp_idx = pd.CategoricalIndex([1, 3, 2], categories=[1, 2, 3],
ordered=True)
exp = pd.Series([3, 2, 1], index=exp_idx, name='xxx')
s = pd.Series(values, name='xxx')
tm.assert_series_equal(s.value_counts(), exp)
# check CategoricalIndex outputs the same result
idx = pd.CategoricalIndex(values, name='xxx')
tm.assert_series_equal(idx.value_counts(), exp)
# normalize
exp = pd.Series(np.array([3., 2., 1]) / 6.,
index=exp_idx, name='xxx')
tm.assert_series_equal(s.value_counts(normalize=True), exp)
tm.assert_series_equal(idx.value_counts(normalize=True), exp)
def test_value_counts_categorical_not_ordered(self):
values = pd.Categorical([1, 2, 3, 1, 1, 3], ordered=False)
exp_idx = pd.CategoricalIndex([1, 3, 2], categories=[1, 2, 3],
ordered=False)
exp = pd.Series([3, 2, 1], index=exp_idx, name='xxx')
s = pd.Series(values, name='xxx')
tm.assert_series_equal(s.value_counts(), exp)
# check CategoricalIndex outputs the same result
idx = pd.CategoricalIndex(values, name='xxx')
tm.assert_series_equal(idx.value_counts(), exp)
# normalize
exp = pd.Series(np.array([3., 2., 1]) / 6.,
index=exp_idx, name='xxx')
tm.assert_series_equal(s.value_counts(normalize=True), exp)
tm.assert_series_equal(idx.value_counts(normalize=True), exp)
@pytest.mark.parametrize("func", [np.any, np.all])
@pytest.mark.parametrize("kwargs", [
dict(keepdims=True),
dict(out=object()),
])
@td.skip_if_np_lt_115
def test_validate_any_all_out_keepdims_raises(self, kwargs, func):
s = pd.Series([1, 2])
param = list(kwargs)[0]
name = func.__name__
msg = (r"the '{arg}' parameter is not "
r"supported in the pandas "
r"implementation of {fname}\(\)").format(arg=param, fname=name)
with pytest.raises(ValueError, match=msg):
func(s, **kwargs)
@td.skip_if_np_lt_115
def test_validate_sum_initial(self):
s = pd.Series([1, 2])
msg = (r"the 'initial' parameter is not "
r"supported in the pandas "
r"implementation of sum\(\)")
with pytest.raises(ValueError, match=msg):
np.sum(s, initial=10)
def test_validate_median_initial(self):
s = pd.Series([1, 2])
msg = (r"the 'overwrite_input' parameter is not "
r"supported in the pandas "
r"implementation of median\(\)")
with pytest.raises(ValueError, match=msg):
# It seems like np.median doesn't dispatch, so we use the
# method instead of the ufunc.
s.median(overwrite_input=True)
@td.skip_if_np_lt_115
def test_validate_stat_keepdims(self):
s = pd.Series([1, 2])
msg = (r"the 'keepdims' parameter is not "
r"supported in the pandas "
r"implementation of sum\(\)")
with pytest.raises(ValueError, match=msg):
np.sum(s, keepdims=True)
main_dtypes = [
'datetime',
'datetimetz',
'timedelta',
'int8',
'int16',
'int32',
'int64',
'float32',
'float64',
'uint8',
'uint16',
'uint32',
'uint64'
]
@pytest.fixture
def s_main_dtypes():
"""A DataFrame with many dtypes
* datetime
* datetimetz
* timedelta
* [u]int{8,16,32,64}
* float{32,64}
The columns are the name of the dtype.
"""
df = pd.DataFrame(
{'datetime': pd.to_datetime(['2003', '2002',
'2001', '2002',
'2005']),
'datetimetz': pd.to_datetime(
['2003', '2002',
'2001', '2002',
'2005']).tz_localize('US/Eastern'),
'timedelta': pd.to_timedelta(['3d', '2d', '1d',
'2d', '5d'])})
for dtype in ['int8', 'int16', 'int32', 'int64',
'float32', 'float64',
'uint8', 'uint16', 'uint32', 'uint64']:
df[dtype] = Series([3, 2, 1, 2, 5], dtype=dtype)
return df
@pytest.fixture(params=main_dtypes)
def s_main_dtypes_split(request, s_main_dtypes):
"""Each series in s_main_dtypes."""
return s_main_dtypes[request.param]
def assert_check_nselect_boundary(vals, dtype, method):
# helper function for 'test_boundary_{dtype}' tests
s = Series(vals, dtype=dtype)
result = getattr(s, method)(3)
expected_idxr = [0, 1, 2] if method == 'nsmallest' else [3, 2, 1]
expected = s.loc[expected_idxr]
tm.assert_series_equal(result, expected)
class TestNLargestNSmallest(object):
@pytest.mark.parametrize(
"r", [Series([3., 2, 1, 2, '5'], dtype='object'),
Series([3., 2, 1, 2, 5], dtype='object'),
# not supported on some archs
# Series([3., 2, 1, 2, 5], dtype='complex256'),
Series([3., 2, 1, 2, 5], dtype='complex128'),
Series(list('abcde')),
Series(list('abcde'), dtype='category')])
def test_error(self, r):
dt = r.dtype
msg = ("Cannot use method 'n(larg|small)est' with "
"dtype {dt}".format(dt=dt))
args = 2, len(r), 0, -1
methods = r.nlargest, r.nsmallest
for method, arg in product(methods, args):
with pytest.raises(TypeError, match=msg):
method(arg)
def test_nsmallest_nlargest(self, s_main_dtypes_split):
# float, int, datetime64 (use i8), timedelts64 (same),
# object that are numbers, object that are strings
s = s_main_dtypes_split
assert_series_equal(s.nsmallest(2), s.iloc[[2, 1]])
assert_series_equal(s.nsmallest(2, keep='last'), s.iloc[[2, 3]])
empty = s.iloc[0:0]
assert_series_equal(s.nsmallest(0), empty)
assert_series_equal(s.nsmallest(-1), empty)
assert_series_equal(s.nlargest(0), empty)
assert_series_equal(s.nlargest(-1), empty)
assert_series_equal(s.nsmallest(len(s)), s.sort_values())
assert_series_equal(s.nsmallest(len(s) + 1), s.sort_values())
assert_series_equal(s.nlargest(len(s)), s.iloc[[4, 0, 1, 3, 2]])
assert_series_equal(s.nlargest(len(s) + 1),
s.iloc[[4, 0, 1, 3, 2]])
def test_misc(self):
s = Series([3., np.nan, 1, 2, 5])
assert_series_equal(s.nlargest(), s.iloc[[4, 0, 3, 2]])
assert_series_equal(s.nsmallest(), s.iloc[[2, 3, 0, 4]])
msg = 'keep must be either "first", "last"'
with pytest.raises(ValueError, match=msg):
s.nsmallest(keep='invalid')
with pytest.raises(ValueError, match=msg):
s.nlargest(keep='invalid')
# GH 15297
s = Series([1] * 5, index=[1, 2, 3, 4, 5])
expected_first = Series([1] * 3, index=[1, 2, 3])
expected_last = Series([1] * 3, index=[5, 4, 3])
result = s.nsmallest(3)
assert_series_equal(result, expected_first)
result = s.nsmallest(3, keep='last')
assert_series_equal(result, expected_last)
result = s.nlargest(3)
assert_series_equal(result, expected_first)
result = s.nlargest(3, keep='last')
assert_series_equal(result, expected_last)
@pytest.mark.parametrize('n', range(1, 5))
def test_n(self, n):
# GH 13412
s = Series([1, 4, 3, 2], index=[0, 0, 1, 1])
result = s.nlargest(n)
expected = s.sort_values(ascending=False).head(n)
assert_series_equal(result, expected)
result = s.nsmallest(n)
expected = s.sort_values().head(n)
assert_series_equal(result, expected)
def test_boundary_integer(self, nselect_method, any_int_dtype):
# GH 21426
dtype_info = np.iinfo(any_int_dtype)
min_val, max_val = dtype_info.min, dtype_info.max
vals = [min_val, min_val + 1, max_val - 1, max_val]
assert_check_nselect_boundary(vals, any_int_dtype, nselect_method)
def test_boundary_float(self, nselect_method, float_dtype):
# GH 21426
dtype_info = np.finfo(float_dtype)
min_val, max_val = dtype_info.min, dtype_info.max
min_2nd, max_2nd = np.nextafter(
[min_val, max_val], 0, dtype=float_dtype)
vals = [min_val, min_2nd, max_2nd, max_val]
assert_check_nselect_boundary(vals, float_dtype, nselect_method)
@pytest.mark.parametrize('dtype', ['datetime64[ns]', 'timedelta64[ns]'])
def test_boundary_datetimelike(self, nselect_method, dtype):
# GH 21426
# use int64 bounds and +1 to min_val since true minimum is NaT
# (include min_val/NaT at end to maintain same expected_idxr)
dtype_info = np.iinfo('int64')
min_val, max_val = dtype_info.min, dtype_info.max
vals = [min_val + 1, min_val + 2, max_val - 1, max_val, min_val]
assert_check_nselect_boundary(vals, dtype, nselect_method)
def test_duplicate_keep_all_ties(self):
# see gh-16818
s = Series([10, 9, 8, 7, 7, 7, 7, 6])
result = s.nlargest(4, keep='all')
expected = Series([10, 9, 8, 7, 7, 7, 7])
assert_series_equal(result, expected)
result = s.nsmallest(2, keep='all')
expected = Series([6, 7, 7, 7, 7], index=[7, 3, 4, 5, 6])
assert_series_equal(result, expected)
class TestCategoricalSeriesAnalytics(object):
def test_count(self):
s = Series(Categorical([np.nan, 1, 2, np.nan],
categories=[5, 4, 3, 2, 1], ordered=True))
result = s.count()
assert result == 2
def test_value_counts(self):
# GH 12835
cats = Categorical(list('abcccb'), categories=list('cabd'))
s = Series(cats, name='xxx')
res = s.value_counts(sort=False)
exp_index = CategoricalIndex(list('cabd'), categories=cats.categories)
exp = Series([3, 1, 2, 0], name='xxx', index=exp_index)
tm.assert_series_equal(res, exp)
res = s.value_counts(sort=True)
exp_index = CategoricalIndex(list('cbad'), categories=cats.categories)
exp = Series([3, 2, 1, 0], name='xxx', index=exp_index)
tm.assert_series_equal(res, exp)
# check object dtype handles the Series.name as the same
# (tested in test_base.py)
s = Series(["a", "b", "c", "c", "c", "b"], name='xxx')
res = s.value_counts()
exp = Series([3, 2, 1], name='xxx', index=["c", "b", "a"])
tm.assert_series_equal(res, exp)
def test_value_counts_with_nan(self):
# see gh-9443
# sanity check
s = Series(["a", "b", "a"], dtype="category")
exp = Series([2, 1], index=CategoricalIndex(["a", "b"]))
res = s.value_counts(dropna=True)
tm.assert_series_equal(res, exp)
res = s.value_counts(dropna=True)
tm.assert_series_equal(res, exp)
# same Series via two different constructions --> same behaviour
series = [
Series(["a", "b", None, "a", None, None], dtype="category"),
Series(Categorical(["a", "b", None, "a", None, None],
categories=["a", "b"]))
]
for s in series:
# None is a NaN value, so we exclude its count here
exp = Series([2, 1], index=CategoricalIndex(["a", "b"]))
res = s.value_counts(dropna=True)
tm.assert_series_equal(res, exp)
# we don't exclude the count of None and sort by counts
exp = Series([3, 2, 1], index=CategoricalIndex([np.nan, "a", "b"]))
res = s.value_counts(dropna=False)
tm.assert_series_equal(res, exp)
# When we aren't sorting by counts, and np.nan isn't a
# category, it should be last.
exp = Series([2, 1, 3], index=CategoricalIndex(["a", "b", np.nan]))
res = s.value_counts(dropna=False, sort=False)
tm.assert_series_equal(res, exp)
@pytest.mark.parametrize(
"dtype",
["int_", "uint", "float_", "unicode_", "timedelta64[h]",
pytest.param("datetime64[D]",
marks=pytest.mark.xfail(reason="GH#7996"))]
)
@pytest.mark.parametrize("is_ordered", [True, False])
def test_drop_duplicates_categorical_non_bool(self, dtype, is_ordered):
cat_array = np.array([1, 2, 3, 4, 5], dtype=np.dtype(dtype))
# Test case 1
input1 = np.array([1, 2, 3, 3], dtype=np.dtype(dtype))
tc1 = Series(Categorical(input1, categories=cat_array,
ordered=is_ordered))
expected = Series([False, False, False, True])
tm.assert_series_equal(tc1.duplicated(), expected)
tm.assert_series_equal(tc1.drop_duplicates(), tc1[~expected])
sc = tc1.copy()
sc.drop_duplicates(inplace=True)
tm.assert_series_equal(sc, tc1[~expected])
expected = Series([False, False, True, False])
tm.assert_series_equal(tc1.duplicated(keep='last'), expected)
tm.assert_series_equal(tc1.drop_duplicates(keep='last'),
tc1[~expected])
sc = tc1.copy()
sc.drop_duplicates(keep='last', inplace=True)
tm.assert_series_equal(sc, tc1[~expected])
expected = Series([False, False, True, True])
tm.assert_series_equal(tc1.duplicated(keep=False), expected)
tm.assert_series_equal(tc1.drop_duplicates(keep=False), tc1[~expected])
sc = tc1.copy()
sc.drop_duplicates(keep=False, inplace=True)
tm.assert_series_equal(sc, tc1[~expected])
# Test case 2
input2 = np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype(dtype))
tc2 = Series(Categorical(
input2, categories=cat_array, ordered=is_ordered)
)
expected = Series([False, False, False, False, True, True, False])
tm.assert_series_equal(tc2.duplicated(), expected)
tm.assert_series_equal(tc2.drop_duplicates(), tc2[~expected])
sc = tc2.copy()
sc.drop_duplicates(inplace=True)
tm.assert_series_equal(sc, tc2[~expected])
expected = Series([False, True, True, False, False, False, False])
tm.assert_series_equal(tc2.duplicated(keep='last'), expected)
tm.assert_series_equal(tc2.drop_duplicates(keep='last'),
tc2[~expected])
sc = tc2.copy()
sc.drop_duplicates(keep='last', inplace=True)
tm.assert_series_equal(sc, tc2[~expected])
expected = Series([False, True, True, False, True, True, False])
tm.assert_series_equal(tc2.duplicated(keep=False), expected)
tm.assert_series_equal(tc2.drop_duplicates(keep=False), tc2[~expected])
sc = tc2.copy()
sc.drop_duplicates(keep=False, inplace=True)
tm.assert_series_equal(sc, tc2[~expected])
@pytest.mark.parametrize("is_ordered", [True, False])
def test_drop_duplicates_categorical_bool(self, is_ordered):
tc = Series(Categorical([True, False, True, False],
categories=[True, False], ordered=is_ordered))
expected = Series([False, False, True, True])
tm.assert_series_equal(tc.duplicated(), expected)
tm.assert_series_equal(tc.drop_duplicates(), tc[~expected])
sc = tc.copy()
sc.drop_duplicates(inplace=True)
tm.assert_series_equal(sc, tc[~expected])
expected = Series([True, True, False, False])
tm.assert_series_equal(tc.duplicated(keep='last'), expected)
tm.assert_series_equal(tc.drop_duplicates(keep='last'), tc[~expected])
sc = tc.copy()
sc.drop_duplicates(keep='last', inplace=True)
tm.assert_series_equal(sc, tc[~expected])
expected = Series([True, True, True, True])
tm.assert_series_equal(tc.duplicated(keep=False), expected)
tm.assert_series_equal(tc.drop_duplicates(keep=False), tc[~expected])
sc = tc.copy()
sc.drop_duplicates(keep=False, inplace=True)
tm.assert_series_equal(sc, tc[~expected])
|
{
"content_hash": "62f89fa966e4743b9295cabf0709a066",
"timestamp": "",
"source": "github",
"line_count": 1507,
"max_line_length": 79,
"avg_line_length": 38.650298606502986,
"alnum_prop": 0.5384060708031453,
"repo_name": "MJuddBooth/pandas",
"id": "1f265d574da15ba18050b10a1d86dfc6e184a9f4",
"size": "58296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/tests/series/test_analytics.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4879"
},
{
"name": "C",
"bytes": "406766"
},
{
"name": "C++",
"bytes": "17248"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "14858932"
},
{
"name": "Shell",
"bytes": "29575"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
}
|
import os
import re
import dateutil.parser
import textwrap
from datetime import datetime
from flask.ext.misaka import markdown
from glob2 import glob
from logging import error
from logging import info
from titlecase import titlecase
class Documents():
meta = {}
source = ''
target = ''
def __init__(self, source, target):
self.source = source
self.target = target
self.remove()
self.update()
def _parse_metadata(self, lines):
meta = {}
for key, line in enumerate(lines):
result = re.match('^(\w*)[ \t]*:[ \t]*(.*)?[ \t]*$', line)
if not result:
break
meta[result.group(1).lower()] = result.group(2)
return (self._normal_metadata(meta), ''.join(lines[key:]))
def _normal_metadata(self, meta):
def check(key):
return key in meta and meta[key]
def clean(key, action, default):
meta[key] = action(meta[key]) if check(key) else default()
def required():
raise KeyError("Missing required value.")
try:
clean('title', titlecase, required)
clean('author', titlecase, lambda: None)
clean('date', dateutil.parser.parse, required)
clean('keywords', lambda x: x.lower().split(), lambda: [])
clean('template', lambda x: x, lambda: 'default')
clean('prev', lambda x: x, lambda: None)
clean('next', lambda x: x, lambda: None)
except(KeyError, ValueError, OverflowError) as e:
raise ValueError("Unable to parse meta information: {}".format(e))
return meta
def _embed_templating(self, html, metadata):
return textwrap.dedent('''\
{{% extends '{template}.jinja' %}}
{{% block content %}}
{content}{{% endblock %}}
''').format(content=html, template=metadata['template'])
def _build(self, path):
# TODO: Allow other file types
if not path.endswith('.md'):
raise ValueError('Skipping non-markdown file.')
with open(path) as f:
meta, content = self._parse_metadata(f.readlines())
if datetime.now() < meta['date']:
raise ValueError("Skipping until published")
# TODO: Pass config[markdown] to the markdown parser
return (meta, self._embed_templating(markdown(content), meta))
# TODO: Restrict to not go up directories
def _trie_glob(self, path):
glob_path = os.path.join(os.path.split(path)[0], '**')
return [e for e in glob(glob_path) if e.startswith(path)]
def update(self, path=''):
for item in self._trie_glob(os.path.join(self.source, path)):
try:
rel = os.path.splitext(os.path.relpath(item, self.source))[0]
target = os.path.join(self.target, rel)
if os.path.isdir(item):
os.makedirs(target, exist_ok=True)
else:
meta, html = self._build(item)
with open(target + '.jinja', 'w+') as f:
f.write(html)
self.meta[rel] = meta
except ValueError as e:
info("{}: {}".format(rel, e))
except OSError as e:
error("{}: {}".format(rel, e))
def remove(self, path=''):
for item in reversed(self._trie_glob(os.path.join(self.target, path))):
try:
os.remove(item) if os.path.isfile(item) else os.rmdir(item)
except OSError as e:
error("{}: {}".format(item, e))
self.meta.pop(os.path.splitext(os.path.relpath(item, self.target))[0], None)
|
{
"content_hash": "f2d34c78278ab73c9b639f7f0ca4f806",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 88,
"avg_line_length": 33.73873873873874,
"alnum_prop": 0.5476635514018692,
"repo_name": "dallbee/Bytelog",
"id": "2d387058b8dfb9f63b6bca165d3843c47d63cdd4",
"size": "3745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service/document.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "2054"
},
{
"name": "Python",
"bytes": "8317"
}
],
"symlink_target": ""
}
|
from util import *
from settings import bgp_validator_server, validator_path, \
maintenance_timeout, maintenance_log, thread_max_errors
import json
import Queue
import socket
import sys
import traceback
from datetime import datetime, timedelta
from subprocess import PIPE, Popen
from threading import Lock, Thread
#from thread import start_new_thread
from time import sleep
validator_threads_lock = Lock()
validator_threads = dict()
maintenance_thread = None
maintenance_thread_queue = Queue.Queue()
mlog_lines = 0
## private functions ##
"""
_restart_validator_thread
- helper function to restart a validator thread after an error
"""
def _restart_validator_thread(cache_server):
print_log("restarting validator thread (%s)" % cache_server)
validator_threads_lock.acquire()
try:
global validator_threads
#validator_threads[cache_server]['thread'] = \
# start_new_thread(validator_thread,
# (validator_threads[cache_server]['queue'],
# cache_server))
validator_threads[cache_server]['queue'].put("STOP")
new_queue = Queue.Queue()
validator_threads[cache_server]['queue'] = new_queue
vt = Thread(target=validator_thread,
args=(validator_threads[cache_server]['queue'],
cache_server))
vt.start()
validator_threads[cache_server]['thread'] = vt
validator_threads[cache_server]['errors'] = list()
except Exception, e:
print_error("Error restarting validator thread (%s), failed with %s" %
(cache_server,e.message))
finally:
validator_threads_lock.release()
"""
_stop_validator_thread
- helper function to stop a validator thread after an error
"""
def _stop_validator_thread(cache_server):
print_log("stopping validator thread (%s)" % cache_server)
validator_threads_lock.acquire()
try:
global validator_threads
validator_threads[cache_server]['queue'].put("STOP")
if validator_threads[cache_server]['thread'].is_alive():
validator_threads[cache_server]['thread'].join()
del validator_threads[cache_server]
except Exception, e:
print_error("Error stopping validator thread (%s), failed with %s" %
(cache_server,e.message))
finally:
validator_threads_lock.release()
def _get_validity(validation_result_string):
validity = dict()
validity['code'] = 100
validity['state'] = 'Error'
validity['description'] = 'Unknown validation error.'
# check validation result
validation_result_array = validation_result_string.split("|")
if validation_result_string == "error":
validity['code'] = 101
validity['description'] = 'RPKI cache-server connection failure!'
elif validation_result_string == "timeout":
validity['code'] = 102
validity['description'] = 'RPKI cache-server connection timeout!'
elif validation_result_string == "input error":
validity['code'] = 103
validity['description'] = 'RPKI cache-server input error!'
elif len(validation_result_array) != 3:
validity['code'] = 104
validity['description'] = 'RPKI cache-server output error!'
else: # looks like a valid validation result string
query = validation_result_array[0]
reasons = validation_result_array[1]
validity['code'] = int(validation_result_array[2])
validity['VRPs'] = dict()
validity['VRPs']['matched'] = list()
validity['VRPs']['unmatched_as'] = list()
validity['VRPs']['unmatched_length'] = list()
if validity['code'] != 1:
reasons_array = reasons.split(',')
vprefix, vlength, vasn = query.split()
for r in reasons_array:
rasn, rprefix, rmin_len, rmax_len = r.split()
vrp = dict()
vrp['asn'] = "AS"+rasn
vrp['prefix'] = rprefix+"/"+rmin_len
vrp['max_length'] = rmax_len
match = True
if vasn != rasn:
validity['VRPs']['unmatched_as'].append(vrp)
match = False
if vlength > rmax_len:
validity['VRPs']['unmatched_length'].append(vrp)
match = False
if match:
validity['VRPs']['matched'].append(vrp)
# END (for r in reasons_array)
if validity['code'] == 2:
if len(reasons_array) == len(validity['VRPs']['unmatched_as']):
validity['code'] = 3
validity['reason'] = 'as'
elif len(reasons_array) == len(validity['VRPs']['unmatched_length']):
validity['code'] = 4
validity['reason'] = 'length'
# END (if validity['code'] == 2)
# END (if validity['code'] != 1)
validity['state'] = validity_state[validity['code']]
validity['description'] = validity_descr[validity['code']]
# END (if elif else)
return validity
## threads ##
"""
maintenance_thread
- periodically checks all running validation threads
"""
def maintenance_thread(mtq):
print_log("CALL maintenance_thread")
timeout = datetime.now() + timedelta(0,maintenance_timeout)
while True:
now = datetime.now()
restart_threads = list()
if not mtq.empty():
break
if now < timeout:
sleep(1)
continue
try:
validator_threads_lock.acquire()
for cs in validator_threads:
dt_now = datetime.now()
dt_start = validator_threads[cs]['start']
dt_access = validator_threads[cs]['access']
runtime_str = str( int((dt_now - dt_start).total_seconds()) )
errors_str = str( len(validator_threads[cs]['errors']) )
count_str = str( validator_threads[cs]['count'] )
dt_start_str = dt_start.strftime("%Y-%m-%d %H:%M:%S")
dt_now_str = dt_now.strftime("%Y-%m-%d %H:%M:%S")
dt_access_str = dt_access.strftime("%Y-%m-%d %H:%M:%S")
# timestamp;start time;last access;cache-server;counter;errors
mnt_str = ';'.join([dt_now_str,dt_start_str,dt_access_str,cs,count_str,errors_str])
print_log(mnt_str)
global mlog_lines
if maintenance_log['enabled']:
if (maintenance_log['rotate'] and
os.path.isfile(maintenance_log['file']) and
(mlog_lines==0 or mlog_lines==maintenance_log['maxlines'])):
log_rotate(maintenance_log['file'])
mlog_lines = 0
with open(maintenance_log['file'],"ab") as f:
f.write(mnt_str+'\n')
mlog_lines = mlog_lines+1
if thread_max_errors > 0:
if len(validator_threads[cs]['errors']) > thread_max_errors:
print_log("RESTART thread (%s) due to errors!" % cs)
restart_threads.append(cs)
except Exception, e:
print_error("Error during maintenance! Failed with %s" % e.message)
finally:
validator_threads_lock.release()
for r in restart_threads:
_restart_validator_thread(r)
timeout = datetime.now() + timedelta(0,maintenance_timeout)
print_info("maintenance_thread sleeps until: " + timeout.strftime("%Y-%m-%d %H:%M:%S") )
"""
client_thread
- handels incoming client connections and queries
- starts validation_thread if necessary
"""
def client_thread(conn):
print_log("CALL client_thread")
data = conn.recv(1024)
try:
query = json.loads(data)
except ValueError:
print_error("Error decoding query into JSON!")
conn.sendall("Invalid query data, must be JSON!\n")
conn.close()
else:
query['conn'] = conn
cache_server = query['cache_server']
if not cache_server_valid(cache_server):
print_error("Invalid cache server (%s)!" % cache_server)
conn.close()
return
# Start a thread for the current cache server if necessary
validator_threads_lock.acquire()
try:
global validator_threads
if cache_server not in validator_threads:
validator_threads[cache_server] = dict()
new_queue = Queue.Queue()
validator_threads[cache_server]['queue'] = new_queue
vt = Thread(target=validator_thread,
args=(validator_threads[cache_server]['queue'],
cache_server))
vt.start()
#validator_threads[cache_server]['thread'] = \
# start_new_thread(validator_thread,
# (validator_threads[cache_server]['queue'],
# cache_server))
validator_threads[cache_server]['thread'] = vt
validator_threads[cache_server]['start'] = datetime.now()
validator_threads[cache_server]['access'] = datetime.now()
validator_threads[cache_server]['errors'] = list()
validator_threads[cache_server]['count'] = 1
else:
validator_threads[cache_server]['access'] = datetime.now()
tmp = validator_threads[cache_server]['count']
validator_threads[cache_server]['count'] = tmp+1
finally:
validator_threads_lock.release()
validator_threads[cache_server]['queue'].put(query)
return True
"""
validator_thread
- handels cache server connections and queries by clients
"""
def validator_thread(queue, cache_server):
cache_host = cache_server.split(":")[0]
cache_port = cache_server.split(":")[1]
cache_cmd = [validator_path, cache_host, cache_port]
validator_process = Popen(cache_cmd, stdin=PIPE, stdout=PIPE)
print_log("CALL validator thread (%s)" % cache_server)
run = True
while run:
validation_entry = queue.get(True)
if validation_entry == "STOP":
run = False
break
conn = validation_entry['conn']
network = validation_entry["network"]
masklen = validation_entry["masklen"]
asn = validation_entry["asn"]
bgp_entry_str = str(network) + " " + str(masklen) + " " + str(asn)
try:
validator_process.stdin.write(bgp_entry_str + '\n')
except Exception, e:
print_error("Error writing validator process, failed with %s!" %
e.message)
_restart_validator_thread(cache_server)
run = False
try:
validation_result = validator_process.stdout.readline().strip()
except Exception, e:
print_error("Error reading validator process, failed with %s!" %
e.message)
_restart_validator_thread(cache_server)
validation_result = ""
run = False
validity = _get_validity(validation_result)
print_info(cache_server + " -> " + network+"/"+masklen +
"(AS"+asn+") -> " + validity['state'])
resp = dict()
resp['cache_server'] = cache_server
resp['prefix'] = network+"/"+masklen
resp['asn'] = asn
resp['validity'] = validity
try:
conn.sendall(json.dumps(resp)+'\n')
conn.close()
except Exception, e:
print_error("Error sending validation response, failed with: %s" %
e.message)
if (validity['code'] >= 100):
validator_threads_lock.acquire()
global validator_threads
validator_threads[cache_server]['errors'].append(validity['code'])
validator_threads_lock.release()
# end while
validator_process.kill()
return True
## main ##
"""
validator_main
"""
def validator_main():
print_log("CALL main")
rbv_host = bgp_validator_server['host']
rbv_port = int(bgp_validator_server['port'])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print_info("Socket created")
#Bind socket to local host and port
try:
s.bind((rbv_host, rbv_port))
except socket.error as msg:
print_error("Bind failed. Error Code : " + str(msg[0]) +
" Message " + msg[1])
sys.exit()
print_info("Socket bind complete")
#Start listening on socket
s.listen(10)
print_info("Socket now listening")
#start_new_thread(maintenance_thread,())
global maintenance_thread
maintenance_thread = Thread(target=maintenance_thread,
args=(maintenance_thread_queue,))
maintenance_thread.start()
while True:
#wait to accept a connection - blocking call
conn, addr = s.accept()
print_info("Connected with " + addr[0] + ":" + str(addr[1]))
ct = Thread(target=client_thread, args=(conn,))
ct.start()
#start_new_thread(client_thread, (conn,))
s.close()
if __name__ == "__main__":
try:
validator_main()
except KeyboardInterrupt:
print_error("Shutdown requested by the user. Exiting...")
except Exception:
print_error(traceback.format_exc())
print_error("An error occurred. Exiting...")
finally:
for v in validator_threads:
if validator_threads[v]['thread'].is_alive():
print ("Waiting for validator thread to terminate ...")
validator_threads[v]['queue'].put("STOP")
validator_threads[v]['thread'].join()
maintenance_thread_queue.put("STOP")
if (maintenance_thread != None) and (maintenance_thread.is_alive()):
print ("Waiting for maintenance thread to terminate ...")
maintenance_thread.join()
sys.exit()
|
{
"content_hash": "d2731bc01b36d433efd4a1cec1616731",
"timestamp": "",
"source": "github",
"line_count": 367,
"max_line_length": 99,
"avg_line_length": 38.89645776566758,
"alnum_prop": 0.5634325744308231,
"repo_name": "rtrlib/rbv",
"id": "dab2d67340c2fc49e14406addccc7bc562c5a78e",
"size": "14275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/validator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "73"
},
{
"name": "HTML",
"bytes": "1924"
},
{
"name": "Python",
"bytes": "34636"
}
],
"symlink_target": ""
}
|
import asyncio
import logging
from . import configuration
event_handlers = dict()
function_handlers = dict()
def handler(fn):
"""
Register a function to receive and handle events.
"""
module = fn.__module__.split(".")[-1]
key = module + "." + fn.__name__
logging.info("Registering Event Handler: '" + key + "' from " + fn.__module__)
fn = asyncio.coroutine(fn)
event_handlers[key] = fn
return fn
def function(fn):
"""
Register a function to receive a request and return a result
"""
module = fn.__module__.split(".")[-1]
key = module + "." + fn.__name__
logging.info("Registering Function Handler: '" + key + "' from " + fn.__module__)
fn = asyncio.coroutine(fn)
function_handlers[key] = fn
return fn
|
{
"content_hash": "fa07758f0524f75e56b665bf5fe3439f",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 85,
"avg_line_length": 25.933333333333334,
"alnum_prop": 0.6015424164524421,
"repo_name": "simonwittber/netwrok-server",
"id": "b01ae4bc621fd4abf6f0284b21d7967c6f128cc0",
"size": "778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/netwrok/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLpgSQL",
"bytes": "30477"
},
{
"name": "Python",
"bytes": "46509"
},
{
"name": "Shell",
"bytes": "97"
}
],
"symlink_target": ""
}
|
"""
This is the test module to test TestRunner function of blade.
"""
import os
import blade_test
class TestTestRunner(blade_test.TargetTest):
"""Test cc_library """
def setUp(self):
"""setup method. """
self.doSetUp('test_test_runner', ':string_test_main',
fulltest=False, args='', test_jobs=1, show_details=True)
def testLoadBuildsNotNone(self):
"""Test direct targets and all command targets are not none. """
self.assertTrue(self.direct_targets)
self.assertTrue(self.all_command_targets)
def testTestRunnerCorrectly(self):
"""Test query targets dependency relationship correctly. """
self.assertTrue(self.all_targets)
self.rules_buf = self.blade.generate_build_rules()
test_env_dir = './build%s_%s/test_test_runner' % (
self.options.m, self.options.profile)
if not os.path.exists(test_env_dir):
os.mkdir(test_env_dir)
cc_library_lower = (self.target_path, 'lowercase')
cc_library_upper = (self.target_path, 'uppercase')
cc_library_string = (self.target_path, 'string_test_main')
self.assertIn(cc_library_lower, self.all_targets.keys())
self.assertIn(cc_library_upper, self.all_targets.keys())
self.assertIn(cc_library_string, self.all_targets.keys())
self.assertTrue(self.dryRun())
com_lower_line = ''
com_upper_line = ''
com_string_line = ''
string_main_depends_libs = ''
for line in self.scons_output:
if 'plowercase.cpp.o -c' in line:
com_lower_line = line
if 'puppercase.cpp.o -c' in line:
com_upper_line = line
if 'string_test.cpp.o -c' in line:
com_string_line = line
if 'string_test_main' in line:
string_main_depends_libs = line
self.assertCxxFlags(com_lower_line)
self.assertCxxFlags(com_upper_line)
self.assertCxxFlags(com_string_line)
self.assertLinkFlags(string_main_depends_libs)
self.assertIn('liblowercase.a', string_main_depends_libs)
ret_code = self.blade.test()
self.assertEqual(ret_code, 1)
if __name__ == '__main__':
blade_test.run(TestTestRunner)
|
{
"content_hash": "8bd295b610509cab03132e7b04a52759",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 77,
"avg_line_length": 34.343283582089555,
"alnum_prop": 0.6058235549760973,
"repo_name": "project-zerus/blade",
"id": "71450a6abd217f3efc61d4a3c4311cf3a6d01895",
"size": "2437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/test_target_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "370"
},
{
"name": "C",
"bytes": "403"
},
{
"name": "C++",
"bytes": "2863"
},
{
"name": "Cuda",
"bytes": "3248"
},
{
"name": "Java",
"bytes": "5638"
},
{
"name": "LLVM",
"bytes": "379"
},
{
"name": "Objective-C",
"bytes": "83"
},
{
"name": "Protocol Buffer",
"bytes": "1569"
},
{
"name": "Python",
"bytes": "653341"
},
{
"name": "Shell",
"bytes": "17020"
},
{
"name": "Thrift",
"bytes": "6217"
},
{
"name": "Vim script",
"bytes": "17199"
},
{
"name": "Yacc",
"bytes": "397"
}
],
"symlink_target": ""
}
|
from sneakers.modules import Encoder
class Identity(Encoder):
info = {
"name": "Identity",
"author": "davinerd",
"description": "An encoder that does no encoding. Ironic, but also useful for testing channels. Or not encoding/decoding.",
"comments": []
}
def encode(self, data):
return data
def decode(self, data):
return data
|
{
"content_hash": "739e9384fef3d28ff1477eff8bb631d3",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 131,
"avg_line_length": 24.625,
"alnum_prop": 0.6091370558375635,
"repo_name": "davinerd/sneaky-creeper",
"id": "6e76a09cd54b195caf2a8511873c779587050f94",
"size": "394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sneakers/encoders/identity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39338"
}
],
"symlink_target": ""
}
|
"""Tests for waymo_open_dataset.metrics.python.wod_detection_evaluator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from google.protobuf import text_format
from waymo_open_dataset.metrics.python import wod_detection_evaluator
from waymo_open_dataset.protos import metrics_pb2
class WODDetectionEvaluatorTest(tf.test.TestCase):
def _GenerateRandomBBoxes(self, num_frames, num_bboxes):
# TODO(chsin): Should WODDetectionEvaluator do the casting? If so, remove
# the astype calls here.
center_xyz = np.random.uniform(
low=-1.0, high=1.0, size=(num_bboxes, 3)).astype(np.float32)
dimension = np.random.uniform(
low=0.1, high=1.0, size=(num_bboxes, 3)).astype(np.float32)
rotation = np.random.uniform(
low=-np.pi, high=np.pi, size=(num_bboxes, 1)).astype(np.float32)
bboxes = np.concatenate([center_xyz, dimension, rotation], axis=-1)
types = np.random.randint(1, 5, size=[num_bboxes]).astype(np.uint8)
frame_ids = np.random.randint(0, num_frames, size=[num_bboxes])
scores = np.random.uniform(size=[num_bboxes]).astype(np.float32)
return bboxes, types, frame_ids, scores
def _BuildConfig(self):
config = metrics_pb2.Config()
config_text = """
num_desired_score_cutoffs: 11
breakdown_generator_ids: OBJECT_TYPE
difficulties {
}
matcher_type: TYPE_HUNGARIAN
iou_thresholds: 0.5
iou_thresholds: 0.5
iou_thresholds: 0.5
iou_thresholds: 0.5
iou_thresholds: 0.5
box_type: TYPE_3D
"""
text_format.Merge(config_text, config)
return config
def testBasic(self):
num_frames, gt_num_bboxes, pd_num_bboxes = 10, 10, 2000
pd_bbox, pd_type, pd_frameid, pd_score = self._GenerateRandomBBoxes(
num_frames, pd_num_bboxes)
pd_overlap_nlz = np.zeros_like(pd_frameid, dtype=np.bool)
gt_bbox, gt_type, gt_frameid, _ = self._GenerateRandomBBoxes(
num_frames, gt_num_bboxes)
config = self._BuildConfig()
evaluator = wod_detection_evaluator.WODDetectionEvaluator(config)
num_breakdowns = len(evaluator._breakdown_names)
self.assertEqual(num_breakdowns, 4)
predictions = {
'prediction_frame_id': pd_frameid,
'prediction_bbox': pd_bbox,
'prediction_type': pd_type,
'prediction_score': pd_score,
'prediction_overlap_nlz': pd_overlap_nlz,
}
# Expect ap and aph metrics is all zeros.
groundtruths = {
'ground_truth_frame_id': gt_frameid,
'ground_truth_bbox': gt_bbox,
'ground_truth_type': gt_type,
'ground_truth_difficulty': np.ones_like(gt_frameid, dtype=np.uint8),
'ground_truth_speed': np.zeros((gt_num_bboxes, 2), dtype=np.float32),
}
# Expect ap and aph metrics is all ones.
groundtruths_as_predictions = {
'ground_truth_frame_id': pd_frameid,
'ground_truth_bbox': pd_bbox,
'ground_truth_type': pd_type,
'ground_truth_difficulty': np.ones_like(pd_frameid, dtype=np.uint8),
'ground_truth_speed': np.zeros((pd_num_bboxes, 2), dtype=np.float32),
}
evaluator.update_state(groundtruths, predictions)
metric_dict = evaluator.result()
self.assertEqual(len(metric_dict.average_precision), num_breakdowns)
self.assertAllClose(metric_dict.average_precision, [0, 0, 0, 0])
self.assertAllClose(metric_dict.average_precision_ha_weighted, [0, 0, 0, 0])
evaluator.update_state(groundtruths_as_predictions, predictions)
metric_dict = evaluator.result()
self.assertAllClose(metric_dict.average_precision, [1, 1, 1, 1])
self.assertAllClose(metric_dict.average_precision_ha_weighted, [1, 1, 1, 1])
evaluator.update_state(groundtruths, predictions)
evaluator.update_state(groundtruths_as_predictions, predictions)
metric_dict = evaluator.result()
self.assertAllClose(
metric_dict.average_precision, [0.5, 0.5, 0.5, 0.5], atol=0.01)
self.assertAllClose(
metric_dict.average_precision_ha_weighted, [0.5, 0.5, 0.5, 0.5],
atol=0.01)
def testDefaultConfig(self):
num_frames, gt_num_bboxes, pd_num_bboxes = 10, 10, 2000
pd_bbox, pd_type, pd_frameid, pd_score = self._GenerateRandomBBoxes(
num_frames, pd_num_bboxes)
pd_overlap_nlz = np.zeros_like(pd_frameid, dtype=np.bool)
gt_bbox, gt_type, gt_frameid, _ = self._GenerateRandomBBoxes(
num_frames, gt_num_bboxes)
evaluator = wod_detection_evaluator.WODDetectionEvaluator()
num_breakdowns = len(evaluator._breakdown_names)
self.assertEqual(num_breakdowns, 32)
predictions = {
'prediction_frame_id': pd_frameid,
'prediction_bbox': pd_bbox,
'prediction_type': pd_type,
'prediction_score': pd_score,
'prediction_overlap_nlz': pd_overlap_nlz,
}
# Expect ap and aph metrics is all zeros.
groundtruths = {
'ground_truth_frame_id': gt_frameid,
'ground_truth_bbox': gt_bbox,
'ground_truth_type': gt_type,
'ground_truth_difficulty': np.ones_like(gt_frameid, dtype=np.uint8),
'ground_truth_speed': np.zeros((gt_num_bboxes, 2), dtype=np.float32),
}
# Expect ap and aph metrics is all ones.
groundtruths_as_predictions = {
'ground_truth_frame_id': pd_frameid,
'ground_truth_bbox': pd_bbox,
'ground_truth_type': pd_type,
'ground_truth_difficulty': np.ones_like(pd_frameid, dtype=np.uint8),
'ground_truth_speed': np.zeros((pd_num_bboxes, 2), dtype=np.float32),
}
# Only checking the first index, which is OBJECT_TYPE_TYPE_VEHICLE_LEVEL_1,
# because the others are more complicated.
evaluator.update_state(groundtruths, predictions)
metric_dict = evaluator.result()
self.assertEqual(len(metric_dict.average_precision), num_breakdowns)
self.assertAllClose(metric_dict.average_precision[0], 0)
self.assertAllClose(metric_dict.average_precision_ha_weighted[0], 0)
evaluator.update_state(groundtruths_as_predictions, predictions)
metric_dict = evaluator.result()
self.assertAllClose(metric_dict.average_precision[0], 1)
self.assertAllClose(metric_dict.average_precision_ha_weighted[0], 1)
evaluator.update_state(groundtruths, predictions)
evaluator.update_state(groundtruths_as_predictions, predictions)
metric_dict = evaluator.result()
self.assertAllClose(metric_dict.average_precision[0], 0.5, atol=0.01)
self.assertAllClose(
metric_dict.average_precision_ha_weighted[0], 0.5, atol=0.01)
if __name__ == '__main__':
tf.test.main()
|
{
"content_hash": "354328a0def8b198136563f1e1f16af3",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 80,
"avg_line_length": 39.22485207100592,
"alnum_prop": 0.6765726353899533,
"repo_name": "waymo-research/waymo-open-dataset",
"id": "ad11a03ad334729068c6a1aa0feb04e21a4ba68c",
"size": "7326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "waymo_open_dataset/metrics/python/wod_detection_evaluator_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "680892"
},
{
"name": "Dockerfile",
"bytes": "6981"
},
{
"name": "Jupyter Notebook",
"bytes": "6236449"
},
{
"name": "Python",
"bytes": "480022"
},
{
"name": "Shell",
"bytes": "12226"
},
{
"name": "Smarty",
"bytes": "439"
},
{
"name": "Starlark",
"bytes": "61788"
}
],
"symlink_target": ""
}
|
from unittest import mock
from osc_lib import exceptions as exc
import yaml
from heatclient import exc as heat_exc
from heatclient.osc.v1 import software_config
from heatclient.tests.unit.osc.v1 import fakes as orchestration_fakes
from heatclient.v1 import software_configs
class TestConfig(orchestration_fakes.TestOrchestrationv1):
def setUp(self):
super(TestConfig, self).setUp()
self.mock_client = self.app.client_manager.orchestration
class TestDeleteConfig(TestConfig):
def setUp(self):
super(TestDeleteConfig, self).setUp()
self.cmd = software_config.DeleteConfig(self.app, None)
self.mock_delete = self.mock_client.software_configs.delete
def test_config_delete(self):
arglist = ['id_123']
parsed_args = self.check_parser(self.cmd, arglist, [])
self.cmd.take_action(parsed_args)
self.mock_delete.assert_called_with(
config_id='id_123')
def test_config_delete_multi(self):
arglist = ['id_123', 'id_456']
parsed_args = self.check_parser(self.cmd, arglist, [])
self.cmd.take_action(parsed_args)
self.mock_delete.assert_has_calls(
[mock.call(config_id='id_123'),
mock.call(config_id='id_456')])
def test_config_delete_not_found(self):
arglist = ['id_123', 'id_456', 'id_789']
self.mock_client.software_configs.delete.side_effect = [
None, heat_exc.HTTPNotFound, None]
parsed_args = self.check_parser(self.cmd, arglist, [])
error = self.assertRaises(exc.CommandError, self.cmd.take_action,
parsed_args)
self.mock_delete.assert_has_calls(
[mock.call(config_id='id_123'),
mock.call(config_id='id_456'),
mock.call(config_id='id_789')])
self.assertEqual('Unable to delete 1 of the 3 software configs.',
str(error))
class TestListConfig(TestConfig):
def setUp(self):
super(TestListConfig, self).setUp()
self.cmd = software_config.ListConfig(self.app, None)
self.mock_client.software_configs.list.return_value = [
software_configs.SoftwareConfig(None, {})]
def test_config_list(self):
arglist = []
parsed_args = self.check_parser(self.cmd, arglist, [])
self.cmd.take_action(parsed_args)
self.mock_client.software_configs.list.assert_called_once_with()
def test_config_list_limit(self):
arglist = ['--limit', '3']
parsed_args = self.check_parser(self.cmd, arglist, [])
self.cmd.take_action(parsed_args)
self.mock_client.software_configs.list.assert_called_with(limit='3')
def test_config_list_marker(self):
arglist = ['--marker', 'id123']
parsed_args = self.check_parser(self.cmd, arglist, [])
self.cmd.take_action(parsed_args)
self.mock_client.software_configs.list.assert_called_with(
marker='id123')
class TestCreateConfig(TestConfig):
def setUp(self):
super(TestCreateConfig, self).setUp()
self.cmd = software_config.CreateConfig(self.app, None)
self.mock_client.software_configs.create.return_value = \
software_configs.SoftwareConfig(None, {})
def test_config_create(self):
properties = {
'config': '',
'group': 'Heat::Ungrouped',
'name': 'test',
'options': {},
'inputs': [],
'outputs': []
}
arglist = ['test']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, rows = self.cmd.take_action(parsed_args)
self.mock_client.stacks.validate.assert_called_with(**{
'template': {
'heat_template_version': '2013-05-23',
'resources': {
'test': {
'type': 'OS::Heat::SoftwareConfig',
'properties': properties}}}})
self.mock_client.software_configs.create.assert_called_with(
**properties)
def test_config_create_group(self):
properties = {
'config': '',
'group': 'group',
'name': 'test',
'options': {},
'inputs': [],
'outputs': []
}
arglist = ['test', '--group', 'group']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, rows = self.cmd.take_action(parsed_args)
self.mock_client.stacks.validate.assert_called_with(**{
'template': {
'heat_template_version': '2013-05-23',
'resources': {
'test': {
'type': 'OS::Heat::SoftwareConfig',
'properties': properties}}}})
self.mock_client.software_configs.create.assert_called_with(
**properties)
@mock.patch('urllib.request.urlopen')
def test_config_create_config_file(self, urlopen):
properties = {
'config': 'config',
'group': 'Heat::Ungrouped',
'name': 'test',
'options': {},
'inputs': [],
'outputs': []
}
data = mock.Mock()
data.read.side_effect = ['config']
urlopen.return_value = data
arglist = ['test', '--config-file', 'config_file']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, rows = self.cmd.take_action(parsed_args)
self.mock_client.stacks.validate.assert_called_with(**{
'template': {
'heat_template_version': '2013-05-23',
'resources': {
'test': {
'type': 'OS::Heat::SoftwareConfig',
'properties': properties}}}})
self.mock_client.software_configs.create.assert_called_with(
**properties)
@mock.patch('urllib.request.urlopen')
def test_config_create_definition_file(self, urlopen):
definition = {
'inputs': [
{'name': 'input'},
],
'outputs': [
{'name': 'output'}
],
'options': {'option': 'value'}
}
properties = {
'config': '',
'group': 'Heat::Ungrouped',
'name': 'test'
}
properties.update(definition)
data = mock.Mock()
data.read.side_effect = [yaml.safe_dump(definition)]
urlopen.return_value = data
arglist = ['test', '--definition-file', 'definition-file']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, rows = self.cmd.take_action(parsed_args)
self.mock_client.stacks.validate.assert_called_with(**{
'template': {
'heat_template_version': '2013-05-23',
'resources': {
'test': {
'type': 'OS::Heat::SoftwareConfig',
'properties': properties}}}})
self.mock_client.software_configs.create.assert_called_with(
**properties)
class TestConfigShow(TestConfig):
columns = (
'id',
'name',
'group',
'config',
'inputs',
'outputs',
'options',
'creation_time',
)
data = (
'96dfee3f-27b7-42ae-a03e-966226871ae6',
'test',
'Heat::Ungrouped',
'',
[],
[],
{},
'2015-12-09T11:55:06',
)
response = dict(zip(columns, data))
def setUp(self):
super(TestConfigShow, self).setUp()
self.cmd = software_config.ShowConfig(self.app, None)
self.mock_client.software_configs.get.return_value = \
software_configs.SoftwareConfig(None, self.response)
def test_config_show(self):
arglist = ['96dfee3f-27b7-42ae-a03e-966226871ae6']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, data = self.cmd.take_action(parsed_args)
self.mock_client.software_configs.get.assert_called_with(**{
'config_id': '96dfee3f-27b7-42ae-a03e-966226871ae6',
})
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
def test_config_show_config_only(self):
arglist = ['--config-only', '96dfee3f-27b7-42ae-a03e-966226871ae6']
parsed_args = self.check_parser(self.cmd, arglist, [])
columns, data = self.cmd.take_action(parsed_args)
self.mock_client.software_configs.get.assert_called_with(**{
'config_id': '96dfee3f-27b7-42ae-a03e-966226871ae6',
})
self.assertIsNone(columns)
self.assertIsNone(data)
def test_config_show_not_found(self):
arglist = ['96dfee3f-27b7-42ae-a03e-966226871ae6']
parsed_args = self.check_parser(self.cmd, arglist, [])
self.mock_client.software_configs.get.side_effect = \
heat_exc.HTTPNotFound()
self.assertRaises(
exc.CommandError,
self.cmd.take_action,
parsed_args)
|
{
"content_hash": "f274296160aa86680b1c947be02f13ac",
"timestamp": "",
"source": "github",
"line_count": 258,
"max_line_length": 76,
"avg_line_length": 35.31395348837209,
"alnum_prop": 0.5573482603446384,
"repo_name": "openstack/python-heatclient",
"id": "f2afcb1c3003e5a719b1823248fa709e4aa29a33",
"size": "9677",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heatclient/tests/unit/osc/v1/test_software_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "892502"
}
],
"symlink_target": ""
}
|
"""EagleEye Platform settings module."""
import json
import os
__all__ = ['get_api_root_endpoint']
def get_api_root_endpoint():
"""Return a string of API root endpoint."""
return _settings.get('api_root_endpoint', None)
def _read_config_file():
"""Internal helper to read settings JSON file and return settings."""
json_file_path = os.path.join(os.path.dirname(__file__),
'eeplatform-settings.json')
with open(json_file_path) as settings:
return json.load(settings)
_settings = _read_config_file()
|
{
"content_hash": "baf98c939b7388914362dd94ac33deab",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 73,
"avg_line_length": 24.73913043478261,
"alnum_prop": 0.6344463971880492,
"repo_name": "CVBDL/ccollab2eeplatform-python",
"id": "0ae582f99cb6281053711ea6969d1e7d9dad63bb",
"size": "569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ccollab2eeplatform/settings/eeplatform_settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "113331"
}
],
"symlink_target": ""
}
|
import sys
import time
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.db import router
from django.utils.six import StringIO
from django.utils.six.moves import input
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
creation and destruction of the test database.
"""
def __init__(self, connection):
self.connection = connection
@property
def _nodb_connection(self):
"""
Used to be defined here, now moved to DatabaseWrapper.
"""
return self.connection._nodb_connection
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
action = 'Creating'
if keepdb:
action = "Using existing"
print("%s test database for alias %s..." % (
action,
self._get_database_display_str(verbosity, test_database_name),
))
# We could skip this call if keepdb is True, but we instead
# give it the keepdb param. This is to handle the case
# where the test DB doesn't exist, in which case we need to
# create it, then just not destroy it. If we instead skip
# this, we will get an exception.
self._create_test_db(verbosity, autoclobber, keepdb)
self.connection.close()
settings.DATABASES[self.connection.alias]["NAME"] = test_database_name
self.connection.settings_dict["NAME"] = test_database_name
# We report migrate messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded).
call_command(
'migrate',
verbosity=max(verbosity - 1, 0),
interactive=False,
database=self.connection.alias,
run_syncdb=True,
)
# We then serialize the current state of the database into a string
# and store it on the connection. This slightly horrific process is so people
# who are testing on databases without transactions or who are using
# a TransactionTestCase still get a clean database on every test run.
if serialize:
self.connection._test_serialized_contents = self.serialize_db_to_string()
call_command('createcachetable', database=self.connection.alias)
# Ensure a connection for the side effect of initializing the test database.
self.connection.ensure_connection()
return test_database_name
def set_as_test_mirror(self, primary_settings_dict):
"""
Set this database up to be used in testing as a mirror of a primary database
whose settings are given
"""
self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
def serialize_db_to_string(self):
"""
Serializes all data in the database into a JSON string.
Designed only for test runner usage; will not handle large
amounts of data.
"""
# Build list of all apps to serialize
from django.db.migrations.loader import MigrationLoader
loader = MigrationLoader(self.connection)
app_list = []
for app_config in apps.get_app_configs():
if (
app_config.models_module is not None and
app_config.label in loader.migrated_apps and
app_config.name not in settings.TEST_NON_SERIALIZED_APPS
):
app_list.append((app_config, None))
# Make a function to iteratively return every object
def get_objects():
for model in serializers.sort_dependencies(app_list):
if (model._meta.can_migrate(self.connection) and
router.allow_migrate_model(self.connection.alias, model)):
queryset = model._default_manager.using(self.connection.alias).order_by(model._meta.pk.name)
for obj in queryset.iterator():
yield obj
# Serialize to a string
out = StringIO()
serializers.serialize("json", get_objects(), indent=None, stream=out)
return out.getvalue()
def deserialize_db_from_string(self, data):
"""
Reloads the database with data from a string generated by
the serialize_db_to_string method.
"""
data = StringIO(data)
for obj in serializers.deserialize("json", data, using=self.connection.alias):
obj.save()
def _get_database_display_str(self, verbosity, database_name):
"""
Return display string for a database for use in various actions.
"""
return "'%s'%s" % (
self.connection.alias,
(" ('%s')" % database_name) if verbosity >= 2 else '',
)
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
settings.
"""
if self.connection.settings_dict['TEST']['NAME']:
return self.connection.settings_dict['TEST']['NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
"""
Internal implementation - creates the test db tables.
"""
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it.
with self._nodb_connection.cursor() as cursor:
try:
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception as e:
# if we want to keep the db, then no need to do any of the below,
# just return and skip it all.
if keepdb:
return test_database_name
sys.stderr.write(
"Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test database for alias %s..." % (
self._get_database_display_str(verbosity, test_database_name),
))
cursor.execute(
"DROP DATABASE %s" % qn(test_database_name))
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name),
suffix))
except Exception as e:
sys.stderr.write(
"Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
return test_database_name
def clone_test_db(self, number, verbosity=1, autoclobber=False, keepdb=False):
"""
Clone a test database.
"""
source_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
action = 'Cloning test database'
if keepdb:
action = 'Using existing clone'
print("%s for alias %s..." % (
action,
self._get_database_display_str(verbosity, source_database_name),
))
# We could skip this call if keepdb is True, but we instead
# give it the keepdb param. See create_test_db for details.
self._clone_test_db(number, verbosity, keepdb)
def get_test_db_clone_settings(self, number):
"""
Return a modified connection settings dict for the n-th clone of a DB.
"""
# When this function is called, the test database has been created
# already and its name has been copied to settings_dict['NAME'] so
# we don't need to call _get_test_db_name.
orig_settings_dict = self.connection.settings_dict
new_settings_dict = orig_settings_dict.copy()
new_settings_dict['NAME'] = '{}_{}'.format(orig_settings_dict['NAME'], number)
return new_settings_dict
def _clone_test_db(self, number, verbosity, keepdb=False):
"""
Internal implementation - duplicate the test db tables.
"""
raise NotImplementedError(
"The database backend doesn't support cloning databases. "
"Disable the option to run tests in parallel processes.")
def destroy_test_db(self, old_database_name=None, verbosity=1, keepdb=False, number=None):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists.
"""
self.connection.close()
if number is None:
test_database_name = self.connection.settings_dict['NAME']
else:
test_database_name = self.get_test_db_clone_settings(number)['NAME']
if verbosity >= 1:
action = 'Destroying'
if keepdb:
action = 'Preserving'
print("%s test database for alias %s..." % (
action,
self._get_database_display_str(verbosity, test_database_name),
))
# if we want to preserve the database
# skip the actual destroying piece.
if not keepdb:
self._destroy_test_db(test_database_name, verbosity)
# Restore the original database name
if old_database_name is not None:
settings.DATABASES[self.connection.alias]["NAME"] = old_database_name
self.connection.settings_dict["NAME"] = old_database_name
def _destroy_test_db(self, test_database_name, verbosity):
"""
Internal implementation - remove the test db tables.
"""
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
with self.connection._nodb_connection.cursor() as cursor:
# Wait to avoid "database is being accessed by other users" errors.
time.sleep(1)
cursor.execute("DROP DATABASE %s"
% self.connection.ops.quote_name(test_database_name))
def sql_table_creation_suffix(self):
"""
SQL to append to the end of the test table creation statements.
"""
return ''
def test_db_signature(self):
"""
Returns a tuple with elements of self.connection.settings_dict (a
DATABASES setting value) that uniquely identify a database
accordingly to the RDBMS particularities.
"""
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME']
)
|
{
"content_hash": "d5b7e65c2b00107f5bf6ee8dc8c7ed17",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 112,
"avg_line_length": 40.08910891089109,
"alnum_prop": 0.5843418127932823,
"repo_name": "wfxiang08/django197",
"id": "bddc8ec1b003f9cb4ea6a5c7feb4292eb482ed97",
"size": "12170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/db/backends/base/creation.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "45763"
},
{
"name": "HTML",
"bytes": "54911"
},
{
"name": "JavaScript",
"bytes": "119249"
},
{
"name": "Python",
"bytes": "4215698"
}
],
"symlink_target": ""
}
|
"""
docker command
"""
import os
import re
import sys
import subprocess
from distutils.version import StrictVersion
from argparse import ArgumentParser, Action
from cirrus.logger import get_logger
from cirrus._2to3 import to_str
from cirrus.configuration import load_configuration
import dockerstache.dockerstache as ds
LOGGER = get_logger()
DOCKER_CONNECTION_HELP = """
We could not connect to a docker daemon.
If you are using docker-machine, run 'docker-machine env <name>' and follow the
instructions to configure your shell.
If you are running docker natively, check that the docker service is running
and you have sufficient privileges to connect.
"""
DOCKER_VERSION_BUILD_HELP = """
Your installed version of "docker build" does not support multiple tag (-t)
arguments. As a result, your image was not auto tagged as "latest".
"docker build -t repo:latest -t repo:1.2.3" is how we apply the "latest" tag to
a new image since "docker tag -f" was deprecated in Docker 1.10.0.
Please consider upgrading your Docker version.
"""
# Version 1.10.0 is needed for "docker build" with multiple -t arguments
DOCKER_REQUIRED_VERSION = '1.10.0'
class DockerVersionError(Exception):
"""
Custom exception; installed docker version cannot be verified.
"""
pass
def parse_config_list(s):
"""util to convert X = A,B,C config entry into ['A', 'B', 'C']"""
return [x.strip() for x in s.split(',') if x.strip()]
class OptionHelper(dict):
"""helper class to resolve cli and cirrus conf opts"""
def __init__(self, cli_opts, config):
super(OptionHelper, self).__init__()
self['username'] = config.get_param('docker', 'docker_login_username', None)
self['email'] = config.get_param('docker', 'docker_login_email', None)
self['password'] = config.get_param('docker', 'docker_login_password', None)
self['login'] = config.get_param(
'docker', 'docker_login_username', None
) is not None
self['docker_repo'] = config.get_param('docker', 'repo', None)
addl_repos = config.get_param('docker', 'additional_repos', None)
self['additional_repos'] = []
if addl_repos:
self['additional_repos'] = parse_config_list(addl_repos)
if cli_opts.login:
self['login'] = True
class BuildOptionHelper(OptionHelper):
"""helper class to resolve cli and cirrus conf opts for build"""
def __init__(self, cli_opts, config):
super(BuildOptionHelper, self).__init__(cli_opts, config)
self['docker_repo'] = config.get_param('docker', 'repo', None)
self['directory'] = config.get_param('docker', 'directory', None)
self['template'] = config.get_param('docker', 'dockerstache_template', None)
self['context'] = config.get_param('docker', 'dockerstache_context', None)
self['defaults'] = config.get_param('docker', 'dockerstache_defaults', None)
self['build_arg'] = {}
self['no_cache'] = config.get_param('docker', 'no_cache', None)
self['pull'] = config.get_param('docker', 'pull', None)
if cli_opts.docker_repo:
self['docker_repo'] = cli_opts.docker_repo
if cli_opts.directory:
self['directory'] = cli_opts.directory
if cli_opts.dockerstache_template:
self['template'] = cli_opts.dockerstache_template
if cli_opts.build_arg:
self['build_arg'].update(cli_opts.build_arg)
if cli_opts.no_cache:
self['no_cache'] = True
if cli_opts.pull:
self['pull'] = True
class StoreDictKeyPair(Action):
_DICT = {}
def __init__(self, option_strings, dest, nargs=None, **kwargs):
self._nargs = nargs
super(StoreDictKeyPair, self).__init__(option_strings, dest, nargs=nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
for kv in values:
k,v = kv.split("=")
self._DICT[k] = v
setattr(namespace, self.dest, self._DICT)
def build_parser():
"""
_build_parser_
Set up command line parser for the docker-image command
"""
parser = ArgumentParser(
description='git cirrus docker command'
)
parser.add_argument('command', nargs='?')
subparsers = parser.add_subparsers(dest='command')
build_command = subparsers.add_parser('build')
build_command.add_argument(
'--docker-repo', '-r',
dest='docker_repo',
help='docker repository name',
default=None,
)
build_command.add_argument(
'--login',
action='store_true',
dest='login',
help='perform docker login before command using settings in cirrus.conf',
default=False
)
build_command.add_argument(
'--directory', '-d',
dest='directory',
help='path to directory containing dockerfile to run docker build in',
default=None,
)
build_command.add_argument(
'--dockerstache-template',
dest='dockerstache_template',
default=None,
help='directory containing dockerstache template to render'
)
build_command.add_argument(
'--dockerstache-context',
dest='dockerstache_context',
default=None,
help='path to dockerstache context file'
)
build_command.add_argument(
'--dockerstache-defaults',
dest='dockerstache_defaults',
default=None,
help='path to dockerstache defaults file'
)
build_command.add_argument(
'--build-arg',
help='build arg key=value pairs to pass to docker build as build-arg options',
nargs='+',
action=StoreDictKeyPair
)
build_command.add_argument(
'--no-cache',
help='Dont use cached docker layers for build, build from scratch',
default=False,
action='store_true'
)
build_command.add_argument(
'--pull',
help='Dont use cached base docker image for build, re-pull base image from docker-registry',
default=False,
action='store_true'
)
build_command.add_argument(
'--local-test',
help=(
'install latest dist tarball from ./dist into container '
'instead of pip installing from remote pypi '
'Run `git cirrus release build` prior to this to get your latest code '
'installed for testing'),
default=False,
action='store_true'
)
push_command = subparsers.add_parser('push')
push_command.add_argument(
'--login',
action='store_true',
dest='login',
help='perform docker login before command using settings in cirrus.conf',
default=False
)
push_command.add_argument(
'--latest',
action='store_true',
dest='latest',
help='include the image tagged "latest" in the docker push command',
default=False
)
subparsers.add_parser('test', help='test docker connection')
opts = parser.parse_args()
return opts
def _docker_build(path, tags, base_tag, build_helper):
"""
execute docker build <path> in a subprocess
The build command uses the multiple tag (-t) option provided by
`docker build` since release 1.10. Otherwise, only the last tag in the list
is applied.
:param path: filesystem path containg the build context (Dockerfile)
:param tags: sequence of tag strings to apply to the image
:param base_tag: full repository repo/tag string (repository/tag:0)
"""
command = ['docker', 'build'] + _build_tag_opts(tags)
if build_helper['no_cache']:
command.append('--no-cache')
if build_helper['pull']:
command.append('--pull')
if build_helper['build_arg']:
for k, v in build_helper['build_arg'].items():
command.extend(["--build-arg", "{}={}".format(k, v)])
command.append(path)
LOGGER.info("Executing docker build command: {}".format(' '.join(command)))
p = subprocess.Popen(
command,
stdout=sys.stdout,
stderr=sys.stderr
)
status = p.wait()
if status:
msg = "docker build exited non-zero!"
LOGGER.error(msg)
raise RuntimeError(msg)
image = find_image_id(base_tag)
LOGGER.info("Image ID: {}".format(image))
if not is_docker_version_installed(DOCKER_REQUIRED_VERSION):
LOGGER.warning(DOCKER_VERSION_BUILD_HELP)
return image
def _build_tag_opts(tags):
"""
create a list of tag options suitable for consumption by
subprocess.check_output and similar functions.
>>> _build_tag_opts(['v1.2.3', 'latest'])
['-t', 'v1.2.3', '-t', 'latest']
:param tags: sequence of tag strings
:returns: list of tag arguments to be fed to docker build.
"""
tag_opts = []
for tag in tags:
tag_opts += ['-t'] + [tag]
return tag_opts
def is_docker_version_installed(required_version):
"""
Check that the installed docker version is required_version or greater
:param required_version: docker version string, such as 1.12.0
"""
raw_version = get_docker_version()
installed_version = match_docker_version(raw_version)
return StrictVersion(installed_version) >= StrictVersion(required_version)
def get_docker_version():
"""
Find the locally installed docker version, as captured from the output of
docker -v.
:returns: the raw string output of docker -v.
"""
try:
stdout = subprocess.check_output(('docker', '-v'))
except subprocess.CalledProcessError as ex:
LOGGER.error(ex.output)
raise DockerVersionError(
"Installed Docker version cannot be determined")
LOGGER.info(stdout)
return stdout.strip()
def match_docker_version(raw_version_string):
"""
Grab the docker version in xx.yy.zz format from a arbitrary string (works
nicely when fed the output of get_docker_version).
:param raw_version_string: a string containing a docker version, typically
in the format returned by docker -v
"Docker version 1.12.0, build 8eab29e"
:returns: the docker version string, cleaned up as xx.yy.zz
"""
match = re.search('[0-9]+\.[0-9]+\.[0-9]+', to_str(raw_version_string))
if match is None:
raise DockerVersionError(
"Installed Docker version cannot be determined. "
"Could not match '{}'".format(raw_version_string))
docker_version = match.group().strip()
return docker_version
def find_image_id(base_tag):
"""
grab the last created image id for the repo
"""
command = (
"echo $(docker images | grep '{tag}' | "
"head -n 1 | awk '{{print $3}}')"
).format(tag=base_tag)
process = subprocess.Popen([command], shell=True, stdout=subprocess.PIPE)
outp, err = process.communicate()
LOGGER.info("Latest Container: {}".format(outp))
return to_str(outp.strip())
def _docker_login(helper):
"""
perform a docker login call using email/user/pass from cirrus.conf
if present.
Returns true if login performed, false otherwise
"""
if helper['username']:
LOGGER.info("Running docker login as {}".format(helper['username']))
command = [
'docker', 'login',
'-u', helper['username'],
'-p', helper['password']
]
if helper.get('docker_repo') is not None:
command.append(helper['docker_repo'])
stdout = subprocess.check_output(command)
LOGGER.info(stdout)
return True
LOGGER.info('No docker login credentials provided in cirrus.conf')
return False
def _docker_push(tag):
"""
execute docker push command as a subprocess
"""
command = ['docker', 'push', tag]
LOGGER.info("Executing docker push command: {}".format(' '.join(command)))
stdout = subprocess.check_output(command)
LOGGER.info(stdout)
def tag_base(config):
pname = config.package_name()
docker_repo = config.get_param('docker', 'repo', None)
if docker_repo is None:
docker_repo = config.organisation_name()
docker_pkg_pfix = config.get_param('docker', 'package_prefix', None)
if docker_pkg_pfix is None:
tag = "{}/{}".format(docker_repo, pname)
else:
tag = "{}/{}/{}".format(docker_repo, docker_pkg_pfix, pname)
return tag
def tag_name(config):
"""
build the docker tag string
"""
pversion = config.package_version()
return "{}:{}".format(tag_base(config), pversion)
def additional_repo_tags(config, repos, latest=False):
pname = config.package_name()
pversion = config.package_version()
result = []
for repo in repos:
t_base = "{}/{}".format(repo, pname)
result.append("{}:{}".format(t_base, pversion))
if latest:
result.append("{}:latest".format(t_base))
return result
def latest_tag_name(config):
return "{}:latest".format(tag_base(config))
def docker_build(opts, config):
"""
issue a docker build command in the directory
specified.
Optionally, if a dockerstache template is given, run
dockerstache using that template and the build directory
as output
"""
tag = tag_name(config)
latest = latest_tag_name(config)
helper = BuildOptionHelper(opts, config)
templ = helper['template']
path = helper['directory']
if not os.path.exists(path):
os.makedirs(path)
if opts.local_test:
#
# local test => override build args
# assumes that the container-init stuff has been used
local_tar = '/opt/{package}-latest.tar.gz'.format(
package=config.package_name()
)
LOGGER.info("Local test build will install latest source tarball from dist...")
helper['build_arg']['LOCAL_INSTALL'] = local_tar
if helper['login']:
check = _docker_login(helper)
if not check:
msg = "Unable to perform docker login due to missing cirrus conf entries"
LOGGER.error(msg)
sys.exit(1)
if templ is not None:
ds.run(
input=templ,
output=path,
context=helper['context'],
defaults=helper['defaults'],
extend_context=config.configuration_map()
)
tags = [latest, tag]
if helper['additional_repos']:
tags.extend(
additional_repo_tags(
config,
helper['additional_repos'],
latest=True
)
)
_docker_build(path, tags, tag_base(config), helper)
def docker_push(opts, config):
"""
run a docker push command to upload the
tagged image to a registry
"""
helper = OptionHelper(opts, config)
if helper['login']:
if not _docker_login(helper):
msg = "Unable to perform docker login due to missing cirrus conf entries"
LOGGER.error(msg)
sys.exit(1)
tag = tag_name(config)
_docker_push(tag)
if opts.latest:
_docker_push(latest_tag_name(config))
if helper['additional_repos']:
tags = additional_repo_tags(config, helper['additional_repos'], opts.latest)
for t in tags:
_docker_push(t)
def is_docker_connected():
"""
Tests whether the docker daemon is connected using the 'docker info'
native command
"""
try:
subprocess.check_output(['docker', 'info'], stderr=subprocess.STDOUT)
LOGGER.info("Docker daemon connection successful")
except subprocess.CalledProcessError as ex:
LOGGER.error(ex)
LOGGER.error(ex.output.strip())
return False
return True
def main():
"""
_main_
provide support for some basic docker operations so that
building images can be standardised as part of a workflow
"""
opts = build_parser()
config = load_configuration()
if not config.has_section('docker'):
msg = (
"Unable to find docker section in cirrus.conf"
#TODO: Link to docs here
)
LOGGER.error(msg)
sys.exit(1)
if not is_docker_connected():
LOGGER.error(DOCKER_CONNECTION_HELP)
sys.exit(1)
if opts.command == 'build':
docker_build(opts, config)
if opts.command == 'push':
docker_push(opts, config)
if opts.command == 'test':
# Already called above
pass
if __name__ == '__main__':
main()
|
{
"content_hash": "8e8f602ed6a2b70a0b67f74e00dd976f",
"timestamp": "",
"source": "github",
"line_count": 533,
"max_line_length": 100,
"avg_line_length": 31.00375234521576,
"alnum_prop": 0.61821482602118,
"repo_name": "evansde77/cirrus",
"id": "aa6e06e903acea140fe3c8076a811dcf158c689c",
"size": "16547",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/cirrus/docker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5952"
},
{
"name": "HTML",
"bytes": "2855"
},
{
"name": "Python",
"bytes": "474730"
},
{
"name": "Shell",
"bytes": "27086"
}
],
"symlink_target": ""
}
|
"""
A good place for a public or proxy objects
"""
__author__ = 'dimd'
|
{
"content_hash": "dbca2cce201845ee1a544d5a349c727a",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 42,
"avg_line_length": 17.75,
"alnum_prop": 0.6197183098591549,
"repo_name": "dimddev/NetCatKS",
"id": "4ace24bee15fefe147ecaaace07dfc8a23b6d0a6",
"size": "71",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NetCatKS/Config/api/public/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "182697"
}
],
"symlink_target": ""
}
|
import re
import cStringIO
from crispy_forms.bootstrap import FormActions
from crispy_forms.layout import Reset, Submit, Layout, Div, HTML
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import Q
from django.forms import ModelMultipleChoiceField
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
import django_filters
from dj_kits.utils.forms import FormBaseMixin
from django import forms
import operator
class BaseForm(FormBaseMixin, forms.ModelForm):
i18n_fields = []
fields_widget_with_form = []
form_class = 'form-horizontal'
form_inputs = [Submit('submit', _(u'保存')),
Reset('reset', _(u'重置'))]
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(BaseForm, self).__init__(*args, **kwargs)
lang = self.get_lang()
if lang:
for field in self.i18n_fields:
self.fields.pop('%s_%s' % (field, lang), None)
for field in self.fields_widget_with_form:
self.fields[field].widget.form = self
def is_edit(self):
return self.instance.pk is not None
def get_lang(self):
return getattr(self.request, 'LANGUAGE_CODE', '').replace('-', '_')
class BaseFilterForm(FormBaseMixin, forms.Form):
form_class = 'form-horizontal'
form_method = 'get'
def __init__(self, *args, **kwargs):
super(BaseFilterForm, self).__init__(*args, **kwargs)
self.keys = self.fields.keys()
for k, v in self.fields.items():
v.help_text = False
def get_layout(self, helper):
layout = Layout(
Div(*self.keys, css_class="inline-group"),
FormActions(Submit('submit', _(u'查询'))),
Div(HTML("""<span class="search-slider-btn"></span>"""), css_class="search-slider")
)
return layout
class TemplateRenderFieldMixin(object):
template = None
def __init__(self, *args, **kwargs):
template = kwargs.pop('template', None)
if template:
self.template = template
if not self.template:
raise Exception("template is required.")
super(TemplateRenderFieldMixin, self).__init__(*args, **kwargs)
def render(self, *args, **kwargs):
template = kwargs.pop('template', None)
if template:
self.template = template
field = super(TemplateRenderFieldMixin, self).render(*args, **kwargs)
return render_to_string(self.template, {'field': field, 'widget': self})
class TemplateRenderSelect(TemplateRenderFieldMixin, forms.Select):
""" """
class TemplateRenderSelectMultiple(TemplateRenderFieldMixin, forms.SelectMultiple):
""" """
class TemplateRenderClearableFileInput(TemplateRenderFieldMixin, forms.FileInput):
""" """
def value_from_datadict(self, data, files, name):
val = files.get(name, None) or data.get(name, None)
if isinstance(val, (basestring, unicode)) and val:
file_name = data.get('name', 'logo')
img_str = re.search(r'base64,(.*)', val).group(1)
temp_img = cStringIO.StringIO(img_str.decode('base64'))
val = SimpleUploadedFile(file_name, temp_img.read())
return val
|
{
"content_hash": "169fa312ce93c57238b52ab0db79ec64",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 95,
"avg_line_length": 32.19417475728155,
"alnum_prop": 0.6329915560916767,
"repo_name": "xuchao666/msz",
"id": "37d56ab5ffc7415e1d27e91b840aebb77e3d0de1",
"size": "3352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "msz/core/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "65421"
},
{
"name": "HTML",
"bytes": "35328"
},
{
"name": "JavaScript",
"bytes": "6929"
},
{
"name": "Python",
"bytes": "42485"
}
],
"symlink_target": ""
}
|
"""Module for constructing <img> tag."""
from __future__ import absolute_import
from ...lib.utils import validate_attribute_values, validate_url
from ...templates.html.tags import img
ATTRIBUTES = {
'align': {
'description': 'Specifies the alignment of an image according '
'to surrounding elements',
'values': ['left', 'right', 'middle', 'top', 'bottom']
},
'alt': {
'description': 'Specifies an alternate text for an image',
'values': None
},
'border': {
'description': 'Specifies the width of the border around an '
'image',
'values': None
},
'crossorigin': {
'description': 'Allow images from third-party sites that allow'
' cross-origin access to be used with canvas',
'values': None
},
'height': {
'description': 'Specifies the height of an image',
'values': None
},
'hspace': {
'description': 'Specifies the whitespace on left and right '
'side of an image',
'values': None
},
'ismap': {
'description': 'Specifies an image as a server-side image-map',
'values': None
},
'longdesc': {
'description': 'Specifies a URL to a detailed description of '
'an image',
'values': None
},
'src': {
'description': 'Specifies the URL of an image',
'values': None
},
'usemap': {
'description': 'Specifies an image as a client-side image-map',
'values': None
},
'vspace': {
'description': 'Specifies the whitespace on top and bottom of '
'an image',
'values': None
},
'width': {
'description': 'Specifies the width of an image',
'values': None
}
}
class Img(object):
"""Class for constructing <img> tag.
Args:
align (str): Specifies the alignment of an image according to
surrounding elements.
alt (str): Specifies an alternate text for an image.
border (str): Specifies the width of the border around an image.
crossorigin (str): Allow images from third-party sites that allow
cross-origin access to be used with canvas.
height (str): Specifies the height of an image.
hspace (str): Specifies the whitespace on left and right side of an
image.
ismap (bool): Specifies an image as a server-side image-map.
longdesc (str): Specifies a URL to a detailed description of an image.
src (str): Specifies the URL of an image.
usemap (str): Specifies an image as a client-side image-map.
vspace (str): Specifies the whitespace on top and bottom of an image.
width (str): Specifies the width of an image.
.. versionadded:: 0.4.0
"""
def __init__(self,
align=None,
alt=None,
border=None,
crossorigin=None,
height=None,
hspace=None,
ismap=False,
longdesc=None,
src=None,
usemap=None,
vspace=None,
width=None):
self.tag = 'img'
validate_attribute_values(tag=self.tag,
attribute_name='align',
attribute_value=align,
default_values=ATTRIBUTES['align']['values'])
# TODO: Add validation for ismap attribute.
validate_url(attribute_name='longdesc', url=longdesc)
validate_url(attribute_name='src', url=src)
# TODO: Add validation for usemap attribute.
self.values = {'align': align,
'alt': alt,
'border': border,
'crossorigin': crossorigin,
'height': height,
'hspace': hspace,
'ismap': ismap,
'longdesc': longdesc,
'src': src,
'usemap': usemap,
'vspace': vspace,
'width': width}
def construct(self):
"""Returns the constructed image tag <img>."""
return img.render(self.values)
|
{
"content_hash": "da31c896a05bc6c194bd76cd669eb159",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 79,
"avg_line_length": 34.72222222222222,
"alnum_prop": 0.5177142857142857,
"repo_name": "bharadwajyarlagadda/korona",
"id": "bcf321dc50babb61d4496899a1c4cdbc59a8f3f7",
"size": "4399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "korona/html/tags/image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "150"
},
{
"name": "Python",
"bytes": "199694"
}
],
"symlink_target": ""
}
|
tutorial_title = u'Essential site scanning analysis'
tutorial_author = u'Burak Kaynak, Pemra Doruker'
tutorial_logo = u'' # default is ProDy logo
tutorial_version = u'' # default is latest ProDy version
# keep the following part as is
try:
execfile('../conf.py')
except NameError:
exec(open('../conf.py').read())
|
{
"content_hash": "deaae8cb7ebe134cc04ebc5d576bdd20",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 63,
"avg_line_length": 33.8,
"alnum_prop": 0.6775147928994083,
"repo_name": "prody/ProDy-website",
"id": "569e3efad6e225c77cd1785f023104f585e33589",
"size": "338",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tutorials/essa_tutorial/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "152805"
},
{
"name": "HTML",
"bytes": "706920"
},
{
"name": "JavaScript",
"bytes": "31771"
},
{
"name": "Jupyter Notebook",
"bytes": "10406865"
},
{
"name": "Makefile",
"bytes": "19126"
},
{
"name": "PHP",
"bytes": "510"
},
{
"name": "Python",
"bytes": "41502"
},
{
"name": "Shell",
"bytes": "1060"
}
],
"symlink_target": ""
}
|
"""
Law example tasks to demonstrate HTCondor workflows at NAF.
The actual payload of the tasks is rather trivial.
"""
import six
import law
# import our "framework" tasks
from analysis.framework import Task, HTCondorWorkflow
class CreateChars(Task, HTCondorWorkflow, law.LocalWorkflow):
"""
Simple task that has a trivial payload: converting integers into ascii characters. The task is
designed to be a workflow with 26 branches. Each branch creates one character (a-z) and saves
it to a json output file. While branches are numbered continuously from 0 to n-1, the actual
data it processes is defined in the *branch_map*. A task can access this data via
``self.branch_map[self.branch]``, or via ``self.branch_data`` by convenience.
By default, CreateChars is a HTCondorWorkflow (first workflow class in the inheritance order,
MRO). If you want to execute it as a LocalWorkflow, add the ``"--workflow local"`` parameter on
the command line. The code in this task should be completely independent of the actual *run
location*, and law provides the means to do so.
When a branch greater or equal to zero is set, e.g. via ``"--branch 1"``, you instantiate a
single *branch task* rather than the workflow. Branch tasks are always executed locally.
"""
def create_branch_map(self):
# map branch indexes to ascii numbers from 97 to 122 ("a" to "z")
return {i: num for i, num in enumerate(range(97, 122 + 1))}
def output(self):
# it's best practice to encode the branch number into the output target
return self.local_target("output_{}.json".format(self.branch))
def run(self):
# the branch data holds the integer number to convert
num = self.branch_data
# actual payload: convert to char
char = chr(num)
# use target formatters (implementing dump and load, based on the file extension)
# to write the output target
output = self.output()
output.dump({"num": num, "char": char})
class CreateAlphabet(Task):
"""
This task requires the CreateChars workflow and extracts the created characters to write the
alphabet into a text file.
"""
def requires(self):
# req() is defined on all tasks and handles the passing of all parameter values that are
# common between the required task and the instance (self)
# note that the workflow is required (branch -1, the default), not the particular branch
# tasks (branches [0, 26))
return CreateChars.req(self)
def output(self):
# output a plain text file
return self.local_target("alphabet.txt")
def run(self):
# since we require the workflow and not the branch tasks (see above), self.input() points
# to the output of the workflow, which contains the output of its branches in a target
# collection, stored - of course - in "collection"
inputs = self.input()["collection"].targets
# loop over all targets in the collection, load the json data, and append the character
# to the alphabet
alphabet = ""
for inp in six.itervalues(inputs):
alphabet += inp.load()["char"]
# again, dump the alphabet string into the output file
output = self.output()
output.dump(alphabet + "\n")
# some status message
# publish_message not only prints the message to stdout, but sends it to the scheduler
# where it will become visible in the browser visualization
alphabet = "".join(law.util.colored(c, color="random") for c in alphabet)
self.publish_message("\nbuilt alphabet: {}\n".format(alphabet))
|
{
"content_hash": "3a2c68c275cbfbeef45ea836c17ab1d1",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 99,
"avg_line_length": 40.89010989010989,
"alnum_prop": 0.6745498521902714,
"repo_name": "riga/law",
"id": "c695d6afa66525e561df90cd21b49e08d94467e8",
"size": "3738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/htcondor_at_naf/analysis/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "851844"
},
{
"name": "Shell",
"bytes": "58608"
}
],
"symlink_target": ""
}
|
from datetime import datetime
import pytz
from django.contrib.auth.models import User
from django.urls import reverse
from django.test.utils import override_settings
from django.utils import timezone
from unittest.mock import call, patch
from casepro.test import BaseCasesTest
from .models import ROLE_ADMIN, ROLE_ANALYST, ROLE_MANAGER, Notification, Profile
from .tasks import send_notifications
class NotificationTest(BaseCasesTest):
def setUp(self):
super(NotificationTest, self).setUp()
self.ann = self.create_contact(self.unicef, "C-001", "Ann")
def test_new_message_labelling(self):
self.aids.watch(self.admin)
self.pregnancy.watch(self.user1)
msg = self.create_message(self.unicef, 101, self.ann, "Hello")
self.assertFalse(msg.notification_set.all())
msg.label(self.aids)
Notification.objects.get(user=self.admin, message=msg, type=Notification.TYPE_MESSAGE_LABELLING, is_sent=False)
# adding more labels won't create new notification for this message and user
msg.label(self.pregnancy, self.aids)
self.assertEqual(Notification.objects.count(), 2)
Notification.objects.get(user=self.admin, message=msg, type=Notification.TYPE_MESSAGE_LABELLING, is_sent=False)
Notification.objects.get(user=self.user1, message=msg, type=Notification.TYPE_MESSAGE_LABELLING, is_sent=False)
@patch("casepro.profiles.models.send_email")
def test_send_all(self, mock_send_email):
self.aids.watch(self.admin)
self.pregnancy.watch(self.user1)
msg1 = self.create_message(self.unicef, 101, self.ann, "Hello", [self.aids])
msg2 = self.create_message(self.unicef, 102, self.ann, "Hello", [self.pregnancy])
send_notifications()
# all notifications now marked as sent
self.assertEqual(Notification.objects.filter(is_sent=False).count(), 0)
mock_send_email.assert_has_calls(
[
call(
[self.admin],
"New labelled message",
"profiles/email/message_labelling",
{"labels": {self.aids}, "inbox_url": "http://unicef.localhost:8000/"},
),
call(
[self.user1],
"New labelled message",
"profiles/email/message_labelling",
{"labels": {self.pregnancy}, "inbox_url": "http://unicef.localhost:8000/"},
),
]
)
mock_send_email.reset_mock()
case1 = self.create_case(self.unicef, self.ann, self.moh, msg1)
case1.watch(self.admin)
case1.add_reply(msg2)
send_notifications()
mock_send_email.assert_has_calls(
[
call(
[self.admin],
"New reply in case #%d" % case1.pk,
"profiles/email/case_reply",
{"case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk},
)
]
)
mock_send_email.reset_mock()
case1.add_note(self.user1, "General note")
case1.close(self.user1, "Close note")
case1.reopen(self.user1)
case1.reassign(self.admin, self.who)
send_notifications()
self.assertEqual(len(mock_send_email.mock_calls), 5)
mock_send_email.assert_has_calls(
[
call(
[self.admin],
"New note in case #%d" % case1.pk,
"profiles/email/case_new_note",
{
"user": self.user1,
"note": "General note",
"assignee": None,
"case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk,
},
),
call(
[self.admin],
"Case #%d was closed" % case1.pk,
"profiles/email/case_closed",
{
"user": self.user1,
"note": "Close note",
"assignee": None,
"case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk,
},
),
call(
[self.admin],
"Case #%d was reopened" % case1.pk,
"profiles/email/case_reopened",
{
"user": self.user1,
"note": None,
"assignee": None,
"case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk,
},
),
call(
[self.user1],
"Case #%d was reassigned" % case1.pk,
"profiles/email/case_reassigned",
{
"user": self.admin,
"note": None,
"assignee": self.who,
"case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk,
},
),
call(
[self.user3],
"New case assignment #%d" % case1.pk,
"profiles/email/case_assignment",
{"user": self.admin, "case_url": "http://unicef.localhost:8000/case/read/%d/" % case1.pk},
),
]
)
mock_send_email.reset_mock()
# if nothing has happened, no emails are sent
send_notifications()
self.assertNotCalled(mock_send_email)
class ProfileTest(BaseCasesTest):
def test_create_user(self):
# create un-attached user
user1 = Profile.create_user("Tom McTicket", "tom@unicef.org", "Qwerty123")
self.assertEqual(user1.profile.full_name, "Tom McTicket")
self.assertFalse(user1.profile.change_password)
self.assertEqual(user1.first_name, "")
self.assertEqual(user1.last_name, "")
self.assertEqual(user1.email, "tom@unicef.org")
self.assertEqual(user1.get_full_name(), "Tom McTicket")
self.assertIsNotNone(user1.password)
# create org-level user
user2 = Profile.create_org_user(self.unicef, "Cary McCase", "cary@unicef.org", "Qwerty123")
self.assertIn(user2, self.unicef.administrators.all())
self.assertFalse(user2.partners.all())
# create partner-level manager user
user3 = Profile.create_partner_user(self.unicef, self.moh, ROLE_MANAGER, "Mo Cases", "mo@moh.com", "Qwerty123")
self.assertIn(user3, self.unicef.editors.all())
self.assertIn(user3, self.moh.users.all())
# create partner-level analyst user
user4 = Profile.create_partner_user(self.unicef, self.moh, ROLE_ANALYST, "Jo Cases", "jo@moh.com", "Qwerty123")
self.assertIn(user4, self.unicef.viewers.all())
self.assertIn(user4, self.moh.users.all())
# test creating user with long email
user5 = Profile.create_user("Lou", "lou123456789012345678901234567890@moh.com", "Qwerty123")
self.assertEqual(user5.email, "lou123456789012345678901234567890@moh.com")
class UserTest(BaseCasesTest):
def test_has_profile(self):
self.assertFalse(self.superuser.has_profile())
self.assertTrue(self.admin.has_profile())
self.assertTrue(self.user1.has_profile())
def test_must_use_faq(self):
self.assertFalse(User.must_use_faq(self.superuser))
self.assertFalse(User.must_use_faq(self.user1))
self.user1.profile.must_use_faq = True
self.user1.save()
self.assertTrue(User.must_use_faq(self.user1))
def test_get_full_name(self):
self.assertEqual(self.superuser.get_full_name(), "")
self.assertEqual(self.admin.get_full_name(), "Kidus")
self.assertEqual(self.user1.get_full_name(), "Evan")
def test_get_role(self):
self.assertEqual(self.admin.get_role(self.unicef), ROLE_ADMIN)
self.assertEqual(self.user1.get_role(self.unicef), ROLE_MANAGER)
self.assertEqual(self.user2.get_role(self.unicef), ROLE_ANALYST)
self.assertEqual(self.user4.get_role(self.unicef), None)
self.assertEqual(self.admin.get_role(self.nyaruka), None)
def test_update_role(self):
# change role from manager to analyst, partner from moh to who
self.user1.update_role(self.unicef, ROLE_ANALYST, self.who)
self.assertEqual(set(self.user1.partners.all()), {self.who})
self.assertTrue(self.user1 not in self.unicef.administrators.all())
self.assertTrue(self.user1 not in self.unicef.editors.all())
self.assertTrue(self.user1 in self.unicef.viewers.all())
# change role from analyst to manager, partner from who to moh
self.user1.update_role(self.unicef, ROLE_MANAGER, self.moh)
self.assertEqual(set(self.user1.partners.all()), {self.moh})
self.assertTrue(self.user1 not in self.unicef.administrators.all())
self.assertTrue(self.user1 in self.unicef.editors.all())
self.assertTrue(self.user1 not in self.unicef.viewers.all())
# change role from manager to administrator, remove from partners
self.user1.update_role(self.unicef, ROLE_ADMIN, None)
self.assertEqual(set(self.user1.partners.all()), set())
self.assertIn(self.user1, self.unicef.administrators.all())
self.assertNotIn(self.user1, self.unicef.editors.all())
self.assertNotIn(self.user1, self.unicef.viewers.all())
# can add them as partner user in another org without changing admin status in this org
self.user1.update_role(self.nyaruka, ROLE_ANALYST, self.klab)
self.assertEqual(set(self.user1.partners.all()), {self.klab})
self.assertIn(self.user1, self.unicef.administrators.all())
self.assertNotIn(self.user1, self.unicef.editors.all())
self.assertNotIn(self.user1, self.unicef.viewers.all())
self.assertNotIn(self.user1, self.nyaruka.administrators.all())
self.assertNotIn(self.user1, self.nyaruka.editors.all())
self.assertIn(self.user1, self.nyaruka.viewers.all())
# error if partner provided for non-partner role
self.assertRaises(ValueError, self.user1.update_role, self.unicef, ROLE_ADMIN, self.who)
# error if no partner provided for partner role
self.assertRaises(ValueError, self.user1.update_role, self.unicef, ROLE_MANAGER, None)
# set user2 as the primary_contact for moh
self.moh.primary_contact = self.user2
self.moh.save()
self.user2.refresh_from_db()
# assure the primary_contact relationship exists
self.assertEqual(self.moh.primary_contact, self.user2)
self.assertEqual(self.moh, self.user2.partners_primary.get(org=self.unicef))
# change user2 role from analyst to manager, keep partner the same
self.user2.update_role(self.unicef, ROLE_MANAGER, self.moh)
self.assertEqual(set(self.user2.partners.all()), {self.moh})
self.assertTrue(self.user2 not in self.unicef.administrators.all())
self.assertTrue(self.user2 in self.unicef.editors.all())
self.assertTrue(self.user2 not in self.unicef.viewers.all())
self.assertEqual(self.moh.primary_contact, self.user2)
self.assertEqual(self.moh, self.user2.partners_primary.get(org=self.unicef))
# change user2 partner
self.user2.update_role(self.unicef, ROLE_MANAGER, self.who)
self.moh.refresh_from_db()
self.assertEqual(set(self.user2.partners.all()), {self.who})
self.assertTrue(self.user2 not in self.unicef.administrators.all())
self.assertTrue(self.user2 in self.unicef.editors.all())
self.assertTrue(self.user2 not in self.unicef.viewers.all())
self.assertEqual(self.moh.primary_contact, None)
self.assertEqual(0, self.user2.partners_primary.filter(org=self.unicef).count())
# set user2 as the primary_contact for who
self.who.primary_contact = self.user2
self.who.save()
self.user2.refresh_from_db()
# assure the primary_contact relationship exists
self.assertEqual(self.who.primary_contact, self.user2)
self.assertEqual(self.who, self.user2.partners_primary.get(org=self.unicef))
# change user2 role from manager to admin
self.user2.update_role(self.unicef, ROLE_ADMIN, None)
self.who.refresh_from_db()
self.assertEqual(set(self.user2.partners.all()), set())
self.assertIn(self.user2, self.unicef.administrators.all())
self.assertNotIn(self.user2, self.unicef.editors.all())
self.assertNotIn(self.user2, self.unicef.viewers.all())
self.assertEqual(self.who.primary_contact, None)
self.assertEqual(0, self.user2.partners_primary.filter(org=self.unicef).count())
def test_can_administer(self):
# superusers can administer any org
self.assertTrue(self.superuser.can_administer(self.unicef))
self.assertTrue(self.superuser.can_administer(self.nyaruka))
# admins can administer their org
self.assertTrue(self.admin.can_administer(self.unicef))
self.assertFalse(self.admin.can_administer(self.nyaruka))
# managers and analysts can administer any org
self.assertFalse(self.user1.can_administer(self.unicef))
def test_can_manage(self):
# superusers can manage any partner
self.assertTrue(self.superuser.can_manage(self.moh))
self.assertTrue(self.superuser.can_manage(self.who))
self.assertTrue(self.superuser.can_manage(self.klab))
# admins can manage any partner in their org
self.assertTrue(self.admin.can_manage(self.moh))
self.assertTrue(self.admin.can_manage(self.who))
self.assertFalse(self.admin.can_manage(self.klab))
# managers can manage their partner
self.assertTrue(self.user1.can_manage(self.moh))
self.assertFalse(self.user1.can_manage(self.who))
self.assertFalse(self.user1.can_manage(self.klab))
# analysts can't manage anyone
self.assertFalse(self.user2.can_manage(self.moh))
self.assertFalse(self.user2.can_manage(self.who))
self.assertFalse(self.user2.can_manage(self.klab))
def test_can_edit(self):
# superusers can edit anyone
self.assertTrue(self.superuser.can_edit(self.unicef, self.admin))
self.assertTrue(self.superuser.can_edit(self.unicef, self.user1))
self.assertTrue(self.superuser.can_edit(self.nyaruka, self.user4))
# admins can edit any user in their org
self.assertTrue(self.admin.can_edit(self.unicef, self.admin))
self.assertTrue(self.admin.can_edit(self.unicef, self.user1))
self.assertTrue(self.admin.can_edit(self.unicef, self.user2))
self.assertTrue(self.admin.can_edit(self.unicef, self.user3))
self.assertFalse(self.admin.can_edit(self.unicef, self.user4))
# managers can edit any user from same partner
self.assertFalse(self.user1.can_edit(self.unicef, self.admin))
self.assertTrue(self.user1.can_edit(self.unicef, self.user1))
self.assertTrue(self.user1.can_edit(self.unicef, self.user2))
self.assertFalse(self.user1.can_edit(self.unicef, self.user3))
self.assertFalse(self.user1.can_edit(self.unicef, self.user4))
# analysts can't edit anyone
self.assertFalse(self.user2.can_edit(self.unicef, self.admin))
self.assertFalse(self.user2.can_edit(self.unicef, self.user1))
self.assertFalse(self.user2.can_edit(self.unicef, self.user2))
self.assertFalse(self.user2.can_edit(self.unicef, self.user3))
self.assertFalse(self.user2.can_edit(self.unicef, self.user3))
def test_remove_from_org(self):
# setup case which users are watching
ann = self.create_contact(self.unicef, "C-001", "Ann")
msg = self.create_message(self.unicef, 101, ann, "Hello", [self.aids])
case = self.create_case(self.unicef, ann, self.moh, msg, [self.aids], summary="Summary")
case.watchers.add(self.admin, self.user1)
# add our admin as a partner user in a different org
self.admin.update_role(self.nyaruka, ROLE_ANALYST, self.klab)
# set our admin as the primary contact for this partner
self.klab.primary_contact = self.admin
self.klab.save()
self.admin.refresh_from_db()
# assure the primary_contact relationship exists
self.assertEqual(self.klab.primary_contact, self.admin)
self.assertEqual(self.klab, self.admin.partners_primary.get(org=self.nyaruka))
# have users watch a label too
self.pregnancy.watchers.add(self.admin, self.user1)
# try with org admin
self.admin.remove_from_org(self.unicef)
self.admin.refresh_from_db()
self.assertIsNone(self.unicef.get_user_org_group(self.admin))
self.assertNotIn(self.admin, case.watchers.all())
# their status in other org shouldn't be affected
self.assertIn(self.admin, self.nyaruka.viewers.all())
self.assertIn(self.admin, self.klab.users.all())
# try with partner user
self.user1.remove_from_org(self.unicef)
self.user1.refresh_from_db()
self.assertIsNone(self.unicef.get_user_org_group(self.user1))
self.assertIsNone(self.user1.get_partner(self.unicef))
self.assertNotIn(self.user1, case.watchers.all())
self.assertNotIn(self.user1, self.pregnancy.watchers.all())
# remove our admin from the partner org
self.admin.remove_from_org(self.nyaruka)
self.klab.refresh_from_db()
# assure the primary_contact relationship has been removed
self.assertEqual(self.klab.primary_contact, None)
self.assertEqual(0, self.admin.partners_primary.filter(org=self.nyaruka).count())
def test_str(self):
self.assertEqual(str(self.superuser), "testroot")
self.assertEqual(str(self.user1), "Evan (evan@unicef.org)")
self.user1.profile.full_name = None
self.user1.profile.save()
self.assertEqual(str(self.user1), "evan@unicef.org")
class UserCRUDLTest(BaseCasesTest):
def test_create(self):
url = reverse("profiles.user_create")
# log in as a superuser
self.login(self.superuser)
# submit with no subdomain (i.e. no org) and no fields entered
response = self.url_post(None, url, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
self.assertFormError(response, "form", "password", "This field is required.")
# submit again with all required fields to create an un-attached user
response = self.url_post(
None,
url,
{
"name": "McAdmin",
"email": "mcadmin@casely.com",
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
user = User.objects.get(email="mcadmin@casely.com")
self.assertRedirects(response, "/user/read/%d/" % user.pk, fetch_redirect_response=False)
self.assertEqual(user.get_full_name(), "McAdmin")
self.assertEqual(user.username, "mcadmin@casely.com")
self.assertIsNone(user.get_partner(self.unicef))
self.assertFalse(user.can_administer(self.unicef))
# log in as an org administrator
self.login(self.admin)
# should see both partner and role options
response = self.url_get("unicef", url, {})
self.assertTrue("partner" in response.context["form"].fields)
self.assertTrue("role" in response.context["form"].fields)
# submit with no fields entered
response = self.url_post("unicef", url, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "role", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
self.assertFormError(response, "form", "password", "This field is required.")
# create another org admin user
response = self.url_post(
"unicef",
url,
{
"name": "Adrian Admin",
"email": "adrian@casely.com",
"role": ROLE_ADMIN,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
user = User.objects.get(email="adrian@casely.com")
self.assertRedirects(response, "/user/read/%d/" % user.pk, fetch_redirect_response=False)
self.assertEqual(user.get_full_name(), "Adrian Admin")
self.assertEqual(user.username, "adrian@casely.com")
self.assertIsNone(user.get_partner(self.unicef))
self.assertTrue(user in self.unicef.administrators.all())
# submit again without providing a partner for role that requires one
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases",
"email": "mo@casely.com",
"partner": None,
"role": ROLE_ANALYST,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertFormError(response, "form", "partner", "Required for role.")
# submit again with all required fields but invalid password
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases",
"email": "mo@casely.com",
"partner": self.moh.pk,
"role": ROLE_ANALYST,
"password": "123",
"confirm_password": "123",
},
)
self.assertFormError(response, "form", "password", "Must be at least 10 characters long")
# submit again with valid password but mismatched confirmation
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases",
"email": "mo@casely.com",
"partner": self.moh.pk,
"role": ROLE_ANALYST,
"password": "Qwerty12345",
"confirm_password": "Azerty23456",
},
)
self.assertFormError(response, "form", "confirm_password", "Passwords don't match.")
# submit again with valid password and confirmation
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases",
"email": "mo@casely.com",
"partner": self.moh.pk,
"role": ROLE_ANALYST,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
user = User.objects.get(email="mo@casely.com")
self.assertRedirects(response, "/user/read/%d/" % user.pk, fetch_redirect_response=False)
# check new user and profile
self.assertEqual(user.profile.full_name, "Mo Cases")
self.assertEqual(user.email, "mo@casely.com")
self.assertEqual(user.username, "mo@casely.com")
self.assertTrue(user in self.unicef.viewers.all())
self.assertEqual(user.get_partner(self.unicef), self.moh)
# try again with same email address
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases II",
"email": "mo@casely.com",
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertFormError(response, "form", None, "Email address already taken.")
# log in as a partner manager
self.login(self.user1)
# can't access this view without a specified partner
response = self.url_get("unicef", url)
self.assertLoginRedirect(response, url)
def test_create_in(self):
url = reverse("profiles.user_create_in", args=[self.moh.pk])
# log in as an org administrator
self.login(self.admin)
# submit with no fields entered
response = self.url_post("unicef", url, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "role", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
self.assertFormError(response, "form", "password", "This field is required.")
# submit again with all required fields
response = self.url_post(
"unicef",
url,
{
"name": "Mo Cases",
"email": "mo@casely.com",
"role": ROLE_ANALYST,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
user = User.objects.get(email="mo@casely.com")
self.assertRedirects(response, "/user/read/%d/" % user.pk, fetch_redirect_response=False)
self.assertEqual(user.get_partner(self.unicef), self.moh)
# log in as a partner manager
self.login(self.user1)
# submit with no fields entered
response = self.url_post("unicef", url, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "role", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
self.assertFormError(response, "form", "password", "This field is required.")
# submit again with all required fields to create another manager
response = self.url_post(
"unicef",
url,
{
"name": "McManage",
"email": "manager@moh.com",
"role": ROLE_MANAGER,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
user = User.objects.get(email="manager@moh.com")
self.assertEqual(user.get_full_name(), "McManage")
self.assertEqual(user.username, "manager@moh.com")
self.assertEqual(user.get_partner(self.unicef), self.moh)
self.assertFalse(user.can_administer(self.unicef))
self.assertTrue(user.can_manage(self.moh))
# submit again with partner - not allowed and will be ignored
response = self.url_post(
"unicef",
url,
{
"name": "Bob",
"email": "bob@moh.com",
"partner": self.who,
"role": ROLE_MANAGER,
"password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
user = User.objects.get(email="bob@moh.com")
self.assertEqual(user.get_partner(self.unicef), self.moh) # WHO was ignored
# partner managers can't access page for other partner orgs
url = reverse("profiles.user_create_in", args=[self.who.pk])
response = self.url_post("unicef", url)
self.assertLoginRedirect(response, url)
# partner analysts can't access page at all
self.login(self.user2)
url = reverse("profiles.user_create_in", args=[self.moh.pk])
response = self.url_post("unicef", url)
self.assertLoginRedirect(response, url)
def test_update(self):
url = reverse("profiles.user_update", args=[self.user2.pk])
# log in as superuser
self.login(self.superuser)
response = self.url_get(None, url)
self.assertEqual(response.status_code, 200)
self.assertEqual(
set(response.context["form"].fields.keys()),
{"name", "email", "new_password", "confirm_password", "change_password", "must_use_faq", "loc"},
)
# submit with all required fields, updating name
response = self.url_post(
"unicef",
url,
{
"name": "Richard",
"email": "rick@unicef.org",
"partner": self.moh.pk,
"role": ROLE_ANALYST,
"change_password": True,
},
)
self.assertEqual(response.status_code, 302)
self.user2.refresh_from_db()
self.user2.profile.refresh_from_db()
self.assertEqual(self.user2.profile.full_name, "Richard")
self.assertEqual(self.user2.profile.change_password, True)
self.assertEqual(self.user2.email, "rick@unicef.org")
self.assertEqual(self.user2.username, "rick@unicef.org")
self.assertIn(self.user2, self.unicef.viewers.all())
self.assertEqual(self.user2.get_partner(self.unicef), self.moh)
# log in as an org administrator
self.login(self.admin)
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 200)
self.assertEqual(
set(response.context["form"].fields.keys()),
{
"name",
"email",
"role",
"partner",
"new_password",
"confirm_password",
"change_password",
"must_use_faq",
"loc",
},
)
# submit with no fields entered
response = self.url_post("unicef", url, {})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "role", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
# submit with all required fields
response = self.url_post(
"unicef", url, {"name": "Bill", "email": "bill@unicef.org", "partner": self.who.pk, "role": ROLE_MANAGER}
)
self.assertEqual(response.status_code, 302)
# check updated user and profile
self.user2.refresh_from_db()
self.user2.profile.refresh_from_db()
self.assertEqual(self.user2.profile.full_name, "Bill")
self.assertEqual(self.user2.email, "bill@unicef.org")
self.assertEqual(self.user2.username, "bill@unicef.org")
self.assertNotIn(self.user2, self.unicef.viewers.all())
self.assertIn(self.user2, self.unicef.editors.all())
self.assertEqual(self.user2.get_partner(self.unicef), self.who)
# submit with too simple a password
response = self.url_post(
"unicef",
url,
{
"name": "Bill",
"email": "bill@unicef.org",
"partner": self.moh.pk,
"role": ROLE_MANAGER,
"new_password": "123",
"confirm_password": "123",
},
)
self.assertFormError(response, "form", "new_password", "Must be at least 10 characters long")
# submit with old email, valid password, and switch back to being analyst for MOH
response = self.url_post(
"unicef",
url,
{
"name": "Bill",
"email": "bill@unicef.org",
"partner": self.moh.pk,
"role": ROLE_ANALYST,
"new_password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
self.user2.refresh_from_db()
self.user2.profile.refresh_from_db()
self.assertEqual(self.user2.profile.full_name, "Bill")
self.assertEqual(self.user2.email, "bill@unicef.org")
self.assertEqual(self.user2.username, "bill@unicef.org")
self.assertNotIn(self.user2, self.unicef.editors.all())
self.assertIn(self.user2, self.unicef.viewers.all())
self.assertEqual(self.user2.get_partner(self.unicef), self.moh)
# try giving user someone else's email address
response = self.url_post(
"unicef", url, {"name": "Bill", "email": "evan@unicef.org", "partner": self.moh.pk, "role": ROLE_ANALYST}
)
self.assertFormError(response, "form", None, "Email address already taken.")
# login in as a partner manager user
self.login(self.user1)
# shouldn't see partner as field on the form
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 200)
self.assertEqual(
set(response.context["form"].fields.keys()),
{"name", "email", "role", "new_password", "confirm_password", "change_password", "must_use_faq", "loc"},
)
# update partner colleague
response = self.url_post(
"unicef",
url,
{
"name": "Bob",
"email": "bob@unicef.org",
"role": ROLE_MANAGER,
"new_password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
self.user2.refresh_from_db()
self.user2.profile.refresh_from_db()
self.assertEqual(self.user2.profile.full_name, "Bob")
self.assertEqual(self.user2.email, "bob@unicef.org")
self.assertEqual(self.user2.username, "bob@unicef.org")
self.assertNotIn(self.user2, self.unicef.viewers.all())
self.assertIn(self.user2, self.unicef.editors.all())
self.assertEqual(self.user2.get_partner(self.unicef), self.moh)
# can't update user outside of their partner
url = reverse("profiles.user_update", args=[self.user3.pk])
self.assertEqual(self.url_get("unicef", url).status_code, 302)
# partner analyst users can't access page
self.client.login(username="bill@unicef.org", password="Qwerty12345")
self.assertEqual(self.url_get("unicef", url).status_code, 302)
def test_read(self):
# log in as superuser
self.login(self.superuser)
# can view a user outside of org, tho can't delete because there is no org
response = self.url_get(None, reverse("profiles.user_read", args=[self.admin.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_update", args=[self.admin.pk]))
self.assertFalse(response.context["can_delete"])
# log in as an org administrator
self.login(self.admin)
# view our own profile
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.admin.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_self"))
self.assertFalse(response.context["can_delete"])
# view other user's profile
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user1.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_update", args=[self.user1.pk]))
self.assertTrue(response.context["can_delete"])
# try to view user from other org
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user4.pk]))
self.assertEqual(response.status_code, 404)
# log in as a manager user
self.login(self.user1)
# view ourselves (can edit)
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user1.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_self"))
self.assertFalse(response.context["can_delete"])
# view another user in same partner org (can edit)
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user2.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_update", args=[self.user2.pk]))
self.assertTrue(response.context["can_delete"])
# view another user in different partner org (can't edit)
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user3.pk]))
self.assertEqual(response.status_code, 200)
self.assertIsNone(response.context["edit_button_url"])
self.assertFalse(response.context["can_delete"])
# log in as an analyst user
self.login(self.user2)
# view ourselves (can edit)
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user2.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["edit_button_url"], reverse("profiles.user_self"))
self.assertFalse(response.context["can_delete"])
# view another user in same partner org (can't edit)
response = self.url_get("unicef", reverse("profiles.user_read", args=[self.user1.pk]))
self.assertEqual(response.status_code, 200)
self.assertIsNone(response.context["edit_button_url"])
self.assertFalse(response.context["can_delete"])
def test_list(self):
url = reverse("profiles.user_list")
# can't access if not logged in
response = self.url_get("unicef", url)
self.assertLoginRedirect(response, url)
# can access all users even as non-administrator
self.login(self.user1)
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.json["results"],
[
{
"id": self.user3.pk,
"name": "Carol",
"email": "carol@unicef.org",
"role": "M",
"partner": {"id": self.who.pk, "name": "WHO"},
},
{
"id": self.user1.pk,
"name": "Evan",
"email": "evan@unicef.org",
"role": "M",
"partner": {"id": self.moh.pk, "name": "MOH"},
},
{"id": self.admin.pk, "name": "Kidus", "email": "kidus@unicef.org", "role": "A", "partner": None},
{
"id": self.user2.pk,
"name": "Rick",
"email": "rick@unicef.org",
"role": "Y",
"partner": {"id": self.moh.pk, "name": "MOH"},
},
],
)
# can filter by partner
response = self.url_get("unicef", url + "?partner=%d" % self.moh.pk)
self.assertEqual(
response.json["results"],
[
{
"id": self.user1.pk,
"name": "Evan",
"email": "evan@unicef.org",
"role": "M",
"partner": {"id": self.moh.pk, "name": "MOH"},
},
{
"id": self.user2.pk,
"name": "Rick",
"email": "rick@unicef.org",
"role": "Y",
"partner": {"id": self.moh.pk, "name": "MOH"},
},
],
)
# can filter by being a non-partner user
response = self.url_get("unicef", url + "?non_partner=true")
self.assertEqual(
response.json["results"],
[{"id": self.admin.pk, "name": "Kidus", "email": "kidus@unicef.org", "role": "A", "partner": None}],
)
# add some reply activity
ann = self.create_contact(self.unicef, "C-001", "Ann")
self.create_outgoing(
self.unicef, self.user1, 202, "B", "Hello 2", ann, created_on=datetime(2016, 4, 20, 9, 0, tzinfo=pytz.UTC)
) # April 20th
self.create_outgoing(
self.unicef, self.user1, 203, "C", "Hello 3", ann, created_on=datetime(2016, 3, 20, 9, 0, tzinfo=pytz.UTC)
) # Mar 20th
# simulate making request in May
with patch.object(timezone, "now", return_value=datetime(2016, 5, 20, 9, 0, tzinfo=pytz.UTC)):
response = self.url_get("unicef", url + "?partner=%d&with_activity=true" % self.moh.pk)
self.assertEqual(
response.json["results"],
[
{
"id": self.user1.pk,
"name": "Evan",
"email": "evan@unicef.org",
"role": "M",
"partner": {"id": self.moh.pk, "name": "MOH"},
"replies": {"last_month": 1, "this_month": 0, "total": 2},
"cases": {"opened_this_month": 0, "closed_this_month": 0, "total": 0},
},
{
"id": self.user2.pk,
"name": "Rick",
"email": "rick@unicef.org",
"role": "Y",
"partner": {"id": self.moh.pk, "name": "MOH"},
"replies": {"last_month": 0, "this_month": 0, "total": 0},
"cases": {"opened_this_month": 0, "closed_this_month": 0, "total": 0},
},
],
)
def test_self(self):
url = reverse("profiles.user_self")
# try as unauthenticated
response = self.url_get("unicef", url)
self.assertLoginRedirect(response, url)
# try as superuser (doesn't have a chat profile)
self.login(self.superuser)
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 404)
# log in as an org administrator
self.login(self.admin)
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 200)
# log in as a user
self.login(self.user1)
response = self.url_get("unicef", url)
self.assertEqual(response.status_code, 200)
# submit with no fields entered
response = self.url_post("unicef", url, dict())
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "name", "This field is required.")
self.assertFormError(response, "form", "email", "This field is required.")
# submit with all required fields entered
response = self.url_post("unicef", url, {"name": "Morris", "email": "mo2@trac.com"})
self.assertEqual(response.status_code, 302)
# check updated user and profile
user = User.objects.get(pk=self.user1.pk)
self.assertEqual(user.profile.full_name, "Morris")
self.assertEqual(user.email, "mo2@trac.com")
self.assertEqual(user.username, "mo2@trac.com")
# submit with too simple a password
response = self.url_post(
"unicef",
url,
{"name": "Morris", "email": "mo2@trac.com", "new_password": "123", "confirm_password": "123"},
)
self.assertFormError(response, "form", "new_password", "Must be at least 10 characters long")
# submit with all required fields entered and valid password fields
old_password_hash = user.password
response = self.url_post(
"unicef",
url,
{
"name": "Morris",
"email": "mo2@trac.com",
"new_password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
# check password has been changed
user = User.objects.get(pk=self.user1.pk)
self.assertNotEqual(user.password, old_password_hash)
# check when user is being forced to change their password
old_password_hash = user.password
self.user1.profile.change_password = True
self.user1.profile.save()
# submit without password
response = self.url_post("unicef", url, {"name": "Morris", "email": "mo2@trac.com"})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "new_password", "This field is required.")
# submit again with new password but no confirmation
response = self.url_post(
"unicef", url, {"name": "Morris", "email": "mo2@trac.com", "new_password": "Qwerty12345"}
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, "form", "confirm_password", "Passwords don't match.")
# submit again with new password and confirmation
response = self.url_post(
"unicef",
url,
{
"name": "Morris",
"email": "mo2@trac.com",
"new_password": "Qwerty12345",
"confirm_password": "Qwerty12345",
},
)
self.assertEqual(response.status_code, 302)
# check password has changed and no longer has to be changed
self.user1.refresh_from_db()
self.user1.profile.refresh_from_db()
self.assertFalse(self.user1.profile.change_password)
self.assertNotEqual(self.user1.password, old_password_hash)
def test_delete(self):
# partner data analyst can't delete anyone
self.login(self.user2)
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.admin.pk]))
self.assertEqual(response.status_code, 302)
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.user1.pk]))
self.assertEqual(response.status_code, 302)
# partner manager can delete fellow partner org users but not org admins
self.login(self.user1)
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.user2.pk]))
self.assertEqual(response.status_code, 204)
self.assertIsNone(self.unicef.get_user_org_group(self.user2))
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.admin.pk]))
self.assertEqual(response.status_code, 302)
# admins can delete anyone in their org
self.login(self.admin)
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.user1.pk]))
self.assertEqual(response.status_code, 204)
self.assertIsNone(self.unicef.get_user_org_group(self.user1))
# but not in a different org
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.norbert.pk]))
self.assertEqual(response.status_code, 404)
# and not themselves
response = self.url_post("unicef", reverse("profiles.user_delete", args=[self.admin.pk]))
self.assertEqual(response.status_code, 302)
def test_login(self):
# test that logins are case-insensitive but passwords aren't
self.assertTrue(self.client.login(username="KIDUS@UNICEF.org", password="kidus@unicef.org"))
self.assertFalse(self.client.login(username="KIDUS@UNICEF.org", password="KIDUS@UNICEF.org"))
class ForcePasswordChangeMiddlewareTest(BaseCasesTest):
@override_settings(CELERY_ALWAYS_EAGER=True, CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, BROKER_BACKEND="memory")
def test_process_view(self):
self.user1.profile.change_password = True
self.user1.profile.save()
self.login(self.user1)
response = self.url_get("unicef", reverse("cases.inbox"))
self.assertRedirects(response, "/profile/self/", fetch_redirect_response=False)
response = self.url_get("unicef", reverse("profiles.user_self"))
self.assertEqual(response.status_code, 200)
self.user1.profile.change_password = False
self.user1.profile.save()
response = self.url_get("unicef", reverse("cases.inbox"))
self.assertEqual(response.status_code, 200)
|
{
"content_hash": "84e7ca599d2066141037b1130ac25759",
"timestamp": "",
"source": "github",
"line_count": 1175,
"max_line_length": 119,
"avg_line_length": 41.41787234042553,
"alnum_prop": 0.5869189988903958,
"repo_name": "praekelt/casepro",
"id": "c952d0ab9251268e43d2049ae724a1fbe10ffdb7",
"size": "48666",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "casepro/profiles/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3475"
},
{
"name": "CoffeeScript",
"bytes": "220558"
},
{
"name": "Dockerfile",
"bytes": "1193"
},
{
"name": "HTML",
"bytes": "108105"
},
{
"name": "PLpgSQL",
"bytes": "10006"
},
{
"name": "Python",
"bytes": "964291"
},
{
"name": "Shell",
"bytes": "233"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from operator import attrgetter
from django import VERSION
from django.contrib.contenttypes.generic import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models, router
from django.db.models.fields import Field
from django.db.models.fields.related import ManyToManyRel, RelatedField, add_lazy_relation
from django.db.models.related import RelatedObject
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from django.utils import six
try:
from django.db.models.related import PathInfo
except ImportError:
pass # PathInfo is not used on Django < 1.6
from taggit.forms import TagField
from taggit.models import TaggedItem, GenericTaggedItemBase
from taggit.utils import require_instance_manager
def _model_name(model):
if VERSION < (1, 7):
return model._meta.module_name
else:
return model._meta.model_name
class TaggableRel(ManyToManyRel):
def __init__(self, field, related_name):
self.related_name = related_name
self.limit_choices_to = {}
self.symmetrical = True
self.multiple = True
self.through = None
self.field = field
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
class ExtraJoinRestriction(object):
"""
An extra restriction used for contenttype restriction in joins.
"""
def __init__(self, alias, col, content_types):
self.alias = alias
self.col = col
self.content_types = content_types
def as_sql(self, qn, connection):
if len(self.content_types) == 1:
extra_where = "%s.%s = %%s" % (qn(self.alias), qn(self.col))
else:
extra_where = "%s.%s IN (%s)" % (qn(self.alias), qn(self.col),
','.join(['%s'] * len(self.content_types)))
return extra_where, self.content_types
def relabel_aliases(self, change_map):
self.alias = change_map.get(self.alias, self.alias)
def clone(self):
return self.__class__(self.alias, self.col, self.content_types[:])
class TaggableManager(RelatedField, Field):
_related_name_counter = 0
def __init__(self, verbose_name=_("Tags"), help_text=_("A comma-separated list of tags."),
through=None, blank=False, related_name=None):
Field.__init__(self, verbose_name=verbose_name, help_text=help_text, blank=blank, null=True, serialize=False)
self.through = through or TaggedItem
self.rel = TaggableRel(self, related_name)
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through,
model=model,
instance=instance,
prefetch_cache_name = self.name
)
return manager
def contribute_to_class(self, cls, name):
if VERSION < (1, 7):
self.name = self.column = name
else:
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
setattr(cls, name, self)
if not cls._meta.abstract:
if isinstance(self.through, six.string_types):
def resolve_related_class(field, model, cls):
self.through = model
self.post_through_setup(cls)
add_lazy_relation(
cls, self, self.through, resolve_related_class
)
else:
self.post_through_setup(cls)
def __lt__(self, other):
"""
Required contribute_to_class as Django uses bisect
for ordered class contribution and bisect requires
a orderable type in py3.
"""
return False
def post_through_setup(self, cls):
self.related = RelatedObject(cls, self.model, self)
self.use_gfk = (
self.through is None or issubclass(self.through, GenericTaggedItemBase)
)
self.rel.to = self.through._meta.get_field("tag").rel.to
self.related = RelatedObject(self.through, cls, self)
if self.use_gfk:
tagged_items = GenericRelation(self.through)
tagged_items.contribute_to_class(cls, 'tagged_items')
for rel in cls._meta.local_many_to_many:
if rel == self or not isinstance(rel, TaggableManager):
continue
if rel.through == self.through:
raise ValueError('You can\'t have two TaggableManagers with the'
' same through model.')
def save_form_data(self, instance, value):
getattr(instance, self.name).set(*value)
def formfield(self, form_class=TagField, **kwargs):
defaults = {
"label": capfirst(self.verbose_name),
"help_text": self.help_text,
"required": not self.blank
}
defaults.update(kwargs)
return form_class(**defaults)
def value_from_object(self, instance):
if instance.pk:
return self.through.objects.filter(**self.through.lookup_kwargs(instance))
return self.through.objects.none()
def related_query_name(self):
return _model_name(self.model)
def m2m_reverse_name(self):
return self.through._meta.get_field_by_name("tag")[0].column
def m2m_reverse_field_name(self):
return self.through._meta.get_field_by_name("tag")[0].name
def m2m_target_field_name(self):
return self.model._meta.pk.name
def m2m_reverse_target_field_name(self):
return self.rel.to._meta.pk.name
def m2m_column_name(self):
if self.use_gfk:
return self.through._meta.virtual_fields[0].fk_field
return self.through._meta.get_field('content_object').column
def db_type(self, connection=None):
return None
def m2m_db_table(self):
return self.through._meta.db_table
def bulk_related_objects(self, new_objs, using):
return []
def extra_filters(self, pieces, pos, negate):
if negate or not self.use_gfk:
return []
prefix = "__".join(["tagged_items"] + pieces[:pos-2])
get = ContentType.objects.get_for_model
cts = [get(obj) for obj in _get_subclasses(self.model)]
if len(cts) == 1:
return [("%s__content_type" % prefix, cts[0])]
return [("%s__content_type__in" % prefix, cts)]
def get_extra_join_sql(self, connection, qn, lhs_alias, rhs_alias):
model_name = _model_name(self.through)
if rhs_alias == '%s_%s' % (self.through._meta.app_label, model_name):
alias_to_join = rhs_alias
else:
alias_to_join = lhs_alias
extra_col = self.through._meta.get_field_by_name('content_type')[0].column
content_type_ids = [ContentType.objects.get_for_model(subclass).pk for subclass in _get_subclasses(self.model)]
if len(content_type_ids) == 1:
content_type_id = content_type_ids[0]
extra_where = " AND %s.%s = %%s" % (qn(alias_to_join), qn(extra_col))
params = [content_type_id]
else:
extra_where = " AND %s.%s IN (%s)" % (qn(alias_to_join), qn(extra_col), ','.join(['%s']*len(content_type_ids)))
params = content_type_ids
return extra_where, params
# This and all the methods till the end of class are only used in django >= 1.6
def _get_mm_case_path_info(self, direct=False):
pathinfos = []
linkfield1 = self.through._meta.get_field_by_name('content_object')[0]
linkfield2 = self.through._meta.get_field_by_name(self.m2m_reverse_field_name())[0]
if direct:
join1infos = linkfield1.get_reverse_path_info()
join2infos = linkfield2.get_path_info()
else:
join1infos = linkfield2.get_reverse_path_info()
join2infos = linkfield1.get_path_info()
pathinfos.extend(join1infos)
pathinfos.extend(join2infos)
return pathinfos
def _get_gfk_case_path_info(self, direct=False):
pathinfos = []
from_field = self.model._meta.pk
opts = self.through._meta
object_id_field = opts.get_field_by_name('object_id')[0]
linkfield = self.through._meta.get_field_by_name(self.m2m_reverse_field_name())[0]
if direct:
join1infos = [PathInfo(self.model._meta, opts, [from_field], self.rel, True, False)]
join2infos = linkfield.get_path_info()
else:
join1infos = linkfield.get_reverse_path_info()
join2infos = [PathInfo(opts, self.model._meta, [object_id_field], self, True, False)]
pathinfos.extend(join1infos)
pathinfos.extend(join2infos)
return pathinfos
def get_path_info(self):
if self.use_gfk:
return self._get_gfk_case_path_info(direct=True)
else:
return self._get_mm_case_path_info(direct=True)
def get_reverse_path_info(self):
if self.use_gfk:
return self._get_gfk_case_path_info(direct=False)
else:
return self._get_mm_case_path_info(direct=False)
def get_joining_columns(self, reverse_join=False):
if reverse_join:
return (("id", "object_id"),)
else:
return (("object_id", "id"),)
def get_extra_restriction(self, where_class, alias, related_alias):
extra_col = self.through._meta.get_field_by_name('content_type')[0].column
content_type_ids = [ContentType.objects.get_for_model(subclass).pk
for subclass in _get_subclasses(self.model)]
return ExtraJoinRestriction(related_alias, extra_col, content_type_ids)
def get_reverse_joining_columns(self):
return self.get_joining_columns(reverse_join=True)
@property
def related_fields(self):
return [(self.through._meta.get_field_by_name('object_id')[0],
self.model._meta.pk)]
@property
def foreign_related_fields(self):
return [self.related_fields[0][1]]
class _TaggableManager(models.Manager):
def __init__(self, through, model, instance, prefetch_cache_name):
self.through = through
self.model = model
self.instance = instance
self.prefetch_cache_name = prefetch_cache_name
self._db = None
def is_cached(self, instance):
return self.prefetch_cache_name in instance._prefetched_objects_cache
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
return self.through.tags_for(self.model, self.instance)
def get_prefetch_query_set(self, instances, queryset = None):
if queryset is not None:
raise ValueError("Custom queryset can't be used for this lookup.")
instance = instances[0]
from django.db import connections
db = self._db or router.db_for_read(instance.__class__, instance=instance)
fieldname = ('object_id' if issubclass(self.through, GenericTaggedItemBase)
else 'content_object')
fk = self.through._meta.get_field(fieldname)
query = {
'%s__%s__in' % (self.through.tag_relname(), fk.name) :
set(obj._get_pk_val() for obj in instances)
}
join_table = self.through._meta.db_table
source_col = fk.column
connection = connections[db]
qn = connection.ops.quote_name
qs = self.get_query_set().using(db)._next_is_sticky().filter(**query).extra(
select = {
'_prefetch_related_val' : '%s.%s' % (qn(join_table), qn(source_col))
}
)
return (qs,
attrgetter('_prefetch_related_val'),
attrgetter(instance._meta.pk.name),
False,
self.prefetch_cache_name)
# Django 1.6 renamed this
get_queryset = get_query_set
def _lookup_kwargs(self):
return self.through.lookup_kwargs(self.instance)
@require_instance_manager
def add(self, *tags):
str_tags = set([
t
for t in tags
if not isinstance(t, self.through.tag_model())
])
tag_objs = set(tags) - str_tags
# If str_tags has 0 elements Django actually optimizes that to not do a
# query. Malcolm is very smart.
existing = self.through.tag_model().objects.filter(
name__in=str_tags
)
tag_objs.update(existing)
for new_tag in str_tags - set(t.name for t in existing):
tag_objs.add(self.through.tag_model().objects.create(name=new_tag))
for tag in tag_objs:
self.through.objects.get_or_create(tag=tag, **self._lookup_kwargs())
@require_instance_manager
def names(self):
return self.get_query_set().values_list('name', flat=True)
@require_instance_manager
def slugs(self):
return self.get_query_set().values_list('slug', flat=True)
@require_instance_manager
def set(self, *tags):
self.clear()
self.add(*tags)
@require_instance_manager
def remove(self, *tags):
self.through.objects.filter(**self._lookup_kwargs()).filter(
tag__name__in=tags).delete()
@require_instance_manager
def clear(self):
self.through.objects.filter(**self._lookup_kwargs()).delete()
def most_common(self):
return self.get_query_set().annotate(
num_times=models.Count(self.through.tag_relname())
).order_by('-num_times')
@require_instance_manager
def similar_objects(self):
lookup_kwargs = self._lookup_kwargs()
lookup_keys = sorted(lookup_kwargs)
qs = self.through.objects.values(*six.iterkeys(lookup_kwargs))
qs = qs.annotate(n=models.Count('pk'))
qs = qs.exclude(**lookup_kwargs)
qs = qs.filter(tag__in=self.all())
qs = qs.order_by('-n')
# TODO: This all feels like a bit of a hack.
items = {}
if len(lookup_keys) == 1:
# Can we do this without a second query by using a select_related()
# somehow?
f = self.through._meta.get_field_by_name(lookup_keys[0])[0]
objs = f.rel.to._default_manager.filter(**{
"%s__in" % f.rel.field_name: [r["content_object"] for r in qs]
})
for obj in objs:
items[(getattr(obj, f.rel.field_name),)] = obj
else:
preload = {}
for result in qs:
preload.setdefault(result['content_type'], set())
preload[result["content_type"]].add(result["object_id"])
for ct, obj_ids in preload.items():
ct = ContentType.objects.get_for_id(ct)
for obj in ct.model_class()._default_manager.filter(pk__in=obj_ids):
items[(ct.pk, obj.pk)] = obj
results = []
for result in qs:
#obj = items[
#tuple(result[k] for k in lookup_keys)
#]
#obj.similar_tags = result["n"]
#results.append(obj)
# fix key error. by guoqiao, 2013.06.06
x = [result[k] for k in lookup_keys]
key = tuple(x)
obj = items.get(key,'')
if obj:
obj.similar_tags = result["n"]
results.append(obj)
return results
def _get_subclasses(model):
subclasses = [model]
for f in model._meta.get_all_field_names():
field = model._meta.get_field_by_name(f)[0]
if (isinstance(field, RelatedObject) and
getattr(field.field.rel, "parent_link", None)):
subclasses.extend(_get_subclasses(field.model))
return subclasses
# `total_ordering` does not exist in Django 1.4, as such
# we special case this import to be py3k specific which
# is not supported by Django 1.4
if six.PY3:
from django.utils.functional import total_ordering
TaggableManager = total_ordering(TaggableManager)
|
{
"content_hash": "ffb7b106dcea773a15469e3b18d9b785",
"timestamp": "",
"source": "github",
"line_count": 447,
"max_line_length": 123,
"avg_line_length": 37.00447427293065,
"alnum_prop": 0.5990568889426274,
"repo_name": "guoqiao/django-taggit",
"id": "f7a27196c19321e1e1699fb6868b0209538df2b7",
"size": "16541",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "taggit/managers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "67333"
}
],
"symlink_target": ""
}
|
import json
from unittest2 import skipIf
from django.test.utils import override_settings
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from myuw.test.api import missing_url, get_user, get_user_pass
FDAO_SWS = 'restclients.dao_implementation.sws.File'
Session = 'django.contrib.sessions.middleware.SessionMiddleware'
Common = 'django.middleware.common.CommonMiddleware'
CsrfView = 'django.middleware.csrf.CsrfViewMiddleware'
Auth = 'django.contrib.auth.middleware.AuthenticationMiddleware'
RemoteUser = 'django.contrib.auth.middleware.RemoteUserMiddleware'
Message = 'django.contrib.messages.middleware.MessageMiddleware'
XFrame = 'django.middleware.clickjacking.XFrameOptionsMiddleware'
UserService = 'userservice.user.UserServiceMiddleware'
AUTH_BACKEND = 'django.contrib.auth.backends.ModelBackend'
@override_settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS,
MIDDLEWARE_CLASSES=(Session,
Common,
CsrfView,
Auth,
RemoteUser,
Message,
XFrame,
UserService,
),
AUTHENTICATION_BACKENDS=(AUTH_BACKEND,)
)
class TestOtherQuarters(TestCase):
def setUp(self):
self.client = Client()
@skipIf(missing_url("myuw_home"), "myuw urls not configured")
def test_javerage_oquarters(self):
url = reverse("myuw_other_quarters_api")
get_user('javerage')
self.client.login(username='javerage',
password=get_user_pass('javerage'))
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data["next_term_data"]["has_registration"], True)
self.assertEquals(data["next_term_data"]["quarter"], "Autumn")
self.assertEquals(data["next_term_data"]["year"], 2013)
self.assertEquals(len(data["terms"]), 3)
self.assertEquals(data["terms"][0]['section_count'], 2)
self.assertEquals(data["terms"][0]['url'], '/2013,summer,a-term')
self.assertEquals(data["terms"][0]['summer_term'], 'a-term')
self.assertEquals(data["terms"][0]['year'], 2013)
self.assertEquals(data["terms"][0]['quarter'], 'Summer')
self.assertEquals(data["terms"][0]['credits'], '2.0')
self.assertEquals(data["terms"][0]['last_final_exam_date'],
'2013-08-23 23:59:59')
|
{
"content_hash": "90caef4745f1d58ec10a4058f482b665",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 75,
"avg_line_length": 44.721311475409834,
"alnum_prop": 0.6088709677419355,
"repo_name": "fanglinfang/myuw",
"id": "60007b5b5a9ea1ae025b3a0678fcb07776b225f3",
"size": "2728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myuw/test/api/other_quarters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "54427"
},
{
"name": "HTML",
"bytes": "169387"
},
{
"name": "JavaScript",
"bytes": "226000"
},
{
"name": "Python",
"bytes": "403286"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from rlpy.Domains import HIVTreatment
from rlpy.Agents import SARSA, Q_LEARNING
from rlpy.Representations import *
from rlpy.Policies import eGreedy
from rlpy.Experiments import Experiment
import numpy as np
from hyperopt import hp
param_space = {'resolution': hp.quniform("resolution", 3, 30, 1),
'lambda_': hp.uniform("lambda_", 0., 1.),
'boyan_N0': hp.loguniform("boyan_N0", np.log(1e1), np.log(1e5)),
'initial_learn_rate': hp.loguniform("initial_learn_rate", np.log(5e-2), np.log(1))}
def make_experiment(
exp_id=1, path="./Results/Temp/{domain}/{agent}/{representation}/",
boyan_N0=136,
lambda_=0.0985,
initial_learn_rate=0.090564,
resolution=13., num_rbfs=9019):
opt = {}
opt["path"] = path
opt["exp_id"] = exp_id
opt["max_steps"] = 150000
opt["num_policy_checks"] = 30
opt["checks_per_policy"] = 1
domain = HIVTreatment()
opt["domain"] = domain
representation = NonparametricLocalBases(domain,
kernel=linf_triangle_kernel,
resolution=resolution,
normalization=True)
policy = eGreedy(representation, epsilon=0.1)
opt["agent"] = Q_LEARNING(
policy, representation,discount_factor=domain.discount_factor,
lambda_=lambda_, initial_learn_rate=initial_learn_rate,
learn_rate_decay_mode="boyan", boyan_N0=boyan_N0)
experiment = Experiment(**opt)
return experiment
if __name__ == '__main__':
from rlpy.Tools.run import run_profiled
# run_profiled(make_experiment)
experiment = make_experiment(1)
experiment.run(visualize_learning=True)
experiment.plot()
# experiment.save()
|
{
"content_hash": "ccd400678c96172643a51f99222d3a72",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 98,
"avg_line_length": 37.370370370370374,
"alnum_prop": 0.6283448959365708,
"repo_name": "rlpy/rlpy",
"id": "9d87a89f4f72bde65d2bfcbc1dbaf22e78238831",
"size": "2018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/hiv/nplb_triangle.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "117712"
},
{
"name": "C++",
"bytes": "1601"
},
{
"name": "PLSQL",
"bytes": "787682"
},
{
"name": "Python",
"bytes": "1215456"
}
],
"symlink_target": ""
}
|
import six
import mock
from neutronclient.common import exceptions as qe
from heat.common import exception
from heat.engine.clients.os import neutron
from heat.engine.clients.os.neutron import lbaas_constraints as lc
from heat.engine.clients.os.neutron import neutron_constraints as nc
from heat.tests import common
from heat.tests import utils
class NeutronClientPluginTestCase(common.HeatTestCase):
def setUp(self):
super(NeutronClientPluginTestCase, self).setUp()
self.neutron_client = mock.MagicMock()
con = utils.dummy_context()
c = con.clients
self.neutron_plugin = c.client_plugin('neutron')
self.neutron_plugin._client = self.neutron_client
class NeutronClientPluginTests(NeutronClientPluginTestCase):
def setUp(self):
super(NeutronClientPluginTests, self).setUp()
self.mock_find = self.patchobject(neutron.neutronV20,
'find_resourceid_by_name_or_id')
self.mock_find.return_value = 42
def test_find_neutron_resource(self):
props = {'net': 'test_network'}
res = self.neutron_plugin.find_neutron_resource(props, 'net',
'network')
self.assertEqual(42, res)
self.mock_find.assert_called_once_with(self.neutron_client, 'network',
'test_network',
cmd_resource=None)
def test_resolve_network(self):
props = {'net': 'test_network'}
res = self.neutron_plugin.resolve_network(props, 'net', 'net_id')
self.assertEqual(42, res)
self.mock_find.assert_called_once_with(self.neutron_client, 'network',
'test_network',
cmd_resource=None)
# check resolve if was send id instead of name
props = {'net_id': 77}
res = self.neutron_plugin.resolve_network(props, 'net', 'net_id')
self.assertEqual(77, res)
# in this case find_resourceid_by_name_or_id is not called
self.mock_find.assert_called_once_with(self.neutron_client, 'network',
'test_network',
cmd_resource=None)
def test_resolve_subnet(self):
props = {'snet': 'test_subnet'}
res = self.neutron_plugin.resolve_subnet(props, 'snet', 'snet_id')
self.assertEqual(42, res)
self.mock_find.assert_called_once_with(self.neutron_client, 'subnet',
'test_subnet',
cmd_resource=None)
# check resolve if was send id instead of name
props = {'snet_id': 77}
res = self.neutron_plugin.resolve_subnet(props, 'snet', 'snet_id')
self.assertEqual(77, res)
# in this case find_resourceid_by_name_or_id is not called
self.mock_find.assert_called_once_with(self.neutron_client, 'subnet',
'test_subnet',
cmd_resource=None)
def test_get_secgroup_uuids(self):
# test get from uuids
sgs_uuid = ['b62c3079-6946-44f5-a67b-6b9091884d4f',
'9887157c-d092-40f5-b547-6361915fce7d']
sgs_list = self.neutron_plugin.get_secgroup_uuids(sgs_uuid)
self.assertEqual(sgs_uuid, sgs_list)
# test get from name, return only one
sgs_non_uuid = ['security_group_1']
expected_groups = ['0389f747-7785-4757-b7bb-2ab07e4b09c3']
fake_list = {
'security_groups': [
{
'tenant_id': 'test_tenant_id',
'id': '0389f747-7785-4757-b7bb-2ab07e4b09c3',
'name': 'security_group_1',
'security_group_rules': [],
'description': 'no protocol'
}
]
}
self.neutron_client.list_security_groups.return_value = fake_list
self.assertEqual(expected_groups,
self.neutron_plugin.get_secgroup_uuids(sgs_non_uuid))
# test only one belong to the tenant
fake_list = {
'security_groups': [
{
'tenant_id': 'test_tenant_id',
'id': '0389f747-7785-4757-b7bb-2ab07e4b09c3',
'name': 'security_group_1',
'security_group_rules': [],
'description': 'no protocol'
},
{
'tenant_id': 'not_test_tenant_id',
'id': '384ccd91-447c-4d83-832c-06974a7d3d05',
'name': 'security_group_1',
'security_group_rules': [],
'description': 'no protocol'
}
]
}
self.neutron_client.list_security_groups.return_value = fake_list
self.assertEqual(expected_groups,
self.neutron_plugin.get_secgroup_uuids(sgs_non_uuid))
# test there are two securityGroups with same name, and the two
# all belong to the tenant
fake_list = {
'security_groups': [
{
'tenant_id': 'test_tenant_id',
'id': '0389f747-7785-4757-b7bb-2ab07e4b09c3',
'name': 'security_group_1',
'security_group_rules': [],
'description': 'no protocol'
},
{
'tenant_id': 'test_tenant_id',
'id': '384ccd91-447c-4d83-832c-06974a7d3d05',
'name': 'security_group_1',
'security_group_rules': [],
'description': 'no protocol'
}
]
}
self.neutron_client.list_security_groups.return_value = fake_list
self.assertRaises(exception.PhysicalResourceNameAmbiguity,
self.neutron_plugin.get_secgroup_uuids,
sgs_non_uuid)
class NeutronConstraintsValidate(common.HeatTestCase):
scenarios = [
('validate_network',
dict(constraint_class=nc.NetworkConstraint,
resource_type='network',
cmd_resource=None)),
('validate_port',
dict(constraint_class=nc.PortConstraint,
resource_type='port',
cmd_resource=None)),
('validate_router',
dict(constraint_class=nc.RouterConstraint,
resource_type='router',
cmd_resource=None)),
('validate_subnet',
dict(constraint_class=nc.SubnetConstraint,
resource_type='subnet',
cmd_resource=None)),
('validate_subnetpool',
dict(constraint_class=nc.SubnetPoolConstraint,
resource_type='subnetpool',
cmd_resource=None)),
('validate_address_scope',
dict(constraint_class=nc.AddressScopeConstraint,
resource_type='address_scope',
cmd_resource=None)),
('validate_loadbalancer',
dict(constraint_class=lc.LoadbalancerConstraint,
resource_type='loadbalancer',
cmd_resource='lbaas_loadbalancer')),
('validate_listener',
dict(constraint_class=lc.ListenerConstraint,
resource_type='listener',
cmd_resource=None)),
('validate_pool',
dict(constraint_class=lc.PoolConstraint,
resource_type='pool',
cmd_resource='lbaas_pool')),
('validate_qos_policy',
dict(constraint_class=nc.QoSPolicyConstraint,
resource_type='policy',
cmd_resource='qos_policy'))
]
def test_validate(self):
mock_extension = self.patchobject(
neutron.NeutronClientPlugin, 'has_extension', return_value=True)
nc = mock.Mock()
mock_create = self.patchobject(neutron.NeutronClientPlugin, '_create')
mock_create.return_value = nc
mock_find = self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
mock_find.side_effect = [
'foo',
qe.NeutronClientException(status_code=404)
]
constraint = self.constraint_class()
ctx = utils.dummy_context()
if hasattr(constraint, 'extension') and constraint.extension:
mock_extension.side_effect = [
False,
True,
True,
]
ex = self.assertRaises(
exception.EntityNotFound,
constraint.validate_with_client, ctx.clients, "foo"
)
expected = ("The neutron extension (%s) could not be found." %
constraint.extension)
self.assertEqual(expected, six.text_type(ex))
self.assertTrue(constraint.validate("foo", ctx))
self.assertFalse(constraint.validate("bar", ctx))
mock_find.assert_has_calls(
[mock.call(self.resource_type, 'foo',
cmd_resource=self.cmd_resource),
mock.call(self.resource_type, 'bar',
cmd_resource=self.cmd_resource)])
class NeutronProviderConstraintsValidate(common.HeatTestCase):
scenarios = [
('validate_lbaasv1',
dict(constraint_class=nc.LBaasV1ProviderConstraint,
service_type='LOADBALANCER')),
('validate_lbaasv2',
dict(constraint_class=lc.LBaasV2ProviderConstraint,
service_type='LOADBALANCERV2'))
]
def test_provider_validate(self):
nc = mock.Mock()
mock_create = self.patchobject(neutron.NeutronClientPlugin, '_create')
mock_create.return_value = nc
providers = {
'service_providers': [
{'service_type': 'LOADBANALCERV2', 'name': 'haproxy'},
{'service_type': 'LOADBANALCER', 'name': 'haproxy'}
]
}
nc.list_service_providers.return_value = providers
constraint = self.constraint_class()
ctx = utils.dummy_context()
self.assertTrue(constraint.validate('haproxy', ctx))
self.assertFalse(constraint.validate("bar", ctx))
class NeutronClientPluginExtensionsTests(NeutronClientPluginTestCase):
"""Tests for extensions in neutronclient."""
def test_has_no_extension(self):
mock_extensions = {'extensions': []}
self.neutron_client.list_extensions.return_value = mock_extensions
self.assertFalse(self.neutron_plugin.has_extension('lbaas'))
def test_without_service_extension(self):
mock_extensions = {'extensions': [{'alias': 'router'}]}
self.neutron_client.list_extensions.return_value = mock_extensions
self.assertFalse(self.neutron_plugin.has_extension('lbaas'))
def test_has_service_extension(self):
mock_extensions = {'extensions': [{'alias': 'router'}]}
self.neutron_client.list_extensions.return_value = mock_extensions
self.assertTrue(self.neutron_plugin.has_extension('router'))
|
{
"content_hash": "e338cb6217d9ef6deb2417cbd6d4041b",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 78,
"avg_line_length": 41.57664233576642,
"alnum_prop": 0.5482794943820225,
"repo_name": "gonzolino/heat",
"id": "aafb64590d094e389b329e8ea0afe4828a965f9c",
"size": "11967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/tests/clients/test_neutron_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7214144"
},
{
"name": "Shell",
"bytes": "32170"
}
],
"symlink_target": ""
}
|
from slamon.afm.tables import Agent, AgentCapability, Task
from slamon.afm.database import Base, engine, init_connection
import logging
import argparse
import sys
logger = logging.getLogger('admin')
def create_tables():
logger.info('Creating tables')
Base.metadata.create_all(engine)
def drop_tables():
logger.info('Dropping tables')
Base.metadata.drop_all(engine)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Admin util for SLAMon Agent Fleet Manager')
parser.add_argument('--create-tables', help='Create tables', action='store_true', default=False)
parser.add_argument('--drop-tables', help='Drop all tables', action='store_true', default=False)
init_connection(unittest=False)
args = parser.parse_args()
if args.create_tables is True:
create_tables()
elif args.drop_tables is True:
drop_tables()
else:
parser.print_help()
sys.exit(1)
|
{
"content_hash": "98493ed1dc0ce90e6034ac315b8663c0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 100,
"avg_line_length": 28.058823529411764,
"alnum_prop": 0.6928721174004193,
"repo_name": "StealthyLoner/SLAMon",
"id": "a0f3026e8215332a72e9fe1dc7572be8b84b499c",
"size": "977",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "slamon/afm/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5404"
},
{
"name": "Java",
"bytes": "124032"
},
{
"name": "Python",
"bytes": "74716"
},
{
"name": "Ruby",
"bytes": "2140"
},
{
"name": "Shell",
"bytes": "1443"
}
],
"symlink_target": ""
}
|
"""The Module implements utilities to build a composite decision-maker."""
# =============================================================================
# IMPORTS
# =============================================================================
from collections import Counter
from .core import SKCMethodABC
from .utils import Bunch
# =============================================================================
# CLASS
# =============================================================================
class SKCPipeline(SKCMethodABC):
"""Pipeline of transforms with a final decision-maker.
Sequentially apply a list of transforms and a final decisionmaker.
Intermediate steps of the pipeline must be 'transforms', that is, they
must implement `transform` method.
The final decision-maker only needs to implement `evaluate`.
The purpose of the pipeline is to assemble several steps that can be
applied together while setting different parameters. A step's
estimator may be replaced entirely by setting the parameter with its name
to another dmaker or a transformer removed by setting it to
`'passthrough'` or `None`.
Parameters
----------
steps : list
List of (name, transform) tuples (implementing evaluate/transform)
that are chained, in the order in which they are chained, with the last
object an decision-maker.
See Also
--------
skcriteria.pipeline.mkpipe : Convenience function for simplified
pipeline construction.
"""
_skcriteria_dm_type = "pipeline"
_skcriteria_parameters = ["steps"]
def __init__(self, steps):
steps = list(steps)
self._validate_steps(steps)
self._steps = steps
@property
def steps(self):
"""List of steps of the pipeline."""
return list(self._steps)
def __len__(self):
"""Return the length of the Pipeline."""
return len(self.steps)
def __getitem__(self, ind):
"""Return a sub-pipeline or a single step in the pipeline.
Indexing with an integer will return an step; using a slice
returns another Pipeline instance which copies a slice of this
Pipeline. This copy is shallow: modifying steps in the sub-pipeline
will affect the larger pipeline and vice-versa.
However, replacing a value in `step` will not affect a copy.
"""
if isinstance(ind, slice):
if ind.step not in (1, None):
raise ValueError("Pipeline slicing only supports a step of 1")
return self.__class__(self.steps[ind])
elif isinstance(ind, int):
return self.steps[ind][-1]
elif isinstance(ind, str):
return self.named_steps[ind]
raise KeyError(ind)
def _validate_steps(self, steps):
for name, step in steps[:-1]:
if not isinstance(name, str):
raise TypeError("step names must be instance of str")
if not (hasattr(step, "transform") and callable(step.transform)):
raise TypeError(
f"step '{name}' must implement 'transform()' method"
)
name, dmaker = steps[-1]
if not isinstance(name, str):
raise TypeError("step names must be instance of str")
if not (hasattr(dmaker, "evaluate") and callable(dmaker.evaluate)):
raise TypeError(
f"step '{name}' must implement 'evaluate()' method"
)
@property
def named_steps(self):
"""Dictionary-like object, with the following attributes.
Read-only attribute to access any step parameter by user given name.
Keys are step names and values are steps parameters.
"""
return Bunch("steps", dict(self.steps))
def evaluate(self, dm):
"""Run the all the transformers and the decision maker.
Parameters
----------
dm: :py:class:`skcriteria.data.DecisionMatrix`
Decision matrix on which the result will be calculated.
Returns
-------
r : Result
Whatever the last step (decision maker) returns from their evaluate
method.
"""
dm = self.transform(dm)
_, dmaker = self.steps[-1]
result = dmaker.evaluate(dm)
return result
def transform(self, dm):
"""Run the all the transformers.
Parameters
----------
dm: :py:class:`skcriteria.data.DecisionMatrix`
Decision matrix on which the transformations will be applied.
Returns
-------
dm: :py:class:`skcriteria.data.DecisionMatrix`
Transformed decision matrix.
"""
for _, step in self.steps[:-1]:
dm = step.transform(dm)
return dm
# =============================================================================
# FUNCTIONS
# =============================================================================
def _name_steps(steps):
"""Generate names for steps."""
# Based on sklearn.pipeline._name_estimators
steps = list(reversed(steps))
names = [type(step).__name__.lower() for step in steps]
name_count = {k: v for k, v in Counter(names).items() if v > 1}
named_steps = []
for name, step in zip(names, steps):
count = name_count.get(name, 0)
if count:
name_count[name] = count - 1
name = f"{name}_{count}"
named_steps.append((name, step))
named_steps.reverse()
return named_steps
def mkpipe(*steps):
"""Construct a Pipeline from the given transformers and decision-maker.
This is a shorthand for the SKCPipeline constructor; it does not require,
and does not permit, naming the estimators. Instead, their names will
be set to the lowercase of their types automatically.
Parameters
----------
*steps: list of transformers and decision-maker object
List of the scikit-criteria transformers and decision-maker
that are chained together.
Returns
-------
p : SKCPipeline
Returns a scikit-learn :class:`SKCPipeline` object.
"""
named_steps = _name_steps(steps)
return SKCPipeline(named_steps)
|
{
"content_hash": "03b81917ca4e38699859b12169ba1f28",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 79,
"avg_line_length": 31.82741116751269,
"alnum_prop": 0.5688995215311005,
"repo_name": "leliel12/scikit-criteria",
"id": "9eb7a813d2b7dbfd69035aaa7e0ce4f08041bd7e",
"size": "6675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skcriteria/pipeline.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "19593"
},
{
"name": "Python",
"bytes": "314308"
},
{
"name": "Shell",
"bytes": "30"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('skill_level', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)])),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
],
),
]
|
{
"content_hash": "e05b86dfa64cfe969d0b6cc53340df4b",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 158,
"avg_line_length": 32.48,
"alnum_prop": 0.6133004926108374,
"repo_name": "danux/danjd",
"id": "3fd1429091b6c067025376f8e2bcc44d5acbb47d",
"size": "882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cv/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22676"
},
{
"name": "HTML",
"bytes": "8984"
},
{
"name": "Python",
"bytes": "11173"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django import forms
def make_form(edit_class, fields):
return type(b'EditFormFor{}'.format(edit_class.__name__),
(forms.ModelForm,),
dict(
form_fields=forms.CharField(initial=','.join(fields),
widget=forms.HiddenInput()),
Meta=type(b'Meta', (object,),
dict(model=edit_class, fields=fields)
)
)
)
|
{
"content_hash": "9cdfd9941e7cbdec4ff1a56dc3217fbe",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 65,
"avg_line_length": 31.571428571428573,
"alnum_prop": 0.5633484162895928,
"repo_name": "eduardolujan/trainning_site",
"id": "a4e7450ad489aa8cc1c262c57dc1577c5193a588",
"size": "442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trainning_site/trainning_site/libs/front_edit/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "58505"
},
{
"name": "Makefile",
"bytes": "2995"
},
{
"name": "Python",
"bytes": "44023"
}
],
"symlink_target": ""
}
|
import sys
import warnings
class PyStdIsDeprecatedWarning(DeprecationWarning):
pass
class Std(object):
""" makes top-level python modules available as an attribute,
importing them on first access.
"""
def __init__(self):
self.__dict__ = sys.modules
def __getattr__(self, name):
warnings.warn("py.std is deprecated, plase import %s directly" % name,
category=PyStdIsDeprecatedWarning)
try:
m = __import__(name)
except ImportError:
raise AttributeError("py.std: could not import %s" % name)
return m
std = Std()
|
{
"content_hash": "ebe5c559a35cbab9a0adb2e3ed8cc20f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 78,
"avg_line_length": 24.26923076923077,
"alnum_prop": 0.6038034865293186,
"repo_name": "lmregus/Portfolio",
"id": "74d43672654e7bca9aa2617d8f6f14ae0ad8d8a9",
"size": "631",
"binary": false,
"copies": "49",
"ref": "refs/heads/master",
"path": "python/design_patterns/env/lib/python3.7/site-packages/py/_std.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "27682"
},
{
"name": "C++",
"bytes": "25458"
},
{
"name": "CSS",
"bytes": "12842"
},
{
"name": "HTML",
"bytes": "49171"
},
{
"name": "Java",
"bytes": "99711"
},
{
"name": "JavaScript",
"bytes": "827"
},
{
"name": "Python",
"bytes": "42857"
},
{
"name": "Shell",
"bytes": "5710"
}
],
"symlink_target": ""
}
|
from resources import *
from cc_delegate import ComboBoxDelegate, DateDelegate, PhoneDelegate, PersonIdDelegate
class ApplyDlg(QDialog):
def __init__(self,parent=None, db="", curuser={}):
super(ApplyDlg,self).__init__(parent)
if db == "":
self.db = globaldb()
else:
self.db = db
self.curuser = curuser
self.willApply = []
self.ApplyView = QTableView()
self.ApplyModel = QSqlRelationalTableModel(self.ApplyView)
self.ApplyModel.setTable("approvalmodel")
self.ApplyModel.setRelation(2, QSqlRelation("mentalmodel", "id", "name"));
self.ApplyModel.setEditStrategy(QSqlTableModel.OnManualSubmit)
self.ApplyModel.select()
for indx, iheader in enumerate(LST_APPROVALHEADER):
self.ApplyModel.setHeaderData(indx+1, Qt.Horizontal, iheader)
self.ApplyView.setModel(self.ApplyModel)
self.ApplyView.setColumnHidden(0, True) # hide sn
hideColList = list(range(8,43))
hideColList.remove(15)
for icol in hideColList:
self.ApplyView.setColumnHidden(icol, True) # hide sn
self.ApplyView.setColumnWidth(1, 150)
self.ApplyView.setColumnWidth(4, 120)
self.ApplyView.setItemDelegateForColumn(3, ComboBoxDelegate(self, CERT1_CHOICES))
self.ApplyView.setItemDelegateForColumn(4, ComboBoxDelegate(self, CERT2_CHOICES))
self.ApplyView.setItemDelegateForColumn(5, ComboBoxDelegate(self, CERT3_CHOICES))
self.ApplyView.setItemDelegateForColumn(6, DateDelegate(self))
self.ApplyView.setAlternatingRowColors(True)
self.ApplyView.setStyleSheet("QTableView{background-color: rgb(250, 250, 115);"
"alternate-background-color: rgb(141, 163, 215);}"
"QTableView::item:hover {background-color: rgba(100,200,220,100);} ")
self.ApplyView.verticalHeader().setStyleSheet("color: red;font-size:20px; ");
self.ApplyView.setStyleSheet("font-size:14px; ");
# print(4)
self.ApplyView.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
btnbox = QDialogButtonBox(Qt.Horizontal)
# newusrbtn = QPushButton("新增")
savebtn = QPushButton("保存")
revertbtn = QPushButton("撤销")
removebtn = QPushButton("删除")
# Applybtn = QPushButton("批准")
# btnbox.addButton(newusrbtn, QDialogButtonBox.ActionRole);
btnbox.addButton(savebtn, QDialogButtonBox.ActionRole);
btnbox.addButton(revertbtn, QDialogButtonBox.ActionRole);
btnbox.addButton(removebtn, QDialogButtonBox.ActionRole);
# btnbox.addButton(Applybtn, QDialogButtonBox.ActionRole);
self.infoLabel = QLabel("", alignment=Qt.AlignLeft)
self.infoLabel.setFrameStyle(QFrame.StyledPanel | QFrame.Sunken)
nameLabel = QLabel("姓名:")
self.nameEdit = QLineEdit()
regExp = QRegExp("[^']*")
self.nameEdit.setValidator(QRegExpValidator(regExp, self))
nameLabel.setBuddy(self.nameEdit)
# personIdLabel = QLabel("身份证号:")
# self.ppidEdit = QLineEdit()
# personIdLabel.setBuddy(self.ppidEdit)
# regExp = QRegExp("^[0-9]{8,12}$")
# self.ppidEdit.setValidator(QRegExpValidator(regExp, self))
applyresultLabel = QLabel("审核结果:")
self.applyresultCombo = QComboBox(self)
self.applyresultCombo.addItems(ISAPPROVAL_CHOICES)
self.applyresultCombo.insertItem(0, "")
self.applyresultCombo.setCurrentIndex(0)
applyresultLabel.setBuddy(self.applyresultCombo)
findbutton = QPushButton("查询")
# findbutton.setIcon(QIcon(":/first.png"))
findbox = QHBoxLayout()
findbox.setMargin(10)
findbox.setAlignment(Qt.AlignHCenter);
# findbox.addWidget(self.callerEdit)
findbox.addStretch (10)
findbox.addWidget(nameLabel)
findbox.addWidget(self.nameEdit)
findbox.addStretch (10)
# findbox.addWidget(personIdLabel)
# findbox.addWidget(self.ppidEdit)
# findbox.addStretch (10)
findbox.addWidget(applyresultLabel)
findbox.addWidget(self.applyresultCombo)
findbox.addWidget(findbutton)
findbox.addStretch (10)
vbox = QVBoxLayout()
vbox.setMargin(5)
vbox.addLayout(findbox)
vbox.addWidget(self.ApplyView)
vbox.addWidget(self.infoLabel)
vbox.addWidget(btnbox)
self.setLayout(vbox)
savebtn.clicked.connect(self.saveApply)
revertbtn.clicked.connect(self.revertApply)
removebtn.clicked.connect(self.removeApply)
# Applybtn.clicked.connect(self.okApply)
findbutton.clicked.connect(self.findApply)
# self.yearCheckbox.stateChanged.connect(self.yearCheck)
# self.ApplyView.clicked.connect(self.tableClick)
# self.connect(savebtn, SIGNAL('clicked()'), self.saveApply)
# self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.ApplyView.doubleClicked.connect(self.dbclick)
def setWillApply(self, lstwillapply):
self.willApply = lstwillapply
def closeEvent(self, event):
self.db.close()
def dbclick(self, indx):
if indx.sibling(indx.row(),15).data() != "待审":
self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
else:
if indx.column() in [1,2,7,15]:
self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
else:
self.ApplyView.setEditTriggers(QAbstractItemView.DoubleClicked)
#当已经申核完结时,锁定当前item,禁止编辑,主要通过全局的 setEditTriggers 来设置。
if self.curuser != {}:
if self.curuser["unitgroup"] == "市残联":
if indx.column() == 1:
self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
else:
self.ApplyView.setEditTriggers(QAbstractItemView.DoubleClicked)
else:
self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
# if indx.sibling(indx.row(),4).data() == "是":
# self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
# else:
# self.ApplyView.setEditTriggers(QAbstractItemView.DoubleClicked)
# if indx.column() == 4:
# self.ApplyView.setEditTriggers(QAbstractItemView.NoEditTriggers)
def findApply(self):
name = self.nameEdit.text()
# ppid = self.ppidEdit.text()
applyresult = self.applyresultCombo.currentText()
strwhere = "relTblAl_2.name like '%%%s%%' and isapproval like '%%%s%%'" % (name, applyresult)
self.ApplyModel.setFilter(strwhere)
# print("~~~", self.ApplyModel.selectStatement())
self.ApplyModel.select()
self.infoLabel.setText("合计:当前查询人数 <font color='red'>%d</font> " % int(self.ApprovalModel.rowCount()))
def removeApply(self):
index = self.ApplyView.currentIndex()
row = index.row()
if row != -1:
ppname = self.ApplyModel.data(self.ApplyModel.index(row, 1))
if QMessageBox.question(self, "删除确认", "是否要删除当前选中记录?\n\n姓名:%s\n\n" % ppname, "确定", "取消") == 0:
self.ApplyModel.removeRows(row, 1)
self.ApplyModel.submitAll()
self.ApplyModel.database().commit()
self.infoLabel.setText("")
# print("nameid")
def revertApply(self):
self.ApplyModel.revertAll()
self.ApplyModel.database().rollback()
self.infoLabel.setText("")
def newApply(self):
# self.ApplyModel.setFilter("1=1")
if self.curuser == {}:
applyman = "某某"
else:
applyman = self.unitman
for iwillapply in self.willApply:
row = self.ApplyModel.rowCount()
self.ApplyModel.insertRow(row)
# print(self.ApplyModel.data(self.ApplyModel.index(row, 0)), 'id=========')
self.ApplyModel.setData(self.ApplyModel.index(row, 2), iwillapply) #set default password
self.ApplyModel.setData(self.ApplyModel.index(row, 7), applyman) #set default password
self.ApplyModel.setData(self.ApplyModel.index(row, 15), '待审') #set default password
self.ApplyView.scrollToBottom()
self.infoLabel.setText("")
# theLastIndex = self.ApplyModel.index(row, 1)
# self.ApplyView.scrollTo(theLastIndex)
# self.ApplyModel.setData(self.ApplyModel.index(row, 2), "123456") #set default password
def saveApply(self):
self.ApplyModel.database().transaction()
if self.ApplyModel.submitAll():
self.ApplyModel.database().commit()
# print("save success! ->commit")
else:
self.ApplyModel.revertAll()
self.ApplyModel.database().rollback()
# print("save fail! ->rollback")
self.ApplyModel.setFilter("1=1")
self.infoLabel.setText("")
# model->database().transaction();
# tmpitem = QStandardItem("张三")
# self.ApplyModel.setItem(0, 0, tmpitem)
# print(self.ApplyModel.database())
# print("saveApply")
if __name__ == "__main__":
import sys
app=QApplication(sys.argv)
dialog=ApplyDlg()
dialog.show()
app.exec_()
|
{
"content_hash": "08ee6d633713a1373af903d1774f73be",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 109,
"avg_line_length": 39.446280991735534,
"alnum_prop": 0.6232977163209721,
"repo_name": "iefan/kfmental",
"id": "62135500535fbcd382ab3c6b5a0acfc83286e19a",
"size": "9738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frmApply.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "128198"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from django.db.models import AutoField
from django.forms import ValidationError
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.forms import DynamicInlineAdminForm
from mezzanine.core.models import CONTENT_STATUS_PUBLISHED, Orderable
from mezzanine.utils.urls import admin_url
class DisplayableAdmin(admin.ModelAdmin):
"""
Admin class for subclasses of the abstract ``Displayable`` model.
"""
list_display = ("title", "status", "admin_link")
list_display_links = ("title",)
list_editable = ("status",)
list_filter = ("status",)
search_fields = ("title", "content",)
date_hierarchy = "publish_date"
radio_fields = {"status": admin.HORIZONTAL}
fieldsets = (
(None, {
"fields": ["title", "status", ("publish_date", "expiry_date")],
}),
(_("Meta data"), {
"fields": ["slug", ("description", "gen_description"), "keywords"],
"classes": ("collapse-closed",)
}),
)
def get_form(self, request, obj=None, **kwargs):
"""
Add validation for the content field - it's required if status
is set to published. We patch this method onto the form to avoid
problems that come up with trying to use a form class. See:
https://bitbucket.org/stephenmcd/mezzanine/pull-request/23/
allow-content-field-on-richtextpage-to-be
"""
form = super(DisplayableAdmin, self).get_form(request, obj, **kwargs)
def clean_content(form):
status = form.cleaned_data.get("status")
content = form.cleaned_data.get("content")
if status == CONTENT_STATUS_PUBLISHED and not content:
raise ValidationError(_("This field is required if status "
"is set to published."))
return content
form.clean_content = clean_content
return form
class BaseDynamicInlineAdmin(object):
"""
Admin inline that uses JS to inject an "Add another" link which
when clicked, dynamically reveals another fieldset. Also handles
adding the ``_order`` field and its widget for models that
subclass ``Orderable``.
"""
form = DynamicInlineAdminForm
extra = 20
def __init__(self, *args, **kwargs):
super(BaseDynamicInlineAdmin, self).__init__(*args, **kwargs)
if issubclass(self.model, Orderable):
fields = self.fields
if not fields:
fields = self.model._meta.fields
exclude = self.exclude or []
fields = [f.name for f in fields if f.editable and
f.name not in exclude and not isinstance(f, AutoField)]
if "_order" in fields:
del fields[fields.index("_order")]
fields.append("_order")
self.fields = fields
class TabularDynamicInlineAdmin(BaseDynamicInlineAdmin, admin.TabularInline):
template = "admin/includes/dynamic_inline_tabular.html"
class StackedDynamicInlineAdmin(BaseDynamicInlineAdmin, admin.StackedInline):
template = "admin/includes/dynamic_inline_stacked.html"
def __init__(self, *args, **kwargs):
"""
Stacked dynamic inlines won't work without grappelli
installed, as the JavaScript in dynamic_inline.js isn't
able to target each of the inlines to set the value of
the order field.
"""
grappelli_name = getattr(settings, "PACKAGE_NAME_GRAPPELLI")
if grappelli_name not in settings.INSTALLED_APPS:
error = "StackedDynamicInlineAdmin requires Grappelli installed."
raise Exception(error)
super(StackedDynamicInlineAdmin, self).__init__(*args, **kwargs)
class OwnableAdmin(admin.ModelAdmin):
"""
Admin class for models that subclass the abstract ``Ownable``
model. Handles limiting the change list to objects owned by the
logged in user, as well as setting the owner of newly created \
objects to the logged in user.
"""
def save_form(self, request, form, change):
"""
Set the object's owner as the logged in user.
"""
obj = form.save(commit=False)
if obj.user_id is None:
obj.user = request.user
return super(OwnableAdmin, self).save_form(request, form, change)
def queryset(self, request):
"""
Filter the change list by currently logged in user if not a
superuser.
"""
qs = super(OwnableAdmin, self).queryset(request)
if request.user.is_superuser:
return qs
return qs.filter(user__id=request.user.id)
class SingletonAdmin(admin.ModelAdmin):
"""
Admin class for models that should only contain a single instance
in the database. Redirect all views to the change view when the
instance exists, and to the add view when it doesn't.
"""
def add_view(self, *args, **kwargs):
"""
Redirect to the change view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except (self.model.DoesNotExist, self.model.MultipleObjectsReturned):
return super(SingletonAdmin, self).add_view(*args, **kwargs)
else:
change_url = admin_url(self.model, "change", singleton.id)
return redirect(change_url)
def changelist_view(self, *args, **kwargs):
"""
Redirect to the add view if no records exist or the change
view if the singleton instance exists.
"""
try:
singleton = self.model.objects.get()
except self.model.MultipleObjectsReturned:
return super(SingletonAdmin, self).changelist_view(*args, **kwargs)
except self.model.DoesNotExist:
add_url = admin_url(self.model, "add")
return redirect(add_url)
else:
change_url = admin_url(self.model, "change", singleton.id)
return redirect(change_url)
def change_view(self, request, object_id, extra_context=None):
"""
If only the singleton instance exists, pass ``True`` for
``singleton`` into the template which will use CSS to hide
the "save and add another" button.
"""
if extra_context is None:
extra_context = {}
try:
self.model.objects.get()
except (self.model.DoesNotExist, self.model.MultipleObjectsReturned):
pass
else:
extra_context["singleton"] = True
response = super(SingletonAdmin, self).change_view(request, object_id,
extra_context)
if request.POST.get("_save"):
response = redirect("admin:index")
return response
|
{
"content_hash": "e2e6572f2253f99a5217fe7b0a8120c0",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 79,
"avg_line_length": 37.475675675675674,
"alnum_prop": 0.617914322803981,
"repo_name": "sachingupta006/Mezzanine",
"id": "56e08beb9c15e6e2331a1c5159f1f0363cf7543b",
"size": "6934",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mezzanine/core/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "63548"
},
{
"name": "Python",
"bytes": "754016"
}
],
"symlink_target": ""
}
|
from pybuilder.core import use_plugin, init, Author
from pybuilder.vcs import VCSRevision
use_plugin("python.core")
use_plugin("python.unittest")
use_plugin("python.install_dependencies")
use_plugin("python.flake8")
use_plugin("python.coverage")
use_plugin("python.distutils")
use_plugin("python.cram")
use_plugin("filter_resources")
name = "cbas"
default_task = "publish"
version = VCSRevision().get_git_revision_count()
summary = 'Command line interface to the c-bastion'
authors = [
Author('Sebastian Spoerer', "sebastian.spoerer@immobilienscout24.de"),
Author('Valentin Haenel', "valentin.haenel@immobilienscout24.de"),
]
url = 'https://github.com/ImmobilienScout24/cbas'
@init
def set_properties(project):
project.depends_on('click')
project.depends_on('keyring')
project.depends_on('secretstorage')
project.depends_on('yamlreader')
project.depends_on('requests')
project.depends_on('six')
project.build_depends_on('requests_mock')
project.build_depends_on('mock')
project.build_depends_on('bottle')
project.get_property('filter_resources_glob').extend(
['**/cbas/__init__.py'])
|
{
"content_hash": "f0d11150da9afed386e1c7b6ae550f3c",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 74,
"avg_line_length": 30.945945945945947,
"alnum_prop": 0.7222707423580786,
"repo_name": "ImmobilienScout24/cbas",
"id": "1eb6e03990443caaeb7a394a6297d5351621ace4",
"size": "1145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "12628"
},
{
"name": "Python",
"bytes": "24577"
}
],
"symlink_target": ""
}
|
"""
Created by Ben Scott on '20/11/2016'.
"""
import io
import math
import requests
from PIL import Image
class ImageOptimAPI():
endpoint = 'https://im2.io'
def __init__(self, username):
self.username = username
def parse_options(self, options):
if options:
opList = []
try:
size = options.pop('size')
except KeyError:
pass
else:
opList.append(size)
for k, v in options.items():
if (type(v) == bool):
opList.append(k)
else:
opList.append('%s=%s' % (k, v))
return ','.join(opList)
else:
return 'full'
def image_from_url(self, file_url, options={}):
url = self._url(options, file_url)
return self._call(url)
def image_from_file(self, file, options={}, resize_pow2=False):
# Check we have a file object to work with
if (not isinstance(file, io.IOBase)):
raise IOError('Image file is not a readable file object')
# Do we want to resize to the nearest power of 2?
# This speeds up rendering when using THREE.WebGLRenderer
if (resize_pow2):
# Cannot set resize to power of 2 & specific dimensions
# We could in the future resize the dimensions to power 2
if 'size' in options:
raise IOError('Cannot specify dimensions with power of 2 resizing')
# Load the image to check the dimensions (using name as operating on the
# file object affects the file object itself)
img = Image.open(file.name)
dimensions = {
'width': img.width,
'height': img.height,
}
# Does the image require resizing?
resize = False
for dimension, value in dimensions.items():
if not self.is_power2(value):
resize = True
dimensions[dimension] = self.resize_power2(value)
# If this requires a resize, change the size option to power of 2 sizes
if resize:
options['size'] = '{width}x{height}'.format(
width=dimensions['width'],
height=dimensions['height']
)
# Allow upscaling of image
options['fit'] = True
url = self._url(options)
return self._call(url, files={'file': file})
def image_from_file_path(self, file_path, options={}, resize_pow2=False):
f = open(file_path, 'rb')
return self.image_from_file(f, options, resize_pow2)
def _url(self, options, file_url=None):
# Helper function - build URL from parts
url_parts = [
self.endpoint,
self.username,
self.parse_options(options)
]
if (file_url):
url_parts.append(file_url)
return '/'.join(url_parts)
@staticmethod
def _call(url, **kwargs):
r = requests.post(url, **kwargs)
r.raise_for_status()
return Image.open(io.BytesIO(r.content))
@staticmethod
def is_power2(x):
# Is a number a power of 2
return ((x & (x - 1)) == 0) and x != 0
@staticmethod
def resize_power2(x):
# Resize a number to nearest greater power of 2
return int(pow(2, math.ceil(math.log(x, 2))))
|
{
"content_hash": "d5f2aaaed1b9de231fadf939461492d1",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 78,
"avg_line_length": 29.173076923076923,
"alnum_prop": 0.6114040870138431,
"repo_name": "benscott/imageoptim",
"id": "b3ef35cab7f4783e5e864a878841d1639f7b4eda",
"size": "3074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imageoptim/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4066"
}
],
"symlink_target": ""
}
|
import numpy as np
from matplotlib import pyplot as plt
import cv2
def drawMatches(img1, kp1, img2, kp2, matches):
# Create a new output image that concatenates the two images together
# (a.k.a) a montage
rows1 = img1.shape[0]
cols1 = img1.shape[1]
rows2 = img2.shape[0]
cols2 = img2.shape[1]
out = np.zeros((max([rows1,rows2]),cols1+cols2,3), dtype='uint8')
# Place the first image to the left
out[:rows1,:cols1,:] = np.dstack([img1, img1, img1])
# Place the next image to the right of it
out[:rows2,cols1:cols1+cols2,:] = np.dstack([img2, img2, img2])
# For each pair of points we have between both images
# draw circles, then connect a line between them
for mat in matches:
# Get the matching keypoints for each of the images
img1_idx = mat.queryIdx
img2_idx = mat.trainIdx
# x - columns
# y - rows
(x1,y1) = kp1[img1_idx].pt
(x2,y2) = kp2[img2_idx].pt
# Draw a small circle at both co-ordinates
# radius 4
# colour blue
# thickness = 1
cv2.circle(out, (int(x1),int(y1)), 4, (255, 0, 0), 1)
cv2.circle(out, (int(x2)+cols1,int(y2)), 4, (255, 0, 0), 1)
# Draw a line in between the two points
# thickness = 1
# colour blue
cv2.line(out, (int(x1),int(y1)), (int(x2)+cols1,int(y2)), (255, 0, 0), 1)
# Show the image
cv2.imshow('Matched Features', out)
cv2.waitKey(0)
cv2.destroyAllWindows()
img = cv2.imread('base.png', 0)
img_detect = cv2.imread('detect.png', 0)
## SIFT
# Initiate SIFT detector
orb = cv2.ORB()
# find the keypoints and descriptors with SIFT
kp1, des1 = orb.detectAndCompute(img,None)
kp2, des2 = orb.detectAndCompute(img_detect,None)
# create BFMatcher object
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
# Match descriptors.
matches = bf.match(des1,des2)
# Sort them in the order of their distance.
matches = sorted(matches, key = lambda x:x.distance)
# Draw first 100 matches.
img3 = drawMatches(img,kp1,img_detect,kp2,matches[:100])
|
{
"content_hash": "a1cd399d76de5a90836d75e811760771",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 81,
"avg_line_length": 28.243243243243242,
"alnum_prop": 0.6330143540669857,
"repo_name": "Lord-Nazdar/OpenCVFaceRecognition",
"id": "394d57ad558eb3fd677c19478917dc0df4bbad0c",
"size": "2090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tp4_SIFT.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15711"
}
],
"symlink_target": ""
}
|
import os
from paste.urlparser import StaticURLParser
from pylons import request, response, session, tmpl_context as c
from pylons.controllers.util import abort, redirect
from pylons.templating import render_mako as render
from pylons import url
from webhelpers.paginate import Page
from sqlalchemy.orm import class_mapper, object_session
from formalchemy.fields import _pk
from formalchemy.fields import _stringify
from formalchemy import Grid, FieldSet
from formalchemy.i18n import get_translator
from formalchemy.fields import Field
from formalchemy import fatypes
try:
from formalchemy.ext.couchdb import Document
except ImportError:
Document = None
import simplejson as json
def model_url(*args, **kwargs):
"""wrap ``pylons.url`` and take care about ``model_name`` in
``pylons.routes_dict`` if any"""
if 'model_name' in request.environ['pylons.routes_dict'] and 'model_name' not in kwargs:
kwargs['model_name'] = request.environ['pylons.routes_dict']['model_name']
return url(*args, **kwargs)
class Session(object):
"""A abstract class to implement other backend than SA"""
def add(self, record):
"""add a record"""
def update(self, record):
"""update a record"""
def delete(self, record):
"""delete a record"""
def commit(self):
"""commit transaction"""
class _RESTController(object):
"""A RESTful Controller bound to a model"""
template = '/forms/restfieldset.mako'
engine = prefix_name = None
FieldSet = FieldSet
Grid = Grid
pager_args = dict(link_attr={'class': 'ui-pager-link ui-state-default ui-corner-all'},
curpage_attr={'class': 'ui-pager-curpage ui-state-highlight ui-corner-all'})
@property
def model_name(self):
"""return ``model_name`` from ``pylons.routes_dict``"""
return request.environ['pylons.routes_dict'].get('model_name', None)
def Session(self):
"""return a Session object. You **must** override this."""
return Session()
def get_model(self):
"""return SA mapper class. You **must** override this."""
raise NotImplementedError()
def sync(self, fs, id=None):
"""sync a record. If ``id`` is None add a new record else save current one.
Default is::
S = self.Session()
if id:
S.merge(fs.model)
else:
S.add(fs.model)
S.commit()
"""
S = self.Session()
if id:
try:
S.merge(fs.model)
except AttributeError:
# SA <= 0.5.6
S.update(fs.model)
else:
S.add(fs.model)
S.commit()
def breadcrumb(self, action=None, fs=None, id=None, **kwargs):
"""return items to build the breadcrumb"""
items = []
if self.prefix_name:
items.append((url(self.prefix_name), self.prefix_name))
if self.model_name:
items.append((model_url(self.collection_name), self.model_name))
elif not self.prefix_name and 'is_grid' not in kwargs:
items.append((model_url(self.collection_name), self.collection_name))
if id and hasattr(fs.model, '__unicode__'):
items.append((model_url(self.member_name, id=id), u'%s' % fs.model))
elif id:
items.append((model_url(self.member_name, id=id), id))
if action in ('edit', 'new'):
items.append((None, action))
return items
def render(self, format='html', **kwargs):
"""render the form as html or json"""
if format != 'html':
meth = getattr(self, 'render_%s_format' % format, None)
if meth is not None:
return meth(**kwargs)
else:
abort(404)
kwargs.update(model_name=self.model_name or self.member_name,
prefix_name=self.prefix_name,
collection_name=self.collection_name,
member_name=self.member_name,
breadcrumb=self.breadcrumb(**kwargs),
F_=get_translator())
self.update_resources()
if self.engine:
return self.engine.render(self.template, **kwargs)
else:
return render(self.template, extra_vars=kwargs)
def render_grid(self, format='html', **kwargs):
"""render the grid as html or json"""
return self.render(format=format, is_grid=True, **kwargs)
def render_json_format(self, fs=None, **kwargs):
response.content_type = 'text/javascript'
if fs:
try:
fields = fs.jsonify()
except AttributeError:
fields = dict([(field.renderer.name, field.model_value) for field in fs.render_fields.values()])
data = dict(fields=fields)
pk = _pk(fs.model)
if pk:
data['item_url'] = model_url(self.member_name, id=pk)
else:
data = {}
data.update(kwargs)
return json.dumps(data)
def render_xhr_format(self, fs=None, **kwargs):
response.content_type = 'text/html'
if fs is not None:
if 'field' in request.GET:
field_name = request.GET.get('field')
fields = fs.render_fields
if field_name in fields:
field = fields[field_name]
return field.render()
else:
abort(404)
return fs.render()
return ''
def get_page(self, **kwargs):
"""return a ``webhelpers.paginate.Page`` used to display ``Grid``.
Default is::
S = self.Session()
query = S.query(self.get_model())
kwargs = request.environ.get('pylons.routes_dict', {})
return Page(query, page=int(request.GET.get('page', '1')), **kwargs)
"""
S = self.Session()
options = dict(collection=S.query(self.get_model()), page=int(request.GET.get('page', '1')))
options.update(request.environ.get('pylons.routes_dict', {}))
options.update(kwargs)
collection = options.pop('collection')
return Page(collection, **options)
def get(self, id=None):
"""return correct record for ``id`` or a new instance.
Default is::
S = self.Session()
model = self.get_model()
if id:
model = S.query(model).get(id)
else:
model = model()
return model or abort(404)
"""
S = self.Session()
model = self.get_model()
if id:
model = S.query(model).get(id)
return model or abort(404)
def get_fieldset(self, id=None):
"""return a ``FieldSet`` object bound to the correct record for ``id``.
Default is::
fs = self.FieldSet(self.get(id))
fs.engine = fs.engine or self.engine
return fs
"""
fs = self.FieldSet(self.get(id))
fs.engine = fs.engine or self.engine
return fs
def get_add_fieldset(self):
"""return a ``FieldSet`` used for add form.
Default is::
fs = self.get_fieldset()
for field in fs.render_fields.values():
if field.is_readonly():
del fs[field.name]
return fs
"""
fs = self.get_fieldset()
for field in fs.render_fields.values():
if field.is_readonly():
del fs[field.name]
return fs
def get_grid(self):
"""return a Grid object
Default is::
grid = self.Grid(self.get_model())
grid.engine = self.engine
self.update_grid(grid)
return grid
"""
grid = self.Grid(self.get_model())
grid.engine = self.engine
self.update_grid(grid)
return grid
def update_grid(self, grid):
"""Add edit and delete buttons to ``Grid``"""
try:
grid.edit
except AttributeError:
def edit_link():
return lambda item: '''
<form action="%(url)s" method="GET" class="ui-grid-icon ui-widget-header ui-corner-all">
<input type="submit" class="ui-grid-icon ui-icon ui-icon-pencil" title="%(label)s" value="%(label)s" />
</form>
''' % dict(url=model_url('edit_%s' % self.member_name, id=_pk(item)),
label=get_translator()('edit'))
def delete_link():
return lambda item: '''
<form action="%(url)s" method="POST" class="ui-grid-icon ui-state-error ui-corner-all">
<input type="submit" class="ui-icon ui-icon-circle-close" title="%(label)s" value="%(label)s" />
<input type="hidden" name="_method" value="DELETE" />
</form>
''' % dict(url=model_url(self.member_name, id=_pk(item)),
label=get_translator()('delete'))
grid.append(Field('edit', fatypes.String, edit_link()))
grid.append(Field('delete', fatypes.String, delete_link()))
grid.readonly = True
def update_resources(self):
"""A hook to add some fanstatic resources"""
pass
def index(self, format='html', **kwargs):
"""REST api"""
page = self.get_page()
fs = self.get_grid()
fs = fs.bind(instances=page)
fs.readonly = True
if format == 'json':
values = []
for item in page:
pk = _pk(item)
fs._set_active(item)
value = dict(id=pk,
item_url=model_url(self.member_name, id=pk))
if 'jqgrid' in request.GET:
fields = [_stringify(field.render_readonly()) for field in fs.render_fields.values()]
value['cell'] = [pk] + fields
else:
value.update(dict([(field.key, field.model_value) for field in fs.render_fields.values()]))
values.append(value)
return self.render_json_format(rows=values,
records=len(values),
total=page.page_count,
page=page.page)
if 'pager' not in kwargs:
pager = page.pager(**self.pager_args)
else:
pager = kwargs.pop('pager')
return self.render_grid(format=format, fs=fs, id=None, pager=pager)
def create(self, format='html', **kwargs):
"""REST api"""
fs = self.get_add_fieldset()
if format == 'json' and request.method == 'PUT':
data = json.load(request.body_file)
else:
data = request.POST
try:
fs = fs.bind(data=data, session=self.Session())
except:
# non SA forms
fs = fs.bind(self.get_model(), data=data, session=self.Session())
if fs.validate():
fs.sync()
self.sync(fs)
if format == 'html':
if request.is_xhr:
response.content_type = 'text/plain'
return ''
redirect(model_url(self.collection_name))
else:
fs.rebind(fs.model, data=None)
return self.render(format=format, fs=fs)
return self.render(format=format, fs=fs, action='new', id=None)
def delete(self, id, format='html', **kwargs):
"""REST api"""
record = self.get(id)
if record:
S = self.Session()
S.delete(record)
S.commit()
if format == 'html':
if request.is_xhr:
response.content_type = 'text/plain'
return ''
redirect(model_url(self.collection_name))
return self.render(format=format, id=id)
def show(self, id=None, format='html', **kwargs):
"""REST api"""
fs = self.get_fieldset(id=id)
fs.readonly = True
return self.render(format=format, fs=fs, action='show', id=id)
def new(self, format='html', **kwargs):
"""REST api"""
fs = self.get_add_fieldset()
fs = fs.bind(session=self.Session())
return self.render(format=format, fs=fs, action='new', id=None)
def edit(self, id=None, format='html', **kwargs):
"""REST api"""
fs = self.get_fieldset(id)
return self.render(format=format, fs=fs, action='edit', id=id)
def update(self, id, format='html', **kwargs):
"""REST api"""
fs = self.get_fieldset(id)
if format == 'json' and request.method == 'PUT' and '_method' not in request.GET:
data = json.load(request.body_file)
else:
data = request.POST
fs = fs.bind(data=data)
if fs.validate():
fs.sync()
self.sync(fs, id)
if format == 'html':
if request.is_xhr:
response.content_type = 'text/plain'
return ''
redirect(model_url(self.member_name, id=id))
else:
return self.render(format=format, fs=fs, status=0)
if format == 'html':
return self.render(format=format, fs=fs, action='edit', id=id)
else:
return self.render(format=format, fs=fs, status=1)
def RESTController(cls, member_name, collection_name):
"""wrap a controller with :class:`~formalchemy.ext.pylons.controller._RESTController`"""
return type(cls.__name__, (cls, _RESTController),
dict(member_name=member_name, collection_name=collection_name))
class _ModelsController(_RESTController):
"""A RESTful Controller bound to more tha one model. The ``model`` and
``forms`` attribute can be a list of object or a module"""
engine = None
model = forms = None
_static_app = StaticURLParser(os.path.join(os.path.dirname(__file__), 'resources'))
def Session(self):
return meta.Session
def models(self, format='html', **kwargs):
"""Models index page"""
models = self.get_models()
return self.render(models=models, format=format)
def static(self):
"""Serve static files from the formalchemy package"""
return self._static_app(request.environ, self.start_response)
def get_models(self):
"""return a dict containing all model names as key and url as value"""
models = {}
if isinstance(self.model, list):
for model in self.model:
key = model.__name__
models[key] = model_url(self.collection_name, model_name=key)
else:
for key, obj in self.model.__dict__.items():
if not key.startswith('_'):
if Document is not None:
try:
if issubclass(obj, Document):
models[key] = model_url(self.collection_name, model_name=key)
continue
except:
pass
try:
class_mapper(obj)
except:
continue
if not isinstance(obj, type):
continue
models[key] = model_url(self.collection_name, model_name=key)
return models
def get_model(self):
if isinstance(self.model, list):
for model in self.model:
if model.__name__ == self.model_name:
return model
elif hasattr(self.model, self.model_name):
return getattr(self.model, self.model_name)
abort(404)
def get_fieldset(self, id):
if self.forms and hasattr(self.forms, self.model_name):
fs = getattr(self.forms, self.model_name)
fs.engine = fs.engine or self.engine
return id and fs.bind(self.get(id)) or fs
return _RESTController.get_fieldset(self, id)
def get_add_fieldset(self):
if self.forms and hasattr(self.forms, '%sAdd' % self.model_name):
fs = getattr(self.forms, '%sAdd' % self.model_name)
fs.engine = fs.engine or self.engine
return fs
return self.get_fieldset(id=None)
def get_grid(self):
model_name = self.model_name
if self.forms and hasattr(self.forms, '%sGrid' % model_name):
g = getattr(self.forms, '%sGrid' % model_name)
g.engine = g.engine or self.engine
g.readonly = True
self.update_grid(g)
return g
return _RESTController.get_grid(self)
def ModelsController(cls, prefix_name, member_name, collection_name):
"""wrap a controller with :class:`~formalchemy.ext.pylons.controller._ModelsController`"""
return type(cls.__name__, (cls, _ModelsController),
dict(prefix_name=prefix_name, member_name=member_name, collection_name=collection_name))
|
{
"content_hash": "3d38ad2f583d47704c44b9aa875769c1",
"timestamp": "",
"source": "github",
"line_count": 468,
"max_line_length": 119,
"avg_line_length": 36.85897435897436,
"alnum_prop": 0.5403478260869565,
"repo_name": "FormAlchemy/formalchemy",
"id": "317210dfc525c153a7f9922c8e9efe15064f78b5",
"size": "17274",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "formalchemy/ext/pylons/controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6599"
},
{
"name": "HTML",
"bytes": "4787"
},
{
"name": "JavaScript",
"bytes": "155"
},
{
"name": "Mako",
"bytes": "8449"
},
{
"name": "Python",
"bytes": "428049"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
}
|
from .client import *
from .log_stream import install as install_log_stream, uninstall as uninstall_log_stream
# See http://www.python.org/dev/peps/pep-0386/ for version numbering, especially NormalizedVersion
from distutils import version
version = version.LooseVersion('0.2.2-dev')
|
{
"content_hash": "e5370ff83839f74c59d69d8c1b15cdc3",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 98,
"avg_line_length": 40.714285714285715,
"alnum_prop": 0.7929824561403509,
"repo_name": "nkvoll/pykeeper",
"id": "5d06e9beb85a1b0fadc1cdc9432cafab8f17344e",
"size": "285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pykeeper/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23315"
}
],
"symlink_target": ""
}
|
import inspect
import operator
from rest_framework import permissions
__all__ = ['ConditionalPermission', 'Condition', 'C', 'And', 'Or', 'Not']
_NONE = object()
def _is_permission_factory(obj):
return inspect.isclass(obj) or inspect.isfunction(obj)
class ConditionalPermission(permissions.BasePermission):
'''
Example of usage:
>>> class MyView(GinericView):
>>> permission_classes = (ConditionalPermission, )
>>> permission_condition = C(Perm1) | ~C(Perm2)
Or you can just:
>>> class MyView(GinericView):
>>> permission_classes = (C(Perm1) | ~C(Perm2), )
'''
permission_condition_field = 'permission_condition'
def get_permission_condition(self, view):
condition = getattr(view, self.permission_condition_field, None)
if condition and not isinstance(condition, Condition):
condition = Condition(condition)
return condition
def has_object_permission(self, request, view, obj):
condition = self.get_permission_condition(view)
if not condition:
return False
return condition.has_object_permission(request, view, obj)
def has_permission(self, request, view):
condition = self.get_permission_condition(view)
if not condition:
return False
return condition.has_permission(request, view)
class Condition(object):
'''
Provides a simple way to define complex and multi-depth
(with logic operators) permissions tree.
Example of usage:
>>> cond = C(Perm1, Perm2) | C(Perm3, ~C(Perm4))
It's same as:
>>> cond = Or(And(Perm1, Perm2), And(Perm3, Not(Perm4)))
Some advanced/exotic usage, it will reject access if 3 of 4 given
permission will be evaluated to `True`:
>>> cond1 = C(Perm1, Perm2, Perm3, Perm4,
>>> reduce_op=operator.add, lazy_until=3, negated=True)
'''
@classmethod
def And(cls, *perms_or_conds):
return cls(reduce_op=operator.and_, lazy_until=False, *perms_or_conds)
@classmethod
def Or(cls, *perms_or_conds):
return cls(reduce_op=operator.or_, lazy_until=True, *perms_or_conds)
@classmethod
def Not(cls, *perms_or_conds):
return cls(negated=True, *perms_or_conds)
def __init__(self, *perms_or_conds, **kwargs):
self.perms_or_conds = perms_or_conds
self.reduce_op = kwargs.get('reduce_op', operator.and_)
self.lazy_until = kwargs.get('lazy_until', False)
self.negated = kwargs.get('negated')
def evaluate_permissions(self, permission_name, *args, **kwargs):
reduced_result = _NONE
for condition in self.perms_or_conds:
if hasattr(condition, 'evaluate_permissions'):
result = condition.evaluate_permissions(permission_name, *args, **kwargs)
else:
if _is_permission_factory(condition):
condition = condition()
result = getattr(condition, permission_name)(*args, **kwargs)
if reduced_result is _NONE:
reduced_result = result
else:
reduced_result = self.reduce_op(reduced_result, result)
if self.lazy_until is not None and self.lazy_until is reduced_result:
break
if reduced_result is not _NONE:
return not reduced_result if self.negated else reduced_result
return False
def has_object_permission(self, request, view, obj):
return self.evaluate_permissions('has_object_permission',
request, view, obj)
def has_permission(self, request, view):
return self.evaluate_permissions('has_permission', request, view)
def __or__(self, perm_or_cond):
return self.Or(self, perm_or_cond)
def __ior__(self, perm_or_cond):
return self.Or(self, perm_or_cond)
def __and__(self, perm_or_cond):
return self.And(self, perm_or_cond)
def __iand__(self, perm_or_cond):
return self.And(self, perm_or_cond)
def __invert__(self):
return self.Not(self)
def __call__(self):
return self
# Define some shortcuts
(C, And, Or, Not) = (Condition, Condition.And, Condition.Or, Condition.Not)
|
{
"content_hash": "e8949e9cde80f168124129d5def9d8d3",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 89,
"avg_line_length": 32.83846153846154,
"alnum_prop": 0.6205200281096276,
"repo_name": "justanr/rest_condition",
"id": "df3f11c69ee5a5c470f92ea07a1ee41d50b3ad5d",
"size": "4316",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rest_condition/permissions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18760"
}
],
"symlink_target": ""
}
|
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
APPVEYOR_TOPIC_TEMPLATE = "{project_name}"
APPVEYOR_MESSAGE_TEMPLATE = """
[Build {project_name} {build_version} {status}]({build_url}):
* **Commit**: [{commit_id}: {commit_message}]({commit_url}) by {committer_name}
* **Started**: {started}
* **Finished**: {finished}
""".strip()
@webhook_view("Appveyor")
@has_request_variables
def api_appveyor_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: Dict[str, Any] = REQ(argument_type="body"),
) -> HttpResponse:
body = get_body_for_http_request(payload)
subject = get_subject_for_http_request(payload)
check_send_webhook_message(request, user_profile, subject, body)
return json_success()
def get_subject_for_http_request(payload: Dict[str, Any]) -> str:
event_data = payload["eventData"]
return APPVEYOR_TOPIC_TEMPLATE.format(project_name=event_data["projectName"])
def get_body_for_http_request(payload: Dict[str, Any]) -> str:
event_data = payload["eventData"]
data = {
"project_name": event_data["projectName"],
"build_version": event_data["buildVersion"],
"status": event_data["status"],
"build_url": event_data["buildUrl"],
"commit_url": event_data["commitUrl"],
"committer_name": event_data["committerName"],
"commit_date": event_data["commitDate"],
"commit_message": event_data["commitMessage"],
"commit_id": event_data["commitId"],
"started": event_data["started"],
"finished": event_data["finished"],
}
return APPVEYOR_MESSAGE_TEMPLATE.format(**data)
|
{
"content_hash": "98322ba9a478776f92b5747b1dc6d2ef",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 81,
"avg_line_length": 34.035714285714285,
"alnum_prop": 0.6836306400839455,
"repo_name": "hackerkid/zulip",
"id": "93574247d12e52c7311749a98e62b6bcb49e0ac2",
"size": "1906",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/webhooks/appveyor/view.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "397271"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "717106"
},
{
"name": "JavaScript",
"bytes": "3079595"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71210"
},
{
"name": "Python",
"bytes": "6876664"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119833"
},
{
"name": "TypeScript",
"bytes": "14645"
}
],
"symlink_target": ""
}
|
from setuptools import setup
import re
version = ''
with open('walls.py', 'r') as f:
version = re.search(r'__version__\s*=\s*\'([\d.]+)\'', f.read()).group(1)
with open('README.rst') as f:
readme = f.read()
with open('HISTORY.rst') as f:
history = f.read()
setup(
name='walls',
version=version,
author='Nick Frost',
author_email='nickfrostatx@gmail.com',
url='https://github.com/nickfrostatx/walls',
description='Random Flickr wallpapers.',
long_description=readme + '\n\n' + history,
py_modules=['walls'],
install_requires=[
'flickrapi',
'requests',
],
extras_require={
'testing': [
'pytest',
'pytest-cov',
'pytest-pep8',
'pytest-pep257',
],
},
entry_points={
'console_scripts': 'walls=walls:main'
},
license='MIT',
keywords='walls',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
{
"content_hash": "4599ab393f455a3598a26a20fcf11829",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 77,
"avg_line_length": 28.18867924528302,
"alnum_prop": 0.5568942436412316,
"repo_name": "nickfrostatx/walls",
"id": "9b95b274fbb22668b32666e6705f7358382ac3c7",
"size": "1519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "166"
},
{
"name": "Python",
"bytes": "14425"
}
],
"symlink_target": ""
}
|
"""Tensor Expressions for the NPU"""
from .convolution import *
from .depthwise import *
from .pooling import *
from .binary_elementwise import *
from .identity import *
from .unary_elementwise import *
from .inline import *
|
{
"content_hash": "ee02bd564eb0e409d3c8d561b52b87eb",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 36,
"avg_line_length": 25.11111111111111,
"alnum_prop": 0.7522123893805309,
"repo_name": "Laurawly/tvm-1",
"id": "2ede967a036caac417e749e8762a2d88e4675388",
"size": "1011",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/tvm/relay/backend/contrib/ethosu/te/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4093"
},
{
"name": "C",
"bytes": "351611"
},
{
"name": "C++",
"bytes": "11660999"
},
{
"name": "CMake",
"bytes": "228510"
},
{
"name": "Cuda",
"bytes": "16902"
},
{
"name": "Cython",
"bytes": "28979"
},
{
"name": "Go",
"bytes": "111527"
},
{
"name": "HTML",
"bytes": "2664"
},
{
"name": "Java",
"bytes": "199950"
},
{
"name": "JavaScript",
"bytes": "15305"
},
{
"name": "Makefile",
"bytes": "67149"
},
{
"name": "Objective-C",
"bytes": "24259"
},
{
"name": "Objective-C++",
"bytes": "87655"
},
{
"name": "Python",
"bytes": "16256580"
},
{
"name": "RenderScript",
"bytes": "1895"
},
{
"name": "Rust",
"bytes": "391076"
},
{
"name": "Shell",
"bytes": "228674"
},
{
"name": "TypeScript",
"bytes": "94385"
}
],
"symlink_target": ""
}
|
"""
In this module we implement "python -m bohrium --info"
"""
import argparse
from . import stack_info
from ._bh_api import sanity_check
parser = argparse.ArgumentParser(description='Check and retrieve info on the Bohrium API installation.')
parser.add_argument(
'--info',
action="store_true",
default=False,
help='Print Runtime Info'
)
parser.add_argument(
'--no-check',
action="store_true",
default=False,
help='Skip installation check'
)
args = parser.parse_args()
if args.info:
print(stack_info.pprint())
if not args.no_check:
if sanity_check():
print("Installation check succeeded")
else:
print("Installation check failed")
exit(-1)
|
{
"content_hash": "a546451e9c4baae6f42369e48b01d33b",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 104,
"avg_line_length": 22.903225806451612,
"alnum_prop": 0.6704225352112676,
"repo_name": "madsbk/bohrium",
"id": "1fd79693c9b6382872bf018aff308c7af2f2eebb",
"size": "756",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bridge/py_api/bohrium_api/__main__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "142233"
},
{
"name": "C++",
"bytes": "1117864"
},
{
"name": "CMake",
"bytes": "89697"
},
{
"name": "Dockerfile",
"bytes": "5940"
},
{
"name": "Python",
"bytes": "565707"
},
{
"name": "Shell",
"bytes": "2173"
},
{
"name": "Smarty",
"bytes": "32819"
}
],
"symlink_target": ""
}
|
"""
[env]
# Conda Environment
conda create --name nft_insights python=3.9.7
conda info --envs
source activate nft_insights
conda deactivate
# if needed to remove
conda env remove -n [NAME_OF_THE_CONDA_ENVIRONMENT]
[path]
cd /Users/brunoflaven/Documents/03_git/BlogArticlesExamples/nft_insights/002_NFT_generator_streamlit/
[file]
streamlit run streamlit_nft_generator_2.py
[source]
https://github.com/yoyoismee/NFT-generator
[required]
# install
pip install pillow
pip install ipython
pip jnstall streamlit
pip install -r requirements.txt
# show what the requirements
pip freeze > streamlit_nft_generator_1.txt
pip install -r streamlit_nft_generator_1.txt
# SOURCE
/Users/brunoflaven/Documents/01_work/blog_articles/nft_insights/002_NFT_generator_streamlit/source_3
/Users/brunoflaven/Documents/01_work/blog_articles/nft_insights/010_frida_kahlo_project/fkp
# GENERATED
/Users/brunoflaven/Documents/01_work/blog_articles/nft_insights/002_NFT_generator_streamlit/generated_5
"""
import streamlit as st
import os
from generator import NFTGenerator
from pathlib import Path
# personal configuration
import config_values.values_conf as conf
### 2. VALUES ###
TEXT_TITLE_APP = conf.TEXT_TITLE_APP
TEXT_SUBHEADER_APP = conf.TEXT_SUBHEADER_APP
APP_TEXT_HELP_1 = conf.APP_TEXT_HELP_1
PATH_TO_SOURCE = conf.PATH_TO_SOURCE
PATH_TO_DESTINATION = conf.PATH_TO_DESTINATION
def detectVersion():
st.sidebar.markdown('* * *')
st.sidebar.markdown('**VERSIONS**')
st.sidebar.write("streamlit ::", st.__version__)
def main():
""" A simple to generate NFT with streamlit"""
### 2. INTRO ###
st.title(f'{TEXT_TITLE_APP}')
st.warning(f'{TEXT_SUBHEADER_APP}')
st.info(f"{APP_TEXT_HELP_1}")
with st.form("nft_form_generator"):
input_dir = st.text_input(f'1. Define the SOURCE DIRECTORY for traits',
placeholder=PATH_TO_SOURCE, help='Define the source directory for traits')
st.caption(PATH_TO_SOURCE)
output_dir = st.text_input(f'2. Define the OUTPUT DIRECTORY for NFTs (Create a new directory each time or remove) it before',
placeholder=PATH_TO_DESTINATION, help='Define the output directory for NFTs')
st.caption(PATH_TO_DESTINATION)
# amount = st.number_input('3. Number of NFTs', 1)
amount = st.slider('3. Number of NFTs', 1, 100, 5, 1)
submit_button = st.form_submit_button(label='Generate NFTs')
unique = st.checkbox("Yes, I want my NFT to be unique from each other.",
help='Rarity involved')
if submit_button:
# st.code(f'input_dir :: '+input_dir+'')
# st.code(f'output_dir :: '+output_dir+'')
# st.code(f'amount :: '+str(amount)+'')
p = Path(output_dir)
p.mkdir(parents=True, exist_ok=True)
the_bar = st.progress(0)
# launch the generator
nft_generator = NFTGenerator(input_dir=input_dir, unique=unique)
for i in range(amount):
the_bar.progress((i + 1) / amount)
nft_generator.generate(save_path=output_dir, file_name=i)
# end
st.success("DONE!")
st.markdown(f'**'+str(amount)+'** have bee created. You can check out at the path below')
st.markdown(f'*{p.absolute()}*')
if __name__ == "__main__":
main()
detectVersion()
|
{
"content_hash": "f42b0f2f55dbf8c91846957e7d679155",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 133,
"avg_line_length": 28.34426229508197,
"alnum_prop": 0.6471949103528051,
"repo_name": "bflaven/BlogArticlesExamples",
"id": "f6c5348083c4b3946604a0078c346fc2c76686b5",
"size": "3502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nft_insights/002_NFT_generator_streamlit/streamlit_nft_generator_2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AppleScript",
"bytes": "2756"
},
{
"name": "CSS",
"bytes": "3497"
},
{
"name": "CoffeeScript",
"bytes": "1785"
},
{
"name": "Dockerfile",
"bytes": "993"
},
{
"name": "HTML",
"bytes": "23687927"
},
{
"name": "JavaScript",
"bytes": "12838"
},
{
"name": "Jupyter Notebook",
"bytes": "2918640"
},
{
"name": "Makefile",
"bytes": "4058"
},
{
"name": "PHP",
"bytes": "223161"
},
{
"name": "Python",
"bytes": "1461699"
},
{
"name": "Shell",
"bytes": "12291"
}
],
"symlink_target": ""
}
|
from common.dbsqlite import DBsqLite
from common.requestserver import RequestAgentManager
from common.configmanager import ConfigManager
from protocol.proto_clusterRequestSetNTPInfo_pb2 import clusterRequestSetNTPInfo
from protocol.proto_clusterRequestSNMP_pb2 import clusterRequestSNMP
from alarm import AlarmObject
from systemlog import SystemLogObject
from common.TCPConnection import TCPConnection
from common.configmanager import ParseIniFile
from protocol import proto_reponseProtoBufInfo_pb2
from protocol import proto_requestProtoBufInfo_pb2
class GeneralObject(object):
def get_host(self):
#init parm
host_list = []
#read config.ini [TFSMaster]ip port [Cluster]ip port
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
IPList = cParseConfig.all_options("cluster")
for unit in IPList:
temp_dict = {}
temp_dict["ip"] = unit["opt"]
temp_dict["status"] = ""
host_list.append(temp_dict)
#return
return host_list
def get_cluster_time(self):
#init parm
cluster_time_list = []
#read config.ini [TFSMaster]ip port [Cluster]ip port
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
IPList = cParseConfig.all_options("cluster")
print "IPlist =", len(IPList)
for unit in IPList:
AgentIP, AgentPort = unit["opt"], unit["value"]
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
if not RequestAgent.connect():
temp_dict = {}
temp_dict["ip"] = AgentIP
temp_dict["time"] = "lost"
cluster_time_list.append(temp_dict)
continue
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "601"
print "sReqCode = 601"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
#return
for unit in pResponseFromAgent.respntpNTPInfo:
temp_dict = {}
temp_dict["ip"] = AgentIP
temp_dict["time"] = unit.sTime
cluster_time_list.append(temp_dict)
return cluster_time_list
def set_cluster_time(self, c_date, c_time):
#init parm
print "Into set_cluster_time"
#read config.ini [TFSMaster]ip port [Cluster]ip port
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
IPList = cParseConfig.all_options("cluster")
print "IPlist =", len(IPList)
for unit in IPList:
AgentIP, AgentPort = unit["opt"], unit["value"]
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
if not RequestAgent.connect():
return 0
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "602"
Param = pRequest.reqntppNTPParam.add()
Param.sTime = "%s %s" % (c_date, c_time)
print "sReqCode = 602"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
#return
print AgentIP, "timeset is ", pResponseFromAgent.sRetMessage
return "1"
def get_ntp_status(self, AgentIP):
# init param
print "Into get_ntp_status"
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "605"
print "sReqCode = 605"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
# return
if not cmp(pResponseFromAgent.sRetMessage, "1"):
return True
return False
def set_ntp_status(self, AgentIP, CMD):
# init param
print "Into get_ntp_status"
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "606"
pRequest.sCMD = CMD
print "sReqCode = 606"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
# return
if not cmp(pResponseFromAgent.sRetMessage, "1"):
return True
return False
def get_ntp_info(self, AgentIP):
#init parm
print "Into get_ntp_info"
List = []
#read config.ini [TFSMaster]ip port [Cluster]ip port
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "603"
print "sReqCode = 603"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
#return
for unit in pResponseFromAgent.respntpNTPInfo:
ntp_dict = {}
ntp_dict["ntp_ip"] = unit.sIP
List.append(ntp_dict)
return List
def add_ntp_info(self, AgentIP, user_name, ntp_addr):
#init parm
print "Into set_ntp_info"
#read config.ini [TFSMaster]ip port [Cluster]ip port
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
# pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "604"
Param = pRequest.reqntppNTPParam.add()
Param.sIP = ntp_addr
print "sReqCode = 604"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
# send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
# return
SystemLogObject().insert_log(user_name, 6, u"设置NTP Server")
if not cmp(pResponseFromAgent.sRetMessage, "1"):
return True
return False
def request_agent_ntp(self, ntp_addr):
ip_list = self.get_host()
cluster_request = clusterRequestSetNTPInfo()
for ip in ip_list:
req_info = cluster_request.reqInfo.reqInfo.add()
req_info.sIp = ''.join(ip)
ntp_info = cluster_request.reqNtpInfo.reqInfo.add()
ntp_info.sIp = ntp_addr
data = cluster_request.SerializeToString()
cluster_request.Clear()
request_agent = RequestAgentManager()
request_agent.send(int(ConfigManager.getvalue("local", "auth_key"), 16), 1003, len(data), data)
return request_agent.accept()
def get_snmp_status(self, AgentIP):
# init param
print "Into get_snmp_status"
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "1201"
print "sReqCode = 1201"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
# return
if not cmp(pResponseFromAgent.sRetMessage, "1"):
return True
return False
def set_snmp_status(self, AgentIP, CMD):
# init param
print "Into set_snmp_status"
cParseConfig = ParseIniFile("config.ini")
cParseConfig.init()
auth_key = cParseConfig.get_value_now("local", "auth_key")
AgentPort = cParseConfig.get_value_now("cluster", AgentIP)
#connect Agent
print "conIP,conPort", AgentIP, ":", AgentPort
RequestAgent = TCPConnection(AgentIP, int(AgentPort))
RequestAgent.connect()
#pack
pRequest = proto_requestProtoBufInfo_pb2.protoRequest()
pRequest.sReqCode = "1202"
pRequest.sCMD = CMD
print "sReqCode = 1202"
Data = pRequest.SerializeToString()
print "sendlen", len(Data)
#send
SendData = auth_key + Data
RequestAgent.send(SendData)
#recv
RecvData = RequestAgent.accept()
print "RecvData len =", len(RecvData)
#unpack
pResponseFromAgent = proto_reponseProtoBufInfo_pb2.protoResponse()
pResponseFromAgent.ParseFromString(RecvData)
print "Code =", pResponseFromAgent.sReqCode, "Mess =", pResponseFromAgent.sRetMessage
# return
if not cmp(pResponseFromAgent.sRetMessage, "1"):
return True
return False
def get_snmp_info(self):
db_instance = DBsqLite()
sql = "SELECT addr, username, password FROM t_snmp"
data = db_instance.queryone(sql)
db_instance.close()
return_data = {}
if data:
return_data["addr"] = data[0]
return_data["username"] = data[1]
return_data["password"] = data[2]
return return_data
def commit_snmp_info(self, login_name, addr, username, password):
data = self.get_snmp_info()
db_instance = DBsqLite()
sql = ""
if data:
if data["addr"] != addr or data["username"] != username or data["password"] != password:
sql = "UPDATE t_snmp set addr='%s', username='%s', password='%s'" % (addr, username, password)
else:
sql = "INSERT INTO t_snmp(addr, username, password) VALUES('%s', '%s', '%s')" % (addr, username, password)
result = {}
if sql:
ip_list = self.get_host()
cluster_request = clusterRequestSNMP()
for ip in ip_list:
req_info = cluster_request.reqInfo.reqInfo.add()
req_info.sIp = ''.join(ip)
cluster_request.snmp_ip = addr
cluster_request.snmp_user = username
cluster_request.snmp_pass = password
alarm_list = AlarmObject().get_set_alarm()
for alarm in alarm_list:
if alarm[0] == "cpu":
cluster_request.cpu = alarm[1]
if alarm[0] == "mem":
cluster_request.mem = alarm[1]
if alarm[0] == "disk":
cluster_request.disk = alarm[1]
data = cluster_request.SerializeToString()
cluster_request.Clear()
request_agent = RequestAgentManager()
request_agent.send(int(ConfigManager.getvalue("local", "auth_key"), 16), 1007, len(data), data)
if request_agent.accept():
if db_instance.execute(sql) >= 0:
SystemLogObject().insert_log(login_name, 6, u"设置SMTP Server")
result["status"] = 1
else:
result["status"] = 0
db_instance.close()
else:
result["status"] = 0
return result
|
{
"content_hash": "957d20d662ef7ac7137e8bd63f75bcf8",
"timestamp": "",
"source": "github",
"line_count": 441,
"max_line_length": 118,
"avg_line_length": 35.04761904761905,
"alnum_prop": 0.5965967908902692,
"repo_name": "liugangabc/ccs_web",
"id": "dfa48d55d99237b470960f3c39147c78bd039ceb",
"size": "15553",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logic/general.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "36"
},
{
"name": "CSS",
"bytes": "918252"
},
{
"name": "CoffeeScript",
"bytes": "4704"
},
{
"name": "Go",
"bytes": "6808"
},
{
"name": "HTML",
"bytes": "3438276"
},
{
"name": "JavaScript",
"bytes": "5338020"
},
{
"name": "Makefile",
"bytes": "460"
},
{
"name": "PHP",
"bytes": "53578"
},
{
"name": "Protocol Buffer",
"bytes": "11073"
},
{
"name": "Python",
"bytes": "226125"
},
{
"name": "Shell",
"bytes": "2025"
}
],
"symlink_target": ""
}
|
import scrapy
from lfcs_scraping.items import TimeItem
class TimeTravel(scrapy.Spider):
name = 'timetravel'
#allowed_domains = ['http://web.archive.org/']
start_urls = [
'http://web.archive.org/web/20160615173215/http://wcms.inf.ed.ac.uk/lfcs/people'
]
def parse(self, response):
item = TimeItem()
nextpage = response.xpath('//tr[@class="y"]/td[@class="b"]/a/@href').extract()
item['url'] = response.url
item['date'] = response.xpath('//tr[@class="y"]/td[@class="c"]/text()').extract()[0]
yield item
if nextpage:
yield scrapy.Request(response.urljoin(nextpage[0]), callback=self.parse)
|
{
"content_hash": "0011ede2e633340760be1bac664a3709",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 92,
"avg_line_length": 33.714285714285715,
"alnum_prop": 0.5861581920903954,
"repo_name": "AWilcke/LFCS-History",
"id": "e3361c68a865ded9095a17c58d9d9f7b67cf1fa3",
"size": "708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scraping/lfcs_scraping/spiders/timetravel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "790"
},
{
"name": "HTML",
"bytes": "87928"
},
{
"name": "JavaScript",
"bytes": "9236"
},
{
"name": "Python",
"bytes": "99394"
}
],
"symlink_target": ""
}
|
import hashlib
from struct import unpack, pack
import sys
from shared import config, frozen
import shared
#import os
def _set_idle():
if 'linux' in sys.platform:
import os
os.nice(20) # @UndefinedVariable
else:
try:
sys.getwindowsversion()
import win32api,win32process,win32con # @UnresolvedImport
pid = win32api.GetCurrentProcessId()
handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
win32process.SetPriorityClass(handle, win32process.IDLE_PRIORITY_CLASS)
except:
#Windows 64-bit
pass
def _pool_worker(nonce, initialHash, target, pool_size):
_set_idle()
trialValue = float('inf')
while trialValue > target:
nonce += pool_size
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
return [trialValue, nonce]
def _doSafePoW(target, initialHash):
nonce = 0
trialValue = float('inf')
while trialValue > target:
nonce += 1
trialValue, = unpack('>Q',hashlib.sha512(hashlib.sha512(pack('>Q',nonce) + initialHash).digest()).digest()[0:8])
return [trialValue, nonce]
def _doFastPoW(target, initialHash):
import time
from multiprocessing import Pool, cpu_count
try:
pool_size = cpu_count()
except:
pool_size = 4
try:
maxCores = config.getint('bitmessagesettings', 'maxcores')
except:
maxCores = 99999
if pool_size > maxCores:
pool_size = maxCores
pool = Pool(processes=pool_size)
result = []
for i in xrange(pool_size):
result.append(pool.apply_async(_pool_worker, args = (i, initialHash, target, pool_size)))
while True:
if shared.shutdown >= 1:
pool.terminate()
while True:
time.sleep(10) # Don't let this thread return here; it will return nothing and cause an exception in bitmessagemain.py
return
for i in xrange(pool_size):
if result[i].ready():
result = result[i].get()
pool.terminate()
pool.join() #Wait for the workers to exit...
return result[0], result[1]
time.sleep(0.2)
def run(target, initialHash):
target = int(target)
if frozen == "macosx_app" or not frozen:
return _doFastPoW(target, initialHash)
else:
return _doSafePoW(target, initialHash)
|
{
"content_hash": "05e96c4cfc85336e39d4b5ca1c172a58",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 134,
"avg_line_length": 33.28,
"alnum_prop": 0.610176282051282,
"repo_name": "onejob6800/minibm",
"id": "977abac1e431c0b78628aa62826f5663be49730e",
"size": "2569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bitmessage/proofofwork.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "272698"
}
],
"symlink_target": ""
}
|
import requests
from requests import exceptions
from solum.openstack.common.gettextutils import _
from solum.openstack.common import log as logging
from solum.openstack.common.py3kcompat import urlutils
LOG = logging.getLogger(__name__)
def get(url, max_size, chunk_size=None, allowed_schemes=('http', 'https')):
"""Get the data at the specified URL.
The URL must use the http: or https: schemes.
The file: scheme is also supported if you override
the allowed_schemes argument.
The max_size represents the total max byte of your file.
The chunk_size is by default set at max_size, it represents the size
of your chunk.
Raise an IOError if getting the data fails and if max_size is exceeded.
"""
LOG.info(_('Fetching data from %s') % url)
components = urlutils.urlparse(url)
if components.scheme not in allowed_schemes:
raise IOError(_('Invalid URL scheme %s') % components.scheme)
if chunk_size is None:
chunk_size = max_size
if max_size < 1:
raise IOError("max_size should be greater than 0")
if chunk_size < 1:
raise IOError("chunk_size should be greater than 0")
if components.scheme == 'file':
try:
return urlutils.urlopen(url).read()
except urlutils.URLError as uex:
raise IOError(_('Failed to read file: %s') % str(uex))
try:
resp = requests.get(url, stream=True)
resp.raise_for_status()
# We cannot use resp.text here because it would download the
# entire file, and a large enough file would bring down the
# engine. The 'Content-Length' header could be faked, so it's
# necessary to download the content in chunks to until
# max_size is reached. The chunk_size we use needs
# to balance CPU-intensive string concatenation with accuracy
# (eg. it's possible to fetch 1000 bytes greater than
# max_size with a chunk_size of 1000).
reader = resp.iter_content(chunk_size=chunk_size)
result = ""
for chunk in reader:
result += chunk
if len(result) > max_size:
raise IOError("File exceeds maximum allowed size (%s "
"bytes)" % max_size)
return result
except exceptions.RequestException as ex:
raise IOError(_('Failed to retrieve file: %s') % str(ex))
|
{
"content_hash": "aaebe8541e7c878cb1dd4b704df543ed",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 75,
"avg_line_length": 37.03076923076923,
"alnum_prop": 0.6439551308683008,
"repo_name": "jamesyli/solum",
"id": "fe492549c14f2892fd09c91b3250c397817f5ef0",
"size": "2954",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "solum/common/urlfetch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import logging
import os
import time
import subprocess
from brkt_cli.aws import aws_service, boto3_device
log = logging.getLogger(__name__)
def share(aws_svc=None, logs_svc=None, instance_id=None, region=None,
snapshot_id=None, dest=None, subnet_id=None, key=None,
bast_key=None, bast_user=None, bast_ip=None):
log.info('Sharing logs')
snapshot = None
new_instance = None
key_name = None
new_snapshot = False
try:
if not snapshot_id:
# Get instance from ID
instance = aws_svc.get_instance(instance_id)
# Find name of the root device
root_name = instance.root_device_name
# Get root volume ID
root_dev = boto3_device.get_device(
instance.block_device_mappings, root_name)
# Create a snapshot of the root volume
snapshot = aws_svc.create_snapshot(
volume_id=root_dev['Ebs']['VolumeId'],
name="temp-logs-snapshot"
)
# Wait for snapshot to post
log.info('Waiting for snapshot...')
aws_service.wait_for_snapshots(aws_svc, snapshot.id)
new_snapshot = True
else: # Taking logs from a snapshot
snapshot = aws_svc.get_snapshot(snapshot_id)
# Split destination path name into path and file
path, logs_file = os.path.split(dest)
# Specifies volume to be attached to instance
mv_disk = boto3_device.make_device(
device_name='/dev/sdg',
volume_type='gp2',
snapshot_id=snapshot.id,
delete_on_termination=True,
volume_size=snapshot.volume_size
)
bdm = [mv_disk]
# Images taken on 4/3/2017 from:
# https://aws.amazon.com/amazon-linux-ami/
IMAGES_BY_REGION = {
"us-east-1": "ami-0b33d91d",
"us-east-2": "ami-c55673a0",
"us-west-1": "ami-165a0876",
"us-west-2": "ami-f173cc91",
"ap-south-1": "ami-f9daac96",
"ap-northeast-2": "ami-dac312b4",
"ap-southeast-1": "ami-dc9339bf",
"ap-southeast-2": "ami-1c47407f",
"ap-northeast-1": "ami-56d4ad31",
"eu-central-1": "ami-af0fc0c0",
"eu-west-1": "ami-70edb016",
"eu-west-2": "ami-f1949e95",
}
image_id = IMAGES_BY_REGION[region]
if not key:
# name key_pair
key_name = 'ShareLogsKey' + time.strftime("%Y%m%d%H%M")
# generate new random key to use for scp if not given
try:
logs_svc.create_key(aws_svc.ec2client, path, key_name)
except Exception:
raise Exception('Failed creating new key. Use --key option or'
' change IAM permissions in account')
else:
key_name = key
# start up script for new instance
amzn = '#!/bin/bash\n' + \
'sudo mount -t ufs -o ro,ufstype=ufs2 /dev/xvdg4 /mnt\n' + \
'sudo tar czvf /tmp/temp_logs -C /mnt ./log ./crash\n' + \
'mv /tmp/temp_logs /tmp/%s' % (logs_file)
# Launch new instance, with volume and startup script
new_instance = aws_svc.ec2client.run_instances(
ImageId=image_id, MinCount=1, MaxCount=1, InstanceType='m4.large',
BlockDeviceMappings=bdm, UserData=amzn, EbsOptimized=False,
SubnetId=subnet_id, KeyName=key_name)
instance_id = new_instance['Instances'][0]['InstanceId']
# wait for instance to launch
log.info('Waiting for instance to launch')
aws_service.wait_for_instance(aws_svc, instance_id)
instance_ip = logs_svc.get_instance_ip(aws_svc.ec2, instance_id)
# wait for file to download
log.info('Waiting for file to download')
logs_svc.wait_file(instance_ip, logs_file, dest, key_name, path,
bast_key, bast_user, bast_ip)
log.info('Success! Logs can be found here: %s' % dest)
finally:
# delete only new instances, snapshots, and keys
if not key:
aws_svc.ec2client.delete_key_pair(KeyName=key_name)
os.remove("%s/%s.pem" % (path, key_name))
if new_snapshot and new_instance:
aws_service.clean_up(aws_svc, instance_ids=[instance_id],
snapshot_ids=[snapshot.id])
if not new_snapshot and new_instance:
aws_service.clean_up(aws_svc, instance_ids=[instance_id])
new_snapshot = False
if new_snapshot and not new_instance:
aws_service.clean_up(aws_svc, snapshot_ids=[snapshot.id])
class ShareLogsService():
def wait_file(self, ip, logs_file, dest, key, path,
bast_key, bast_user, bast_ip):
for i in range(60):
try:
self.scp(ip, "/tmp/%s" % logs_file, dest, key, path,
bast_key, bast_user, bast_ip)
return
except subprocess.CalledProcessError:
time.sleep(15)
pass
log.error("Timed out waiting for file to download")
def scp(self, external_ip, src, dest, key, path,
bast_key, bast_user, bast_ip):
sshflags = " ".join([
"-o ServerAliveInterval=10",
"-o UserKnownHostsFile=/dev/null",
"-o StrictHostKeyChecking=no",
"-o ConnectTimeout=5",
"-o LogLevel=quiet",
])
if bast_key:
bastion = "ssh -i %s -W %%h:%%p %s@%s" % (bast_key,
bast_user, bast_ip)
sshflags += " -o ProxyCommand='%s'" % bastion
command = 'scp %s -i %s/%s.pem ec2-user@%s:%s %s >& /dev/null' % (
sshflags, path, key, external_ip, src, dest)
return subprocess.check_output(command, shell=True)
def get_instance_ip(self, ec2, instance_id):
ip = None
for i in range(40):
instance = ec2.Instance(instance_id)
ip = instance.public_dns_name
if ip:
return ip
if instance.state == 'terminated':
raise Exception('Instance died on launch')
time.sleep(5)
log.error('Failed finding IP address for instance %s' % instance_id)
def create_key(self, ec2client, dest, key):
# create new key and put file in destination dir
outfile = open("%s/%s.pem" % (dest, key), 'w')
key_pair = ec2client.create_key_pair(KeyName=key)
key_pair_out = str(key_pair['KeyMaterial'])
outfile.write(key_pair_out)
# change permissions on key file
subprocess.check_output("chmod 400 %s/%s.pem" %
(dest, key), shell=True)
|
{
"content_hash": "f189dd00390e2b02fbfb26be3ba6a6f1",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 78,
"avg_line_length": 37.04371584699454,
"alnum_prop": 0.5555391650685942,
"repo_name": "brkt/brkt-cli",
"id": "815a0d628d7f175e6fd42ce466fa2783fdfe5967",
"size": "7351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "brkt_cli/aws/share_logs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "722247"
},
{
"name": "Shell",
"bytes": "395"
}
],
"symlink_target": ""
}
|
import time
import hashlib
import logging
import simplejson as json
from rq import Queue
from rq import use_connection
from operator import itemgetter
import tornado.web
import tornado.ioloop
import tornado.options
from tornado.web import HTTPError
tornado.options.parse_command_line()
from tasks import judge
from config import SALT
from config import DAEMON_PORT
REQURED_ARGS = ["code", "lang", "filename", "shortname", "timelimit", "memlimit", "testpoint", "sign", "time"]
use_connection()
class ReceiveQueryHandler(tornado.web.RequestHandler):
def post(self):
query = self.get_argument("query", default = None)
if not query:
logging.error("Invalid Post Query.")
raise HTTPError(404)
query = json.loads(query)
for key in REQURED_ARGS:
if key not in query.keys():
logging.error("Require key %s is missing" % key)
raise HTTPError(404)
query_sign = query["sign"]
query.pop("sign")
now = time.time()
if abs(now - query["time"]) > 300:
logging.error("Time is invalid!")
raise HTTPError(404)
query = dict(sorted(query.iteritems(), key=itemgetter(1)))
sign = hashlib.sha1(json.dumps(query) + SALT).hexdigest()
if not sign == query_sign:
logging.error("Signature is invalid")
raise HTTPError(404)
q = Queue('judge')
q.enqueue(judge, query)
application = tornado.web.Application([
(r"/", ReceiveQueryHandler),
])
if __name__ == "__main__":
application.listen(int(DAEMON_PORT))
tornado.ioloop.IOLoop.instance().start()
|
{
"content_hash": "e2cbcde3393e16015dd2cc1cb6ed4747",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 110,
"avg_line_length": 30.796296296296298,
"alnum_prop": 0.634996993385448,
"repo_name": "ptphp/PyLib",
"id": "c22be35611cc0d6ff70f9c65764901b4f62fa927",
"size": "1816",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/tornado/demos/Vulpix-master/daemons-rq/daemons.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1523"
},
{
"name": "C++",
"bytes": "7541"
},
{
"name": "CSS",
"bytes": "625731"
},
{
"name": "JavaScript",
"bytes": "4811257"
},
{
"name": "PHP",
"bytes": "34868"
},
{
"name": "Python",
"bytes": "3824172"
},
{
"name": "Ruby",
"bytes": "322"
},
{
"name": "SQL",
"bytes": "685656"
},
{
"name": "Shell",
"bytes": "4143"
}
],
"symlink_target": ""
}
|
def printSomething(myString):
print(myString)
|
{
"content_hash": "1b1f5afe6107e1261b9c9eb6c9729743",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 29,
"avg_line_length": 24.5,
"alnum_prop": 0.7755102040816326,
"repo_name": "digicatech/python_learn",
"id": "5b030ae5e4578561f2663d84229a8c216e7c3140",
"size": "106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/mymodule.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7993"
}
],
"symlink_target": ""
}
|
from corehq.apps.sms.forms import BackendForm
from dimagi.utils.django.fields import TrimmedCharField
from crispy_forms import layout as crispy
from django.utils.translation import ugettext_lazy as _
class GrapevineBackendForm(BackendForm):
affiliate_code = TrimmedCharField(
label=_("Affiliate Code"),
)
authentication_code = TrimmedCharField(
label=_("Authentication Code"),
)
@property
def gateway_specific_fields(self):
return crispy.Fieldset(
_("Grapevine Settings"),
'affiliate_code',
'authentication_code',
)
|
{
"content_hash": "fb746f43558bfe01f2a6a99d3caa2407",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 55,
"avg_line_length": 24.6,
"alnum_prop": 0.6715447154471544,
"repo_name": "qedsoftware/commcare-hq",
"id": "539e0f7ca2e42e675bc8794c226840d71436a88c",
"size": "615",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "corehq/messaging/smsbackends/grapevine/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
}
|
"""NDArray configuration API."""
import ctypes
from ..base import _LIB
from ..base import c_str_array, c_handle_array
from ..base import NDArrayHandle, CachedOpHandle
from ..base import check_call
def _monitor_callback_wrapper(callback):
"""A wrapper for the user-defined handle."""
def callback_handle(name, opr_name, array, _):
""" ctypes function """
callback(name, opr_name, array)
return callback_handle
class NDArrayBase(object):
"""Base data structure for ndarray"""
__slots__ = ["handle", "writable"]
# pylint: disable= no-member
def __init__(self, handle, writable=True):
"""initialize a new NDArray
Parameters
----------
handle : NDArrayHandle
NDArray handle of C API
"""
if handle is not None:
assert isinstance(handle, NDArrayHandle)
self.handle = handle
self.writable = writable
def __del__(self):
check_call(_LIB.MXNDArrayFree(self.handle))
def __reduce__(self):
return (_ndarray_cls, (None,), self.__getstate__())
_ndarray_cls = None
_np_ndarray_cls = None
def _set_ndarray_class(cls):
"""Set the symbolic class to be cls"""
global _ndarray_cls
_ndarray_cls = cls
def _set_np_ndarray_class(cls):
"""Set the symbolic class to be cls"""
global _np_ndarray_cls
_np_ndarray_cls = cls
def _imperative_invoke(handle, ndargs, keys, vals, out, is_np_op, output_is_list):
"""ctypes implementation of imperative invoke wrapper"""
if out is not None:
original_output = out
if isinstance(out, NDArrayBase):
out = (out,)
num_output = ctypes.c_int(len(out))
output_vars = c_handle_array(out)
output_vars = ctypes.cast(output_vars, ctypes.POINTER(NDArrayHandle))
else:
original_output = None
output_vars = ctypes.POINTER(NDArrayHandle)()
num_output = ctypes.c_int(0)
# return output stypes to avoid the c_api call for checking
# a handle's stype in _ndarray_cls
out_stypes = ctypes.POINTER(ctypes.c_int)()
check_call(_LIB.MXImperativeInvokeEx(
ctypes.c_void_p(handle),
ctypes.c_int(len(ndargs)),
c_handle_array(ndargs),
ctypes.byref(num_output),
ctypes.byref(output_vars),
ctypes.c_int(len(keys)),
c_str_array(keys),
c_str_array([str(s) for s in vals]),
ctypes.byref(out_stypes)))
create_ndarray_fn = _np_ndarray_cls if is_np_op else _ndarray_cls
if original_output is not None:
return original_output
if num_output.value == 1 and not output_is_list:
return create_ndarray_fn(ctypes.cast(output_vars[0], NDArrayHandle),
stype=out_stypes[0])
else:
return [create_ndarray_fn(ctypes.cast(output_vars[i], NDArrayHandle),
stype=out_stypes[i]) for i in range(num_output.value)]
class CachedOp(object):
"""Cached operator handle."""
__slots__ = ["handle", "is_np_sym", "_monitor_callback"]
def __init__(self, sym, flags=()):
self.handle = CachedOpHandle()
self._monitor_callback = None
from ..symbol.numpy._symbol import _Symbol
self.is_np_sym = bool(isinstance(sym, _Symbol))
check_call(_LIB.MXCreateCachedOpEx(
sym.handle,
len(flags),
c_str_array([key for key, _ in flags]),
c_str_array([str(val) for _, val in flags]),
ctypes.byref(self.handle)))
def __del__(self):
check_call(_LIB.MXFreeCachedOp(self.handle))
def __call__(self, *args, **kwargs):
"""ctypes implementation of imperative invoke wrapper"""
out = kwargs.pop('out', None)
if out is not None:
original_output = out
if isinstance(out, NDArrayBase):
out = (out,)
num_output = ctypes.c_int(len(out))
output_vars = c_handle_array(out)
output_vars = ctypes.cast(output_vars, ctypes.POINTER(NDArrayHandle))
else:
original_output = None
output_vars = ctypes.POINTER(NDArrayHandle)()
num_output = ctypes.c_int(0)
if kwargs:
raise TypeError(
"CachedOp.__call__ got unexpected keyword argument(s): " + \
', '.join(kwargs.keys()))
# return output stypes to avoid the c_api call for checking
# a handle's stype in _ndarray_cls
out_stypes = ctypes.POINTER(ctypes.c_int)()
check_call(_LIB.MXInvokeCachedOpEx(
self.handle,
ctypes.c_int(len(args)),
c_handle_array(args),
ctypes.byref(num_output),
ctypes.byref(output_vars),
ctypes.byref(out_stypes)))
if original_output is not None:
return original_output
create_ndarray_fn = _np_ndarray_cls if self.is_np_sym else _ndarray_cls
if num_output.value == 1:
return create_ndarray_fn(ctypes.cast(output_vars[0], NDArrayHandle),
stype=out_stypes[0])
else:
return [create_ndarray_fn(ctypes.cast(output_vars[i], NDArrayHandle),
stype=out_stypes[i]) for i in range(num_output.value)]
def _register_op_hook(self, callback, monitor_all=False):
"""Install callback for monitor.
Parameters
----------
callback : function
Takes a string for node_name, string for op_name and a NDArrayHandle.
monitor_all : bool, default False
If true, monitor both input _imperative_invoked output, otherwise monitor output only.
"""
cb_type = ctypes.CFUNCTYPE(None, ctypes.c_char_p, ctypes.c_char_p, NDArrayHandle, ctypes.c_void_p)
if callback:
self._monitor_callback = cb_type(_monitor_callback_wrapper(callback))
check_call(_LIB.MXCachedOpRegisterOpHook(
self.handle,
self._monitor_callback,
ctypes.c_int(monitor_all)))
|
{
"content_hash": "693c8dd5d8cb1083e29ba49e76da5ba1",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 106,
"avg_line_length": 34.451977401129945,
"alnum_prop": 0.588553624139062,
"repo_name": "larroy/mxnet",
"id": "8dce5d8692549c039ac044c943b51d29e179acd9",
"size": "7019",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/mxnet/_ctypes/ndarray.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "227904"
},
{
"name": "C++",
"bytes": "9484781"
},
{
"name": "CMake",
"bytes": "157181"
},
{
"name": "Clojure",
"bytes": "622652"
},
{
"name": "Cuda",
"bytes": "1290387"
},
{
"name": "Dockerfile",
"bytes": "100732"
},
{
"name": "Groovy",
"bytes": "165549"
},
{
"name": "HTML",
"bytes": "40277"
},
{
"name": "Java",
"bytes": "205196"
},
{
"name": "Julia",
"bytes": "445413"
},
{
"name": "Jupyter Notebook",
"bytes": "3660357"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "148945"
},
{
"name": "Perl",
"bytes": "1558292"
},
{
"name": "PowerShell",
"bytes": "11743"
},
{
"name": "Python",
"bytes": "9656682"
},
{
"name": "R",
"bytes": "357994"
},
{
"name": "Raku",
"bytes": "9012"
},
{
"name": "SWIG",
"bytes": "161870"
},
{
"name": "Scala",
"bytes": "1304647"
},
{
"name": "Shell",
"bytes": "460507"
},
{
"name": "Smalltalk",
"bytes": "3497"
}
],
"symlink_target": ""
}
|
"""
===========================================
Upper Air Analysis using Declarative Syntax
===========================================
The MetPy declarative syntax allows for a simplified interface to creating common
meteorological analyses including upper air observation plots.
"""
########################################
from datetime import datetime
import pandas as pd
from metpy.cbook import get_test_data
import metpy.plots as mpplots
from metpy.units import units
########################################
# **Getting the data**
#
# In this example, data is originally from the Iowa State Upper-air archive
# (https://mesonet.agron.iastate.edu/archive/raob/) available through a Siphon method.
# The data are pre-processed to attach latitude/longitude locations for each RAOB site.
data = pd.read_csv(get_test_data('UPA_obs.csv', as_file_obj=False))
########################################
# **Plotting the data**
#
# Use the declarative plotting interface to create a CONUS upper-air map for 500 hPa
# Plotting the Observations
obs = mpplots.PlotObs()
obs.data = data
obs.time = datetime(1993, 3, 14, 0)
obs.level = 500 * units.hPa
obs.fields = ['temperature', 'dewpoint', 'height']
obs.locations = ['NW', 'SW', 'NE']
obs.formats = [None, None, lambda v: format(v, '.0f')[:3]]
obs.vector_field = ('u_wind', 'v_wind')
obs.reduce_points = 0
# Add map features for the particular panel
panel = mpplots.MapPanel()
panel.layout = (1, 1, 1)
panel.area = (-124, -72, 20, 53)
panel.projection = 'lcc'
panel.layers = ['coastline', 'borders', 'states', 'land', 'ocean']
panel.plots = [obs]
# Collecting panels for complete figure
pc = mpplots.PanelContainer()
pc.size = (15, 10)
pc.panels = [panel]
# Showing the results
pc.show()
|
{
"content_hash": "2b888737568242c7efb727a68bd91c28",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 87,
"avg_line_length": 30.051724137931036,
"alnum_prop": 0.6368330464716007,
"repo_name": "metpy/MetPy",
"id": "86a8a18697f402b1a423b3639e01f66e6e605b12",
"size": "1881",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "v1.1/_downloads/a519b093777a2b63ced71d531cb53070/upperair_declarative.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "989941"
},
{
"name": "Python",
"bytes": "551868"
}
],
"symlink_target": ""
}
|
import contextlib
import copy
import mock
import netaddr
from oslo_config import cfg
from oslo_utils import importutils
from webob import exc
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.rpc.handlers import l3_rpc
from neutron.api.v2 import attributes
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.db import common_db_mixin
from neutron.db import db_base_plugin_v2
from neutron.db import external_net_db
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_attrs_db
from neutron.db import l3_db
from neutron.db import l3_dvr_db
from neutron.extensions import external_net
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as service_constants
from neutron.tests import base
from neutron.tests import fake_notifier
from neutron.tests.unit import test_agent_ext_plugin
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_api_v2_extension
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import testlib_plugin
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
_get_path = test_api_v2._get_path
class L3TestExtensionManager(object):
def get_resources(self):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
l3.RESOURCE_ATTRIBUTE_MAP)
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class L3NatExtensionTestCase(test_api_v2_extension.ExtensionTestCase):
fmt = 'json'
def setUp(self):
super(L3NatExtensionTestCase, self).setUp()
self._setUpExtension(
'neutron.extensions.l3.RouterPluginBase', None,
l3.RESOURCE_ATTRIBUTE_MAP, l3.L3, '',
allow_pagination=True, allow_sorting=True,
supported_extension_aliases=['router'],
use_quota=True)
def test_router_create(self):
router_id = _uuid()
data = {'router': {'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(),
'external_gateway_info': None}}
return_value = copy.deepcopy(data['router'])
return_value.update({'status': "ACTIVE", 'id': router_id})
instance = self.plugin.return_value
instance.create_router.return_value = return_value
instance.get_routers_count.return_value = 0
res = self.api.post(_get_path('routers', fmt=self.fmt),
self.serialize(data),
content_type='application/%s' % self.fmt)
instance.create_router.assert_called_with(mock.ANY,
router=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], True)
def test_router_list(self):
router_id = _uuid()
return_value = [{'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(), 'id': router_id}]
instance = self.plugin.return_value
instance.get_routers.return_value = return_value
res = self.api.get(_get_path('routers', fmt=self.fmt))
instance.get_routers.assert_called_with(mock.ANY, fields=mock.ANY,
filters=mock.ANY,
sorts=mock.ANY,
limit=mock.ANY,
marker=mock.ANY,
page_reverse=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('routers', res)
self.assertEqual(1, len(res['routers']))
self.assertEqual(router_id, res['routers'][0]['id'])
def test_router_update(self):
router_id = _uuid()
update_data = {'router': {'admin_state_up': False}}
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.update_router.return_value = return_value
res = self.api.put(_get_path('routers', id=router_id,
fmt=self.fmt),
self.serialize(update_data))
instance.update_router.assert_called_with(mock.ANY, router_id,
router=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_get(self):
router_id = _uuid()
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.get_router.return_value = return_value
res = self.api.get(_get_path('routers', id=router_id,
fmt=self.fmt))
instance.get_router.assert_called_with(mock.ANY, router_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_delete(self):
router_id = _uuid()
res = self.api.delete(_get_path('routers', id=router_id))
instance = self.plugin.return_value
instance.delete_router.assert_called_with(mock.ANY, router_id)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
def test_router_add_interface(self):
router_id = _uuid()
subnet_id = _uuid()
port_id = _uuid()
interface_data = {'subnet_id': subnet_id}
return_value = copy.deepcopy(interface_data)
return_value['port_id'] = port_id
instance = self.plugin.return_value
instance.add_router_interface.return_value = return_value
path = _get_path('routers', id=router_id,
action="add_router_interface",
fmt=self.fmt)
res = self.api.put(path, self.serialize(interface_data))
instance.add_router_interface.assert_called_with(mock.ANY, router_id,
interface_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('port_id', res)
self.assertEqual(res['port_id'], port_id)
self.assertEqual(res['subnet_id'], subnet_id)
# This base plugin class is for tests.
class TestL3NatBasePlugin(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin):
__native_pagination_support = True
__native_sorting_support = True
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatBasePlugin, self).create_network(context,
network)
self._process_l3_create(context, net, network['network'])
return net
def update_network(self, context, id, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatBasePlugin, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
return net
def delete_network(self, context, id):
with context.session.begin(subtransactions=True):
self._process_l3_delete(context, id)
super(TestL3NatBasePlugin, self).delete_network(context, id)
def delete_port(self, context, id, l3_port_check=True):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if plugin:
if l3_port_check:
plugin.prevent_l3_port_deletion(context, id)
plugin.disassociate_floatingips(context, id)
return super(TestL3NatBasePlugin, self).delete_port(context, id)
# This plugin class is for tests with plugin that integrates L3.
class TestL3NatIntPlugin(TestL3NatBasePlugin,
l3_db.L3_NAT_db_mixin):
supported_extension_aliases = ["external-net", "router"]
# This plugin class is for tests with plugin that integrates L3 and L3 agent
# scheduling.
class TestL3NatIntAgentSchedulingPlugin(TestL3NatIntPlugin,
l3_agentschedulers_db.
L3AgentSchedulerDbMixin):
supported_extension_aliases = ["external-net", "router",
"l3_agent_scheduler"]
router_scheduler = importutils.import_object(
cfg.CONF.router_scheduler_driver)
# This plugin class is for tests with plugin not supporting L3.
class TestNoL3NatPlugin(TestL3NatBasePlugin):
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = ["external-net"]
# A L3 routing service plugin class for tests with plugins that
# delegate away L3 routing functionality
class TestL3NatServicePlugin(common_db_mixin.CommonDbMixin,
l3_dvr_db.L3_NAT_with_dvr_db_mixin,
l3_db.L3_NAT_db_mixin):
supported_extension_aliases = ["router"]
def get_plugin_type(self):
return service_constants.L3_ROUTER_NAT
def get_plugin_description(self):
return "L3 Routing Service Plugin for testing"
# A L3 routing with L3 agent scheduling service plugin class for tests with
# plugins that delegate away L3 routing functionality
class TestL3NatAgentSchedulingServicePlugin(TestL3NatServicePlugin,
l3_agentschedulers_db.
L3AgentSchedulerDbMixin):
supported_extension_aliases = ["router", "l3_agent_scheduler"]
def __init__(self):
super(TestL3NatAgentSchedulingServicePlugin, self).__init__()
self.router_scheduler = importutils.import_object(
cfg.CONF.router_scheduler_driver)
self.agent_notifiers.update(
{l3_constants.AGENT_TYPE_L3: l3_rpc_agent_api.L3AgentNotifyAPI()})
class L3NATdbonlyMixinTestCase(base.BaseTestCase):
def setUp(self):
super(L3NATdbonlyMixinTestCase, self).setUp()
self.mixin = l3_db.L3_NAT_dbonly_mixin()
def test_build_routers_list_with_gw_port_mismatch(self):
routers = [{'gw_port_id': 'foo_gw_port_id', 'id': 'foo_router_id'}]
gw_ports = {}
routers = self.mixin._build_routers_list(mock.ANY, routers, gw_ports)
self.assertIsNone(routers[0].get('gw_port'))
class L3NatTestCaseMixin(object):
def _create_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False,
arg_list=None, **kwargs):
data = {'router': {'tenant_id': tenant_id}}
if name:
data['router']['name'] = name
if admin_state_up:
data['router']['admin_state_up'] = admin_state_up
for arg in (('admin_state_up', 'tenant_id') + (arg_list or ())):
# Arg must be present and not empty
if kwargs.get(arg):
data['router'][arg] = kwargs[arg]
router_req = self.new_create_request('routers', data, fmt)
if set_context and tenant_id:
# create a specific auth context for this request
router_req.environ['neutron.context'] = context.Context(
'', tenant_id)
return router_req.get_response(self.ext_api)
def _make_router(self, fmt, tenant_id, name=None, admin_state_up=None,
external_gateway_info=None, set_context=False,
arg_list=None, **kwargs):
if external_gateway_info:
arg_list = ('external_gateway_info', ) + (arg_list or ())
res = self._create_router(fmt, tenant_id, name,
admin_state_up, set_context,
arg_list=arg_list,
external_gateway_info=external_gateway_info,
**kwargs)
return self.deserialize(fmt, res)
def _add_external_gateway_to_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
neutron_context=None, ext_ips=[]):
body = {'router':
{'external_gateway_info': {'network_id': network_id}}}
if ext_ips:
body['router']['external_gateway_info'][
'external_fixed_ips'] = ext_ips
return self._update('routers', router_id, body,
expected_code=expected_code,
neutron_context=neutron_context)
def _remove_external_gateway_from_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
external_gw_info=None):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
external_gw_info}},
expected_code=expected_code)
def _router_interface_action(self, action, router_id, subnet_id, port_id,
expected_code=exc.HTTPOk.code,
expected_body=None,
tenant_id=None,
msg=None):
interface_data = {}
if subnet_id:
interface_data.update({'subnet_id': subnet_id})
if port_id and (action != 'add' or not subnet_id):
interface_data.update({'port_id': port_id})
req = self.new_action_request('routers', interface_data, router_id,
"%s_router_interface" % action)
# if tenant_id was specified, create a tenant context for this request
if tenant_id:
req.environ['neutron.context'] = context.Context(
'', tenant_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, expected_code, msg)
response = self.deserialize(self.fmt, res)
if expected_body:
self.assertEqual(response, expected_body, msg)
return response
@contextlib.contextmanager
def router(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=_uuid(),
external_gateway_info=None, set_context=False,
**kwargs):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, external_gateway_info,
set_context, **kwargs)
yield router
self._delete('routers', router['router']['id'])
def _set_net_external(self, net_id):
self._update('networks', net_id,
{'network': {external_net.EXTERNAL: True}})
def _create_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False,
floating_ip=None):
data = {'floatingip': {'floating_network_id': network_id,
'tenant_id': self._tenant_id}}
if port_id:
data['floatingip']['port_id'] = port_id
if fixed_ip:
data['floatingip']['fixed_ip_address'] = fixed_ip
if floating_ip:
data['floatingip']['floating_ip_address'] = floating_ip
floatingip_req = self.new_create_request('floatingips', data, fmt)
if set_context and self._tenant_id:
# create a specific auth context for this request
floatingip_req.environ['neutron.context'] = context.Context(
'', self._tenant_id)
return floatingip_req.get_response(self.ext_api)
def _make_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False, floating_ip=None,
http_status=exc.HTTPCreated.code):
res = self._create_floatingip(fmt, network_id, port_id,
fixed_ip, set_context, floating_ip)
self.assertEqual(res.status_int, http_status)
return self.deserialize(fmt, res)
def _validate_floating_ip(self, fip):
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 1)
self.assertEqual(body['floatingips'][0]['id'],
fip['floatingip']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
@contextlib.contextmanager
def floatingip_with_assoc(self, port_id=None, fmt=None, fixed_ip=None,
set_context=False):
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
private_port = None
if port_id:
private_port = self._show('ports', port_id)
with test_db_plugin.optional_ctx(private_port,
self.port) as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
floatingip = None
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'],
fixed_ip=fixed_ip,
set_context=set_context)
yield floatingip
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc_with_public_sub(
self, private_sub, fmt=None, set_context=False, public_sub=None):
self._set_net_external(public_sub['subnet']['network_id'])
with self.router() as r:
floatingip = None
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
set_context=set_context)
yield floatingip, r
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc(self, private_sub, fmt=None, set_context=False):
with self.subnet(cidr='12.0.0.0/24') as public_sub:
with self.floatingip_no_assoc_with_public_sub(
private_sub, fmt, set_context, public_sub) as (f, r):
# Yield only the floating ip object
yield f
class ExtraAttributesMixinTestCase(base.BaseTestCase):
def setUp(self):
super(ExtraAttributesMixinTestCase, self).setUp()
self.mixin = l3_attrs_db.ExtraAttributesMixin()
def _test__extend_extra_router_dict(
self, extra_attributes, attributes, expected_attributes):
self.mixin._extend_extra_router_dict(
attributes, {'extra_attributes': extra_attributes})
self.assertEqual(expected_attributes, attributes)
def test__extend_extra_router_dict_string_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': 'foo_default'
}]
extension_attributes = {'foo_key': 'my_fancy_value'}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_booleans_false_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': False
}]
extension_attributes = {'foo_key': True}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_booleans_true_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': True
}]
# Test that the default is overridden
extension_attributes = {'foo_key': False}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_no_extension_attributes(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': 'foo_value'
}]
self._test__extend_extra_router_dict({}, {}, {'foo_key': 'foo_value'})
def test__extend_extra_router_dict_none_extension_attributes(self):
self._test__extend_extra_router_dict(None, {}, {})
class L3NatTestCaseBase(L3NatTestCaseMixin):
def test_router_create(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
for k, v in expected_value:
self.assertEqual(router['router'][k], v)
def test_router_create_call_extensions(self):
self.extension_called = False
def _extend_router_dict_test_attr(*args, **kwargs):
self.extension_called = True
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
l3.ROUTERS, [_extend_router_dict_test_attr])
self.assertFalse(self.extension_called)
with self.router():
self.assertTrue(self.extension_called)
def test_router_create_with_gwinfo(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
data = {'router': {'tenant_id': _uuid()}}
data['router']['name'] = 'router1'
data['router']['external_gateway_info'] = {
'network_id': s['subnet']['network_id']}
router_req = self.new_create_request('routers', data, self.fmt)
res = router_req.get_response(self.ext_api)
router = self.deserialize(self.fmt, res)
self.assertEqual(
s['subnet']['network_id'],
router['router']['external_gateway_info']['network_id'])
self._delete('routers', router['router']['id'])
def test_router_create_with_gwinfo_ext_ip(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ext_info = {
'network_id': s['subnet']['network_id'],
'external_fixed_ips': [{'ip_address': '10.0.0.99'}]
}
res = self._create_router(
self.fmt, _uuid(), arg_list=('external_gateway_info',),
external_gateway_info=ext_info
)
router = self.deserialize(self.fmt, res)
self._delete('routers', router['router']['id'])
self.assertEqual(
[{'ip_address': '10.0.0.99', 'subnet_id': s['subnet']['id']}],
router['router']['external_gateway_info'][
'external_fixed_ips'])
def test_router_create_with_gwinfo_ext_ip_subnet(self):
with self.network() as n:
with contextlib.nested(
self.subnet(network=n),
self.subnet(network=n, cidr='1.0.0.0/24'),
self.subnet(network=n, cidr='2.0.0.0/24'),
) as subnets:
self._set_net_external(n['network']['id'])
for s in subnets:
ext_info = {
'network_id': n['network']['id'],
'external_fixed_ips': [
{'subnet_id': s['subnet']['id']}]
}
res = self._create_router(
self.fmt, _uuid(), arg_list=('external_gateway_info',),
external_gateway_info=ext_info
)
router = self.deserialize(self.fmt, res)
ext_ips = router['router']['external_gateway_info'][
'external_fixed_ips']
self._delete('routers', router['router']['id'])
self.assertEqual(
[{'subnet_id': s['subnet']['id'],
'ip_address': mock.ANY}], ext_ips)
def test_router_create_with_gwinfo_ext_ip_non_admin(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ext_info = {
'network_id': s['subnet']['network_id'],
'external_fixed_ips': [{'ip_address': '10.0.0.99'}]
}
res = self._create_router(
self.fmt, _uuid(), arg_list=('external_gateway_info',),
set_context=True, external_gateway_info=ext_info
)
self.assertEqual(res.status_int, exc.HTTPForbidden.code)
def test_router_list(self):
with contextlib.nested(self.router(),
self.router(),
self.router()
) as routers:
self._test_list_resources('router', routers)
def test_router_list_with_parameters(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
) as (router1, router2):
query_params = 'name=router1'
self._test_list_resources('router', [router1],
query_params=query_params)
query_params = 'name=router2'
self._test_list_resources('router', [router2],
query_params=query_params)
query_params = 'name=router3'
self._test_list_resources('router', [],
query_params=query_params)
def test_router_list_with_sort(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_sort('router', (router3, router2, router1),
[('name', 'desc')])
def test_router_list_with_pagination(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination('router',
(router1, router2, router3),
('name', 'asc'), 2, 2)
def test_router_list_with_pagination_reverse(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination_reverse('router',
(router1, router2,
router3),
('name', 'asc'), 2, 2)
def test_router_update(self):
rname1 = "yourrouter"
rname2 = "nachorouter"
with self.router(name=rname1) as r:
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname1)
body = self._update('routers', r['router']['id'],
{'router': {'name': rname2}})
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname2)
def test_router_update_gateway(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s2['subnet']['network_id'])
# Validate that we can clear the gateway with
# an empty dict, in any other case, we fall back
# on None as default value
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'],
external_gw_info={})
def test_router_update_gateway_with_external_ip_used_by_gw(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
ext_ips=[{'ip_address': s['subnet']['gateway_ip']}],
expected_code=exc.HTTPBadRequest.code)
def test_router_update_gateway_with_invalid_external_ip(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
ext_ips=[{'ip_address': '99.99.99.99'}],
expected_code=exc.HTTPBadRequest.code)
def test_router_update_gateway_with_invalid_external_subnet(self):
with contextlib.nested(
self.subnet(),
self.subnet(cidr='1.0.0.0/24'),
self.router()
) as (s1, s2, r):
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'],
# this subnet is not on the same network so this should fail
ext_ips=[{'subnet_id': s2['subnet']['id']}],
expected_code=exc.HTTPBadRequest.code)
def test_router_update_gateway_with_different_external_subnet(self):
with self.network() as n:
with contextlib.nested(
self.subnet(network=n),
self.subnet(network=n, cidr='1.0.0.0/24'),
self.router()
) as (s1, s2, r):
self._set_net_external(n['network']['id'])
res1 = self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'],
ext_ips=[{'subnet_id': s1['subnet']['id']}])
res2 = self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'],
ext_ips=[{'subnet_id': s2['subnet']['id']}])
fip1 = res1['router']['external_gateway_info']['external_fixed_ips'][0]
fip2 = res2['router']['external_gateway_info']['external_fixed_ips'][0]
self.assertEqual(s1['subnet']['id'], fip1['subnet_id'])
self.assertEqual(s2['subnet']['id'], fip2['subnet_id'])
self.assertNotEqual(fip1['subnet_id'], fip2['subnet_id'])
self.assertNotEqual(fip1['ip_address'], fip2['ip_address'])
def test_router_update_gateway_with_existed_floatingip(self):
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.floatingip_with_assoc() as fip:
self._add_external_gateway_to_router(
fip['floatingip']['router_id'],
subnet['subnet']['network_id'],
expected_code=exc.HTTPConflict.code)
def test_router_update_gateway_to_empty_with_existed_floatingip(self):
with self.floatingip_with_assoc() as fip:
self._remove_external_gateway_from_router(
fip['floatingip']['router_id'], None,
expected_code=exc.HTTPConflict.code)
def _test_router_add_interface_subnet(self, router, subnet, msg=None):
exp_notifications = ['router.create.start',
'router.create.end',
'network.create.start',
'network.create.end',
'subnet.create.start',
'subnet.create.end',
'router.interface.create',
'router.interface.delete']
body = self._router_interface_action('add',
router['router']['id'],
subnet['subnet']['id'],
None)
self.assertIn('port_id', body, msg)
# fetch port and confirm device_id
r_port_id = body['port_id']
port = self._show('ports', r_port_id)
self.assertEqual(port['port']['device_id'],
router['router']['id'], msg)
self._router_interface_action('remove',
router['router']['id'],
subnet['subnet']['id'],
None)
self._show('ports', r_port_id,
expected_code=exc.HTTPNotFound.code)
self.assertEqual(
set(exp_notifications),
set(n['event_type'] for n in fake_notifier.NOTIFICATIONS), msg)
for n in fake_notifier.NOTIFICATIONS:
if n['event_type'].startswith('router.interface.'):
payload = n['payload']['router_interface']
self.assertIn('id', payload)
self.assertEqual(payload['id'], router['router']['id'])
self.assertIn('tenant_id', payload)
stid = subnet['subnet']['tenant_id']
# tolerate subnet tenant deliberately set to '' in the
# nsx metadata access case
self.assertIn(payload['tenant_id'], [stid, ''], msg)
def test_router_add_interface_subnet(self):
fake_notifier.reset()
with self.router() as r:
with self.network() as n:
with self.subnet(network=n) as s:
self._test_router_add_interface_subnet(r, s)
def test_router_add_interface_ipv6_subnet(self):
"""Test router-interface-add for valid ipv6 subnets.
Verify the valid use-cases of an IPv6 subnet where we
are allowed to associate to the Neutron Router are successful.
"""
slaac = l3_constants.IPV6_SLAAC
stateful = l3_constants.DHCPV6_STATEFUL
stateless = l3_constants.DHCPV6_STATELESS
use_cases = [{'msg': 'IPv6 Subnet Modes (slaac, none)',
'ra_mode': slaac, 'address_mode': None},
{'msg': 'IPv6 Subnet Modes (none, none)',
'ra_mode': None, 'address_mode': None},
{'msg': 'IPv6 Subnet Modes (dhcpv6-stateful, none)',
'ra_mode': stateful, 'address_mode': None},
{'msg': 'IPv6 Subnet Modes (dhcpv6-stateless, none)',
'ra_mode': stateless, 'address_mode': None},
{'msg': 'IPv6 Subnet Modes (slaac, slaac)',
'ra_mode': slaac, 'address_mode': slaac},
{'msg': 'IPv6 Subnet Modes (dhcpv6-stateful,'
'dhcpv6-stateful)', 'ra_mode': stateful,
'address_mode': stateful},
{'msg': 'IPv6 Subnet Modes (dhcpv6-stateless,'
'dhcpv6-stateless)', 'ra_mode': stateless,
'address_mode': stateless}]
for uc in use_cases:
fake_notifier.reset()
with contextlib.nested(self.router(), self.network()) as (r, n):
with self.subnet(network=n, cidr='fd00::1/64',
gateway_ip='fd00::1', ip_version=6,
ipv6_ra_mode=uc['ra_mode'],
ipv6_address_mode=uc['address_mode']) as s:
self._test_router_add_interface_subnet(r, s, uc['msg'])
self._delete('subnets', s['subnet']['id'])
def test_router_add_iface_ipv6_ext_ra_subnet_returns_400(self):
"""Test router-interface-add for in-valid ipv6 subnets.
Verify that an appropriate error message is displayed when
an IPv6 subnet configured to use an external_router for Router
Advertisements (i.e., ipv6_ra_mode is None and ipv6_address_mode
is not None) is attempted to associate with a Neutron Router.
"""
use_cases = [{'msg': 'IPv6 Subnet Modes (none, slaac)',
'ra_mode': None,
'address_mode': l3_constants.IPV6_SLAAC},
{'msg': 'IPv6 Subnet Modes (none, dhcpv6-stateful)',
'ra_mode': None,
'address_mode': l3_constants.DHCPV6_STATEFUL},
{'msg': 'IPv6 Subnet Modes (none, dhcpv6-stateless)',
'ra_mode': None,
'address_mode': l3_constants.DHCPV6_STATELESS}]
for uc in use_cases:
with contextlib.nested(self.router(), self.network()) as (r, n):
with self.subnet(network=n, cidr='fd00::1/64',
gateway_ip='fd00::1', ip_version=6,
ipv6_ra_mode=uc['ra_mode'],
ipv6_address_mode=uc['address_mode']) as s:
exp_code = exc.HTTPBadRequest.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exp_code,
msg=uc['msg'])
self._delete('subnets', s['subnet']['id'])
def test_router_add_interface_ipv6_subnet_without_gateway_ip(self):
with self.router() as r:
with self.subnet(ip_version=6, cidr='fe80::/64',
gateway_ip=None) as s:
error_code = exc.HTTPBadRequest.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=error_code)
def test_router_add_interface_subnet_with_bad_tenant_returns_404(self):
with mock.patch('neutron.context.Context.to_dict') as tdict:
tenant_id = _uuid()
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router(tenant_id=tenant_id) as r:
with self.network(tenant_id=tenant_id) as n:
with self.subnet(network=n) as s:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertIn('port_id', body)
tdict.return_value = tenant_context
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_subnet_with_port_from_other_tenant(self):
tenant_id = _uuid()
other_tenant_id = _uuid()
with contextlib.nested(
self.router(tenant_id=tenant_id),
self.network(tenant_id=tenant_id),
self.network(tenant_id=other_tenant_id)) as (r, n1, n2):
with contextlib.nested(
self.subnet(network=n1, cidr='10.0.0.0/24'),
self.subnet(network=n2, cidr='10.1.0.0/24')) as (s1, s2):
body = self._router_interface_action(
'add',
r['router']['id'],
s2['subnet']['id'],
None)
self.assertIn('port_id', body)
self._router_interface_action(
'add',
r['router']['id'],
s1['subnet']['id'],
None,
tenant_id=tenant_id)
self.assertIn('port_id', body)
self._router_interface_action(
'remove',
r['router']['id'],
s1['subnet']['id'],
None,
tenant_id=tenant_id)
body = self._router_interface_action(
'remove',
r['router']['id'],
s2['subnet']['id'],
None)
def test_router_add_interface_port(self):
with self.router() as r:
with self.port() as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self.assertIn('port_id', body)
self.assertEqual(body['port_id'], p['port']['id'])
# fetch port and confirm device_id
body = self._show('ports', p['port']['id'])
self.assertEqual(body['port']['device_id'], r['router']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_empty_port_and_subnet_ids(self):
with self.router() as r:
self._router_interface_action('add', r['router']['id'],
None, None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_interface_port_bad_tenant_returns_404(self):
with mock.patch('neutron.context.Context.to_dict') as tdict:
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router() as r:
with self.port() as p:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
tdict.return_value = tenant_context
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port(subnet=s) as p1:
with self.port(subnet=s) as p2:
self._router_interface_action('add',
r['router']['id'],
None,
p1['port']['id'])
self._router_interface_action('add',
r['router']['id'],
None,
p2['port']['id'],
expected_code=exc.
HTTPBadRequest.code)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p1['port']['id'])
def test_router_add_interface_overlapped_cidr_returns_400(self):
with self.router() as r:
with self.subnet(cidr='10.0.1.0/24') as s1:
self._router_interface_action('add',
r['router']['id'],
s1['subnet']['id'],
None)
def try_overlapped_cidr(cidr):
with self.subnet(cidr=cidr) as s2:
self._router_interface_action('add',
r['router']['id'],
s2['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
# another subnet with same cidr
try_overlapped_cidr('10.0.1.0/24')
# another subnet with overlapped cidr including s1
try_overlapped_cidr('10.0.0.0/16')
# clean-up
self._router_interface_action('remove',
r['router']['id'],
s1['subnet']['id'],
None)
def test_router_add_interface_no_data_returns_400(self):
with self.router() as r:
self._router_interface_action('add',
r['router']['id'],
None,
None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_gateway_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_gateway_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_router_add_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertIsNone(gw_info)
def test_router_add_gateway_tenant_ctx(self):
with self.router(tenant_id='noadmin',
set_context=True) as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ctx = context.Context('', 'noadmin')
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
neutron_context=ctx)
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertIsNone(gw_info)
def test_create_router_port_with_device_id_of_other_teants_router(self):
with self.router() as admin_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n):
for device_owner in l3_constants.ROUTER_INTERFACE_OWNERS:
self._create_port(
self.fmt, n['network']['id'],
tenant_id='tenant_a',
device_id=admin_router['router']['id'],
device_owner=device_owner,
set_context=True,
expected_res_status=exc.HTTPConflict.code)
def test_create_non_router_port_device_id_of_other_teants_router_update(
self):
# This tests that HTTPConflict is raised if we create a non-router
# port that matches the device_id of another tenants router and then
# we change the device_owner to be network:router_interface.
with self.router() as admin_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n):
for device_owner in l3_constants.ROUTER_INTERFACE_OWNERS:
port_res = self._create_port(
self.fmt, n['network']['id'],
tenant_id='tenant_a',
device_id=admin_router['router']['id'],
set_context=True)
port = self.deserialize(self.fmt, port_res)
neutron_context = context.Context('', 'tenant_a')
data = {'port': {'device_owner': device_owner}}
self._update('ports', port['port']['id'], data,
neutron_context=neutron_context,
expected_code=exc.HTTPConflict.code)
self._delete('ports', port['port']['id'])
def test_update_port_device_id_to_different_tenants_router(self):
with self.router() as admin_router:
with self.router(tenant_id='tenant_a',
set_context=True) as tenant_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n) as s:
port = self._router_interface_action(
'add', tenant_router['router']['id'],
s['subnet']['id'], None, tenant_id='tenant_a')
neutron_context = context.Context('', 'tenant_a')
data = {'port':
{'device_id': admin_router['router']['id']}}
self._update('ports', port['port_id'], data,
neutron_context=neutron_context,
expected_code=exc.HTTPConflict.code)
self._router_interface_action(
'remove', tenant_router['router']['id'],
s['subnet']['id'], None, tenant_id='tenant_a')
def test_router_add_gateway_invalid_network_returns_400(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
"foobar", expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_non_existent_network_returns_404(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
_uuid(), expected_code=exc.HTTPNotFound.code)
def test_router_add_gateway_net_not_external_returns_400(self):
with self.router() as r:
with self.subnet() as s:
# intentionally do not set net as external
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_no_subnet_returns_400(self):
with self.router() as r:
with self.network() as n:
self._set_net_external(n['network']['id'])
self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'], expected_code=exc.HTTPBadRequest.code)
def test_router_remove_interface_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_remove_interface_wrong_subnet_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPBadRequest.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_interface_returns_200(self):
with self.router() as r:
with self.port() as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
expected_body=body)
def test_router_remove_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet():
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port(self.fmt, p['port']['network_id'])
p2 = self.deserialize(self.fmt, res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_router_delete(self):
with self.router() as router:
router_id = router['router']['id']
req = self.new_show_request('router', router_id)
res = req.get_response(self._api_for_resource('router'))
self.assertEqual(res.status_int, 404)
def test_router_delete_with_port_existed_returns_409(self):
with self.subnet() as subnet:
res = self._create_router(self.fmt, _uuid())
router = self.deserialize(self.fmt, res)
self._router_interface_action('add',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'],
exc.HTTPConflict.code)
self._router_interface_action('remove',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'])
def test_router_delete_with_floatingip_existed_returns_409(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int, exc.HTTPCreated.code)
floatingip = self.deserialize(self.fmt, res)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# Cleanup
self._delete('floatingips', floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_router_show(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_network_update_external_failure(self):
with self.router() as r:
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', s1['subnet']['network_id'],
{'network': {external_net.EXTERNAL: False}},
expected_code=exc.HTTPConflict.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_network_update_external(self):
with self.router() as r:
with self.network('test_net') as testnet:
self._set_net_external(testnet['network']['id'])
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', testnet['network']['id'],
{'network': {external_net.EXTERNAL: False}})
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_floatingip_crd_ops(self):
with self.floatingip_with_assoc() as fip:
self._validate_floating_ip(fip)
# post-delete, check that it is really gone
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 0)
self._show('floatingips', fip['floatingip']['id'],
expected_code=exc.HTTPNotFound.code)
def _test_floatingip_with_assoc_fails(self, plugin_method):
with self.subnet(cidr='200.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
with mock.patch(plugin_method) as pl:
pl.side_effect = n_exc.BadRequest(
resource='floatingip',
msg='fake_error')
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, 400)
for p in self._list('ports')['ports']:
if (p['device_owner'] ==
l3_constants.DEVICE_OWNER_FLOATINGIP):
self.fail('garbage port is not deleted')
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_floatingip_with_assoc_fails(self):
self._test_floatingip_with_assoc_fails(
'neutron.db.l3_db.L3_NAT_db_mixin._check_and_get_fip_assoc')
def test_create_floatingip_with_assoc(
self, expected_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
with self.floatingip_with_assoc() as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'],
fip['floatingip']['port_id'])
self.assertEqual(expected_status, body['floatingip']['status'])
self.assertIsNotNone(body['floatingip']['fixed_ip_address'])
self.assertIsNotNone(body['floatingip']['router_id'])
def test_floatingip_update(
self, expected_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.floatingip_no_assoc(private_sub) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(body['floatingip']['fixed_ip_address'])
self.assertEqual(body['floatingip']['status'], expected_status)
port_id = p['port']['id']
ip_address = p['port']['fixed_ips'][0]['ip_address']
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'], port_id)
self.assertEqual(body['floatingip']['fixed_ip_address'],
ip_address)
def test_floatingip_create_different_fixed_ip_same_port(self):
'''This tests that it is possible to delete a port that has
multiple floating ip addresses associated with it (each floating
address associated with a unique fixed address).
'''
with self.router() as r:
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
with self.subnet() as private_sub:
ip_range = list(netaddr.IPNetwork(
private_sub['subnet']['cidr']))
fixed_ips = [{'ip_address': str(ip_range[-3])},
{'ip_address': str(ip_range[-2])}]
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
with self.port(subnet=private_sub,
fixed_ips=fixed_ips) as p:
fip1 = self._make_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
p['port']['id'],
fixed_ip=str(ip_range[-2]))
fip2 = self._make_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
p['port']['id'],
fixed_ip=str(ip_range[-3]))
# Test that floating ips are assigned successfully.
body = self._show('floatingips',
fip1['floatingip']['id'])
self.assertEqual(
body['floatingip']['port_id'],
fip1['floatingip']['port_id'])
body = self._show('floatingips',
fip2['floatingip']['id'])
self.assertEqual(
body['floatingip']['port_id'],
fip2['floatingip']['port_id'])
self._delete('ports', p['port']['id'])
# Test that port has been successfully deleted.
body = self._show('ports', p['port']['id'],
expected_code=exc.HTTPNotFound.code)
for fip in [fip1, fip2]:
self._delete('floatingips',
fip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
def test_floatingip_update_different_fixed_ip_same_port(self):
with self.subnet() as s:
ip_range = list(netaddr.IPNetwork(s['subnet']['cidr']))
fixed_ips = [{'ip_address': str(ip_range[-3])},
{'ip_address': str(ip_range[-2])}]
with self.port(subnet=s, fixed_ips=fixed_ips) as p:
with self.floatingip_with_assoc(
port_id=p['port']['id'],
fixed_ip=str(ip_range[-3])) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(fip['floatingip']['id'],
body['floatingip']['id'])
self.assertEqual(fip['floatingip']['port_id'],
body['floatingip']['port_id'])
self.assertEqual(str(ip_range[-3]),
body['floatingip']['fixed_ip_address'])
self.assertIsNotNone(body['floatingip']['router_id'])
body_2 = self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': p['port']['id'],
'fixed_ip_address': str(ip_range[-2])}
})
self.assertEqual(fip['floatingip']['port_id'],
body_2['floatingip']['port_id'])
self.assertEqual(str(ip_range[-2]),
body_2['floatingip']['fixed_ip_address'])
def test_floatingip_update_different_router(self):
# Create subnet with different CIDRs to account for plugins which
# do not support overlapping IPs
with contextlib.nested(self.subnet(cidr='10.0.0.0/24'),
self.subnet(cidr='10.0.1.0/24')) as (
s1, s2):
with contextlib.nested(self.port(subnet=s1),
self.port(subnet=s2)) as (p1, p2):
private_sub1 = {'subnet':
{'id':
p1['port']['fixed_ips'][0]['subnet_id']}}
private_sub2 = {'subnet':
{'id':
p2['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
with contextlib.nested(
self.floatingip_no_assoc_with_public_sub(
private_sub1, public_sub=public_sub),
self.floatingip_no_assoc_with_public_sub(
private_sub2, public_sub=public_sub)) as (
(fip1, r1), (fip2, r2)):
def assert_no_assoc(fip):
body = self._show('floatingips',
fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(
body['floatingip']['fixed_ip_address'])
assert_no_assoc(fip1)
assert_no_assoc(fip2)
def associate_and_assert(fip, port):
port_id = port['port']['id']
ip_address = (port['port']['fixed_ips']
[0]['ip_address'])
body = self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'],
port_id)
self.assertEqual(
body['floatingip']['fixed_ip_address'],
ip_address)
return body['floatingip']['router_id']
fip1_r1_res = associate_and_assert(fip1, p1)
self.assertEqual(fip1_r1_res, r1['router']['id'])
# The following operation will associate the floating
# ip to a different router
fip1_r2_res = associate_and_assert(fip1, p2)
self.assertEqual(fip1_r2_res, r2['router']['id'])
fip2_r1_res = associate_and_assert(fip2, p1)
self.assertEqual(fip2_r1_res, r1['router']['id'])
# disassociate fip1
self._update(
'floatingips', fip1['floatingip']['id'],
{'floatingip': {'port_id': None}})
fip2_r2_res = associate_and_assert(fip2, p2)
self.assertEqual(fip2_r2_res, r2['router']['id'])
def test_floatingip_port_delete(self):
with self.subnet() as private_sub:
with self.floatingip_no_assoc(private_sub) as fip:
with self.port(subnet=private_sub) as p:
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip':
{'port_id': p['port']['id']}})
# note: once this port goes out of scope, the port will be
# deleted, which is what we want to test. We want to confirm
# that the fields are set back to None
self._delete('ports', p['port']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(body['floatingip']['fixed_ip_address'])
self.assertIsNone(body['floatingip']['router_id'])
def test_two_fips_one_port_invalid_return_409(self):
with self.floatingip_with_assoc() as fip1:
res = self._create_floatingip(
self.fmt,
fip1['floatingip']['floating_network_id'],
fip1['floatingip']['port_id'])
self.assertEqual(res.status_int, exc.HTTPConflict.code)
def test_floating_ip_direct_port_delete_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_FLOATINGIP:
self._delete('ports', p['id'],
expected_code=exc.HTTPConflict.code)
found = True
self.assertTrue(found)
def _test_floatingip_with_invalid_create_port(self, plugin_class):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'],
None)
with mock.patch(plugin_class + '.create_port') as createport:
createport.return_value = {'fixed_ips': []}
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int,
exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
private_sub
['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_floatingip_with_invalid_create_port(self):
self._test_floatingip_with_invalid_create_port(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2')
def test_create_floatingip_no_ext_gateway_return_404(self):
with self.subnet() as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
# this should be some kind of error
self.assertEqual(res.status_int, exc.HTTPNotFound.code)
def test_create_floating_non_ext_network_returns_400(self):
with self.subnet() as public_sub:
# normally we would set the network of public_sub to be
# external, but the point of this test is to handle when
# that is not the case
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_floatingip_no_public_subnet_returns_400(self):
with self.network() as public_network:
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt,
public_network['network']['id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
# cleanup
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_create_floatingip_invalid_floating_network_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, 'iamnotanuuid',
uuidutils.generate_uuid(), '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_floating_port_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
'iamnotanuuid', '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_fixed_ip_address_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
uuidutils.generate_uuid(), 'iamnotnanip')
self.assertEqual(res.status_int, 400)
def test_floatingip_list_with_sort(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_sort('floatingip', (fp3, fp2, fp1),
[('floating_ip_address', 'desc')])
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_port_id(self):
with self.floatingip_with_assoc() as fip:
port_id = fip['floatingip']['port_id']
res = self._list('floatingips',
query_params="port_id=%s" % port_id)
self.assertEqual(len(res['floatingips']), 1)
res = self._list('floatingips', query_params="port_id=aaa")
self.assertEqual(len(res['floatingips']), 0)
def test_floatingip_list_with_pagination(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_pagination_reverse(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination_reverse(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_ROUTER_INTF:
subnet_id = p['fixed_ips'][0]['subnet_id']
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, subnet_id, None,
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_ROUTER_INTF:
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, None, p['id'],
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_router_delete_subnet_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
# subnet cannot be delete as it's attached to a router
self._delete('subnets', s['subnet']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_delete_ext_net_with_disassociated_floating_ips(self):
with self.network() as net:
net_id = net['network']['id']
self._set_net_external(net_id)
with self.subnet(network=net):
self._make_floatingip(self.fmt, net_id)
def test_create_floatingip_with_specific_ip(self):
with self.subnet(cidr='10.0.0.0/24') as s:
network_id = s['subnet']['network_id']
self._set_net_external(network_id)
fp = self._make_floatingip(self.fmt, network_id,
floating_ip='10.0.0.10')
try:
self.assertEqual(fp['floatingip']['floating_ip_address'],
'10.0.0.10')
finally:
self._delete('floatingips', fp['floatingip']['id'])
def test_create_floatingip_with_specific_ip_out_of_allocation(self):
with self.subnet(cidr='10.0.0.0/24',
allocation_pools=[
{'start': '10.0.0.10', 'end': '10.0.0.20'}]
) as s:
network_id = s['subnet']['network_id']
self._set_net_external(network_id)
fp = self._make_floatingip(self.fmt, network_id,
floating_ip='10.0.0.30')
try:
self.assertEqual(fp['floatingip']['floating_ip_address'],
'10.0.0.30')
finally:
self._delete('floatingips', fp['floatingip']['id'])
def test_create_floatingip_with_specific_ip_non_admin(self):
ctx = context.Context('user_id', 'tenant_id')
with self.subnet(cidr='10.0.0.0/24') as s:
network_id = s['subnet']['network_id']
self._set_net_external(network_id)
self._make_floatingip(self.fmt, network_id,
set_context=ctx,
floating_ip='10.0.0.10',
http_status=exc.HTTPForbidden.code)
def test_create_floatingip_with_specific_ip_out_of_subnet(self):
with self.subnet(cidr='10.0.0.0/24') as s:
network_id = s['subnet']['network_id']
self._set_net_external(network_id)
self._make_floatingip(self.fmt, network_id,
floating_ip='10.0.1.10',
http_status=exc.HTTPBadRequest.code)
def test_create_floatingip_with_duplicated_specific_ip(self):
with self.subnet(cidr='10.0.0.0/24') as s:
network_id = s['subnet']['network_id']
self._set_net_external(network_id)
fp1 = self._make_floatingip(self.fmt, network_id,
floating_ip='10.0.0.10')
try:
self._make_floatingip(self.fmt, network_id,
floating_ip='10.0.0.10',
http_status=exc.HTTPConflict.code)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
class L3AgentDbTestCaseBase(L3NatTestCaseMixin):
"""Unit tests for methods called by the L3 agent."""
def test_l3_agent_routers_query_interfaces(self):
with self.router() as r:
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
routers = self.plugin.get_sync_data(
context.get_admin_context(), None)
self.assertEqual(1, len(routers))
interfaces = routers[0][l3_constants.INTERFACE_KEY]
self.assertEqual(1, len(interfaces))
subnet_id = interfaces[0]['subnet']['id']
wanted_subnetid = p['port']['fixed_ips'][0]['subnet_id']
self.assertEqual(wanted_subnetid, subnet_id)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_ignore_interfaces_with_moreThanOneIp(self):
with self.router() as r:
with self.subnet(cidr='9.0.1.0/24') as subnet:
with self.port(subnet=subnet,
fixed_ips=[{'ip_address': '9.0.1.3'}]) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
port = {'port': {'fixed_ips':
[{'ip_address': '9.0.1.4',
'subnet_id': subnet['subnet']['id']},
{'ip_address': '9.0.1.5',
'subnet_id': subnet['subnet']['id']}]}}
ctx = context.get_admin_context()
self.core_plugin.update_port(ctx, p['port']['id'], port)
routers = self.plugin.get_sync_data(ctx, None)
self.assertEqual(1, len(routers))
interfaces = routers[0].get(l3_constants.INTERFACE_KEY, [])
self.assertEqual(1, len(interfaces))
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
routers = self.plugin.get_sync_data(
context.get_admin_context(), [r['router']['id']])
self.assertEqual(1, len(routers))
gw_port = routers[0]['gw_port']
self.assertEqual(s['subnet']['id'], gw_port['subnet']['id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_l3_agent_routers_query_floatingips(self):
with self.floatingip_with_assoc() as fip:
routers = self.plugin.get_sync_data(
context.get_admin_context(), [fip['floatingip']['router_id']])
self.assertEqual(1, len(routers))
floatingips = routers[0][l3_constants.FLOATINGIP_KEY]
self.assertEqual(1, len(floatingips))
self.assertEqual(floatingips[0]['id'],
fip['floatingip']['id'])
self.assertEqual(floatingips[0]['port_id'],
fip['floatingip']['port_id'])
self.assertIsNotNone(floatingips[0]['fixed_ip_address'])
self.assertIsNotNone(floatingips[0]['router_id'])
def _test_notify_op_agent(self, target_func, *args):
l3_rpc_agent_api_str = (
'neutron.api.rpc.agentnotifiers.l3_rpc_agent_api.L3AgentNotifyAPI')
with mock.patch(l3_rpc_agent_api_str):
plugin = manager.NeutronManager.get_service_plugins()[
service_constants.L3_ROUTER_NAT]
notifyApi = plugin.l3_rpc_notifier
kargs = [item for item in args]
kargs.append(notifyApi)
target_func(*kargs)
def _test_router_gateway_op_agent(self, notifyApi):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
self.assertEqual(
2, notifyApi.routers_updated.call_count)
def test_router_gateway_op_agent(self):
self._test_notify_op_agent(self._test_router_gateway_op_agent)
def _test_interfaces_op_agent(self, r, notifyApi):
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
self.assertEqual(2, notifyApi.routers_updated.call_count)
def test_interfaces_op_agent(self):
with self.router() as r:
self._test_notify_op_agent(
self._test_interfaces_op_agent, r)
def _test_floatingips_op_agent(self, notifyApi):
with self.floatingip_with_assoc():
pass
# add gateway, add interface, associate, deletion of floatingip,
# delete gateway, delete interface
self.assertEqual(6, notifyApi.routers_updated.call_count)
def test_floatingips_op_agent(self):
self._test_notify_op_agent(self._test_floatingips_op_agent)
class L3BaseForIntTests(test_db_plugin.NeutronDbPluginV2TestCase,
testlib_plugin.NotificationSetupHelper):
mock_rescheduling = True
def setUp(self, plugin=None, ext_mgr=None, service_plugins=None):
if not plugin:
plugin = 'neutron.tests.unit.test_l3_plugin.TestL3NatIntPlugin'
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
ext_mgr = ext_mgr or L3TestExtensionManager()
if self.mock_rescheduling:
mock.patch('%s._check_router_needs_rescheduling' % plugin,
new=lambda *a: False).start()
super(L3BaseForIntTests, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.setup_notification_driver()
class L3BaseForSepTests(test_db_plugin.NeutronDbPluginV2TestCase,
testlib_plugin.NotificationSetupHelper):
def setUp(self, plugin=None, ext_mgr=None):
# the plugin without L3 support
if not plugin:
plugin = 'neutron.tests.unit.test_l3_plugin.TestNoL3NatPlugin'
# the L3 service plugin
l3_plugin = ('neutron.tests.unit.test_l3_plugin.'
'TestL3NatServicePlugin')
service_plugins = {'l3_plugin_name': l3_plugin}
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
if not ext_mgr:
ext_mgr = L3TestExtensionManager()
super(L3BaseForSepTests, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.setup_notification_driver()
class L3NatDBIntAgentSchedulingTestCase(L3BaseForIntTests,
L3NatTestCaseMixin,
test_agent_ext_plugin.
AgentDBTestMixIn):
"""Unit tests for core plugin with L3 routing and scheduling integrated."""
def setUp(self, plugin='neutron.tests.unit.test_l3_plugin.'
'TestL3NatIntAgentSchedulingPlugin',
ext_mgr=None, service_plugins=None):
self.mock_rescheduling = False
super(L3NatDBIntAgentSchedulingTestCase, self).setUp(
plugin, ext_mgr, service_plugins)
self.adminContext = context.get_admin_context()
def _assert_router_on_agent(self, router_id, agent_host):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
agents = plugin.list_l3_agents_hosting_router(
self.adminContext, router_id)['agents']
self.assertEqual(len(agents), 1)
self.assertEqual(agents[0]['host'], agent_host)
def test_update_gateway_agent_exists_supporting_network(self):
with contextlib.nested(self.router(),
self.subnet(),
self.subnet()) as (r, s1, s2):
self._set_net_external(s1['subnet']['network_id'])
l3_rpc_cb = l3_rpc.L3RpcCallback()
self._register_one_l3_agent(
host='host1',
ext_net_id=s1['subnet']['network_id'])
self._register_one_l3_agent(
host='host2', internal_only=False,
ext_net_id=s2['subnet']['network_id'])
l3_rpc_cb.sync_routers(self.adminContext,
host='host1')
self._assert_router_on_agent(r['router']['id'], 'host1')
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host1')
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host2')
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_update_gateway_agent_exists_supporting_multiple_network(self):
with contextlib.nested(self.router(),
self.subnet(),
self.subnet()) as (r, s1, s2):
self._set_net_external(s1['subnet']['network_id'])
l3_rpc_cb = l3_rpc.L3RpcCallback()
self._register_one_l3_agent(
host='host1',
ext_net_id=s1['subnet']['network_id'])
self._register_one_l3_agent(
host='host2', internal_only=False,
ext_net_id='', ext_bridge='')
l3_rpc_cb.sync_routers(self.adminContext,
host='host1')
self._assert_router_on_agent(r['router']['id'], 'host1')
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host1')
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host2')
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_router_update_gateway_no_eligible_l3_agent(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
class L3RpcCallbackTestCase(base.BaseTestCase):
def setUp(self):
super(L3RpcCallbackTestCase, self).setUp()
self.mock_plugin = mock.patch.object(
l3_rpc.L3RpcCallback,
'plugin', new_callable=mock.PropertyMock).start()
self.mock_l3plugin = mock.patch.object(
l3_rpc.L3RpcCallback,
'l3plugin', new_callable=mock.PropertyMock).start()
self.l3_rpc_cb = l3_rpc.L3RpcCallback()
def test__ensure_host_set_on_port_update_on_concurrent_delete(self):
port_id = 'foo_port_id'
port = {
'id': port_id,
'device_owner': 'compute:None',
portbindings.HOST_ID: '',
portbindings.VIF_TYPE: portbindings.VIF_TYPE_BINDING_FAILED
}
router_id = 'foo_router_id'
self.l3_rpc_cb.plugin.update_port.side_effect = n_exc.PortNotFound(
port_id=port_id)
with mock.patch.object(l3_rpc.LOG, 'debug') as mock_log:
self.l3_rpc_cb._ensure_host_set_on_port(
mock.ANY, mock.ANY, port, router_id)
self.l3_rpc_cb.plugin.update_port.assert_called_once_with(
mock.ANY, port_id, {'port': {'binding:host_id': mock.ANY}})
self.assertTrue(mock_log.call_count)
expected_message = ('Port foo_port_id not found while updating '
'agent binding for router foo_router_id.')
actual_message = mock_log.call_args[0][0] % mock_log.call_args[0][1]
self.assertEqual(expected_message, actual_message)
class L3AgentDbIntTestCase(L3BaseForIntTests, L3AgentDbTestCaseBase):
"""Unit tests for methods called by the L3 agent for
the case where core plugin implements L3 routing.
"""
def setUp(self):
super(L3AgentDbIntTestCase, self).setUp()
self.core_plugin = TestL3NatIntPlugin()
self.plugin = self.core_plugin
class L3AgentDbSepTestCase(L3BaseForSepTests, L3AgentDbTestCaseBase):
"""Unit tests for methods called by the L3 agent for the
case where separate service plugin implements L3 routing.
"""
def setUp(self):
super(L3AgentDbSepTestCase, self).setUp()
self.core_plugin = TestNoL3NatPlugin()
self.plugin = TestL3NatServicePlugin()
class L3NatDBIntTestCase(L3BaseForIntTests, L3NatTestCaseBase):
"""Unit tests for core plugin with L3 routing integrated."""
pass
class L3NatDBSepTestCase(L3BaseForSepTests, L3NatTestCaseBase):
"""Unit tests for a separate L3 routing service plugin."""
def test_port_deletion_prevention_handles_missing_port(self):
pl = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
self.assertIsNone(
pl.prevent_l3_port_deletion(context.get_admin_context(), 'fakeid')
)
|
{
"content_hash": "edc23facaecb889bda5fdeea0c986f1f",
"timestamp": "",
"source": "github",
"line_count": 2396,
"max_line_length": 79,
"avg_line_length": 47.77170283806344,
"alnum_prop": 0.47553315102960836,
"repo_name": "cloudbase/neutron-virtualbox",
"id": "e44251a1d7b57d00efc1bde114efe765df55f823",
"size": "115091",
"binary": false,
"copies": "1",
"ref": "refs/heads/virtualbox_agent",
"path": "neutron/tests/unit/test_l3_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "8448838"
},
{
"name": "Shell",
"bytes": "12510"
}
],
"symlink_target": ""
}
|
import warnings
from datetime import date
from unittest import TestCase
from workalendar.core import Calendar
class GenericCalendarTest(TestCase):
cal_class = Calendar
def setUp(self):
warnings.simplefilter("ignore")
self.year = date.today().year
self.cal = self.cal_class()
|
{
"content_hash": "303ca1d3931f947b3451086c4e6cd033",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 39,
"avg_line_length": 22.214285714285715,
"alnum_prop": 0.7106109324758842,
"repo_name": "sayoun/workalendar",
"id": "3107c1a4e182803da9116c3273375e16cd337735",
"size": "311",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "workalendar/tests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1164"
},
{
"name": "Python",
"bytes": "383844"
}
],
"symlink_target": ""
}
|
import argparse, sys
from base.xmlserializer import Serializable
from base.applog import *
class Configuration(Serializable):
# Override this function!
def setup(self, *args, **kwargs):
# Private:
self.__version = '0.0.0'
self.__appname = 'NO_APPNAME'
# Defaults (Settings):
# self.ipfix_port = 4739
def __init__(self):
Serializable.__init__(self, 'config.xml')
#super(Configuration, self).__init__('config.xml')
self.setup()
#super(Configuration).__init__('config.xml')
# Config File:
self.__parseArguments()
self.__handleArguments()
def __parseArguments(self):
ap = argparse.ArgumentParser(description="Dump IPFIX-Messages collected over UDP")
ap.add_argument('-v', '--version', help='Print Version', action='store_true')
ap.add_argument('-c', '--config', metavar='Configfile', help='Which config-file to use (default: config.xml)')
ap.add_argument('-d', '--defaultconfig', metavar='Configfile', help='Write sample Config with default values')
self.__args = ap.parse_args()
def __handleArguments(self):
if self.__args.version:
print("%s. Version: %s" % (self.__appname, self.__version))
print("(c) 2014 by Alexander Bredo, EDAG AG")
sys.exit(0)
if self.__args.defaultconfig:
self._filename = self.__args.defaultconfig
self.save()
print("Default config has been generated. See %s" % self._filename)
sys.exit(0)
if self.__args.config:
self._filename = self.__args.config
try:
self.load()
log.info("Using %s as configuration." % self._filename)
except Exception:
log.info("Configuration file '%s' was not found. Using default values." % self._filename)
|
{
"content_hash": "f42fce3c6c0bb906facb401840ef1406",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 112,
"avg_line_length": 35.69565217391305,
"alnum_prop": 0.6784409257003654,
"repo_name": "alexbredo/site-packages3",
"id": "c01ec73461c4df4e2f3c41a1faa230c6d2633f0b",
"size": "3185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "base/appconfig.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "42447"
}
],
"symlink_target": ""
}
|
root = "./"
webRoot = u"http://YOURDOMAIN"
contentDir = "content/"
templateDir = root + "templates/default/"
outputDir = root + "output/"
sitename = u'YOUR TITLE'
# From the typekit "kit" get the 7 characters before the ".js"
# If you're not using Adobe TypeKit, leave this blank and it will not process.
typekitId = ''
postsPerPage = 10
|
{
"content_hash": "be78fc4ad69ddef4d3119052633edd0a",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 78,
"avg_line_length": 33.7,
"alnum_prop": 0.7091988130563798,
"repo_name": "JoeIsHere/yakbarber",
"id": "4ac0d8d50319f10cbc0380869879090937fabda4",
"size": "386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6938"
},
{
"name": "HTML",
"bytes": "6008"
},
{
"name": "Python",
"bytes": "7690"
}
],
"symlink_target": ""
}
|
"""Test gettxoutproof and verifytxoutproof RPCs."""
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import (
CMerkleBlock,
from_hex,
)
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
class MerkleBlockTest(SyscoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [
[],
["-txindex"],
]
def run_test(self):
miniwallet = MiniWallet(self.nodes[0])
# Add enough mature utxos to the wallet, so that all txs spend confirmed coins
self.generate(miniwallet, 5)
self.generate(self.nodes[0], COINBASE_MATURITY)
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
txid1 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
txid2 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
txlist = []
blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1])), [txid1])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2])), txlist)
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2], blockhash)), txlist)
txin_spent = miniwallet.get_utxo(txid=txid2) # Get the change from txid2
tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent)
txid3 = tx3['txid']
self.generate(self.nodes[0], 1)
txid_spent = txin_spent["txid"]
txid_unspent = txid1 # Input was change from txid2, so txid1 should be unspent
# Invalid txids
assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["00000000000000000000000000000000"], blockhash)
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash)
# Invalid blockhashes
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000")
# We can't find the block from a fully-spent tx
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000")
# We can get the proof if the transaction is unspent
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2]))), sorted(txlist))
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[1].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[0].gettxoutproof, [txid1, txid3])
# Test empty list
assert_raises_rpc_error(-8, "Parameter 'txids' cannot be empty", self.nodes[0].gettxoutproof, [])
# Test duplicate txid
assert_raises_rpc_error(-8, 'Invalid parameter, duplicated txid', self.nodes[0].gettxoutproof, [txid1, txid1])
# Now we'll try tweaking a proof.
proof = self.nodes[1].gettxoutproof([txid1, txid2])
assert txid1 in self.nodes[0].verifytxoutproof(proof)
assert txid2 in self.nodes[1].verifytxoutproof(proof)
tweaked_proof = from_hex(CMerkleBlock(), proof)
# Make sure that our serialization/deserialization is working
assert txid1 in self.nodes[0].verifytxoutproof(tweaked_proof.serialize().hex())
# Check to see if we can go up the merkle tree and pass this off as a
# single-transaction block
tweaked_proof.txn.nTransactions = 1
tweaked_proof.txn.vHash = [tweaked_proof.header.hashMerkleRoot]
tweaked_proof.txn.vBits = [True] + [False]*7
for n in self.nodes:
assert not n.verifytxoutproof(tweaked_proof.serialize().hex())
# TODO: try more variants, eg transactions at different depths, and
# verify that the proofs are invalid
if __name__ == '__main__':
MerkleBlockTest().main()
|
{
"content_hash": "9845096da0d6bf096045d6c5eb939d9f",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 259,
"avg_line_length": 56.01851851851852,
"alnum_prop": 0.6940495867768595,
"repo_name": "syscoin/syscoin",
"id": "dca7268c5baef64ea474d9e9db5015590096a223",
"size": "6264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/rpc_txoutproof.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "C",
"bytes": "1285088"
},
{
"name": "C++",
"bytes": "12653307"
},
{
"name": "CMake",
"bytes": "50978"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "1721"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "30986"
},
{
"name": "JavaScript",
"bytes": "31802"
},
{
"name": "M4",
"bytes": "260893"
},
{
"name": "Makefile",
"bytes": "146223"
},
{
"name": "Objective-C++",
"bytes": "5497"
},
{
"name": "Python",
"bytes": "2965506"
},
{
"name": "QMake",
"bytes": "438"
},
{
"name": "Sage",
"bytes": "56850"
},
{
"name": "Scheme",
"bytes": "25953"
},
{
"name": "Shell",
"bytes": "212830"
},
{
"name": "TypeScript",
"bytes": "10706"
}
],
"symlink_target": ""
}
|
import os
import shutil
import glob
import time
import sys
import stat
import random
import json
import logging
import zipfile
import signal
import fnmatch
import subprocess
import re
from optparse import OptionParser
reload(sys)
sys.setdefaultencoding('utf8')
TOOL_VERSION = "v0.1"
VERSION_FILE = "VERSION"
DEFAULT_CMD_TIMEOUT = 600
PKG_TYPES = ["deb"]
PKG_NAME = None
BUILD_PARAMETERS = None
BUILD_ROOT = None
BUILD_ROOT_SRC = None
BUILD_ROOT_SRC_PKG = None
BUILD_ROOT_SRC_PKG_APP = None
BUILD_ROOT_SRC_SUB_APP = None
BUILD_ROOT_PKG = None
BUILD_ROOT_PKG_APP = None
LOG = None
LOG_LEVEL = logging.DEBUG
BUILD_TIME = time.strftime('%Y%m%d', time.localtime(time.time()))
class ColorFormatter(logging.Formatter):
def __init__(self, msg):
logging.Formatter.__init__(self, msg)
def format(self, record):
red, green, yellow, blue = range(4)
colors = {'INFO': green, 'DEBUG': blue,
'WARNING': yellow, 'ERROR': red}
msg = record.msg
if msg[0] == "+":
msg = "\33[01m" + msg[1:] + "\033[0m"
elif msg[0] == "=":
msg = "\33[07m" + msg + "\033[0m"
levelname = record.levelname
if levelname in colors:
msg_color = "\033[0;%dm" % (
31 + colors[levelname]) + msg + "\033[0m"
record.msg = msg_color
return logging.Formatter.format(self, record)
def iterfindfiles(path, fnexp):
for root, dirs, files in os.walk(path):
for filename in fnmatch.filter(files, fnexp):
yield os.path.join(root, filename)
def replaceUserString(path, fnexp, old_s, new_s):
for sub_file in iterfindfiles(path, fnexp):
try:
with open(sub_file, 'r') as sub_read_obj:
read_string = sub_read_obj.read()
except IOError as err:
LOG.error("Read %s Error : " % sub_file + str(err))
continue
if read_string.find(old_s) >= 0:
try:
with open(sub_file, 'w') as sub_write_obj:
sub_write_obj.write(re.sub(old_s, new_s, read_string))
except IOError as err:
LOG.error("Modify %s Error : " % sub_file + str(err))
continue
def isWindows():
return sys.platform == "cygwin" or sys.platform.startswith("win")
def killProcesses(ppid=None):
if isWindows():
subprocess.check_call("TASKKILL /F /PID %s /T" % ppid)
else:
ppid = str(ppid)
pidgrp = []
def GetChildPids(ppid):
command = "ps -ef | awk '{if ($3 ==%s) print $2;}'" % str(ppid)
pids = os.popen(command).read()
pids = pids.split()
return pids
pidgrp.extend(GetChildPids(ppid))
for pid in pidgrp:
pidgrp.extend(GetChildPids(pid))
pidgrp.insert(0, ppid)
while len(pidgrp) > 0:
pid = pidgrp.pop()
try:
os.kill(int(pid), signal.SIGKILL)
return True
except OSError:
try:
os.popen("kill -9 %d" % int(pid))
return True
except Exception:
return False
def safelyGetValue(origin_json=None, key=None):
if origin_json and key and key in origin_json:
return origin_json[key]
return None
def checkContains(origin_str=None, key_str=None):
if origin_str.upper().find(key_str.upper()) >= 0:
return True
return False
def getRandomStr():
str_pool = list("abcdefghijklmnopqrstuvwxyz1234567890")
random_str = ""
for i in range(15):
index = random.randint(0, len(str_pool) - 1)
random_str = random_str + str_pool[index]
return random_str
def zipDir(dir_path, zip_file):
try:
if os.path.exists(zip_file):
if not doRemove([zip_file]):
return False
if not os.path.exists(os.path.dirname(zip_file)):
os.makedirs(os.path.dirname(zip_file))
z_file = zipfile.ZipFile(zip_file, "w")
orig_dir = os.getcwd()
os.chdir(dir_path)
for root, dirs, files in os.walk("."):
for i_file in files:
LOG.info("zip %s" % os.path.join(root, i_file))
z_file.write(os.path.join(root, i_file))
z_file.close()
os.chdir(orig_dir)
except Exception as e:
LOG.error("Fail to pack %s to %s: %s" % (dir_path, zip_file, e))
return False
LOG.info("Done to zip %s to %s" % (dir_path, zip_file))
return True
def overwriteCopy(src, dest, symlinks=False, ignore=None):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copystat(src, dest)
sub_list = os.listdir(src)
if ignore:
excl = ignore(src, sub_list)
sub_list = [x for x in sub_list if x not in excl]
for i_sub in sub_list:
s_path = os.path.join(src, i_sub)
d_path = os.path.join(dest, i_sub)
if symlinks and os.path.islink(s_path):
if os.path.lexists(d_path):
os.remove(d_path)
os.symlink(os.readlink(s_path), d_path)
try:
s_path_s = os.lstat(s_path)
s_path_mode = stat.S_IMODE(s_path_s.st_mode)
os.lchmod(d_path, s_path_mode)
except Exception:
pass
elif os.path.isdir(s_path):
overwriteCopy(s_path, d_path, symlinks, ignore)
else:
shutil.copy2(s_path, d_path)
def doCopy(src_item=None, dest_item=None):
LOG.info("Copying %s to %s" % (src_item, dest_item))
try:
if os.path.isdir(src_item):
overwriteCopy(src_item, dest_item, symlinks=True)
else:
if not os.path.exists(os.path.dirname(dest_item)):
LOG.info("Create non-existent dir: %s" %
os.path.dirname(dest_item))
os.makedirs(os.path.dirname(dest_item))
shutil.copy2(src_item, dest_item)
except Exception as e:
LOG.error("Fail to copy file %s: %s" % (src_item, e))
return False
return True
def doRemove(target_file_list=None):
for i_file in target_file_list:
LOG.info("Removing %s" % i_file)
try:
if os.path.isdir(i_file):
shutil.rmtree(i_file)
else:
os.remove(i_file)
except Exception as e:
LOG.error("Fail to remove file %s: %s" % (i_file, e))
return False
return True
def updateCopylistPrefix(src_default, dest_default, src_sub, dest_sub):
src_new = ""
dest_new = ""
PACK_TOOL_TAG = "PACK-TOOL-ROOT"
if src_sub[0:len(PACK_TOOL_TAG)] == PACK_TOOL_TAG:
src_new = src_sub.replace(PACK_TOOL_TAG, BUILD_PARAMETERS.pkgpacktools)
else:
src_new = os.path.join(src_default, src_sub)
if dest_sub[0:len(PACK_TOOL_TAG)] == PACK_TOOL_TAG:
dest_new = dest_sub.replace(PACK_TOOL_TAG, BUILD_ROOT)
else:
dest_new = os.path.join(dest_default, dest_sub)
return (src_new, dest_new)
def buildSRC(src=None, dest=None, build_json=None):
if not os.path.exists(src):
LOG.info("+Src dir does not exist, skip build src process ...")
return True
if not doCopy(src, dest):
return False
if "blacklist" in build_json:
if build_json["blacklist"].count("") > 0:
build_json["blacklist"].remove("")
black_file_list = []
for i_black in build_json["blacklist"]:
black_file_list = black_file_list + \
glob.glob(os.path.join(dest, i_black))
black_file_list = list(set(black_file_list))
if not doRemove(black_file_list):
return False
if "copylist" in build_json:
for i_s_key in build_json["copylist"].keys():
if i_s_key and build_json["copylist"][i_s_key]:
(src_updated, dest_updated) = updateCopylistPrefix(
src, dest, i_s_key, build_json["copylist"][i_s_key])
if not doCopy(src_updated, dest_updated):
return False
return True
def exitHandler(return_code=1):
LOG.info("+Cleaning build root folder ...")
if not BUILD_PARAMETERS.bnotclean and os.path.exists(BUILD_ROOT):
if not doRemove([BUILD_ROOT]):
LOG.error("Fail to clean build root, exit ...")
sys.exit(1)
if return_code == 0:
LOG.info("================ DONE ================")
else:
LOG.error(
"================ Found Something Wrong !!! ================")
sys.exit(return_code)
def prepareBuildRoot():
LOG.info("+Preparing build root folder ...")
global BUILD_ROOT # The build root directory, like as "/tmp/randomName"
# The source code in the tmp directory, like as
# "/tmp/randomName/crosswalk-test"
global BUILD_ROOT_SRC
# The source of the zip operate for all package, like as
# "/tmp/randomName/pkg"
global BUILD_ROOT_SRC_PKG
# The source of the app_package operate for all package, like as
# "/tmp/randomName/pkg-app"
global BUILD_ROOT_SRC_PKG_APP
# The source of the sub app_package operate for all package, like as
# "/tmp/randomName/sub-app"
global BUILD_ROOT_SRC_SUB_APP
global BUILD_ROOT_PKG # BUILD_ROOT_SRC_PKG + "opt" + PKG_NAME
global BUILD_ROOT_PKG_APP # BUILD_ROOT_SRC_PKG_APP + "opt" + PKG_NAME
while True:
BUILD_ROOT = os.path.join("/tmp", getRandomStr())
if os.path.exists(BUILD_ROOT):
continue
else:
break
BUILD_ROOT_SRC = os.path.join(BUILD_ROOT, PKG_NAME)
BUILD_ROOT_SRC_PKG = os.path.join(BUILD_ROOT, "pkg")
BUILD_ROOT_SRC_PKG_APP = os.path.join(BUILD_ROOT, "pkg-app")
BUILD_ROOT_SRC_SUB_APP = os.path.join(BUILD_ROOT, "sub-app")
BUILD_ROOT_PKG = os.path.join(BUILD_ROOT, "pkg", "opt", PKG_NAME)
BUILD_ROOT_PKG_APP = os.path.join(BUILD_ROOT, "pkg-app", "opt", PKG_NAME)
if not doCopy(BUILD_PARAMETERS.srcdir, BUILD_ROOT_SRC):
return False
else:
replaceUserString(
BUILD_ROOT_SRC,
'*',
'TESTER-HOME-DIR',
"/home/%s" %
BUILD_PARAMETERS.user)
if not doRemove(
glob.glob(os.path.join(BUILD_ROOT_SRC, "%s*.zip" % PKG_NAME))):
return False
return True
def doCMD(cmd, time_out=DEFAULT_CMD_TIMEOUT, no_check_return=False):
LOG.info("Doing CMD: [ %s ]" % cmd)
pre_time = time.time()
cmd_proc = subprocess.Popen(args=cmd, shell=True)
while True:
cmd_exit_code = cmd_proc.poll()
elapsed_time = time.time() - pre_time
if cmd_exit_code is None:
if elapsed_time >= time_out:
killProcesses(ppid=cmd_proc.pid)
LOG.error("Timeout to exe CMD")
return False
else:
if not no_check_return and cmd_exit_code != 0:
LOG.error("Fail to exe CMD")
return False
break
time.sleep(2)
return True
def packDEB(build_json=None, app_src=None, app_dest=None, app_name=None):
pack_cmd = "crosswalk-app build"
LOG.info("Packing cmd : %s" % pack_cmd)
orig_dir = os.getcwd()
os.chdir(app_src)
LOG.info("Change dir to : %s" % app_src)
if app_name.find("org") == -1:
pkg_name = "org.test." + app_name.replace("-", "")
else:
pkg_name = app_name
if doCMD(pack_cmd, DEFAULT_CMD_TIMEOUT):
for parent, dirnames, filenames in os.walk(
os.path.join(app_src, "pkg")):
LOG.info("ReName source file is : %s" % filenames)
if app_name:
os.chdir(os.path.join(app_src, "pkg"))
for filename in filenames:
rename_cmd = "mv %s %s" % (filename, pkg_name + ".deb")
if not doCMD(rename_cmd, DEFAULT_CMD_TIMEOUT):
os.chdir(orig_dir)
return False
else:
return False
# After build successfully, copy the .deb from app_src+"pkg" to app_dest
if not doCopy(
os.path.join(app_src, "pkg"),
app_dest):
return False
return True
def packAPP(build_json=None, app_src=None, app_dest=None, app_name=None):
LOG.info("Packing %s(%s)" % (app_name, app_src))
if not os.path.exists(app_dest):
try:
os.makedirs(app_dest)
except Exception as e:
LOG.error("Fail to init package install dest dir: %s" % e)
return False
if checkContains(BUILD_PARAMETERS.pkgtype, "DEB"):
if not packDEB(build_json, app_src, app_dest, app_name):
return False
else:
LOG.error("Got wrong pkg type: %s" % BUILD_PARAMETERS.pkgtype)
return False
LOG.info("Success to pack APP: %s" % app_name)
return True
def createIndexFile(index_file_path=None, hosted_app=None):
try:
index_url = "opt/%s/webrunner/index.html?testsuite=../tests.xml" \
"&testprefix=../../../.." % PKG_NAME
html_content = "<!doctype html><head><meta http-equiv='Refresh' " \
"content='1; url=%s'></head>" % index_url
index_file = open(index_file_path, "w")
index_file.write(html_content)
index_file.close()
except Exception as e:
LOG.error("Fail to create index.html for top-app: %s" % e)
return False
LOG.info("Success to create index file %s" % index_file_path)
return True
def buildSubAPP(app_dir=None, build_json=None, app_dest_default=None):
app_dir_inside = safelyGetValue(build_json, "app-dir")
if app_dir_inside:
app_dir = app_dir_inside
LOG.info("+Building sub APP(s) from %s ..." % app_dir)
app_dir = os.path.join(BUILD_ROOT_SRC, app_dir)
app_name = safelyGetValue(build_json, "app-name")
if not app_name:
app_name = os.path.basename(app_dir)
app_src = os.path.join(BUILD_ROOT_SRC_SUB_APP, app_name)
pkg_name = app_name
LOG.info("+Change dir to %s: " % BUILD_ROOT_SRC_SUB_APP)
if not os.path.exists(BUILD_ROOT_SRC_SUB_APP):
LOG.info("Create BUILD_ROOT_SRC_SUB_APP dir: %s" %
BUILD_ROOT_SRC_SUB_APP)
os.makedirs(BUILD_ROOT_SRC_SUB_APP)
os.chdir(BUILD_ROOT_SRC_SUB_APP)
pack_cmd = "crosswalk-app create " + pkg_name
orig_dir = os.getcwd()
if not doCMD(pack_cmd, DEFAULT_CMD_TIMEOUT):
os.chdir(orig_dir)
return False
# copy source to BUILD_ROOT_SRC_SUB_APP/pkg_name/app
if buildSRC(app_dir, os.path.join(app_src, "app"), build_json):
app_dest = safelyGetValue(build_json, "install-path")
if app_dest:
app_dest = os.path.join(app_dest_default, app_dest)
else:
app_dest = app_dest_default
if safelyGetValue(build_json, "all-apps") == "true":
app_dirs = os.listdir(app_src)
apps_num = 0
for i_app_dir in app_dirs:
if os.path.isdir(os.path.join(app_src, i_app_dir)):
i_app_name = os.path.basename(i_app_dir)
if not packAPP(
build_json, os.path.join(app_src, i_app_name),
app_dest, i_app_name):
return False
else:
apps_num = apps_num + 1
if apps_num > 0:
LOG.info("Totally packed %d apps in %s" % (apps_num, app_dir))
return True
else:
return packAPP(build_json, app_src, app_dest, app_name)
return False
def buildPKGAPP(build_json=None):
try:
LOG.info("+Building package APP ...")
if not os.path.exists(
os.path.join(BUILD_ROOT_SRC, "crosswalk-app-tools-deb")):
try:
os.makedirs(
os.path.join(
BUILD_ROOT_SRC,
"crosswalk-app-tools-deb"))
except Exception as e:
LOG.error(
"Fail to make the crosswalk-app-tools-deb dir: %s" %
e)
return False
pkg_name = "org.test." + PKG_NAME.replace("-", "")
pack_cmd = "crosswalk-app create " + pkg_name
orig_dir = os.getcwd()
LOG.info(
"+Change dir to %s: " %
os.path.join(
BUILD_ROOT_SRC,
"crosswalk-app-tools-deb"))
os.chdir(os.path.join(BUILD_ROOT_SRC, "crosswalk-app-tools-deb"))
if not doCMD(pack_cmd, DEFAULT_CMD_TIMEOUT):
os.chdir(orig_dir)
return False
if not doCopy(os.path.join(BUILD_ROOT_SRC, "crosswalk-app-tools-deb", pkg_name),
os.path.join(BUILD_ROOT_SRC_PKG_APP, pkg_name)):
return False
if not doCopy(os.path.join(BUILD_ROOT_SRC, "icon.png"),
os.path.join(BUILD_ROOT_SRC_PKG_APP, pkg_name, "app", "icon.png")):
return False
if not doCopy(os.path.join(BUILD_ROOT_SRC, "manifest.json"),
os.path.join(BUILD_ROOT_SRC_PKG_APP, pkg_name, "app", "manifest.json")):
return False
if not createIndexFile(
os.path.join(BUILD_ROOT_SRC_PKG_APP, pkg_name, "app", "index.html")):
return False
if "blacklist" not in build_json:
build_json.update({"blacklist": []})
build_json["blacklist"].extend(PKG_BLACK_LIST)
BUILD_ROOT_PKG_APP = os.path.join(
BUILD_ROOT_SRC_PKG_APP,
pkg_name,
"app",
"opt",
PKG_NAME)
if not buildSRC(BUILD_ROOT_SRC, BUILD_ROOT_PKG_APP, build_json):
return False
comXML = os.path.join(BUILD_ROOT_PKG_APP, "tests.xml")
linuxXML = os.path.join(BUILD_ROOT_PKG_APP, "tests.linux.xml")
if os.path.exists(linuxXML):
if not doCMD("rm -rf %s" % comXML):
return False
if not doCMD("mv %s %s" % (linuxXML, comXML)):
return False
if "subapp-list" in build_json:
for i_sub_app in build_json["subapp-list"].keys():
if not buildSubAPP(
i_sub_app, build_json["subapp-list"][i_sub_app],
BUILD_ROOT_PKG_APP):
return False
if not packAPP(
build_json, os.path.join(BUILD_ROOT_SRC_PKG_APP, pkg_name), BUILD_ROOT_PKG, PKG_NAME):
return False
return True
except Exception as e:
LOG.error("Got wrong options: %s, exit ..." % e)
sys.exit(1)
def buildPKG(build_json=None):
if "blacklist" not in build_json:
build_json.update({"blacklist": []})
build_json["blacklist"].extend(PKG_BLACK_LIST)
if not buildSRC(BUILD_ROOT_SRC, BUILD_ROOT_PKG, build_json):
return False
if "subapp-list" in build_json:
for i_sub_app in build_json["subapp-list"].keys():
if not buildSubAPP(
i_sub_app, build_json["subapp-list"][i_sub_app],
BUILD_ROOT_PKG):
return False
if "pkg-app" in build_json:
if not buildPKGAPP(build_json["pkg-app"]):
return False
return True
def replaceCopy(readfile, writefile, content, newContent):
ffrom = open(readfile, "r")
fto = open(writefile, "w")
while True:
l = ffrom.readline()
if not l:
break
if 'org.xwalk.embedding.test' in l:
temp = ""
temp = re.sub(content, newContent, l)
fto.write(temp)
else:
temp1 = l
fto.write(temp1)
fto.close()
def findVersionFile(pathFile=None):
if not pathFile:
pathFile = os.path.join("..", "..", "..")
if not os.path.exists(
os.path.join(BUILD_PARAMETERS.srcdir, pathFile, VERSION_FILE)):
pathFile = pathFile[:-3]
if pathFile != "..":
findVersionFile(pathFile)
else:
pkg_version_file_path = os.path.join(
BUILD_PARAMETERS.srcdir,
pathFile,
VERSION_FILE)
return (pkg_version_file_path)
def main():
global LOG
LOG = logging.getLogger("pack-tool")
LOG.setLevel(LOG_LEVEL)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(LOG_LEVEL)
stream_formatter = ColorFormatter("[%(asctime)s] %(message)s")
stream_handler.setFormatter(stream_formatter)
LOG.addHandler(stream_handler)
try:
usage = "Usage: ./%prog -t deb"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-c",
"--cfg",
dest="pkgcfg",
help="specify the path of config json file")
opts_parser.add_option(
"-t",
"--type",
dest="pkgtype",
help="specify the pkg type, e.g. deb ...")
opts_parser.add_option(
"-d",
"--dest",
dest="destdir",
help="specify the installation folder for packed package")
opts_parser.add_option(
"-s",
"--src",
dest="srcdir",
help="specify the path of pkg resource for packing")
opts_parser.add_option(
"--tools",
dest="pkgpacktools",
help="specify the parent folder of pack tools")
opts_parser.add_option(
"--notclean",
dest="bnotclean",
action="store_true",
help="disable the build root clean after the packing")
opts_parser.add_option(
"-v",
"--version",
dest="bversion",
action="store_true",
help="show this pack tool's version")
opts_parser.add_option(
"-u",
"--user",
dest="user",
help="specify the user in inst.py")
opts_parser.add_option(
"--pkg-version",
dest="pkgversion",
help="specify the pkg version, e.g. 0.0.0.1")
if len(sys.argv) == 1:
sys.argv.append("-h")
global BUILD_PARAMETERS
(BUILD_PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
LOG.error("Got wrong options: %s, exit ..." % e)
sys.exit(1)
if BUILD_PARAMETERS.bversion:
print "Version: %s" % TOOL_VERSION
sys.exit(0)
if not BUILD_PARAMETERS.srcdir:
BUILD_PARAMETERS.srcdir = os.getcwd()
BUILD_PARAMETERS.srcdir = os.path.expanduser(BUILD_PARAMETERS.srcdir)
if not BUILD_PARAMETERS.user:
BUILD_PARAMETERS.user = "app"
pkg_version_file_path = findVersionFile()
try:
pkg_main_version = 0
pkg_release_version = 1
if BUILD_PARAMETERS.pkgversion:
LOG.info("Using %s as pkg version " % BUILD_PARAMETERS.pkgversion)
pkg_main_version = BUILD_PARAMETERS.pkgversion
else:
if pkg_version_file_path is not None:
LOG.info(
"Using pkg version by file: %s" %
pkg_version_file_path)
with open(pkg_version_file_path, "rt") as pkg_version_file:
pkg_version_raw = pkg_version_file.read()
pkg_version_file.close()
pkg_version_json = json.loads(pkg_version_raw)
pkg_main_version = pkg_version_json["main-version"]
pkg_release_version = pkg_version_json["release-version"]
except Exception as e:
LOG.error("Fail to read pkg version file: %s, exit ..." % e)
sys.exit(1)
if not BUILD_PARAMETERS.pkgtype:
LOG.error("No pkg type provided, exit ...")
sys.exit(1)
elif not BUILD_PARAMETERS.pkgtype in PKG_TYPES:
LOG.error("Wrong pkg type, only support: %s, exit ..." %
PKG_TYPES)
sys.exit(1)
elif not BUILD_PARAMETERS.destdir:
BUILD_PARAMETERS.destdir = BUILD_PARAMETERS.srcdir
BUILD_PARAMETERS.destdir = os.path.expanduser(BUILD_PARAMETERS.destdir)
if not BUILD_PARAMETERS.pkgpacktools:
BUILD_PARAMETERS.pkgpacktools = os.path.join(
BUILD_PARAMETERS.srcdir, "..", "..", "tools")
BUILD_PARAMETERS.pkgpacktools = os.path.expanduser(
BUILD_PARAMETERS.pkgpacktools)
config_json = None
if BUILD_PARAMETERS.pkgcfg:
config_json_file_path = BUILD_PARAMETERS.pkgcfg
else:
config_json_file_path = os.path.join(
BUILD_PARAMETERS.srcdir, "suite.json")
try:
LOG.info("Using config json file: %s" % config_json_file_path)
with open(config_json_file_path, "rt") as config_json_file:
config_raw = config_json_file.read()
config_json_file.close()
config_json = json.loads(config_raw)
except Exception as e:
LOG.error("Fail to read config json file: %s, exit ..." % e)
sys.exit(1)
global PKG_NAME
PKG_NAME = safelyGetValue(config_json, "pkg-name")
if not PKG_NAME:
PKG_NAME = os.path.basename(BUILD_PARAMETERS.srcdir)
LOG.warning(
"Due to fail to read pkg name from json that "
"using src dir name as pkg name ...")
LOG.info("================= %s (%s-%s) ================" %
(PKG_NAME, pkg_main_version, pkg_release_version))
if not safelyGetValue(config_json, "pkg-list"):
LOG.error("Fail to read pkg-list, exit ...")
sys.exit(1)
pkg_json = None
for i_pkg in config_json["pkg-list"].keys():
i_pkg_list = i_pkg.replace(" ", "").split(",")
if BUILD_PARAMETERS.pkgtype in i_pkg_list:
pkg_json = config_json["pkg-list"][i_pkg]
if not pkg_json:
LOG.error("Fail to read pkg json, exit ...")
sys.exit(1)
if not prepareBuildRoot():
exitHandler(1)
global PKG_BLACK_LIST
PKG_BLACK_LIST = []
if "pkg-blacklist" in config_json:
PKG_BLACK_LIST.extend(config_json["pkg-blacklist"])
if not buildPKG(pkg_json):
exitHandler(1)
LOG.info("+Building package ...")
pkg_file = os.path.join(
BUILD_PARAMETERS.destdir,
"%s-%s-%s.%s.zip" %
(PKG_NAME,
pkg_main_version,
pkg_release_version,
BUILD_PARAMETERS.pkgtype))
if not zipDir(BUILD_ROOT_SRC_PKG, pkg_file):
exitHandler(1)
if __name__ == "__main__":
main()
exitHandler(0)
|
{
"content_hash": "6454080ba1eb4a9f53a598d56409e862",
"timestamp": "",
"source": "github",
"line_count": 797,
"max_line_length": 102,
"avg_line_length": 33.375156838143035,
"alnum_prop": 0.5543233082706767,
"repo_name": "XiaosongWei/crosswalk-test-suite",
"id": "5b6dbc4bad5a8e2de6c4c280fcefa9c32c432dab",
"size": "28156",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/build/pack_deb.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1693"
},
{
"name": "C",
"bytes": "28136"
},
{
"name": "CSS",
"bytes": "403677"
},
{
"name": "CoffeeScript",
"bytes": "18978"
},
{
"name": "Cucumber",
"bytes": "76562"
},
{
"name": "GLSL",
"bytes": "6990"
},
{
"name": "Groff",
"bytes": "12"
},
{
"name": "HTML",
"bytes": "41078525"
},
{
"name": "Java",
"bytes": "786204"
},
{
"name": "JavaScript",
"bytes": "4639929"
},
{
"name": "Logos",
"bytes": "12"
},
{
"name": "Makefile",
"bytes": "1044"
},
{
"name": "PHP",
"bytes": "45668"
},
{
"name": "Python",
"bytes": "4057992"
},
{
"name": "Shell",
"bytes": "850195"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lucem.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
{
"content_hash": "e338bc3695d71b070cc5cb8e5fa3d00f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 77,
"avg_line_length": 37.19047619047619,
"alnum_prop": 0.6197183098591549,
"repo_name": "cydrobolt/lucem",
"id": "7c3fb4af79a3793a99cc5833718d072c22f678f4",
"size": "803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "746"
},
{
"name": "HTML",
"bytes": "6001"
},
{
"name": "JavaScript",
"bytes": "2112"
},
{
"name": "Python",
"bytes": "14660"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
MESSAGES = {
"%d min remaining to read": "%d minutu gelditzen dira irakurtzeko",
"(active)": "(aktibo)",
"Also available in:": "Eskuragarria hizkuntza hauetan ere:",
"Archive": "Artxiboa",
"Authors": "Egileak",
"Categories": "Kategoriak",
"Comments": "Iruzkinak",
"LANGUAGE": "Euskara",
"Languages:": "Hizkuntzak:",
"More posts about %s": "%s-ri buruzko argitalpen gehiago",
"Newer posts": "Argitalpen berriagoak",
"Next post": "Hurrengo argitalpena",
"Next": "",
"No posts found.": "Ez da argitalpenik aurkitu",
"Nothing found.": "Ez da ezer aurkitu",
"Older posts": "Post zaharragoak",
"Original site": "Jatorrizko orria",
"Posted:": "Argitaratuta:",
"Posts about %s": "%s-ri buruzko argitalpenak",
"Posts by %s": "%s-ek idatzitako argitalpenak",
"Posts for year %s": "%s. urteko argitalpenak",
"Posts for {month} {day}, {year}": "{year}ko {month}aren {day}ko argitalpenak",
"Posts for {month} {year}": "{year}ko {month}ren argitalpenak",
"Previous post": "Aurreko argitalpena",
"Previous": "",
"Publication date": "Argitaratze-data",
"RSS feed": "RSS jarioa",
"Read in English": "Euskaraz irakurri",
"Read more": "Irakurri gehiago",
"Skip to main content": "Joan eduki nagusira",
"Source": "Iturria",
"Subcategories:": "Azpikategoriak:",
"Tags and Categories": "Etiketak eta Kategoriak",
"Tags": "Etiketak",
"Toggle navigation": "",
"Uncategorized": "Kategorizatu-gabeak",
"Up": "",
"Updates": "Eguneraketak",
"Write your page here.": "Idatzi zure orria hemen",
"Write your post here.": "Idatzi zure argitalpena hemen",
"old posts, page %d": "Argitalpen zaharragoak,%d. orria",
"page %d": "%d. orria",
"{month} {day}, {year}": "",
"{month} {year}": "",
}
|
{
"content_hash": "52916c2e4eb9edf70692b32c25a1eed4",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 84,
"avg_line_length": 39.083333333333336,
"alnum_prop": 0.6156716417910447,
"repo_name": "knowsuchagency/nikola",
"id": "41fa9f18408088550337a3f158ce3d555c7b7509",
"size": "1901",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nikola/data/themes/base/messages/messages_eu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18794"
},
{
"name": "JavaScript",
"bytes": "24667"
},
{
"name": "Python",
"bytes": "1169446"
},
{
"name": "Shell",
"bytes": "11217"
},
{
"name": "XSLT",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from bottle import route, run, request, response, default_app
import methods
from utils import *
from datetime import datetime
def normalize(answer):
if isinstance(answer, dict):
for i in answer:
if type(answer[i]) == datetime:
answer[i] = int(answer[i].timestamp())
elif isinstance(answer, list):
for i in answer:
normalize(i)
response.content_type = 'application/json'
return str(answer)
@route('/user/nick/<nick>')
def get_user(nick):
print(nick)
try:
user = methods.get_user_info(nick)
return normalize(user)
except Exception as err:
return {'Error': err}
@route('/post/<post_id:int>')
def get_post(post_id):
post = methods.get_post(post_id)
return normalize(post)
@route('/post/<post_id:int>/reply', method='POST')
def reply_post(post_id):
body = request.forms.get('body')
cookies = {x: request.cookies.dict[x][0] for x in request.cookies.dict}
t = methods.post_reply(post_id, body, cookies)
return normalize({'Status': t})
@route('/post/<post_id:int>/vote/<rating:int>', method='POST')
def rating_post(post_id, rating):
cookies = {x: request.cookies.dict[x][0] for x in request.cookies.dict}
t = methods.vote_post(post_id, rating, cookies)
return normalize({'Rating': t})
@route('/threadslist/<board>/<page:int>')
def get_threads(board, page=0):
threads = methods.get_threads_list(board, page)
return normalize(threads)
@route('/thread/<main:int>/<page:re:(all|[0-9]+)>')
def get_thread_page(main, page):
posts = methods.get_thread(main, page)
return normalize(posts)
if __name__ == '__main__':
run(host='localhost', port=10001, debug=True)
application = default_app()
|
{
"content_hash": "d0d9984c3decd3f2191a6c9e38620bb5",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 75,
"avg_line_length": 26.606060606060606,
"alnum_prop": 0.643507972665148,
"repo_name": "mr-tron/forumlocal-json-api",
"id": "9a1660484fbac90fdaef898bccd220abf7cf5fe1",
"size": "1805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22076"
}
],
"symlink_target": ""
}
|
import Tkinter as tk
import tkSimpleDialog
from PIL import Image, ImageTk
import atx
class CropIDE(object):
def __init__(self, title='AirtestX Basic GUI', screenshot=None):
self._root = tk.Tk()
self._init_items()
self._root.title(title)
self._lastx = 0
self._lasty = 0
self._bounds = None # crop area
self._center = (0, 0) # center point, used for offset
self._size = (90, 90)
self._moved = False # click or click and move
self._color = 'red' # draw color
self._tkimage = None # keep reference
self._image = None
self._screenshot = screenshot
self._ratio = 0.5
def _init_items(self):
root = self._root
frm_control = tk.Frame(root, bg='blue')
frm_control.grid(column=0, row=0)
frm_screen = tk.Frame(root, bg='#aaa')
frm_screen.grid(column=0, row=1)
tk.Button(frm_control, text="Refresh", command=self._redraw).grid(column=0, row=0, sticky=tk.W)
tk.Button(frm_control, text="Save crop", command=self._save_crop).grid(column=1, row=0, sticky=tk.W)
self.canvas = tk.Canvas(frm_screen, bg="blue", bd=0, highlightthickness=0, relief='ridge')
self.canvas.grid(column=0, row=0, padx=10, pady=10)
self.canvas.bind("<Button-1>", self._stroke_start)
self.canvas.bind("<B1-Motion>", self._stroke_move)
self.canvas.bind("<B1-ButtonRelease>", self._stroke_done)
def _fix_bounds(self, bounds):
bounds = [x/self._ratio for x in bounds]
(x0, y0, x1, y1) = bounds
if x0 > x1:
x0, y0, x1, y1 = x1, y1, x0, y0
# in case of out of bounds
w, h = self._size
x0 = max(0, x0)
y0 = max(0, y0)
x1 = min(w, x1)
y1 = min(h, y1)
return map(int, [x0, y0, x1, y1])
def _save_crop(self):
print self._bounds
if self._bounds is None:
return
bounds = self._fix_bounds(self._bounds)
print bounds
save_to = tkSimpleDialog.askstring("Save cropped image", "Enter filename")
if save_to:
if save_to.find('.') == -1:
save_to += '.png'
print('Save to:', save_to)
self._image.crop(bounds).save(save_to)
# cv2.imwrite(save_to, image)
def _redraw(self):
image = self._screenshot()
self.draw_image(image)
self._bounds = None
self._reset()
def _reset(self):
self.canvas.delete('boundsLine')
self.canvas.delete('clickPosition')
def _stroke_start(self, event):
self._moved = False
c = self.canvas
self._lastx, self._lasty = c.canvasx(event.x), c.canvasy(event.y)
print 'click:', self._lastx, self._lasty
def _stroke_move(self, event):
self._moved = True
self._reset()
c = self.canvas
x, y = c.canvasx(event.x), c.canvasy(event.y)
self._bounds = (self._lastx, self._lasty, x, y)
self._draw_bounds(self._bounds)
x, y = (self._lastx+x)/2, (self._lasty+y)/2
self.tag_point(x, y)
def _stroke_done(self, event):
c = self.canvas
x, y = c.canvasx(event.x), c.canvasy(event.y)
if self._moved:
x, y = (self._lastx+x)/2, (self._lasty+y)/2
self._center = (x, y) # rember position
self.tag_point(x, y)
self.canvas.itemconfigure('boundsLine', width=2)
def _draw_bounds(self, bounds):
c = self.canvas
(x0, y0, x1, y1) = self._bounds
c.create_rectangle(x0, y0, x1, y1, outline=self._color, tags='boundsLine', width=5)#, fill="blue")
# c.create_line((x0, y0, x1, y1), fill=self._color, width=2, tags='boundsLine', dash=(4, 4))
# c.create_line((x0, y1, x1, y0), fill=self._color, width=2, tags='boundsLine', dash=(4, 4))
def tag_point(self, x, y):
# coord = 10, 50, 110, 150
self.canvas.delete('clickPosition')
r = max(min(self._size)/30*self._ratio, 5)
self.canvas.create_line(x-r, y, x+r, y, width=2, fill=self._color, tags='clickPosition')
self.canvas.create_line(x, y-r, x, y+r, width=2, fill=self._color, tags='clickPosition')
# coord = x-r, y-r, x+r, y+r
# self.canvas.create_oval(coord, fill='gray', stipple="gray50", tags='clickPosition')
def draw_image(self, image):
self._image = image
self._size = (width, height) = image.size
w, h = int(width*self._ratio), int(height*self._ratio)
# print w, h
image = image.copy()
image.thumbnail((w, h), Image.ANTIALIAS)
tkimage = ImageTk.PhotoImage(image)
self._tkimage = tkimage # keep a reference
self.canvas.config(width=w, height=h)
self.canvas.create_image(0, 0, anchor=tk.NW, image=tkimage)
def mainloop(self):
self._root.mainloop()
def atx_ide(serial):
d = atx.connect(serial)
gui = CropIDE('AirtestX IDE SN: %s' % serial, screenshot=d.screenshot)
gui.draw_image(d.screenshot())
gui.mainloop()
def test():
# image = Image.open('jurassic_park_kitchen.jpg')
gui = CropIDE('AirtestX IDE')
image = Image.open('screen.png')
gui.draw_image(image)
gui.tag_point(100, 100)
# gui.canvas.create_rectangle(10, 60, 30, 70, fill="red", stipple="gray12")
gui.mainloop()
if __name__ == '__main__':
# main()
# atx.connect().screenshot().save('screen.png')
atx_ide(None)
# test()
# canvas.create_line(0, 0, 200, 100)
# canvas.create_line(0, 100, 200, 0, fill="red", dash=(4, 4))
# canvas.create_rectangle(100, 25, 150, 75)#, fill="blue")
# print (width, height)
# lastx, lasty = 0, 0
# color = 'red'
# def xy(event):
# global lastx, lasty
# lastx, lasty = canvas.canvasx(event.x), canvas.canvasy(event.y)
# def add_line(event):
# global lastx, lasty
# x, y = canvas.canvasx(event.x), canvas.canvasy(event.y)
# canvas.create_line((lastx, lasty, x, y), fill=color, width=5, tags='boundsLine')
# lastx, lasty = x, y
# def doneStroke(event):
# canvas.itemconfigure('boundsLine', width=2)
# canvas.bind("<Button-1>", xy)
# canvas.bind("<B1-Motion>", add_line)
# canvas.bind("<B1-ButtonRelease>", doneStroke)
# # mainframe = ttk.Frame(root, padding="3 3 12 12")
# root.mainloop()
|
{
"content_hash": "107ff404b0354a10a2ba8baafffe19c9",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 108,
"avg_line_length": 34.67213114754098,
"alnum_prop": 0.5754137115839244,
"repo_name": "Andy-hpliu/AirtestX",
"id": "96d302446ad70d44fb6619ab5057e7aec22097b4",
"size": "6779",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scripts/tkgui.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "230"
},
{
"name": "CSS",
"bytes": "34684"
},
{
"name": "Go",
"bytes": "13043"
},
{
"name": "HTML",
"bytes": "28019"
},
{
"name": "JavaScript",
"bytes": "300119"
},
{
"name": "Makefile",
"bytes": "348"
},
{
"name": "Protocol Buffer",
"bytes": "5495"
},
{
"name": "Python",
"bytes": "394333"
},
{
"name": "Shell",
"bytes": "4162"
}
],
"symlink_target": ""
}
|
import codecs
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
file_path = os.path.join(os.path.dirname(__file__), fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-django',
use_scm_version=True,
description='A Django plugin for pytest.',
author='Andreas Pelme',
author_email='andreas@pelme.se',
maintainer="Andreas Pelme",
maintainer_email="andreas@pelme.se",
url='https://pytest-django.readthedocs.io/',
license='BSD-3-Clause',
packages=['pytest_django'],
long_description=read('README.rst'),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
setup_requires=['setuptools_scm>=1.11.1'],
install_requires=[
'pytest>=3.6',
'pathlib2;python_version<"3.4"',
],
extras_require={
'docs': [
'sphinx',
'sphinx_rtd_theme',
],
'testing': [
'Django',
'django-configurations>=2.0',
'six',
],
},
classifiers=['Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.0',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Framework :: Django :: 3.1',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
],
project_urls={
'Source': 'https://github.com/pytest-dev/pytest-django',
'Changelog': 'https://pytest-django.readthedocs.io/en/latest/changelog.html',
},
# the following makes a plugin available to pytest
entry_points={'pytest11': ['django = pytest_django.plugin']})
|
{
"content_hash": "f3fdc780831c827fd8e9d4778efd7632",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 85,
"avg_line_length": 38.36,
"alnum_prop": 0.5342370524852277,
"repo_name": "cloudera/hue",
"id": "8513c299a504f0ba3dab34cd64165bf3c5d34110",
"size": "2924",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/pytest-django-3.10.0/setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
from ._configuration_stores_operations import ConfigurationStoresOperations
from ._operations import Operations
from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._key_values_operations import KeyValuesOperations
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
'ConfigurationStoresOperations',
'Operations',
'PrivateEndpointConnectionsOperations',
'PrivateLinkResourcesOperations',
'KeyValuesOperations',
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
|
{
"content_hash": "bedce5e9dc6b5f5e760b2990037a9c88",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 90,
"avg_line_length": 41.888888888888886,
"alnum_prop": 0.7785145888594165,
"repo_name": "Azure/azure-sdk-for-python",
"id": "36f96183612a1441b8e0d73074e0064c1f29494f",
"size": "1222",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "sdk/appconfiguration/azure-mgmt-appconfiguration/azure/mgmt/appconfiguration/v2022_05_01/operations/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
import orange
import Orange
import math
from itertools import chain
def discrete_columns(table, cols=None):
if cols is None:
cols = [attr.name for attr in table.domain]
ret = []
for col in cols:
attr = table.domain[col]
if attr.varType == orange.VarTypes.Discrete:
ret.append(col)
return ret
def continuous_columns(table, cols=None):
if cols is None:
cols = [attr.name for attr in table.domain]
ret = []
for col in cols:
attr = table.domain[col]
if attr.varType != orange.VarTypes.Discrete:
ret.append(col)
return ret
def column_positions(table, cols):
return [table.domain.index(table.domain[col]) for col in cols]
def valid_table_cols(table, cols, kwargs={}):
bad_attrs = [
'id', 'err', 'pickup_id', 'pickup_address', 'epoch', 'userid',
'mid', 'imdb', 'tstamp', "unknown", "action", "adventure",
"animation", "children", "comedy", "crime", "documentary",
"drama", "fantasy", "noir", "horro", "musical", "mystery",
"romance", "scifi", "thriller", "war", "western", 'lin_ima', 'com_nam',
'total_charges', 'total_cost', "totalcosts_ia", "totalcharges_ia",
"estimatednetrevenue_ia", "estimated_net_revenue", "lengthofstay"
]
ignore_attrs = kwargs.get('ignore_attrs', [])
print "ignoring attrs: %s" % str(ignore_attrs)
attrs = table.domain
ret = []
for attr in attrs:
if attr.name in bad_attrs:
continue
if attr.name in cols:
continue
if attr.name in ignore_attrs:
continue
if attr.name.endswith('id') and attr.name != 'moteid':
continue
nunique = len(set([row[attr].value for row in table]))
print "nunique %s\t%s" % (nunique, attr.name)
if attr.varType != orange.VarTypes.Continuous:
if nunique > 100 and nunique > 0.7 * len(table) or nunique > 7000:
print "%s skipped" % attr.name
continue
ret.append(attr.name)
return ret
def rm_attr_from_domain(table, attrs):
"""
Remove attribute(s)/cols(s) from table
Creates a copy of the original table :(
"""
domain = table.domain
try:
erridxs = [domain.index(domain[attr]) for attr in attrs]
except:
erridxs = [domain.index(domain[attrs])]
erridxs.sort(reverse=True)
attrs = list(table.domain)
map(attrs.pop, erridxs)
newd = Orange.data.Domain(attrs)
newd.add_metas(table.domain.get_metas())
return Orange.data.Table(newd, table)
# alternative way to create new table
# return table.select_ref(newd)
def ids_filter(table, ids, attrname='id', negate=False):
attr = table.domain[attrname]
id_pos = table.domain.index(attr)
vals = ids
if not isinstance(ids[0], Orange.data.Value):
vals = [Orange.data.Value(attr, id) for id in ids]
if attr.var_type == Orange.feature.Type.Discrete:
f = [ValueFilterDiscrete(position=id_pos, values=vals)]
else:
f = [ValueFilterContinuous(position=id_pos, ref=val, oper=ValueFilter.Equal) for val in vals]
c = Orange.data.filter.Values(domain=table.domain,
negate=negate,
conjunction=0,
conditions=f)
def tmp(t, negate=negate):
c.negate = negate
return c(t)
return tmp
def reconcile_tables(*all_tables):
"""
Ensure bad and good tables use the same domain
@return list of reconciled tables, union of all tables
"""
domain = None
full_table = None
for table in chain(*all_tables):
if domain is None:
domain = list(table.domain.clone())
else:
for idx, (newattr, oldattr) in enumerate(zip(domain, table.domain)):
if newattr.varType == orange.VarTypes.Discrete:
if oldattr.varType == orange.VarTypes.Discrete:
map(newattr.add_value, oldattr.values)
else:
domain[idx] = table.domain.clone()[idx]
domain = Orange.data.Domain(domain)
translated_tables = []
for tables in all_tables:
group = []
translated_tables.append(group)
for table in tables:
#if not len(table):
#continue
if len(table):
table = Orange.data.Table(domain)
else:
rows = [[v.value for v in row] for row in table]
table = Orange.data.Table(domain, rows)
group.append(table)
if full_table is None:
full_table = Orange.data.Table(domain, table)
else:
full_table.extend(table)
if not full_table:
return (), None
return tuple(translated_tables), full_table
translate = lambda t: Orange.data.Table(domain, t.to_numpyMA('ac')[0].data)
translated_tables = []
for tables in all_tables:#shared_domain_tables:
translated_tables.append(map(translate, tables))
return tuple(translated_tables), full_table
def union_tables(*tables):
ret = None
for t in chain(*tables):
if ret is None:
domain = t.domain.clone()
ret = Orange.data.Table(domain)
ret.extend(t)
return ret
def add_meta_column(tables, name, vals=None, default=None):
"""
@param name name of new attribute
@param vals None if meta should be continuous, otherwise vals is a list of string values
set to values of discrete attribute
@param default default value.
if None, set to -inf for continuous attribute, or first element of vals for discrete
"""
meta_id = Orange.feature.Descriptor.new_meta_id()
for table in tables:
if not vals:
table.domain.addmeta(meta_id, Orange.feature.Continuous(name))
table.add_meta_attribute(meta_id, default or -1e1000000)
else:
table.domain.addmeta(meta_id, Orange.feature.Discrete(name, values=vals))
table.add_meta_attribute(meta_id, default or vals[0])
return meta_id
|
{
"content_hash": "e254edc76614e750f445bb503014a877",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 101,
"avg_line_length": 29.35820895522388,
"alnum_prop": 0.6217590238942552,
"repo_name": "sirrice/scorpion",
"id": "75b61c36a45e3d99bbe3bffab372680ddf036cf4",
"size": "5901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scorpion/util/table.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "389180"
},
{
"name": "R",
"bytes": "3041"
},
{
"name": "Shell",
"bytes": "1322"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, url
from explorer.views import QueryView, CreateQueryView, PlayQueryView, DeleteQueryView, ListQueryView, ListQueryLogView
urlpatterns = patterns('',
url(r'(?P<query_id>\d+)/$', QueryView.as_view(), name='query_detail'),
url(r'(?P<query_id>\d+)/download$', 'explorer.views.download_query', name='query_download'),
url(r'(?P<query_id>\d+)/csv$', 'explorer.views.view_csv_query', name='query_csv'),
url(r'(?P<query_id>\d+)/email_csv$', 'explorer.views.email_csv_query', name='email_csv_query'),
url(r'(?P<pk>\d+)/delete$', DeleteQueryView.as_view(), name='query_delete'),
url(r'new/$', CreateQueryView.as_view(), name='query_create'),
url(r'play/$', PlayQueryView.as_view(), name='explorer_playground'),
url(r'csv$', 'explorer.views.download_csv_from_sql', name='generate_csv'),
url(r'schema/$', 'explorer.views.schema', name='explorer_schema'),
url(r'logs/$', ListQueryLogView.as_view(), name='explorer_logs'),
url(r'format/$', 'explorer.views.format_sql', name='format_sql'),
url(r'^$', ListQueryView.as_view(), name='explorer_index'),
)
|
{
"content_hash": "262c5f21c77366b5b0ef247a41893582",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 118,
"avg_line_length": 66.23529411764706,
"alnum_prop": 0.6705150976909414,
"repo_name": "pombredanne/django-sql-explorer",
"id": "8ad14074cc25345a64deb04e9933c338f9f49f5d",
"size": "1126",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "explorer/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27155"
},
{
"name": "HTML",
"bytes": "27347"
},
{
"name": "JavaScript",
"bytes": "345502"
},
{
"name": "Python",
"bytes": "94405"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('layers', '0002_auto_20161009_1735'),
]
operations = [
migrations.AddField(
model_name='layer',
name='json',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=''),
preserve_default=False,
),
]
|
{
"content_hash": "b86e20c254c8ccf02653f9121f0f08da",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 89,
"avg_line_length": 24.15,
"alnum_prop": 0.6211180124223602,
"repo_name": "RubenSchmidt/giscademy",
"id": "deecbb0a808b2bc51c7080b14645c4ab742bc132",
"size": "556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "layers/migrations/0003_layer_json.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23390"
},
{
"name": "HTML",
"bytes": "43665"
},
{
"name": "JavaScript",
"bytes": "332861"
},
{
"name": "Python",
"bytes": "63323"
}
],
"symlink_target": ""
}
|
import warnings
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
from pandas import (
DataFrame,
Series,
isna,
)
import pandas._testing as tm
class TestDataFrameCov:
def test_cov(self, float_frame, float_string_frame):
# min_periods no NAs (corner case)
expected = float_frame.cov()
result = float_frame.cov(min_periods=len(float_frame))
tm.assert_frame_equal(expected, result)
result = float_frame.cov(min_periods=len(float_frame) + 1)
assert isna(result.values).all()
# with NAs
frame = float_frame.copy()
frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
frame.iloc[5:10, frame.columns.get_loc("B")] = np.nan
result = frame.cov(min_periods=len(frame) - 8)
expected = frame.cov()
expected.loc["A", "B"] = np.nan
expected.loc["B", "A"] = np.nan
tm.assert_frame_equal(result, expected)
# regular
result = frame.cov()
expected = frame["A"].cov(frame["C"])
tm.assert_almost_equal(result["A"]["C"], expected)
# exclude non-numeric types
with tm.assert_produces_warning(
FutureWarning, match="The default value of numeric_only"
):
result = float_string_frame.cov()
expected = float_string_frame.loc[:, ["A", "B", "C", "D"]].cov()
tm.assert_frame_equal(result, expected)
# Single column frame
df = DataFrame(np.linspace(0.0, 1.0, 10))
result = df.cov()
expected = DataFrame(
np.cov(df.values.T).reshape((1, 1)), index=df.columns, columns=df.columns
)
tm.assert_frame_equal(result, expected)
df.loc[0] = np.nan
result = df.cov()
expected = DataFrame(
np.cov(df.values[1:].T).reshape((1, 1)),
index=df.columns,
columns=df.columns,
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("test_ddof", [None, 0, 1, 2, 3])
def test_cov_ddof(self, test_ddof):
# GH#34611
np_array1 = np.random.rand(10)
np_array2 = np.random.rand(10)
df = DataFrame({0: np_array1, 1: np_array2})
result = df.cov(ddof=test_ddof)
expected_np = np.cov(np_array1, np_array2, ddof=test_ddof)
expected = DataFrame(expected_np)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"other_column", [pd.array([1, 2, 3]), np.array([1.0, 2.0, 3.0])]
)
def test_cov_nullable_integer(self, other_column):
# https://github.com/pandas-dev/pandas/issues/33803
data = DataFrame({"a": pd.array([1, 2, None]), "b": other_column})
result = data.cov()
arr = np.array([[0.5, 0.5], [0.5, 1.0]])
expected = DataFrame(arr, columns=["a", "b"], index=["a", "b"])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("numeric_only", [True, False])
def test_cov_numeric_only(self, numeric_only):
# when dtypes of pandas series are different
# then ndarray will have dtype=object,
# so it need to be properly handled
df = DataFrame({"a": [1, 0], "c": ["x", "y"]})
expected = DataFrame(0.5, index=["a"], columns=["a"])
if numeric_only:
result = df.cov(numeric_only=numeric_only)
tm.assert_frame_equal(result, expected)
else:
with pytest.raises(ValueError, match="could not convert string to float"):
df.cov(numeric_only=numeric_only)
class TestDataFrameCorr:
# DataFrame.corr(), as opposed to DataFrame.corrwith
@pytest.mark.parametrize("method", ["pearson", "kendall", "spearman"])
@td.skip_if_no_scipy
def test_corr_scipy_method(self, float_frame, method):
float_frame["A"][:5] = np.nan
float_frame["B"][5:10] = np.nan
float_frame["A"][:10] = float_frame["A"][10:20]
correls = float_frame.corr(method=method)
expected = float_frame["A"].corr(float_frame["C"], method=method)
tm.assert_almost_equal(correls["A"]["C"], expected)
# ---------------------------------------------------------------------
def test_corr_non_numeric(self, float_string_frame):
# exclude non-numeric types
with tm.assert_produces_warning(
FutureWarning, match="The default value of numeric_only"
):
result = float_string_frame.corr()
expected = float_string_frame.loc[:, ["A", "B", "C", "D"]].corr()
tm.assert_frame_equal(result, expected)
@td.skip_if_no_scipy
@pytest.mark.parametrize("meth", ["pearson", "kendall", "spearman"])
def test_corr_nooverlap(self, meth):
# nothing in common
df = DataFrame(
{
"A": [1, 1.5, 1, np.nan, np.nan, np.nan],
"B": [np.nan, np.nan, np.nan, 1, 1.5, 1],
"C": [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
}
)
rs = df.corr(meth)
assert isna(rs.loc["A", "B"])
assert isna(rs.loc["B", "A"])
assert rs.loc["A", "A"] == 1
assert rs.loc["B", "B"] == 1
assert isna(rs.loc["C", "C"])
@pytest.mark.parametrize("meth", ["pearson", "spearman"])
def test_corr_constant(self, meth):
# constant --> all NA
df = DataFrame(
{
"A": [1, 1, 1, np.nan, np.nan, np.nan],
"B": [np.nan, np.nan, np.nan, 1, 1, 1],
}
)
rs = df.corr(meth)
assert isna(rs.values).all()
@td.skip_if_no_scipy
@pytest.mark.parametrize("meth", ["pearson", "kendall", "spearman"])
def test_corr_int_and_boolean(self, meth):
# when dtypes of pandas series are different
# then ndarray will have dtype=object,
# so it need to be properly handled
df = DataFrame({"a": [True, False], "b": [1, 0]})
expected = DataFrame(np.ones((2, 2)), index=["a", "b"], columns=["a", "b"])
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore", RuntimeWarning)
result = df.corr(meth)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("method", ["cov", "corr"])
def test_corr_cov_independent_index_column(self, method):
# GH#14617
df = DataFrame(np.random.randn(4 * 10).reshape(10, 4), columns=list("abcd"))
result = getattr(df, method)()
assert result.index is not result.columns
assert result.index.equals(result.columns)
def test_corr_invalid_method(self):
# GH#22298
df = DataFrame(np.random.normal(size=(10, 2)))
msg = "method must be either 'pearson', 'spearman', 'kendall', or a callable, "
with pytest.raises(ValueError, match=msg):
df.corr(method="____")
def test_corr_int(self):
# dtypes other than float64 GH#1761
df = DataFrame({"a": [1, 2, 3, 4], "b": [1, 2, 3, 4]})
df.cov()
df.corr()
@td.skip_if_no_scipy
@pytest.mark.parametrize(
"nullable_column", [pd.array([1, 2, 3]), pd.array([1, 2, None])]
)
@pytest.mark.parametrize(
"other_column",
[pd.array([1, 2, 3]), np.array([1.0, 2.0, 3.0]), np.array([1.0, 2.0, np.nan])],
)
@pytest.mark.parametrize("method", ["pearson", "spearman", "kendall"])
def test_corr_nullable_integer(self, nullable_column, other_column, method):
# https://github.com/pandas-dev/pandas/issues/33803
data = DataFrame({"a": nullable_column, "b": other_column})
result = data.corr(method=method)
expected = DataFrame(np.ones((2, 2)), columns=["a", "b"], index=["a", "b"])
tm.assert_frame_equal(result, expected)
def test_corr_item_cache(self):
# Check that corr does not lead to incorrect entries in item_cache
df = DataFrame({"A": range(10)})
df["B"] = range(10)[::-1]
ser = df["A"] # populate item_cache
assert len(df._mgr.arrays) == 2 # i.e. 2 blocks
_ = df.corr()
# Check that the corr didn't break link between ser and df
ser.values[0] = 99
assert df.loc[0, "A"] == 99
assert df["A"] is ser
assert df.values[0, 0] == 99
@pytest.mark.parametrize("length", [2, 20, 200, 2000])
def test_corr_for_constant_columns(self, length):
# GH: 37448
df = DataFrame(length * [[0.4, 0.1]], columns=["A", "B"])
result = df.corr()
expected = DataFrame(
{"A": [np.nan, np.nan], "B": [np.nan, np.nan]}, index=["A", "B"]
)
tm.assert_frame_equal(result, expected)
def test_calc_corr_small_numbers(self):
# GH: 37452
df = DataFrame(
{"A": [1.0e-20, 2.0e-20, 3.0e-20], "B": [1.0e-20, 2.0e-20, 3.0e-20]}
)
result = df.corr()
expected = DataFrame({"A": [1.0, 1.0], "B": [1.0, 1.0]}, index=["A", "B"])
tm.assert_frame_equal(result, expected)
@td.skip_if_no_scipy
@pytest.mark.parametrize("method", ["pearson", "spearman", "kendall"])
def test_corr_min_periods_greater_than_length(self, method):
df = DataFrame({"A": [1, 2], "B": [1, 2]})
result = df.corr(method=method, min_periods=3)
expected = DataFrame(
{"A": [np.nan, np.nan], "B": [np.nan, np.nan]}, index=["A", "B"]
)
tm.assert_frame_equal(result, expected)
@td.skip_if_no_scipy
@pytest.mark.parametrize("meth", ["pearson", "kendall", "spearman"])
@pytest.mark.parametrize("numeric_only", [True, False])
def test_corr_numeric_only(self, meth, numeric_only):
# when dtypes of pandas series are different
# then ndarray will have dtype=object,
# so it need to be properly handled
df = DataFrame({"a": [1, 0], "b": [1, 0], "c": ["x", "y"]})
expected = DataFrame(np.ones((2, 2)), index=["a", "b"], columns=["a", "b"])
if numeric_only:
result = df.corr(meth, numeric_only=numeric_only)
tm.assert_frame_equal(result, expected)
else:
with pytest.raises(ValueError, match="could not convert string to float"):
df.corr(meth, numeric_only=numeric_only)
class TestDataFrameCorrWith:
def test_corrwith(self, datetime_frame):
a = datetime_frame
noise = Series(np.random.randn(len(a)), index=a.index)
b = datetime_frame.add(noise, axis=0)
# make sure order does not matter
b = b.reindex(columns=b.columns[::-1], index=b.index[::-1][10:])
del b["B"]
colcorr = a.corrwith(b, axis=0)
tm.assert_almost_equal(colcorr["A"], a["A"].corr(b["A"]))
rowcorr = a.corrwith(b, axis=1)
tm.assert_series_equal(rowcorr, a.T.corrwith(b.T, axis=0))
dropped = a.corrwith(b, axis=0, drop=True)
tm.assert_almost_equal(dropped["A"], a["A"].corr(b["A"]))
assert "B" not in dropped
dropped = a.corrwith(b, axis=1, drop=True)
assert a.index[-1] not in dropped.index
# non time-series data
index = ["a", "b", "c", "d", "e"]
columns = ["one", "two", "three", "four"]
df1 = DataFrame(np.random.randn(5, 4), index=index, columns=columns)
df2 = DataFrame(np.random.randn(4, 4), index=index[:4], columns=columns)
correls = df1.corrwith(df2, axis=1)
for row in index[:4]:
tm.assert_almost_equal(correls[row], df1.loc[row].corr(df2.loc[row]))
def test_corrwith_with_objects(self):
df1 = tm.makeTimeDataFrame()
df2 = tm.makeTimeDataFrame()
cols = ["A", "B", "C", "D"]
df1["obj"] = "foo"
df2["obj"] = "bar"
with tm.assert_produces_warning(
FutureWarning, match="The default value of numeric_only"
):
result = df1.corrwith(df2)
expected = df1.loc[:, cols].corrwith(df2.loc[:, cols])
tm.assert_series_equal(result, expected)
with tm.assert_produces_warning(
FutureWarning, match="The default value of numeric_only"
):
result = df1.corrwith(df2, axis=1)
expected = df1.loc[:, cols].corrwith(df2.loc[:, cols], axis=1)
tm.assert_series_equal(result, expected)
def test_corrwith_series(self, datetime_frame):
result = datetime_frame.corrwith(datetime_frame["A"])
expected = datetime_frame.apply(datetime_frame["A"].corr)
tm.assert_series_equal(result, expected)
def test_corrwith_matches_corrcoef(self):
df1 = DataFrame(np.arange(10000), columns=["a"])
df2 = DataFrame(np.arange(10000) ** 2, columns=["a"])
c1 = df1.corrwith(df2)["a"]
c2 = np.corrcoef(df1["a"], df2["a"])[0][1]
tm.assert_almost_equal(c1, c2)
assert c1 < 1
@pytest.mark.parametrize("numeric_only", [True, False])
def test_corrwith_mixed_dtypes(self, numeric_only):
# GH#18570
df = DataFrame(
{"a": [1, 4, 3, 2], "b": [4, 6, 7, 3], "c": ["a", "b", "c", "d"]}
)
s = Series([0, 6, 7, 3])
if numeric_only:
result = df.corrwith(s, numeric_only=numeric_only)
corrs = [df["a"].corr(s), df["b"].corr(s)]
expected = Series(data=corrs, index=["a", "b"])
tm.assert_series_equal(result, expected)
else:
with pytest.raises(TypeError, match="not supported for the input types"):
df.corrwith(s, numeric_only=numeric_only)
def test_corrwith_index_intersection(self):
df1 = DataFrame(np.random.random(size=(10, 2)), columns=["a", "b"])
df2 = DataFrame(np.random.random(size=(10, 3)), columns=["a", "b", "c"])
result = df1.corrwith(df2, drop=True).index.sort_values()
expected = df1.columns.intersection(df2.columns).sort_values()
tm.assert_index_equal(result, expected)
def test_corrwith_index_union(self):
df1 = DataFrame(np.random.random(size=(10, 2)), columns=["a", "b"])
df2 = DataFrame(np.random.random(size=(10, 3)), columns=["a", "b", "c"])
result = df1.corrwith(df2, drop=False).index.sort_values()
expected = df1.columns.union(df2.columns).sort_values()
tm.assert_index_equal(result, expected)
def test_corrwith_dup_cols(self):
# GH#21925
df1 = DataFrame(np.vstack([np.arange(10)] * 3).T)
df2 = df1.copy()
df2 = pd.concat((df2, df2[0]), axis=1)
result = df1.corrwith(df2)
expected = Series(np.ones(4), index=[0, 0, 1, 2])
tm.assert_series_equal(result, expected)
def test_corr_numerical_instabilities(self):
# GH#45640
df = DataFrame([[0.2, 0.4], [0.4, 0.2]])
result = df.corr()
expected = DataFrame({0: [1.0, -1.0], 1: [-1.0, 1.0]})
tm.assert_frame_equal(result - 1, expected - 1, atol=1e-17)
@td.skip_if_no_scipy
def test_corrwith_spearman(self):
# GH#21925
df = DataFrame(np.random.random(size=(100, 3)))
result = df.corrwith(df**2, method="spearman")
expected = Series(np.ones(len(result)))
tm.assert_series_equal(result, expected)
@td.skip_if_no_scipy
def test_corrwith_kendall(self):
# GH#21925
df = DataFrame(np.random.random(size=(100, 3)))
result = df.corrwith(df**2, method="kendall")
expected = Series(np.ones(len(result)))
tm.assert_series_equal(result, expected)
|
{
"content_hash": "579c60c52e7af371301cff5c7b4fa93c",
"timestamp": "",
"source": "github",
"line_count": 408,
"max_line_length": 87,
"avg_line_length": 38.05882352941177,
"alnum_prop": 0.5622102009273571,
"repo_name": "datapythonista/pandas",
"id": "ee9af3f436943e417949df7817cb9cb36a5fb2c4",
"size": "15528",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pandas/tests/frame/methods/test_cov_corr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C",
"bytes": "355524"
},
{
"name": "CSS",
"bytes": "1662"
},
{
"name": "Cython",
"bytes": "1178139"
},
{
"name": "Dockerfile",
"bytes": "1933"
},
{
"name": "HTML",
"bytes": "456449"
},
{
"name": "Makefile",
"bytes": "505"
},
{
"name": "Python",
"bytes": "19048364"
},
{
"name": "Shell",
"bytes": "10511"
},
{
"name": "Smarty",
"bytes": "8486"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.