prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
from .formSubmission import FormSubmission
from django.contrib.auth.models import User
from django.db import models
from django.template.defaultfilters import slugify
class Log(models.Model):
"""
Form Submission Log Database Model
Attributes:
* owner - user submitting the message
* submission - form submission associated
* timestamp - time of submission entry
* private - display to non-owners?
* message - log entry
* mtype - type of log entry
* 1 - user message (default)
* 2 - system action
* 3 - form status change
* 4 - attached file
* file - attached file entry
"""
owner = models.ForeignKey(User, blank=True, null=True)
submission = models.ForeignKey(FormSubmission)
timestamp = models.DateTimeField(auto_now_add=True)
private = models.BooleanField(default=False)
message = models.TextField(blank=True)
mtype = models.IntegerField(default=1)
file = models.FileField(upload_to='private/constellation_forms/log_files/')
cla | ss Meta:
db_table = "form_log"
ordering = ("timestamp",)
@property
def extension(self):
| return self.file.name.split(".")[-1]
@property
def content_type(self):
if self.extension == "pdf":
return "application/pdf"
if self.extension == "txt":
return "text/plain"
if self.extension == "png":
return "image/png"
if self.extension == "jpeg" or self.extension == "jpg":
return "image/jpeg"
if self.extension == "gif":
return "image/gif"
return "application/force-download"
@property
def file_name(self):
return slugify("{0}_{1}_{2}".format(self.submission.form.name, self.pk,
self.owner.username)) + "." + \
self.extension
|
"""Utilities for working with data structures.
Version Added:
2.1
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django_evolution.compat import six
def filter_dup_list_items(items):
"""Return list items with duplicates filtered out.
The order of items will be preserved, but only the first occurrence of
any given item will remain in the list.
Version Added:
2.1
Args:
items (list):
The list of items.
Returns:
list:
The resulting de-duplicated list of items.
"""
return list(six.iterkeys(OrderedDict(
(item, True)
for item in items
)))
def merge_dicts(dest, source):
"""Merge two dictionaries together.
This will recursively merge a source dictionary into a destination
dictionary with the following rules:
* Any keys in the source that aren't in the destination will be placed
directly to the destination (using the same instance of the value, not
a copy).
* Any lists that are in both the source and destination will be combined
by appending the source list to the destinataion list (and this will not
recurse into lists).
* Any dictionaries that are in both the source and destinataion will be
merged using this function.
* Any keys that are not a list or dictionary that exist in both
dictionaries will result in a :py:exc:`TypeError`.
Version Added:
2.1
Args:
dest (dict):
The destination dictionary to merge into.
source (dict):
The source dictionary to merge into the destination.
Raises:
TypeError:
A key was present in both dictionaries with a type that could not
be merged.
"""
for key, value in six.iteritems(source):
if key in dest:
if isinstance(value, list):
if not isinstance(dest[key], list):
raise TypeError(
'Cannot merge a list into a %r for key "%s".'
% (type(dest[key]), key))
dest[key] += value
elif isinstance(value, dict):
if not isinstance(dest[key], dict):
raise TypeError(
'Cannot merge a dictionary into a %r for key "%s".'
% | (type(dest[key]), key))
merge_dicts(dest[key], value)
else:
| raise TypeError(
'Key "%s" was not an expected type (found %r) '
'when merging dictionaries.'
% (key, type(value)))
else:
dest[key] = value
|
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Heidelberg University Library
Distributed under the GNU GPL | v3. For full terms see the file
LICENSE.md
'''
from ompannouncements import Announcements
def index():
a = Announcements(myconf, db, locale)
news_l | ist = a.create_announcement_list()
return locals() |
def __init__(self, display: str, xauthority: str):
env = dict(os.environ)
if display:
env[DISPLAY] = display
if xauthority:
env[XAUTHORITY] = xauthority
self.env = env
def apply(self, profile: Profile):
"""
Apply given profile by calling xrandr
"""
logger.debug("Applying profile %s", profile.name)
args = self._compose_mode_args(profile, self.get_all_outputs())
self._xrandr(*args)
@lru_cache()
def _xrandr(self, *args):
"""
Perform call to xrandr executable with passed arguments.
Returns subprocess.Popen object
"""
args = list(args)
logger.debug("Calling xrandr with args %s", args)
args.insert(0, self.EXECUTABLE)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, env=self.env)
err = p.stderr.readlines()
if err:
# close descriptors
p.stderr.close()
p.stdout.close()
err_str = ''.join(map(lambda x: x.decode(), err)).strip()
raise XrandrException(err_str, args)
out = list(map(lambda x: x.decode(), p.stdout.readlines()))
if out:
out.pop(0) # remove first line. It describes Screen
return out
def _compose_mode_args(self, profile: Profile, xrandr_connections: list):
"""
Composes list of arguments to xrandr to apply profile settings and disable the other outputs
"""
args = []
active_names = []
for name, o in profile.outputs.items():
active_names.append(name)
args.append(self.OUTPUT_KEY)
args.append(name)
args.append(self.MODE_KEY)
args.append(o.mode)
args.append(self.POS_KEY)
args.append(o.pos)
args.append(self.ROTATE_KEY)
args.append(o.rotate)
args.append(self.PANNING_KEY)
args.append(o.panning)
args.append(self.SCALE_KEY)
args.append(o.scale)
if o.rate:
args.append(self.RATE_KEY)
args.append(str(o.rate))
if name == profile.primary:
args.append(self.PRIMARY_KEY)
if o.crtc is not None:
args.append(self.CRTC_KEY)
args.append(str(o.crtc))
# turn off the others
for c in xrandr_connections:
if active_names.count(c.name) == 0:
args.append(self.OUTPUT_KEY)
args.append(c.name)
args.append(self.OFF_KEY)
return args
def get_all_outputs(self):
"""
Query xrandr for all supported outputs.
Performs call to xrandr with -q key and parses output.
| Returns list of outputs with some properties missing (only name and status are guaranteed)
"""
outputs = []
items = self._xrandr | (self.QUERY_KEY)
items = self._group_query_result(items)
logger.debug("Detected total %d outputs", len(items))
crtcs = self._get_verbose_fields('CRTC')
for i in items:
o = self._parse_xrandr_connection(i)
o.crtc = int(crtcs[o.name]) if o.name in crtcs and len(crtcs[o.name]) else None
outputs.append(o)
return outputs
def get_connected_outputs(self):
"""
Query xrandr and return list of connected outputs.
Performs call to xrandr with -q and --verbose keys.
Returns list of connected outputs with all properties set
"""
outputs = list(filter(lambda o: o.display is not None, self.get_all_outputs()))
edids = self._get_verbose_fields('EDID')
for o in outputs:
o.display.edid = edids[o.name]
if logger.isEnabledFor(logging.DEBUG):
logger.debug("Connected outputs: %s", list(map(lambda o: o.name, outputs)))
return outputs
def _get_verbose_fields(self, field):
"""
Get particular field of all connected displays.
Return dictionary of {"connection_name": field_value}
"""
ret = dict()
items = self._xrandr(self.QUERY_KEY, self.VERBOSE_KEY)
items = self._group_query_result(items)
items = filter(lambda x: x[0].find(' connected') > 0, items)
for i in items:
name_idx = i[0].find(' ')
name = i[0][:name_idx]
ret[name] = self._field_from_query_item(i, field)
return ret
def _field_from_query_item(self, item_lines: list, field: str):
"""
Extracts display field from xrandr --verbose output
"""
val = ''
indent = ''
in_field = False
lines_collected = 0
for i, line in enumerate(item_lines):
m = re.match(r'(\s+)(.*):\s*(.*)$', line)
if m and m.group(2).lower() == field.lower():
indent = m.group(1)
in_field = True
val = m.group(3).strip()
elif in_field and m and (len(indent) >= len(m.group(1)) or m.group(1) == indent):
return val
elif in_field and not line.startswith(indent):
return val
elif in_field:
val += line.strip()
lines_collected += 1
if field == 'EDID' and lines_collected >= 8:
return val
return val
def _parse_xrandr_connection(self, item_lines: list):
"""
Creates XrandrConnection from lines returned by xrandr --query.
Example:
LVDS1 connected primary 1366x768+0+312 (normal left inverted right x axis y axis) 277mm x 156mm
1366x768 60.02*+
1024x768 60.00
"""
connection_info = item_lines[0]
name, status, state = connection_info.split(' ', 2)
if status != 'connected':
# We are not connected, do not parse the rest.
return XrandrConnection(name)
# We are connected parse connected display.
display = self._parse_display(item_lines[1:])
if not display.is_on():
# inactive output
return XrandrConnection(name, display)
parsed = self.OUTPUT_DETAILS_REGEX.match(state)
if parsed is None:
raise ParseException(name, status, state)
primary = parsed.group('primary') is not None
rotate = parsed.group('rotate')
panning = parsed.group('panning')
geometry = parsed.group('geometry')
size, pos = self._parse_geometry(geometry)
is_rotated = rotate in ['left', 'right']
if is_rotated:
size = 'x'.join(size.split('x')[::-1])
scale = '1x1'
if size != display.mode:
dw, dh = map(lambda s: int(s), display.mode.split('x'))
vw, vh = map(lambda s: int(s), size.split('x'))
sw, sh = vw / dw, vh / dh
if is_rotated:
sw, sh = sh, sw
scale = "{}x{}".format(sw, sh)
viewport = Viewport(size, pos, rotate, panning, scale)
return XrandrConnection(name, display, viewport, primary)
def _parse_display(self, lines: list):
supported_modes = []
preferred_mode = None
current_mode = None
current_rate = None
for mode_line in lines:
mode_line = mode_line.strip()
(mode, rate, extra) = self.CURRENT_MODE_REGEX.match(mode_line).groups()
current = (extra.find("*") >= 0)
preferred = (extra.find("+") >= 0)
supported_modes.append(mode)
if current:
current_mode = mode
current_rate = rate
if preferred:
preferred_mode = mode
return Display(supported_modes, preferred_mode, current_mode, current_rate)
def _group_query_result(self, query_result: list):
"""
Group input list of lines such that every line starting with a non-whitespace character is a start of a
group, and every subsequent line starting with whit |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print(fn)
print("Loading file " + fn)
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(o | pen(fn), object_hook = b | ju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print("About to save %s" % entry)
tsdb.save(entry)
|
".jpg" not in lvl1_url.text.lower()) and (
".mp4" not in lvl1_url.text.lower()) and (
".mp3" not in lvl1_url.text.lower()) and (
".txt" not in lvl1_url.text.lower()) and (
".png" not in lvl1_url.text.lower()) and (
".gif" not in lvl1_url.text.lower()) and (
".svg" not in lvl1_url.text.lower()) and (
".eps" not in lvl1_url.text.lower()) and (
".doc" not in lvl1_url.text.lower()) and (
".docx" not in lvl1_url.text.lower()) and (
".xls" not in lvl1_url.text.lower()) and (
".js" not in lvl1_url.text.lower()) and (
".css" not in lvl1_url.text.lower()) and (
".xlsx" not in lvl1_url.text.lower()) and (
".ttf" not in lvl1_url.text.lower()) and (
".eot" not in lvl1_url.text | .lower() | ) and (
".bak" not in lvl1_url.text.lower()) and (
".woff" not in lvl1_url.text.lower()) and (
"javascript:" not in lvl1_url.text.lower()) and (
"tel:" not in lvl1_url.text.lower()) and (
"mailto:" not in lvl1_url.text.lower()) and (
"#" not in lvl1_url.text.lower()):
if lvl1_url.lastmod is not None:
date = dateutil.parser.parse(lvl1_url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (lvl1_url.lastmod.string, lvl1_url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print(
'Found {0} URLs from multiple sitemaps in the siteindex you provided.'.format(
len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
else:
soup = BeautifulSoup(sitemap, "html.parser")
for url in soup.findAll("url"):
date = None
if url.lastmod is not None:
date = dateutil.parser.parse(url.lastmod.string).replace(tzinfo=None)
if limit is not None and date is not None and date > limit:
date_and_url = (url.lastmod.string, url.loc.string)
found_urls.append(
date_and_url) # if date (lastmod) is missing the URL will not be checked
print('Found {0} URLs in the sitemap you provided.'.format(len(found_urls)))
return sorted(found_urls, key=getKey, reverse=True)
def fetchUrlsFromPage(url, num_limit=None, local_only=True):
"""Given a URL contained URLs are returned as a list with tuples. Optional to number of URLs and if to only include URLs within the local website.
Attributes: url (string), num_limit (integer), local_only (bool)
"""
main_url = urlparse(url)
found_urls = list()
page = httpRequestGetContent(url)
soup = BeautifulSoup(page, "html.parser")
i = 0
for the_url in soup.find_all('a', href=True):
if (".pdf" not in the_url['href'].lower()) and (
".jpg" not in the_url['href'].lower()) and (
".mp4" not in the_url['href'].lower()) and (
".mp3" not in the_url['href'].lower()) and (
".txt" not in the_url['href'].lower()) and (
".png" not in the_url['href'].lower()) and (
".gif" not in the_url['href'].lower()) and (
".svg" not in the_url['href'].lower()) and (
".eps" not in the_url['href'].lower()) and (
".doc" not in the_url['href'].lower()) and (
".docx" not in the_url['href'].lower()) and (
".xls" not in the_url['href'].lower()) and (
".js" not in the_url['href'].lower()) and (
".css" not in the_url['href'].lower()) and (
".xlsx" not in the_url['href'].lower()) and (
".ttf" not in the_url['href'].lower()) and (
".eot" not in the_url['href'].lower()) and (
".bak" not in the_url['href'].lower()) and (
".woff" not in the_url['href'].lower()) and (
"javascript:" not in the_url['href'].lower()) and (
"tel:" not in the_url['href'].lower()) and (
"callto:" not in the_url['href'].lower()) and (
"mailto:" not in the_url['href'].lower()) and (
"#" not in the_url['href'].lower()):
found_url = urlparse(the_url['href'])
if local_only and (len(found_url.netloc) is 0 or found_url.netloc is main_url.netloc):
if len(found_url.netloc) is 0:
found_url = urljoin(url, found_url.geturl())
if found_url not in found_urls: # making the entries unique
found_urls.append(found_url)
i+=1
if num_limit is not None:
found_urls = found_urls[:num_limit]
print('Found {0} URLs on the page you provided, returning {1} of them.'.format(i, len(found_urls)))
return found_urls[:num_limit]
def getGzipedContentFromUrl(url):
"""
Fetching a gziped file from Internet, unpacks it and returns its contents.
"""
unique_id = getUniqueId(5)
file_name = 'tmp/file-{0}.gz'.format(unique_id)
try:
r = requests.get(url, stream=True)
with open(file_name, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
with gzip.open(file_name, 'rb') as f:
file_content = f.read()
return file_content
except SSLError:
if 'http://' in url: # trying the same URL over SSL/TLS
return getGzipedContentFromUrl(url.replace('http://', 'https://'))
else:
return None
except:
print(
'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(
url, timeout_in_seconds, sys.exc_info()[0]))
return None
def httpRequestGetContent(url):
"""Trying to fetch the response content
Attributes: url, as for the URL to fetch
"""
if '.gz' in url or '.gzip' in url:
# the url indicates that it is compressed using Gzip
return getGzipedContentFromUrl(url)
timeout_in_seconds = 30
try:
a = requests.get(url)
return a.text
except requests.exceptions.SSLError:
if 'http://' in url: # trying the same URL over SSL/TLS
print('Info: Trying SSL before giving up.')
return httpRequestGetContent(url.replace('http://', 'https://'))
except requests.exceptions.ConnectionError:
print(
'Connection error! Unfortunately the request for URL "{0}" failed.\nMessage:\n{1}'.format(url, sys.exc_info()[0]))
pass
except:
print(
'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, timeout_in_seconds, sys.exc_info()[0]))
pass
def is_sitemap(content):
"""Check a string to see if its content is a sitemap or siteindex.
Attributes: content (string)
"""
if 'http://www.sitemaps.org/schemas/sitemap/' in content or '<sitemapindex' in content:
return True
return False
"""
If file is executed on itself then call a definition, mostly for testing purposes
"""
if __name__ == '__main__':
# fetchUrlsFromSitemap('http://webbstrategiforalla.se/sitemap.xml')
# tmp = fetchUrlsFromSitemap('http://www.varberg.se/sitemap.xml', '2017-02-17T06:19:00+01:00')
# print(len( |
places))
)
if hasattr(obj, 'plot'):
if obj.plot is not None:
raise ValueError("object to be added already has 'plot' attribute set")
obj.plot = self
self.renderers.append(obj)
if place is not 'center':
getattr(self, place).append(obj)
def add_tools(self, *tools):
''' Adds an tools to the plot.
Args:
*tools (Tool) : the tools to add to the Plot
Returns:
None
'''
if not all(isinstance(tool, Tool) for tool in tools):
raise ValueError("All arguments to add_tool must be Tool subclasses.")
for tool in tools:
if tool.plot is not None:
raise ValueError("tool %s to be added already has 'plot' attribute set" % tool)
tool.plot = self
self.tools.append(tool)
def add_glyph(self, source_or_glyph, glyph=None, **kw):
''' Adds a glyph to the plot with associated data sources and ranges | .
This function will take care of creating and configurinf a Glyph object,
and then add it to the plot's list of renderers.
Args:
source (DataSource) : a data source for the glyphs to all use
glyph (Glyph) : the glyph to add to the Plot
Keyword Arguments:
Any additional keyword arguments are passed on as-is to the
Glyph initializer.
Returns:
| glyph : Glyph
'''
if glyph is not None:
source = source_or_glyph
else:
source, glyph = ColumnDataSource(), source_or_glyph
if not isinstance(source, DataSource):
raise ValueError("'source' argument to add_glyph() must be DataSource subclass")
if not isinstance(glyph, Glyph):
raise ValueError("'glyph' argument to add_glyph() must be Glyph subclass")
g = GlyphRenderer(data_source=source, glyph=glyph, **kw)
self.renderers.append(g)
return g
x_range = Instance(Range, help="""
The (default) data range of the horizontal dimension of the plot.
""")
y_range = Instance(Range, help="""
The (default) data range of the vertical dimension of the plot.
""")
x_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert x-coordinates in data space
into x-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates.
""")
y_mapper_type = Either(Auto, String, help="""
What kind of mapper to use to convert y-coordinates in data space
into y-coordinates in screen space.
Typically this can be determined automatically, but this property
can be useful to, e.g., show datetime values as floating point
"seconds since epoch" instead of formatted dates
""")
extra_x_ranges = Dict(String, Instance(Range1d), help="""
Additional named ranges to make available for mapping x-coordinates.
This is useful for adding additional axes.
""")
extra_y_ranges = Dict(String, Instance(Range), help="""
Additional named ranges to make available for mapping y-coordinates.
This is useful for adding additional axes.
""")
title = String('', help="""
A title for the plot.
""")
title_props = Include(TextProps, help="""
The %s for the plot title.
""")
outline_props = Include(LineProps, help="""
The %s for the plot border outline.
""")
renderers = List(Instance(Renderer), help="""
A list of all renderers for this plot, including guides and annotations
in addition to glyphs and markers.
This property can be manipulated by hand, but the ``add_glyph`` and
``add_layout`` methods are recommended to help make sure all necessary
setup is performed.
""")
tools = List(Instance(Tool), help="""
A list of tools to add to the plot.
""")
tool_events = Instance(ToolEvents, help="""
A ToolEvents object to share and report tool events.
""")
left = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the left of the plot.
""")
right = List(Instance(Renderer), help="""
A list of renderers to occupy the area to the right of the plot.
""")
above = List(Instance(Renderer), help="""
A list of renderers to occupy the area above of the plot.
""")
below = List(Instance(Renderer), help="""
A list of renderers to occupy the area below of the plot.
""")
toolbar_location = Enum(Location, help="""
Where the toolbar will be located. If set to None, no toolbar
will be attached to the plot.
""")
logo = Enum("normal", "grey", help="""
What version of the Bokeh logo to display on the toolbar. If
set to None, no logo will be displayed.
""")
plot_height = Int(600, help="""
Total height of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the height of the HTML
canvas that will be used.
""")
plot_width = Int(600, help="""
Total width of the entire plot (including any axes, titles,
border padding, etc.)
.. note::
This corresponds directly to the width of the HTML
canvas that will be used.
""")
background_fill = Color("white", help="""
""")
border_fill = Color("white", help="""
""")
min_border_top = Int(50, help="""
Minimum size in pixels of the padding region above the top of the
central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_bottom = Int(50, help="""
Minimum size in pixels of the padding region below the bottom of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_left = Int(50, help="""
Minimum size in pixels of the padding region to the left of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border_right = Int(50, help="""
Minimum size in pixels of the padding region to the right of
the central plot region.
.. note::
This is a *minimum*. The padding region may expand as needed to
accommodate titles or axes, etc.
""")
min_border = Int(50, help="""
A convenience property to set all all the ``min_X_border`` properties
to the same value. If an individual border property is explicitly set,
it will override ``min_border``.
""")
h_symmetry = Bool(True, help="""
Whether the total horizontal padding on both sides of the plot will
be made equal (the left or right padding amount, whichever is larger).
""")
v_symmetry = Bool(False, help="""
Whether the total vertical padding on both sides of the plot will
be made equal (the top or bottom padding amount, whichever is larger).
""")
lod_factor = Int(10, help="""
Decimation factor to use when applying level-of-detail decimation.
""")
lod_threshold = Int(2000, help="""
A number of data points, above which level-of-detail downsampling may
be performed by glyph renderers. Set to ``None`` to disable any
level-of-detail downsampling.
""")
lod_interval = Int(300, help="""
Interval (in ms) during which an interactive tool event will enable
level-of-detail downsampling.
""")
lod_timeout = Int(500, help="""
Timeout (in ms) for checking whether interactive tool events are still
occurring. Once level-of-detail mode is enabled, a check is made every
``lod_timeout`` ms. If no interactive tool events have happened,
level-of-detail mode is disabled.
""")
class GridPlot(Plot):
""" A 2D grid of plots rendered on separate canvases in |
import unittest
from locust.util.timespan import parse_timespan
from locust.util.rounding import proper_round
class TestParseTimespan(unittest.TestCase):
def test_parse_timespan_invalid_values(self):
self.assertRaises(ValueError, parse_timespan, None)
self.assertRaises(ValueError, parse_timespan, "")
self.assertRai | ses(ValueError, parse_timespan, "q")
def test_parse_timespan(self):
self.assertEqual(7, parse_timespan("7"))
self.assertEqual(7, parse_timespan("7s"))
self.assertEqual(60, parse_timespan("1m"))
self.assertEqual(7200, parse_timespan("2h"))
self.assertEqual(3787, parse_timespan("1h3m7s"))
class TestRounding(unittest.TestCase):
def test_rounding_down(self):
self.assertEqual(1, proper_round(1.499999999))
self.asser | tEqual(5, proper_round(5.499999999))
self.assertEqual(2, proper_round(2.05))
self.assertEqual(3, proper_round(3.05))
def test_rounding_up(self):
self.assertEqual(2, proper_round(1.5))
self.assertEqual(3, proper_round(2.5))
self.assertEqual(4, proper_round(3.5))
self.assertEqual(5, proper_round(4.5))
self.assertEqual(6, proper_round(5.5))
|
import socket
import sys
def set_keepalive(sock, interval=1, probes=5):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, interval)
if hasattr(socket, 'TCP_KEEPCNT'):
sock.setsockopt(sock | et.SOL_TCP, socket.TCP_KEEPCNT, probes)
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, interval)
if hasattr(socket, 'TCP_KEEPINTVL'):
sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, interval)
s = socket.s | ocket()
s.bind(('', 0))
print s.getsockname()
set_keepalive(s)
s.listen(1)
while True:
csock, addr = s.accept()
set_keepalive(csock)
print csock.recv(512)
|
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
A | PI Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but the | y are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL
function code.
Optionally, an inverse() method returns a new transform performing the
inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa
|
import unittest
from flumine import config
class ConfigTest(unittest.TestCase):
def test_init(self):
self.assertFalse(config.simulated)
self.assertTrue(config.simulated_strategy_isolation)
self.assertIsInstance(config.customer_strategy_ref, str)
self.assertIsInstance(config.process_id, int)
self.assertIsNone(config.current_time)
self.assertFa | lse(config.raise_errors)
self.assertEqual(config.max_execution_workers, 32)
self.assertFal | se(config.async_place_orders)
self.assertEqual(config.place_latency, 0.120)
self.assertEqual(config.cancel_latency, 0.170)
self.assertEqual(config.update_latency, 0.150)
self.assertEqual(config.replace_latency, 0.280)
self.assertEqual(config.order_sep, "-")
self.assertEqual(config.execution_retry_attempts, 10)
|
import numpy
import math
def mkRamp(*args):
''' mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)
Compute a matrix of dimension SIZE (a [Y X] 2-vector, or a scalar)
containing samples of a ramp function, with given gradient DIRECTION
(radians, CW from X-axis, default = 0), SLOPE (per pixel, default =
1), and a value of INTERCEPT (default = 0) at the ORIGIN (default =
(size+1)/2, [1 1] = upper left). All but the first argument are
optional '''
if len(args) == 0:
print("mkRamp(SIZE, DIRECTION, SLOPE, INTERCEPT, ORIGIN)")
print("first argumen | t is required")
exit(1)
else:
sz = args[0]
if isinstance(sz, | (int)):
sz = (sz, sz)
elif not isinstance(sz, (tuple)):
print("first argument must be a two element tuple or an integer")
exit(1)
# OPTIONAL args:
if len(args) > 1:
direction = args[1]
else:
direction = 0
if len(args) > 2:
slope = args[2]
else:
slope = 1
if len(args) > 3:
intercept = args[3]
else:
intercept = 0
if len(args) > 4:
origin = args[4]
else:
origin = (float(sz[0] - 1) / 2.0, float(sz[1] - 1) / 2.0)
#--------------------------
xinc = slope * math.cos(direction)
yinc = slope * math.sin(direction)
[xramp, yramp] = numpy.meshgrid(xinc * (numpy.array(list(range(sz[1]))) - origin[1]),
yinc * (numpy.array(list(range(sz[0]))) - origin[0]))
res = intercept + xramp + yramp
return res
|
from subprocess import *
import gzip
import string
import os
import time
import ApplePythonReporter
class ApplePythonReport:
vendorId = YOUR_VENDOR_ID
userId = 'YOUR_ITUNES_CONNECT_ACCOUNT_MAIL'
password = 'ITUNES_CONNECT_PASSWORD'
account = 'ACCOUNT_ID'
mode = 'Robot.XML'
dateType = 'Daily'
eventIndex = 1
activeSubscriberIndex = 16
quantityIndex = 25
subscribers = 0
cancellations = 0
activeSubscribers = 0
maxAttempts = 5
def __init__(self, reportDate):
self.DownloadSubscriptionEventReport(reportDate)
self.DownloadSubscriptionReport(reportDate)
self.FetchSubscriptionEventData(reportDate)
self.FetchSubscriptionData(reportDate)
self.CleanUp(reportDate)
def DownloadSubscriptionEventReport(self, date):
print 'Downloading Apple Financial Report for Subscriptions (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},SubscriptionEvent,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except Exception:
pass
#return iter(p.stdout.readline, b'')
def DownloadSubscriptionReport(self, date):
print 'Downloading Apple Financial Report for Active Users (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},Subscription,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except:
pass
#return iter(p.stdout.readline, b'')
#Uncompress and | extract needed values (cancellations and new subscribers)
def FetchSubscriptionEventData(self, date):
fileName = 'Subscription_Event_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if(attem | pts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching SubscriptionEvents..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[self.eventIndex].__str__()
if line[0].__str__().endswith(date[-2:]):
if line[self.eventIndex] == 'Cancel':
self.cancellations += int(line[self.quantityIndex])
if line[self.eventIndex] == 'Subscribe':
self.subscribers += int(line[self.quantityIndex])
else:
print 'SubscriptionEvent: There were no sales for the date specified'
# Uncompress and extract needed values (active users)
def FetchSubscriptionData(self, date):
fileName = 'Subscription_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if (attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching Subscriptions..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[0].__str__()
self.activeSubscribers += int(line[self.activeSubscriberIndex])
else:
print 'Subscription: There were no sales for the date specified'
def CleanUp(self, date):
if os.path.isfile('Subscription_'+self.vendorId.__str__() +'_' + date + '.txt'):
os.remove('Subscription_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
if os.path.isfile('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt'):
os.remove('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date' |
ht
scale = min(max_width/ float(input_width), max_height/float(input_height) )
scale_width = int(input_width*scale)
scale_height = int(input_height*scale)
padding_ofs_x = (max_width - scale_width)//2
padding_ofs_y = (max_height - scale_height)//2
vfilter.append("scale=%d:%d,pad=%d:%d:%d:%d" % (scale_width,scale_height,
max_width,max_height, padding_ofs_x,padding_ofs_y))
else:
out_width = stream['width']
out_height = stream['height']
if vfilter:
cmd.extend(['-vf', ','.join(vfilter)])
# cmd.extend(['-s', "%dx%d" % (width, height)])
if i == 1:
out_file = os.path.join(output_dir, 'out_%d.alpha' % (stream_index))
out_meta['path_alpha'] = out_file
else:
out_rate = frame_rate or str(stream['avg_frame_rate'])
out_file = os.path.join(output_dir, 'out_%d.dnxhd' % (stream_index))
out_meta = {'path':out_file, 'frame_rate':out_rate, 'type': 'video', 'profile':video_profile_name}
out_meta['width'] = out_width
out_meta['height'] = out_height
cmd.extend([out_file])
#pprint(stream)
print("USING FRAMREATE", out_rate, str(stream['avg_frame_rate']))
out_files.append(out_meta)
elif stream['codec_type'] == 'audio':
input_sample_rate = int(stream['sample_rate'])
channels = stream['channels']
cmd.extend(['-vn', '-acodec', 'pcm_s16le', '-ar', str(sample_rate)])
# afilter = ['-af', "aresample=async=1:first_pts=0"]
# cmd.extend(afilter)
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
out_file = os.path.join(output_dir, 'out_%d_%d_%d.wav' % (stream_index, sample_rate, channels))
cmd.extend([out_file])
out_files.append({'path':out_file, 'sample_rate':sample_rate, 'channels':channels,'type': 'audio'})
print(subprocess.list2cmdline(cmd))
subprocess.check_call(cmd)
return out_files
def create_matte_key_definition(f):
opdef = f.create.OperationDef(auid.AUID("0c864774-e428-3b2d-8115-1c736806191a"), 'MatteKey_2')
opdef['IsTimeWarp'].value = False
opdef['OperationCategory'].value = 'OperationCategory_Effect'
opdef['NumberInputs'].value = 3
opdef['Bypass'].value = 2
opdef.media_kind = "picture"
f.dictionary.register_def(opdef)
return opdef
def import_video_essence(f, mastermob, stream, compmob=None, tapemob=None):
tape = None
edit_rate = stream['frame_rate']
if tapemob:
timecode_fps= int(round(float(fractions.Fraction(edit_rate))))
start_time = timecode_fps * 60 * 60
tape = tapemob.create_source_clip(1, start=start_time)
alpha_path = stream.get("path_alpha", None)
color_slot = mastermob.import_dnxhd_essence(stream['path'], edit_rate, tape=tape)
if alpha_path:
pixel_layout = [{u'Code': u'CompAlpha', u'Size': 8}]
width = stream['width']
height = stream['height']
source_mob = f.create.SourceMob()
f.content.mobs.append(source_mob)
if tapemob:
tape = tapemob.create_source_clip(1, start=start_time)
source_slot = source_mob.import_rawvideo_essence(alpha_path, edit_rate, width, height, pixel_layout, tape=tape)
length = source_slot.segment.length
essence_group = f.create.EssenceGroup()
alpha_slot = mastermob.create_picture_slot(edit_rate)
alpha_slot.segment = essence_group
source_clip = source_mob.create_source_clip(source_slot.slot_id)
source_clip.length = length
essence_group['Choices'].append(source_clip)
essence_group.length = length
opdef = create_matte_key_definition(f)
slot = compmob.create_picture_slot(edit_rate)
op_group = f.create.OperationGroup(opdef)
slot.segment = op_group
scope = f.create.ScopeReference()
scope['RelativeScope'].value = 1
scope['RelativeSlot'].value = 1
scope.length = length
sequence = f.create.Sequence(length=length)
sequence.components.append(scope)
op_group.segments.append(sequence)
op_group.segments.append(mastermob.create_source_clip(color_slot.slot_id, length=length))
op_group.segments.append(mastermob.create_source_clip(alpha_slot.slot_id, length=length))
def create_aaf(path, media_streams, mobname, tape_name=None, start_timecode=None):
with aaf2.open(path, 'w') as f:
mastermob = f.create.MasterMob(mobname)
f.content.mobs.append(mastermob)
edit_rate = None
for stream in media_streams:
if stream['type'] == 'video':
edit_rate =fractions.Fraction(stream['frame_rate'])
break
alpha = False
compmob = None
for stream in media_streams:
if stream.get('path_alpha', False):
alpha = True
compmob = f.create.CompositionMob(mastermob.name)
compmob.usage = 'Usage_Template'
f.content.mobs.append(compmob)
# this hides the mastermob in avid bin
mastermob['AppCode'].value = 1
mastermob.usage = "Usage_LowerLevel"
break
tapemob = None
timecode_fps= int(round(float(edit_rate)))
if tape_name:
tapemob = f.create.SourceMob()
tapemob.create_tape_slots(tape_name, edit_rate, timecode_fps)
f.content.mobs.append(tapemob)
for stream in media_streams:
if stream['type'] == 'video':
print("importing video...")
start = time.time()
import_video_essence(f, mastermob, stream, compmob, tapemob)
print("imported video in %f secs" % (time.time()- start))
for stream in media_streams:
if stream['type'] == 'audio':
print("importing audio...")
start = time.time()
sample_rate = stream['sample_rate']
slot = mastermob.import_audio_essence(stream['path'], edit_rate)
if compmob:
sound_slot = compmob.create_sound_slot(edit_rate)
sound_slot.segment = mastermob.create_source_clip(slot.slot_id, length = slot.segment.length)
print("imported audio in %f secs" % (time.time()- start))
if __name__ == "__main__":
from optparse import OptionParser
usage = "usage: %prog [options] output_aaf_file media_file"
parser = OptionParser(usage=usage)
parser.add_option('-s', '--start', type="string", dest="start",default=None,
help = "start recording at, in timecode or seconds")
parser.add_option('-e', '--end', type="string", dest='end',default=None,
help = "end recording at in timecode or seconds")
parser.add_option('-d', '--duration', type="string", dest='duration',default=None,
help = "record duration in timecode or seconds")
parser.add_option('--tape', type="string", dest="tape_name",default=None,
help = "tape name")
parser.add_option('--start_timecode', type="string", dest="start_timecode", default=None,
help = "start timecode [default 01:00:00:00]")
parser.add_option('--ignore_alpha', action='store_true', dest="ignore_alpha", default=False,
help = "ignore alpha channel if present")
parser.add_option("-v", '--video-profile', type='string', dest = 'video_profile', default="dnx_1080p_36_23.97 | ",
| help = "encoding profile for video [default: 1080p_36_23.97]")
|
import numpy as np
def extrapolate(xs_name):
"""Extrapolate cross section based on thermal salt expansion feedback.
Extrapolates cross section data at 900 K to 1500 K at 50 K intervals
based on the thermal salt expansion feedback formula from [1]. Writes
the extrapolated data back into the .txt cross section files in the
Moltres-compatible format.
Parameters
----------
xs_name : list of str
Names of cross sections to be extrapolated.
| Returns
-------
None
References
----------
[1] Tiberga et al., "Results from a multi-physics nurmerical benchmark for
codes dedicated to molten salt fast reactors," Annals of Nuclear Energy,
vol. 142, July 2020, 107428.
"""
rho_900 = 2.0e3 # Density at 900 K [kg m-3]
alpha = 2.0e-4 # Thermal expans | ion coeff [K-1]
input_file = "benchmark_" + xs_name + ".txt"
# Setup temperature values to extrapolate to
temp = np.linspace(950, 1500, 12)
# Read cross section data at 900K
f = open(input_file, 'r+')
lines = f.readlines()
data_900 = list(lines[0].split())
f.close()
# Setup space separated data to be written back into txt
s = " "
xs = [s.join(data_900) + "\n"]
h = open(input_file, 'w')
for i in range(len(temp)):
# Calculate density at temp[i]
rho = rho_900 * (1 - alpha * (temp[i]-900))
# Apply extrapolation formula at temp[i]
data_next = [0, ] * len(data_900)
data_next[0] = str(temp[i])
for i in range(1, len(data_900)):
if xs_name == "DIFFCOEF":
data = float(data_900[i]) / rho * rho_900
data_next[i] = '{:0.5e}'.format(data)
else:
data = float(data_900[i]) * rho / rho_900
data_next[i] = '{:0.5e}'.format(data)
data_next = s.join(data_next) + "\n"
xs.append(data_next)
# Write cross section data into txt file
h = open(input_file, 'w')
h.writelines(xs)
h.close()
return
def main():
"""Runs extrapolate() for the relevant cross sections.
"""
xs_names = ["DIFFCOEF", "FISS", "NSF", "REMXS", "SP0"]
for i in xs_names:
extrapolate(i)
return
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMaxTipAge on command line.
Nodes don't consider themselves out of "initial block download" as long as
their best known block header time is more than nMaxTipAge in the past.
"""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
DEFAULT_MAX_TIP_AGE = 24 * 60 * 60
class MaxTipAgeTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def test_maxtipage(self, maxtipage, set_parameter=True):
node_miner = self.nodes[0]
node_ibd = self.nodes[1]
self.restart_node(1, [f'-maxtipage={maxtipage}'] if set_parameter else None)
self.connect_nodes(0, 1)
# tips older than maximum age -> stay in IBD
cur_time = int(time.time())
node_ibd.setmocktime(cur_time)
for delta in [5, 4, 3, 2, 1]:
node_miner.setmocktime(cur_time - maxtipage - delta)
self.generate(node_miner, 1)
assert_equal(node_ibd.getblockchaininfo()['initialblockdownload'], True)
# tip within maximum age -> leave IBD
node_miner.setmocktime(cur_time - maxtipage)
self.generate(node_miner, 1)
assert_equal(node_ibd.getb | lockchaininfo()['initialblockdownload'], False)
def run_test(self):
self.log.info("Test IBD with maximum tip age of 24 hours (default).")
self.test_maxtipage(DEFAULT_MAX_TIP_AGE, set_parameter=False)
for hours in [20, 10, 5, 2, 1]:
maxtipage = h | ours * 60 * 60
self.log.info(f"Test IBD with maximum tip age of {hours} hours (-maxtipage={maxtipage}).")
self.test_maxtipage(maxtipage)
if __name__ == '__main__':
MaxTipAgeTest().main()
|
import pytest
from dateutil.parser import parse
from django import forms
from adhocracy4.forms.fields import DateTimeField
class DateT | imeForm(forms.Form):
date = DateTimeField(
| time_format='%H:%M',
required=False,
require_all_fields=False,
)
@pytest.mark.django_db
def test_datetimefield_valid(user):
data = {'date_0': '2023-01-01', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] == \
parse('2023-01-01 12:30:00 UTC')
@pytest.mark.django_db
def test_datetimefield_invalid(user):
data = {'date_0': 'not a date', 'date_1': '12:30'}
form = DateTimeForm(data=data)
assert not form.is_valid()
@pytest.mark.django_db
def test_datetimefield_empty_none(user):
data = {'date_0': '', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] is None
@pytest.mark.django_db
def test_datetimefield_default_time(user):
data = {'date_0': '2023-01-01', 'date_1': ''}
form = DateTimeForm(data=data)
assert form.is_valid()
assert form.cleaned_data['date'] == \
parse('2023-01-01 00:00:00 UTC')
|
se. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_utils import timeutils
import webob
from manila.api.v1 import share_types as types
from manila.api.views import types as views_types
from manila.common import constants
from manila import exception
from manila import policy
from manila.share import share_types
from manila import test
from manila.tests.api import fakes
def stub_share_type(id):
specs = {
"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5",
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
return dict(
id=id,
name='share_type_%s' % str(id),
extra_specs=specs,
required_extra_specs={
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS: "true",
}
)
def return_share_types_get_all_types(context, search_opts=None):
return dict(
share_type_1=stub_share_type(1),
share_type_2=stub_share_type(2),
share_type_3=stub_share_type(3)
)
def return_empty_share_types_get_all_types(context, search_opts=None):
return {}
def return_share_types_get_share_type(co | ntext, id=1):
if id == "777":
raise exception.ShareTypeNotFound(share_type_id=id)
return stub_share_type(int(id))
def return_share_types_get_by_name(context, name):
if name == "777":
raise exception.ShareTypeNotFoundByName(share_type_name=name)
return stub_share_type(int(name.split("_")[2]))
@ddt.ddt
class ShareTypesApiTest(test.TestCase):
def setUp(self):
super(ShareTypesApiTest, self).setUp()
self. | controller = types.ShareTypesController()
self.mock_object(policy, 'check_policy',
mock.Mock(return_value=True))
@ddt.data(True, False)
def test_share_types_index(self, admin):
self.mock_object(share_types, 'get_all_types',
return_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types',
use_admin_context=admin)
res_dict = self.controller.index(req)
self.assertEqual(3, len(res_dict['share_types']))
expected_names = ['share_type_1', 'share_type_2', 'share_type_3']
actual_names = map(lambda e: e['name'], res_dict['share_types'])
self.assertEqual(set(actual_names), set(expected_names))
for entry in res_dict['share_types']:
if admin:
self.assertEqual('value1', entry['extra_specs'].get('key1'))
else:
self.assertIsNone(entry['extra_specs'].get('key1'))
self.assertTrue('required_extra_specs' in entry)
required_extra_spec = entry['required_extra_specs'].get(
constants.ExtraSpecs.DRIVER_HANDLES_SHARE_SERVERS, '')
self.assertEqual('true', required_extra_spec)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_index_no_data(self):
self.mock_object(share_types, 'get_all_types',
return_empty_share_types_get_all_types)
req = fakes.HTTPRequest.blank('/v2/fake/types')
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['share_types']))
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'index')
def test_share_types_show(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/1')
res_dict = self.controller.show(req, 1)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_show_not_found(self):
self.mock_object(share_types, 'get_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/777')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, '777')
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'show')
def test_share_types_default(self):
self.mock_object(share_types, 'get_default_share_type',
return_share_types_get_share_type)
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
res_dict = self.controller.default(req)
self.assertEqual(2, len(res_dict))
self.assertEqual('1', res_dict['share_type']['id'])
self.assertEqual('share_type_1', res_dict['share_type']['name'])
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_share_types_default_not_found(self):
self.mock_object(share_types, 'get_default_share_type',
mock.Mock(side_effect=exception.ShareTypeNotFound(
share_type_id="fake")))
req = fakes.HTTPRequest.blank('/v2/fake/types/default')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.default, req)
policy.check_policy.assert_called_once_with(
req.environ['manila.context'], types.RESOURCE_NAME, 'default')
def test_view_builder_show(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_type = dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
deleted_at=None,
required_extra_specs={},
id=42,
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.show(request, raw_share_type)
self.assertIn('share_type', output)
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42,
)
self.assertDictMatch(output['share_type'], expected_share_type)
def test_view_builder_list(self):
view_builder = views_types.ViewBuilder()
now = timeutils.isotime()
raw_share_types = []
for i in range(0, 10):
raw_share_types.append(
dict(
name='new_type',
deleted=False,
created_at=now,
updated_at=now,
extra_specs={},
required_extra_specs={},
deleted_at=None,
id=42 + i
)
)
request = fakes.HTTPRequest.blank("/v2")
output = view_builder.index(request, raw_share_types)
self.assertIn('share_types', output)
for i in range(0, 10):
expected_share_type = dict(
name='new_type',
extra_specs={},
required_extra_specs={},
id=42 + i
)
self.assertDictMatch(output['share_types'][i],
expected_share_type)
@ddt.data(None, True, 'true', 'false', 'all')
def test_parse_is_public_valid(self, value):
result = self.controller._parse_is_public(value)
self.assertTrue(result in (True, False, None))
def test_parse_is_public_invalid(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._parse_i |
# Copyright 2018-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
from artificialproject.field_generators import (
GenerationFailedException,
StringGenerator,
)
from artificialproject.random import weighted_choice
class FilePathGenerator:
BUILD_FILE_NAME = "BUCK"
def __init__(self):
self._component_generator = StringGenerator()
self._file_samples = collections.defaultdict(
lambda: collections.defaultdict(set)
)
self._file_samples_dirty = False
self._package_depths = collections.Counter()
self._file_depths_in_package = collections.Counter()
self._sizes_by_depth = collections.defaultdict(collections.Counter)
self._sizes_by_depth_in_package = collections.defaultdict(collections.Counter)
self._build_file_sizes = collections.Counter()
self._root = {}
self._package_paths = {}
self._available_directories = {}
self._last_package_path = None
self._last_package_remaining_targets = None
def analyze_project_data(self, project_data):
dir_entries = collections.defaultdict(set)
build_file_entries = collections.defaultdict(set)
for target_data in project_data.values():
base_path = target_data["buck.base_path"]
build_file_entries[base_path].add(target_data["name"])
components = self._split_path_into_components(base_path)
# TODO(jakubzika): Targets in the root of the repo are ignored
# because _generate_path does not handle depth == 0.
if components:
self._pa | ckage_depths.update([len(components)])
for component in components:
self._component_generator.add_string_sample(component)
for i, name in enumerate(components):
prefix = components[:i]
dir_entries[tuple(prefix)].add(name)
for base_path, name | s in build_file_entries.items():
self._build_file_sizes.update([len(names)])
for path, entries in dir_entries.items():
self._sizes_by_depth[len(path)].update([len(entries)])
def add_package_file_sample(self, package_path, relative_path):
components = self._split_path_into_components(relative_path)
self._file_depths_in_package.update([len(components)])
for i, name in enumerate(components):
prefix = components[:i]
self._file_samples[package_path][tuple(prefix)].add(name)
self._file_samples_dirty = True
def generate_package_path(self):
if self._last_package_path is not None:
path = self._last_package_path
self._last_package_remaining_targets -= 1
if self._last_package_remaining_targets <= 0:
self._last_package_path = None
return path
depth = weighted_choice(self._package_depths)
path, parent_dir = self._generate_path(
"//", self._root, depth, self._sizes_by_depth, self._component_generator
)
directory = {self.BUILD_FILE_NAME.lower(): None}
parent_dir[os.path.basename(path).lower()] = directory
self._last_package_path = path
self._last_package_remaining_targets = (
weighted_choice(self._build_file_sizes) - 1
)
return path
def generate_path_in_package(
self, package_path, depth, component_generator, extension
):
if depth == 0:
return ""
if self._file_samples_dirty:
self._sizes_by_depth_in_package.clear()
for dir_entries in self._file_samples.values():
for path, entries in dir_entries.items():
self._sizes_by_depth_in_package[len(path)].update([len(entries)])
self._file_samples_dirty = False
root = self._root
components = self._split_path_into_components(package_path)
for component in components:
root = root[component.lower()]
path, parent_dir = self._generate_path(
package_path,
root,
depth,
self._sizes_by_depth_in_package,
component_generator,
extension,
)
parent_dir[os.path.basename(path).lower()] = None
return path
def register_path(self, path):
directory = self._root
existed = True
for component in self._split_path_into_components(path):
if component not in directory:
directory[component] = {}
existed = False
directory = directory[component]
if directory is None:
raise GenerationFailedException()
if existed:
raise GenerationFailedException()
def _split_path_into_components(self, path):
components = []
while path:
path, component = os.path.split(path)
components.append(component)
return components[::-1]
def _generate_path(
self,
package_key,
root,
depth,
sizes_by_depth,
component_generator,
extension=None,
):
assert depth >= 1
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator, extension)
return os.path.join(parent_path, name), parent_dir
def _generate_parent(
self, package_key, root, depth, sizes_by_depth, component_generator
):
if depth == 0:
return "", root
key = (package_key, depth)
value = self._available_directories.get(key)
if value is not None:
key_found = True
path, directory, size = value
else:
key_found = False
parent_path, parent_dir = self._generate_parent(
package_key, root, depth - 1, sizes_by_depth, component_generator
)
name = self._generate_name(parent_dir, component_generator)
path = os.path.join(parent_path, name)
directory = {}
parent_dir[name.lower()] = directory
size = weighted_choice(sizes_by_depth[depth])
size -= 1
if size > 0:
self._available_directories[key] = (path, directory, size)
elif key_found:
del self._available_directories[key]
return path, directory
def _generate_name(self, directory, generator, extension=None):
for i in range(1000):
name = generator.generate_string()
if extension is not None:
name += extension
if (
name.lower() not in directory
and name.lower() != self.BUILD_FILE_NAME.lower()
):
return name
raise GenerationFailedException()
|
x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x04\x1c\x1df\x9a\xa0',
b'\x04>\x0c\x02\x01\x04\x01\x93\x0c6\x94\xf3Q\x00\xa2',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x13\x18}\xdf\x0c\xa7',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa7',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\x04.\xd0\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\x9e',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\x9f',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\x9f',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa3',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa4',
b'\x04>\x1d\x02\x01\x00\x01\xa7\x1d\xfed\xd5l\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05G\x1cz\xc0\xb2\xa1',
b'\x04>\x0c\x02\x01\x04\x01\xa7\x1d\xfed\xd5l\x00\xa2',
b'\x04>\x1a\x02\x01\x00\x01\xb1C\x93\xb2\xfad\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x18\xe8\xdc\xbf\x9e',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bV\x8f\x00\x00\x00\xf8\xe40\x97\xc3\x8c\xad\xb21\xbf\x85\x0b\x14\x03\n\xc8\xa2',
b'\x04>\x1e\x02\x01\x00\x01\xfcQ\xbf\xf8\xbei\x12\x02\x01\x1a\x02\n\x08\x0b\xffL\x00\x10\x06!\x1a\x03\x86:\xeb\xa0',
b'\x04>\x0c\x02\x01\x04\x01\xb1C\x93\xb2\xfad\x00\xae',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>(\x02\x01\x02\x01p\x95\xcc,\x1fN\x1c\x1b\xff\xff\xff\xbe\xacH%>Yr$Dc\xb9\xb8\x03?\xfa\xb5\x81\x04\x00{\x01A\xbc\x00\xb8',
b'\x04>\x0c\x02\x01\x04\x01p\x95\xcc,\x1fN\x00\xb8',
b'\x04>&\x02\x01\x02\x01t\x85\xcb\xb2H\\\x1a\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xbf\xfb5\xfd\r\x17i\x1dd\xaa\x90\xab\xcd\xef\t\x87e\xba',
b'\x04>\x0c\x02\x01\x04\x01t\x85\xcb\xb2H\\\x00\xba',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xac',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xad',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xae',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xae',
b'\x04>+\x02\x01\x00\x01\xe0b\xe1\xf7v\xdc\x1f\x02\x01\x06\x11\xff3\x01\x1bd\x0e\x10\x00`\x00\xe8\x02\x0f(]\x01\x00\t\tDC76F7E1\xb0',
b'\x04>)\x02\x01\x04\x01\xe0b\xe1\xf7v\xdc\x1d\x1c\xff3\x01(e(L(*\x00\x00\x00\xf0\x02(\x00\xcf\x02\x04\x00\xe4\x02\x10\x00\x00\x00\x00\x00\xaf',
b'\x04>\x1f\x02\x01\x02\x01^\x89\xf3\x12\xfa_\x13\x03\x03\xaa\xfe\x0e\x16\xaa\xfe\x10\xbd\x01firstuk\x01\xb8',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb8',
b"\x04>'\x02\x01\x02\x01\x8eml\xe2\x83b\x1b\x1a\xffL\x00\x02\x15j\xb1|\x17\xf4{MA\x806Rj\xee\xd2/s\x01\x16\x03h\xbf\xb9",
b'\x04>\x0c\x02\x01\x04\x01\x8eml\xe2\x83b\x00\xb9',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xf6\x00\x00\x00\x00resaec\x00\x00\x00\x00\x00\x0f\xae',
b'\x04>(\x02\x01\x02\x01%\xae\x8a\x05S~\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa9',
b'\x04>\x16\x02\x01\x04\x01%\xae\x8a\x05S~\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xaa',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06G\x1d\x96[\x97\x80\xaa',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xab',
b'\x04>\x1a\x02\x01\x00\x01\xb1C\x93\xb2\xfad\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x13\x18\xe8\xdc\xbf\xae',
b'\x04>\x0c\x02\x01\x04\x01\xb1C\x93\xb2\xfad\x00\xae',
b'\x04>\x1d\x02\x01\x00\x01\xc2\x0e&N\xd7C\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x01\x18N\xa5\xcb\xa1',
b'\x04>\x0c\x02\x01\x04\x01\xc2\x0e&N\xd7C\x00\x9f',
b'\x04>*\x02\x01\x00\x01|\xd7\xf7w\x0c\xf1\x1e\x02\x01\x06\x11\x06\xbaV\x89\xa6\xfa\xbf\xa2\xbd\x01F}n\x00\xfb\xab\xad\x08\x16\n\x18\x16\x04s\x83\x03\xa8',
b'\x04>\x19\x02\x01\x04\x01|\xd7\xf7w\x0c\xf1\r\t\tCharge 2\x02\n\x05\xa8',
b'\x04>\x1d\x02\x01\x00\x01\xa7\x1d\xfed\xd5l\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05G\x1cz\xc0\xb2\xa1',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa4',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa5',
b'\x04>\x1a\x02\x01\x00\x01\xc8\xd4\xf9\xf46w\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x18mQ\xeb\xa9',
b'\x04>\x0c\x02\x01\x04\x01\xc8\xd4\xf9\xf46w\x00\xa9',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x13\x18}\xdf\x0c\xa7',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa7',
b'\x04>\x1d\x02\x01\x00\x01\x93\x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x04\x1c\x1df\x9a\xa4',
b'\x04>\x0c\x02\x01\x04\x01\x93\x0c6\x94\xf3Q\x00\xa3',
b'\x04>(\x02\x01\x03\x00iX\xf1\xf4\xc3\x00\x1c\x1b\xffu\x00B\x04\x01\x80`\x00\xc3\xf4\xf1Xi\x02\xc3\xf4\xf1Xh\x01\x00\x00\x00\x00\x00\x00\xa7',
b'\x04>\x1d\x02\x01\x00\x01\x15\xc1\xedPH\x7f\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x03\x1c\xeeQ<\xa1',
b'\x04>\x0c\x02\x01\x04\x01\x15\xc1\xedPH\x7f\x00\xa3',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bV\x8f\x00\x00\x00\xf8\xe40\x97\xc3\x8c\xad\xb21\xbf\x85\x0b\x14\x03\n\xc8\xa2',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\x9d',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8k\x00\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa0\x04>\x1e',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\xa5',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04\x0f\x04\x00\x01\x01\x04',
b'\x04\x0e\x04\x01\x05 \x00',
b'\x04\x0e\x04\x01\x0b \x00',
b'\x04\x0e\x04\x01\x0c \x00',
b'\x04>\x1a\x02\x01\x00\x01\xec[Z\xf2\x1fr\x0e\x02\x01\x1a\n\xffL\x00\x10\x05\x03\x1c\xa4\xcb\xb3\xa7',
b'\x04>\x0c\x02\x01\x04\x01\xec[Z\xf2\x1fr\x00\xa5',
b'\x04>\x1d\x02\x01\x00\x01\x95|\xedj^V\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x1 | 3\x18}\xdf\x0c\xa4',
b'\x04>\x0c\x02\x01\x04\x01\x95|\xedj^V\x00\xa4',
b'\x04>+\x02\x01\x03\x01\xbb5z\xa0\xabY\x1f\x1e\xffL\x00\x07\x19\x01\x02 \x0bU\x8f\x00\x00\x00\xe7p\xdc\xa2\x0fO\x8c\xc6.\xf3\xac\x07\xab\xc1\xf1\x06\xac',
b'\x04>\x1e\x02\x01\x00\x01W\xc32c!K\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06K\x1d\x96[\x97\x80\xb6',
b'\x04>\x0c\x02\x01\x04\x01W\xc32c!K\x00\xb6',
b'\x04>\x1d\x02\x01\x00\x01\xc2\x0e&N\xd7C\x11\ | x02\x01\x1a\x02\n\x0c\n\xffL\x00\x10\x05\x01\x18N\xa5\xcb\xaa',
b'\x04>(\x02\x01\x03\x00k\xa0\xd0.\x04\xf8\x1c\x1b\xffu\x00B\x04\x01\x80\xac\xf8\xd0\x00\xa0k\xfa\x04.\xd0\xa0j\x01\x17@\x00\x00\x00\x00\xa0\x04',
b'\x04>\x1a\x02\x01\x00\x01\x07\xbb\xd8!p\\\x0e\x02\x01\x06\n\xffL\x00\x10\x05\x01\x10\xfd\xf3\xc6\xae',
b'\x04>\x0c\x02\x01\x04\x01\x07\xbb\xd8!p\\\x00\xae',
b'\x04>\x1d\x02\x01\x00\x01\x15\xc1\xedPH\x7f\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x1c\x00\xeeQ<\x9f\x04>',
b'\x04>)\x02\x01\x03\x01\xbeC\xe75\x82\xde\x1d\x02\x01\x06\x03\x03\xaa\xfe\x15\x16\xaa\xfe\x00\xf6\x00\x00\x00\x00resaec\x00\x00\x00\x00\x00\x0f\xb7',
b'\x04>\x1e\x02\x01\x00\x01[a\xc6\x83\xc9a\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x02\x1e\x96g\x0e0\xa1',
b'\x04>\x0c\x02\x01\x04\x01[a\xc6\x83\xc9a\x00\xa3',
b'\x04>\x1e\x02\x01\x00\x01\x95\xfdu\xa2>N\x12\x02\x01\x1a\x02\n\x0c\x0b\xffL\x00\x10\x06\x03\x1e\x86\xdc\xb98\xac',
b'\x04>\x0c\x02\x01\x04\x01\x95\xfdu\xa2>N\x00\xad',
b'\x04>\x1d\x02\x01\x00\x01\x93\x0c6\x94\xf3Q\x11\x02\x01\x1a\x02\n\x0c\n\xffL\x00\x04\x00>\x0c\x02\x01\x04\x01',
b'\x04>(\x02\x01\x02\x01%\xae\x8a\x05S~\x1c\x03\x03\x9f\xfe\x17\x16\x9f\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaf',
b'\x04>\x16\x02\x01\x04\x01%\xae\x8a\x05S~\n\t\xff\xe0\x00\x01z\xca\x86\xa1\xca\xb0',
b'\x04>\x1e\x02\x01\x00\x01\xfcQ\xbf\xf8\xbei\x12\x02\x01\x1a\x02\n\x08\x0b\xffL\x00:\x00\xeb\x9f\x04>\x1f\x02\x01',
b'\x04>\x0c\x02\x01\x04\x01^\x89\xf3\x12\xfa_\x00\xb5'
]
more_beacons |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
import torch
except ImportError:
pass # soft dep
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.utils.annotations import override
class TorchDistributionWrapper(ActionDistribution):
"""Wrapper class for torch.distributions."""
@override(ActionDistribution)
def logp(self, actions):
return self.dist.log_prob(actions)
@override(ActionDistribution)
def entropy(self):
return self.dist.entropy()
@o | verride(ActionDistribution)
def kl(self, other):
return torch.distributions.kl.kl_divergence(self.dist, other)
@override(ActionDistribution)
def sample(self):
return self.dist.sample()
class | TorchCategorical(TorchDistributionWrapper):
"""Wrapper class for PyTorch Categorical distribution."""
@override(ActionDistribution)
def __init__(self, inputs):
self.dist = torch.distributions.categorical.Categorical(logits=inputs)
class TorchDiagGaussian(TorchDistributionWrapper):
"""Wrapper class for PyTorch Normal distribution."""
@override(ActionDistribution)
def __init__(self, inputs):
mean, log_std = torch.chunk(inputs, 2, dim=1)
self.dist = torch.distributions.normal.Normal(mean, torch.exp(log_std))
@override(TorchDistributionWrapper)
def logp(self, actions):
return TorchDistributionWrapper.logp(self, actions).sum(-1)
|
# Copyright (c) | 2015, Matt Layman
"""Tests for tappy"""
from tap.tests.testcase import Te | stCase # NOQA
|
dule: lemur.auth.views
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
"""
import jwt
import base64
import requests
from flask import g, Blueprint, current_app
from flask.ext.restful import reqparse, Resource, Api
from flask.ext.principal import Identity, identity_changed
from lemur.common.utils import get_psuedo_random_string
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
mod = Blueprint('auth', __name__)
api = Api(mod)
class Login(Resource):
"""
Provides an endpoint for Lemur's basic authentication. It takes a username and password
combination and returns a JWT token.
This token token is required for each API request and must be provided in the Authorization Header for the request.
::
Authorization:Bearer <token>
Tokens have a set expiration date. You can inspect the token expiration be base64 decoding the token and inspecting
it's contents.
.. note:: It is recommended that the token expiration is fairly short lived (hours not days). This will largely depend \
on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \
and force re-authentication.
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Login, self).__init__()
def post(self):
"""
.. http:post:: /auth/login
Login with username:password
**Example request**:
.. sourcecode:: http
POST /auth/login HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"username": "test",
"password": "test"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"token": "12343243243"
}
:arg username: username
:arg password: password
:statuscode 401: invalid credentials
:statuscode 200: no error
"""
self.reqparse.add_argument('username', type=str, require | d=True, location='json')
self.reqparse.add_argument('password', type=str, required=True, location='json')
args = self.reqparse.parse_args()
if '@' in args['username']:
user = user_service.get_by_email(arg | s['username'])
else:
user = user_service.get_by_username(args['username'])
if user and user.check_password(args['password']):
# Tell Flask-Principal the identity changed
identity_changed.send(current_app._get_current_object(),
identity=Identity(user.id))
return dict(token=create_token(user))
return dict(message='The supplied credentials are invalid'), 401
def get(self):
return {'username': g.current_user.username, 'roles': [r.name for r in g.current_user.roles]}
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Lemur. In
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
OAuth2 provider you want to use Lemur there would be two steps:
1. Define your own class that inherits from :class:`flask.ext.restful.Resource` and create the HTTP methods the \
provider uses for it's callbacks.
2. Add or change the Lemur AngularJS Configuration to point to your new provider
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def post(self):
self.reqparse.add_argument('clientId', type=str, required=True, location='json')
self.reqparse.add_argument('redirectUri', type=str, required=True, location='json')
self.reqparse.add_argument('code', type=str, required=True, location='json')
args = self.reqparse.parse_args()
# take the information we have received from the provider to create a new request
params = {
'client_id': args['clientId'],
'grant_type': 'authorization_code',
'scope': 'openid email profile address',
'redirect_uri': args['redirectUri'],
'code': args['code']
}
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
user_api_url = current_app.config.get('PING_USER_API_URL')
# the secret and cliendId will be given to you when you signup for the provider
basic = base64.b64encode('{0}:{1}'.format(args['clientId'], current_app.config.get("PING_SECRET")))
headers = {'Authorization': 'Basic {0}'.format(basic)}
# exchange authorization code for access token.
r = requests.post(access_token_url, headers=headers, params=params)
id_token = r.json()['id_token']
access_token = r.json()['access_token']
# fetch token public key
header_data = fetch_token_header(id_token)
jwks_url = current_app.config.get('PING_JWKS_URL')
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()['keys']:
if key['kid'] == header_data['kid']:
secret = get_rsa_public_key(key['n'], key['e'])
algo = header_data['alg']
break
else:
return dict(message='Key not found'), 403
# validate your token based on the key it was signed with
try:
jwt.decode(id_token, secret, algorithms=[algo], audience=args['clientId'])
except jwt.DecodeError:
return dict(message='Token is invalid'), 403
except jwt.ExpiredSignatureError:
return dict(message='Token has expired'), 403
except jwt.InvalidTokenError:
return dict(message='Token is invalid'), 403
user_params = dict(access_token=access_token, schema='profile')
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params)
profile = r.json()
user = user_service.get_by_email(profile['email'])
# update their google 'roles'
roles = []
for group in profile['googleGroups']:
role = role_service.get_by_name(group)
if not role:
role = role_service.create(group, description='This is a google group based role created by Lemur')
roles.append(role)
# if we get an sso user create them an account
# we still pick a random password in case sso is down
if not user:
# every user is an operator (tied to a default role)
if current_app.config.get('LEMUR_DEFAULT_ROLE'):
v = role_service.get_by_name(current_app.config.get('LEMUR_DEFAULT_ROLE'))
if v:
roles.append(v)
user = user_service.create(
profile['email'],
get_psuedo_random_string(),
profile['email'],
True,
profile.get('thumbnailPhotoUrl'),
roles
)
else:
# we add 'lemur' specific roles, so they do not get marked as removed
for ur in user.roles:
if ur.authority_id:
roles.append(ur)
# update any changes to the user
user_service.update(
user.id,
profile['email'],
profile['email'],
True,
profile.get('thumbnailPhotoUrl'), # incase profile isn't google+ enabled
roles
)
# Tell Flask-Principal the identity changed
|
"""
pluginconf.d configuration file - Files
=======================================
Shared mappers for parsing and extracting data from
``/etc/yum/pluginconf.d/*.conf`` files. Parsers contained
in this module are:
PluginConfD - files ``/etc/yum/pluginconf.d/*.conf``
---------------------------------------------------
PluginConfDIni - files ``/etc/yum/pluginconf.d/*.conf``
-------------------------------------------------------
"""
from insights.core import IniConfigFile, LegacyItemAccess, Parser
from insights.core.plugins import parser
from insights.parsers import get_active_lines
from insights.specs import Specs
from insights.util import deprecated
@parser(Specs.pluginconf_d)
class PluginConfD(LegacyItemAccess, Parser):
"""
.. warning::
This parser is deprecated, please use
:py:class:`insights.parsers.pluginconf_d.PluginConfDIni` instead
Class to parse configuration file under ``pluginconf.d``
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
#[some-unsigned-custom-channel]
#gpgcheck = 0
"""
def parse_content(self, content):
deprecated(PluginConfD, "Deprecated. Use 'PluginConfDIni' instead.")
plugin_dict = {}
section_dict = {}
key = None
for line in get_active_lines(content):
if line.startswith('['):
section_dict = {}
plugin_dict[line[1:-1]] = section_dict
elif '=' in line:
key, _, value = line.partition("=")
key = key.strip()
section_dict[key] = value.strip()
else:
if key:
section_dict[key] = ','.join([section_dict[key], line])
self.data = plugin_dict
def __iter__(self):
for sec in self.data:
yield sec
@parser(Specs.pluginconf_d)
class PluginConfDIni(IniConfigFile):
"""
Read yum plugin config files, in INI format, using the standard INI file
parser class.
Sample configuration::
[main]
enabled = 0
gpgcheck = 1
timeout = 120
# You can specify options per channel, e.g.:
#
#[rhel-i386-server-5]
#enabled = 1
#
#[some-unsigned-custom-channel]
#gpgcheck = 0
[test]
test_multiline_config = http://example.com/repos/test/
http://mirror_example.com/repos/test/
Examples:
>>> type(conf)
<class 'insights.parsers.pluginconf_d.PluginConfDIni'>
>>> conf.sections()
['main', 'test']
>>> conf.has_option('main', 'gpgcheck')
| True
>>> conf.get("main", "enabled")
'0'
>>> conf.getint("main", "timeout")
120
>>> conf.getboolean("main", "enabled")
False
>>> conf.get("test", "test_multiline_config")
'http://example.com/repos/test/ http://mirror_e | xample.com/repos/test/'
"""
pass
|
import collections
class Solution:
def numSimilarGroups(self, A):
UF = {}
for i in range(len(A)): UF[i] = i
def find(x):
if x != UF[x]:
UF[x] = find(UF[x])
return UF[x]
def union(x, y):
UF.setdefault(x, x)
UF.setdefault(y, y)
UF[find(x)] = find(y)
def match(s1, s2):
i = 0
j = -1
while i<len(s1):
if s1[i] != s2[i]:
if j == -1: j = i
else: break
i += 1
| return s1[i+1:] == s2[i+1:]
N, W = len(A), len(A[0])
if N < W*W:
for i in range(len(A)): UF[i] = i
for i in range(len(A)):
for j in range(i+1, len(A)):
if matc | h(A[i], A[j]):
union(i, j)
else:
d = collections.defaultdict(set)
for idx, w in enumerate(A):
lw = list(w)
for i in range(W):
for j in range(i+1, W):
lw[i], lw[j] = lw[j], lw[i]
d["".join(lw)].add(idx)
lw[i], lw[j] = lw[j], lw[i]
for i, w in enumerate(A):
for j in d[w]:
union(i, j)
return len({find(x) for x in UF})
print(Solution().numSimilarGroups(["tars","rats","arts","star"]))
|
# -*- coding: utf-8 -*-
from queue.producer import Producer
from queue.consumer import Consumer
from queue.bloom_fil | ter import Bl | oomFilter
class Dytt:
def main():
for i in range(15):
# Producer().start()
Consumer().start()
if __name__ == '__main__':
main()
|
"""
Helper Methods
"""
import six
de | f _get_key(key_or_id, key_cls):
"""
Helper method to get a course/usage key either from a string or a key_cls,
where the key_cls (CourseKey or UsageKey) will simply be returned.
"""
return (
key_cls.from_string(key_or_id)
if isinstanc | e(key_or_id, six.string_types)
else key_or_id
)
|
# -*- coding:utf-8 -*-
import logging
import warnings
from flypwd.config import config
wi | th warnings.catch_warnings():
warnings.simplefilter("ignore")
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5
log = logging.getLogger(__name__)
def | check_key(keyfile):
"""
checks the RSA key file
raises ValueError if not valid
"""
with open(keyfile, 'r') as f:
return RSA.importKey(f.read(), passphrase="")
def gen_key():
return RSA.generate(config.getint('keys', 'dimension'))
def encrypt_with_pub(pwd, pub):
cipher = PKCS1_v1_5.new(pub)
return cipher.encrypt(pwd.encode('utf-8'))
|
_rd_en,
mcb_rd_data,
mcb_rd_empty,
mcb_rd_full,
mcb_rd_overflow,
mcb_rd_count,
mcb_rd_error):
if os.system(build_cmd):
raise Exception("Error running build command")
return Cosimulation("vvp -m myhdl test_%s.vvp -lxt2" % module,
clk=clk,
rst=rst,
current_test=current_test,
wb_adr_i=wb_adr_i,
wb_dat_i=wb_dat_i,
wb_dat_o=wb_dat_o,
wb_we_i=wb_we_i,
wb_sel_i=wb_sel_i,
wb_stb_i=wb_stb_i,
wb_ack_o=wb_ack_o,
wb_cyc_i=wb_cyc_i,
mcb_cmd_clk=mcb_cmd_clk,
mcb_cmd_en=mcb_cmd_en,
mcb_cmd_instr=mcb_cmd_instr,
mcb_cmd_bl=mcb_cmd_bl,
mcb_cmd_byte_addr=mcb_cmd_byte_addr,
mcb_cmd_empty=mcb_cmd_empty,
mcb_cmd_full=mcb_cmd_full,
mcb_wr_clk=mcb_wr_clk,
mcb_wr_en=mcb_wr_en,
mcb_wr_mask=mcb_wr_mask,
mcb_wr_data=mcb_wr_data,
mcb_wr_empty=mcb_wr_empty,
mcb_wr_full=mcb_wr_full,
mcb_wr_underrun=mcb_wr_underrun,
mcb_wr_count=mcb_wr_count,
mcb_wr_error=mcb_wr_error,
mcb_rd_clk=mcb_rd_clk,
mcb_rd_en=mcb_rd_en,
mcb_rd_data=mcb_rd_data,
mcb_rd_empty=mcb_rd_empty,
mcb_rd_full=mcb_rd_full,
mcb_rd_overflow=mcb_rd_overflow,
mcb_rd_count=mcb_rd_count,
mcb_rd_error=mcb_rd_error)
def bench():
# Parameters
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
wb_adr_i = Signal(intbv(0)[32:])
wb_dat_i = Signal(intbv(0)[32:])
wb_we_i = Signal(bool(0))
wb_sel_i = Signal(intbv(0)[4:])
wb_stb_i = Signal(bool(0))
wb_cyc_i = Signal(bool(0))
mcb_cmd_empty = Signal(bool(0))
mcb_cmd_full = Signal(bool(0))
mcb_wr_empty = Signal(bool(0))
mcb_wr_full = Signal(bool(0))
mcb_wr_underrun = Signal(bool(0))
mcb_wr_count = Signal(intbv(0)[7:])
mcb_wr_error = Signal(bool(0))
mcb_rd_data = Signal(intbv(0)[32:])
mcb_rd_empty = Signal(bool(0))
mcb_rd_full = Signal(bool(0))
mcb_rd_overflow = Signal(bool(0))
mcb_rd_count = Signal(intbv(0)[7:])
mcb_rd_error = Signal(bool(0))
# Outputs
wb_dat_o = Signal(intbv(0)[32:])
wb_ack_o = Signal(bool(0))
mcb_cmd_clk = Signal(bool(0))
mcb_cmd_en = Signal(bool(0))
mcb_cmd_instr = Signal(intbv(0)[3:])
mcb_cmd_bl = Signal(intbv(0)[6:])
mcb_cmd_byte_addr = Signal(intbv(0)[32:])
mcb_wr_clk = Signal(bool(0))
mcb_wr_en = Signal(bool(0))
mcb_wr_mask = Signal(intbv(0)[4:])
mcb_wr_data = Signal(intbv(0)[32:])
mcb_rd_clk = Signal(bool(0))
mcb_rd_en = Signal(bool(1))
# WB master
wbm_inst = wb.WBMaster()
wbm_logic = wbm_inst.create_logic(clk,
adr_o=wb_adr_i,
dat_i=wb_dat_o,
dat_o=wb_dat_i,
we_o=wb_we_i,
sel_o=wb_sel_i,
stb_o=wb_stb_i,
ack_i=wb_ack_o,
cyc_o=wb_cyc_i,
name='master')
# MCB model
mcb_inst = mcb.MCB(2**16)
mcb_controller = mcb_inst.create_controller(clk, rst)
mcb_port0 = mcb_inst.create_readwrite_port(cmd_clk=mcb_cmd_clk,
cmd_en=mcb_cmd_en,
cmd_instr=mcb_cmd_instr,
cmd_bl=mcb_cmd_bl,
cmd_byte_addr=mcb_cmd_byte_addr,
cmd_empty=mcb_cmd_empty,
cmd_full=mcb_cmd_full,
wr_clk=mcb_wr_clk,
wr_en=mcb_wr_en,
wr_mask=mcb_wr_mask,
wr_data=mcb_wr_data,
wr_empty=mcb_wr_empty,
wr_full=mcb_wr_full,
wr_underrun=mcb_wr_underrun,
wr_count=mcb_wr_count,
wr_error=mcb_wr_error,
rd_clk=mcb_rd_clk,
rd_en=mcb_rd_en,
rd_data=mcb_rd_data,
rd_empty=mcb_rd_empty,
rd_full=mcb_rd_full,
| rd_overflow=mcb_rd_overflow,
rd_count=mcb_rd_count,
rd_error=mcb_rd_error,
name='port0')
# DUT
dut = dut_wb_mcb_32(clk,
rst,
current_test,
wb_adr_i,
wb_dat_i,
wb_dat_o,
wb_we_i,
wb_sel_i,
wb_stb_i | ,
wb_ack_o,
wb_cyc_i,
mcb_cmd_clk,
mcb_cmd_en,
mcb_cmd_instr,
mcb_cmd_bl,
mcb_cmd_byte_addr,
mcb_cmd_empty,
mcb_cmd_full,
mcb_wr_clk,
mcb_wr_en,
mcb_wr_mask,
mcb_wr_data,
mcb_wr_empty,
mcb_wr_full,
mcb_wr_underrun,
mcb_wr_count,
mcb_wr_error,
mcb_rd_clk,
mcb_rd_en,
mcb_rd_data,
mcb_rd_empty,
mcb_rd_full,
mcb_rd_overflow,
mcb_rd_count,
mcb_rd_error)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
yield clk.posedge
print("test 1: write")
current_test.next = 1
wbm_inst.init_write(4, '\x11\x22\x33\x44')
yield wbm_inst.wait()
yield clk.posedge
data = mcb_inst.read_mem(0, 32)
for i in range(0, len(data), 16):
print(" ".join("{:02x}".format(ord(c)) for c in data[i:i+16]))
assert mcb_inst.read_mem(4,4) == '\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 2: read")
current_test.next = 2
wbm_inst.init_read(4, 4)
yield wbm_inst.wait()
yield clk.posedge
data = wbm_inst.get_read_data()
assert data[0] == 4
assert data[1] == '\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 3: various writes")
current_test.next = 3
for length in range(1,8):
for offset in range(4):
wbm_inst.init_write(256*(16*offset+length)+offset, '\x11\x22\x33\x44\x55\x66\x77\x88'[0:length])
yield wbm_inst.wait()
yield clk.posedge
data = mcb_inst.read_mem(256*(16*offset+length), 32)
for i in range(0, len(data), 16):
print(" ".join("{:02x}".format(ord(c)) for c in data[i:i+16]))
assert mcb_inst.read_mem(256*(16*offset+length)+offset,length) == '\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]
yield delay(100)
yield clk.posedge
print("test 4: various reads |
s()
def updateRadiOutBodePlots(ZarcFitWindow, value):
ZarcFitWindow.updateFigs()
def updateRadiOutComplexPlots(ZarcFitWindow, value):
ZarcFitWindow.updateFigs()
# # # Help # # #
def ZarcFitHelp(ZarcFitWindow):
print ("ZarcFitHelp")
def AboutZarcFit(ZarcFitWindow):
print ("AboutZarcFit")
#### Update Slide | rs ####
def updateSldOutLinf(ZarcFitWindow, value):
Linf = 10**(value/1000.)
ZarcFitWindow.SldOutLi | nf.setText("{:.2E}".format(Linf))
ZarcFitWindow.zarc.Linf = Linf
# ZarcFitWindow.updateFigs()
def updateSliderLinf(ZarcFitWindow, value):
Linf = float(value)
ZarcFitWindow.SliderLinf.setValue(int(np.log10(Linf)*1000.))
ZarcFitWindow.zarc.Linf = Linf
ZarcFitWindow.updateFigs()
def updateSldOutRinf(ZarcFitWindow, value):
Rinf = 10**(value/1000.)
ZarcFitWindow.SldOutRinf.setText("{:.2E}".format(Rinf))
ZarcFitWindow.zarc.Rinf = Rinf
# ZarcFitWindow.updateFigs()
def updateSliderRinf(ZarcFitWindow, value):
Rinf = float(value)
ZarcFitWindow.SliderRinf.setValue(int(np.log10(Rinf)*1000.))
ZarcFitWindow.zarc.Rinf = Rinf
ZarcFitWindow.updateFigs()
def updateSldOutRh(ZarcFitWindow, value):
Rh = 10**(value/1000.)
ZarcFitWindow.SldOutRh.setText("{:.2E}".format(Rh))
ZarcFitWindow.zarc.Rh = Rh
# ZarcFitWindow.updateFigs()
def updateSliderRh(ZarcFitWindow, value):
Rh = float(value)
ZarcFitWindow.SliderRh.setValue(int(np.log10(Rh)*1000.))
ZarcFitWindow.zarc.Rh = Rh
ZarcFitWindow.updateFigs()
def updateSldOutFh(ZarcFitWindow, value):
Fh = 10**(value/1000.)
ZarcFitWindow.SldOutFh.setText("{:.2E}".format(Fh))
ZarcFitWindow.zarc.Fh = Fh
# ZarcFitWindow.updateFigs()
def updateSliderFh(ZarcFitWindow, value):
Fh = float(value)
ZarcFitWindow.SliderFh.setValue(int(np.log10(Fh)*1000.))
ZarcFitWindow.zarc.Fh = Fh
ZarcFitWindow.updateFigs()
def updateSldOutPh(ZarcFitWindow, value):
Ph = value/1000.
ZarcFitWindow.SldOutPh.setText("{:.3f}".format(Ph))
ZarcFitWindow.zarc.Ph = Ph
# ZarcFitWindow.updateFigs()
def updateSliderPh(ZarcFitWindow, value):
Ph = float(value)
ZarcFitWindow.SliderPh.setValue(Ph*1000)
ZarcFitWindow.zarc.Ph = Ph
ZarcFitWindow.updateFigs()
def updateSldOutRm(ZarcFitWindow, value):
Rm = 10**(value/1000.)
ZarcFitWindow.SldOutRm.setText("{:.2E}".format(Rm))
ZarcFitWindow.zarc.Rm = Rm
# ZarcFitWindow.updateFigs()
def updateSliderRm(ZarcFitWindow, value):
Rm = float(value)
ZarcFitWindow.SliderRm.setValue(int(np.log10(Rm)*1000.))
ZarcFitWindow.zarc.Rm = Rm
ZarcFitWindow.updateFigs()
def updateSldOutFm(ZarcFitWindow, value):
Fm = 10**(value/1000.)
ZarcFitWindow.SldOutFm.setText("{:.2E}".format(Fm))
ZarcFitWindow.zarc.Fm = Fm
# ZarcFitWindow.updateFigs()
def updateSliderFm(ZarcFitWindow, value):
Fm = float(value)
ZarcFitWindow.SliderFm.setValue(int(np.log10(Fm)*1000.))
ZarcFitWindow.zarc.Fm = Fm
ZarcFitWindow.updateFigs()
def updateSldOutPm(ZarcFitWindow, value):
Pm = value/1000.
ZarcFitWindow.SldOutPm.setText("{:.3f}".format(Pm))
ZarcFitWindow.zarc.Pm = Pm
# ZarcFitWindow.updateFigs()
def updateSliderPm(ZarcFitWindow, value):
Pm = float(value)
ZarcFitWindow.SliderPm.setValue(Pm*1000)
ZarcFitWindow.zarc.Pm = Pm
ZarcFitWindow.updateFigs()
def updateSldOutRl(ZarcFitWindow, value):
Rl = 10**(value/1000.)
ZarcFitWindow.SldOutRl.setText("{:.2E}".format(Rl))
ZarcFitWindow.zarc.Rl = Rl
# ZarcFitWindow.updateFigs()
def updateSliderRl(ZarcFitWindow, value):
Rl = float(value)
ZarcFitWindow.SliderRl.setValue(int(np.log10(Rl)*1000.))
ZarcFitWindow.zarc.Rl = Rl
ZarcFitWindow.updateFigs()
def updateSldOutFl(ZarcFitWindow, value):
Fl = 10**(value/1000.)
ZarcFitWindow.SldOutFl.setText("{:.2E}".format(Fl))
ZarcFitWindow.zarc.Fl = Fl
# ZarcFitWindow.updateFigs()
def updateSliderFl(ZarcFitWindow, value):
Fl = float(value)
ZarcFitWindow.SliderFl.setValue(int(np.log10(Fl)*1000.))
ZarcFitWindow.zarc.Fl = Fl
ZarcFitWindow.updateFigs()
def updateSldOutPl(ZarcFitWindow, value):
Pl = value/1000.
ZarcFitWindow.SldOutPl.setText("{:.3f}".format(Pl))
ZarcFitWindow.zarc.Pl = Pl
# ZarcFitWindow.updateFigs()
def updateSliderPl(ZarcFitWindow, value):
Pl = float(value)
ZarcFitWindow.SliderPl.setValue(Pl*1000)
ZarcFitWindow.zarc.Pl = Pl
ZarcFitWindow.updateFigs()
def updateSldOutRe(ZarcFitWindow, value):
Re = 10**(value/1000.)
ZarcFitWindow.SldOutRe.setText("{:.2E}".format(Re))
ZarcFitWindow.zarc.Re = Re
# ZarcFitWindow.updateFigs()
def updateSliderRe(ZarcFitWindow, value):
Re = float(value)
ZarcFitWindow.SliderRe.setValue(int(np.log10(Re)*1000.))
ZarcFitWindow.zarc.Re = Re
# ZarcFitWindow.updateFigs()
def updateSldOutQe(ZarcFitWindow, value):
Qe = 10**(value/1000.)
ZarcFitWindow.SldOutQe.setText("{:.2E}".format(Qe))
ZarcFitWindow.zarc.Qe = Qe
# ZarcFitWindow.updateFigs()
def updateSliderQe(ZarcFitWindow, value):
Qe = float(value)
ZarcFitWindow.SliderQe.setValue(int(np.log10(Qe)*1000.))
ZarcFitWindow.zarc.Qe = Qe
ZarcFitWindow.updateFigs()
def updateSldOutPef(ZarcFitWindow, value):
Pef = value/1000.
ZarcFitWindow.SldOutPef.setText("{:.3f}".format(Pef))
ZarcFitWindow.zarc.Pef = Pef
# ZarcFitWindow.updateFigs()
def updateSliderPef(ZarcFitWindow, value):
Pef = float(value)
ZarcFitWindow.SliderPef.setValue(Pef*1000)
ZarcFitWindow.zarc.Pef = Pef
ZarcFitWindow.updateFigs()
def updateSldOutPei(ZarcFitWindow, value):
Pei = value/1000.
ZarcFitWindow.SldOutPei.setText("{:.3f}".format(Pei))
ZarcFitWindow.zarc.Pei = Pei
# ZarcFitWindow.updateFigs()
def updateSliderPei(ZarcFitWindow, value):
Pei = float(value)
ZarcFitWindow.SliderPei.setValue(Pei*1000)
ZarcFitWindow.zarc.Pei = Pei
ZarcFitWindow.updateFigs()
def SetDefaultParameters():
Linf = 1.E-4
Rinf = 1.E4
Rh = 1.E5
Fh = 1e5
Ph = 0.8
Rm = 1e-1
Fm = 1e-1
Pm = 0.5
Rl = 1.E4
Fl = 1.e1
Pl = 0.5
Re = 1.E10
Qe = 1.E-4
Pef = 0.5
Pei = 0.05
return Linf, Rinf, Rh, Fh, Ph, Rl, Fl, Pl, Rm, Fm, Pm, Re, Qe, Pef, Pei
###############################################################################
###############################################################################
if __name__ == '__main__':
#Read initial data
mysys = whichsystem()
mysys.run()
scriptPath = os.getcwd()
print(scriptPath+mysys.filesep+"ZarcFit.ini")
with open(scriptPath+mysys.filesep+"ZarcFit.ini", "r") as ini_file:
pathNameStr = ini_file.read()
pathNameStr = pathNameStr.rstrip('\n')
################ The following lines are unecessary, as Main now starts by reading the pathNameStr directory.
path = "../data/HVC2014_10Grenon/"
fnameobs = "BC13867-A 2014-10-23.z"
pathobs = path+fnameobs
temp = np.loadtxt(pathobs, skiprows=11, delimiter=",")
obs = temp[:,4]+1j*temp[:,5]
frequency = temp[:,0].copy()
frequencyN = len (frequency)
zarc = ZarcfitCalculations(obs, frequency)
Linf, Rinf, Rh, Fh, Ph, Rl, Fl, Pl, Rm, Fm, Pm, Re, Qe, Pef, Pei = SetDefaultParameters()
zar |
pend_page(self.__drives_page(), gtk.Label(_("Drives")))
if 'posix' == os.name:
notebook.append_page(
self.__languages_page(), gtk.Label(_("Languages")))
notebook.append_page(self.__locations_page(
LOCATIONS_WHITELIST), gtk.Label(_("Whitelist")))
self.dialog.vbox.pack_start(notebook, True)
self.dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
def __toggle_callback(self, cell, path):
"""Callback function to toggle option"""
options.toggle(path)
if online_update_notification_enabled:
self.cb_beta.set_sensitive(options.get('check_online_updates'))
if 'nt' == os.name:
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
if 'auto_hide' == path:
self.cb_refresh_operations()
if 'auto_start' == path:
if 'nt' == os.name:
swc = Windows.start_with_computer
if 'posix' == os.name:
swc = Unix.start_with_computer
try:
swc(options.get(path))
except:
traceback.print_exc()
dlg = gtk.MessageDialog(self.parent,
type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format=str(sys.exc_info()[1]))
dlg.run()
dlg.destroy()
def __general_page(self):
"""Return a widget containing the general page"""
if 'nt' == os.name:
swcc = Windows.start_with_computer_check
if 'posix' == os.name:
swcc = Unix.start_with_computer_check
options.set('auto_start', swcc())
vbox = gtk.VBox()
if online_update_notification_enabled:
cb_updates = gtk.CheckButton(
_("Check periodically for software updates via the Internet"))
cb_updates.set_active(options.get('check_online_updates'))
cb_updates.connect(
'toggled', self.__toggle_callback, 'check_online_updates')
cb_updates.set_tooltip_text(
_("If an update is found, you will b | e given the option to view information about it. Then, you may manually download and | install the update."))
vbox.pack_start(cb_updates, False)
updates_box = gtk.VBox()
updates_box.set_border_width(10)
self.cb_beta = gtk.CheckButton(_("Check for new beta releases"))
self.cb_beta.set_active(options.get('check_beta'))
self.cb_beta.set_sensitive(options.get('check_online_updates'))
self.cb_beta.connect(
'toggled', self.__toggle_callback, 'check_beta')
updates_box.pack_start(self.cb_beta, False)
if 'nt' == os.name:
self.cb_winapp2 = gtk.CheckButton(
_("Download and update cleaners from community (winapp2.ini)"))
self.cb_winapp2.set_active(options.get('update_winapp2'))
self.cb_winapp2.set_sensitive(
options.get('check_online_updates'))
self.cb_winapp2.connect(
'toggled', self.__toggle_callback, 'update_winapp2')
updates_box.pack_start(self.cb_winapp2, False)
vbox.pack_start(updates_box, False)
# TRANSLATORS: This means to hide cleaners which would do
# nothing. For example, if Firefox were never used on
# this system, this option would hide Firefox to simplify
# the list of cleaners.
cb_auto_hide = gtk.CheckButton(_("Hide irrelevant cleaners"))
cb_auto_hide.set_active(options.get('auto_hide'))
cb_auto_hide.connect('toggled', self.__toggle_callback, 'auto_hide')
vbox.pack_start(cb_auto_hide, False)
# TRANSLATORS: Overwriting is the same as shredding. It is a way
# to prevent recovery of the data. You could also translate
# 'Shred files to prevent recovery.'
cb_shred = gtk.CheckButton(_("Overwrite files to hide contents"))
cb_shred.set_active(options.get('shred'))
cb_shred.connect('toggled', self.__toggle_callback, 'shred')
cb_shred.set_tooltip_text(
_("Overwriting is ineffective on some file systems and with certain BleachBit operations. Overwriting is significantly slower."))
vbox.pack_start(cb_shred, False)
cb_start = gtk.CheckButton(_("Start BleachBit with computer"))
cb_start.set_active(options.get('auto_start'))
cb_start.connect('toggled', self.__toggle_callback, 'auto_start')
vbox.pack_start(cb_start, False)
# Close the application after cleaning is complete.
cb_exit = gtk.CheckButton(_("Exit after cleaning"))
cb_exit.set_active(options.get('exit_done'))
cb_exit.connect('toggled', self.__toggle_callback, 'exit_done')
vbox.pack_start(cb_exit, False)
# Disable delete confirmation message.
cb_popup = gtk.CheckButton(_("Confirm before delete"))
cb_popup.set_active(options.get('delete_confirmation'))
cb_popup.connect(
'toggled', self.__toggle_callback, 'delete_confirmation')
vbox.pack_start(cb_popup, False)
return vbox
def __drives_page(self):
"""Return widget containing the drives page"""
def add_drive_cb(button):
"""Callback for adding a drive"""
title = _("Choose a folder")
pathname = GuiBasic.browse_folder(self.parent, title,
multiple=False, stock_button=gtk.STOCK_ADD)
if pathname:
liststore.append([pathname])
pathnames.append(pathname)
options.set_list('shred_drives', pathnames)
def remove_drive_cb(button):
"""Callback for removing a drive"""
treeselection = treeview.get_selection()
(model, _iter) = treeselection.get_selected()
if None == _iter:
# nothing selected
return
pathname = model[_iter][0]
liststore.remove(_iter)
pathnames.remove(pathname)
options.set_list('shred_drives', pathnames)
vbox = gtk.VBox()
# TRANSLATORS: 'free' means 'unallocated'
notice = gtk.Label(
_("Choose a writable folder for each drive for which to overwrite free space."))
notice.set_line_wrap(True)
vbox.pack_start(notice, False)
liststore = gtk.ListStore(str)
pathnames = options.get_list('shred_drives')
if pathnames:
pathnames = sorted(pathnames)
if not pathnames:
pathnames = []
for pathname in pathnames:
liststore.append([pathname])
treeview = gtk.TreeView(model=liststore)
crt = gtk.CellRendererText()
tvc = gtk.TreeViewColumn(None, crt, text=0)
treeview.append_column(tvc)
vbox.pack_start(treeview)
# TRANSLATORS: In the preferences dialog, this button adds a path to
# the list of paths
button_add = gtk.Button(_p('button', 'Add'))
button_add.connect("clicked", add_drive_cb)
# TRANSLATORS: In the preferences dialog, this button removes a path
# from the list of paths
button_remove = gtk.Button(_p('button', 'Remove'))
button_remove.connect("clicked", remove_drive_cb)
button_box = gtk.HButtonBox()
button_box.set_layout(gtk.BUTTONBOX_START)
button_box.pack_start(button_add)
button_box.pack_start(button_remove)
vbox.pack_start(button_box, False)
return vbox
def __languages_page(self):
"""Return widget containing the languages page"""
def preserve_toggled_cb(cell, path, liststore):
"""Callback for toggling the 'preserve' column"""
__iter = liststore.get_iter_from_string(path)
value = not liststore.get_value(__iter, 0)
liststore.set(__ |
'sort_order': self._sort_order,
})
base_qs.update(self._filters)
return update_qs(self.url, base_qs)
def sort(self, key, order='asc'):
if not order in ('asc', 'desc'):
raise ValueError("Order must be one of 'asc', 'desc'")
self._sort_key = key
self._sort_order = order
self._invalidate()
return self
def filter(self, **kwargs):
self._filters = kwargs
self._invalidate()
return self
@property
def pages(self):
if self._num_pages is None:
self._load_pagination_info()
return self._num_pages
@property
def count(self):
if self._num_items is None:
self._load_pagination_info()
return self._num_items
def page(self, index):
if not index in self._pages:
data = self.client._get(self._url_for_page(index))
self._pages[index] = [
self._transform(item) for item in data[self._list_key]
]
return self._pages[index]
def _transform(self, item):
return item
def __getitem__(self, index):
page_index = index / self.per_page + 1
offset = index % self.per_page
try:
page = self.page(page_index)
except HTTPError, e:
if e.status_code == 404:
raise IndexError(e.msg)
else:
raise
return page[offset]
def __len__(self):
return self.count
def __iter__(self):
for i in xrange(1, self.pages + 1):
page = self.page(i)
for item in page:
yield item
class PaginatedList(BasePaginatedResponse):
"""A paginated list of objects of a particular class."""
def __init__(self, client, url, key, class_):
super(PaginatedList, self).__init__(client, url)
self._list_key = key
self.class_ = class_
def _transform(self, item):
return self.class_(self.client, item)
class Wantlist(PaginatedList):
def add(self, release, notes=None, notes_public=None, rating=None):
release_id = release.id if isinstance(release, Release) else release
data = {
'release_id': release_id,
'notes': notes,
'notes_public': notes_public,
'rating': rating,
}
self.client._put(self.url + '/' + str(release_id), omit_none(data))
self._invalidate()
def remove(self, release):
release_id = release.id if isinstance(release, Release) else release
self.client._delete(self.url + '/' + str(release_id))
self._invalidate()
class OrderMessagesList(PaginatedList):
def add(self, message=None, status=None, email_buyer=True, email_seller=False):
data = {
'message': message,
'status': status,
'email_buyer': email_buyer,
'email_seller': email_seller,
}
self.client._post(self.url, omit_none(data))
self._invalidate()
class MixedPaginatedList(BasePaginatedResponse):
"""A paginated list of objects identified by their type parameter."""
def __init__(self, client, url, key):
super(MixedPaginatedList, self).__init__(client, url)
self._list_key = key
def _transform(self, item):
# In some cases, we want to map the 'title' key we get back in search
# results to 'name'. This way, you can repr() a page of search results
# without making 50 requests.
if item['type'] in ('label', 'artist'):
item['name'] = item['title']
return CLASS_MAP[item['type']](self.client, item)
class Artist(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
real_name = SimpleField(key='realname')
profile = SimpleField()
data_quality = SimpleField()
name_variations = SimpleField(key='namevariations')
url = SimpleField('uri')
urls = SimpleField()
aliases = ListField('Artist')
members = ListField('Artist')
groups = ListField('Artist')
def __init__(self, client, dict_):
super(Artist, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/artists/%d' % dict_['id']
@property
def releases(self):
return MixedPaginatedList(self.client, self.fetch('releases_url'), 'releases')
def __repr__(self):
return '<Artist %r %r>' % (self.id, self.name)
class Release(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
year = SimpleField()
thumb = SimpleField()
data_quality = SimpleField()
status = SimpleField()
genres = SimpleField()
country = SimpleField()
notes = SimpleField()
formats = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = List | Field('Track')
artists = ListField('Artist')
credits = ListField('Artist', key='extraartists')
labels = ListField('Label')
companies = ListField('Label')
def __init__(self, client, dict_):
super(Release, self).__init__(client, dict_)
self.data['resource | _url'] = client._base_url + '/releases/%d' % dict_['id']
@property
def master(self):
master_id = self.fetch('master_id')
if master_id:
return Master(self.client, {'id': master_id})
else:
return None
def __repr__(self):
return '<Release %r %r>' % (self.id, self.title)
class Master(PrimaryAPIObject):
id = SimpleField()
title = SimpleField()
data_quality = SimpleField()
styles = SimpleField()
genres = SimpleField()
images = SimpleField()
url = SimpleField('uri')
videos = ListField('Video')
tracklist = ListField('Track')
main_release = ObjectField('Release', as_id=True)
versions = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Master, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/masters/%d' % dict_['id']
def __repr__(self):
return '<Master %r %r>' % (self.id, self.title)
class Label(PrimaryAPIObject):
id = SimpleField()
name = SimpleField()
profile = SimpleField()
urls = SimpleField()
images = SimpleField()
contact_info = SimpleField()
data_quality = SimpleField()
url = SimpleField('uri')
sublabels = ListField('Label')
parent_label = ObjectField('Label', optional=True)
releases = ObjectCollection('Release')
def __init__(self, client, dict_):
super(Label, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/labels/%d' % dict_['id']
def __repr__(self):
return '<Label %r %r>' % (self.id, self.name)
class User(PrimaryAPIObject):
id = SimpleField()
username = SimpleField()
releases_contributed = SimpleField()
num_collection = SimpleField()
num_wantlist = SimpleField()
num_lists = SimpleField()
rank = SimpleField()
rating_avg = SimpleField()
url = SimpleField('uri')
name = SimpleField(writable=True)
profile = SimpleField(writable=True)
location = SimpleField(writable=True)
home_page = SimpleField(writable=True)
registered = SimpleField(transform=parse_timestamp)
inventory = ObjectCollection('Listing', key='listings', url_key='inventory_url')
wantlist = ObjectCollection('WantlistItem', key='wants', url_key='wantlist_url', list_class=Wantlist)
def __init__(self, client, dict_):
super(User, self).__init__(client, dict_)
self.data['resource_url'] = client._base_url + '/users/%s' % dict_['username']
@property
def orders(self):
return PaginatedList(self.client, self.client._base_url + '/marketplace/orders', 'orders', Order)
@property
def collection_folders(self):
resp = self.client._get(self.fetch('collection_folders_url'))
return [CollectionFolder(self.client, d) for d in resp['folders']]
def __repr__(self):
return '<User %r %r>' % (self.id, self.username)
class WantlistItem(PrimaryAPIObject):
id = SimpleField()
rating = SimpleFie |
#!/usr/bin/env python
"""
The LibVMI Library is an introspection library that simplifies access to
memory in a target virtual machine or in a file containing a dump of
a system's physical memory. LibVMI is based on the XenAccess Library.
Copyright 2011 Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government
retains certain rights in this software.
Author: Bryan D. Payne (bdpayne@acm.org)
This file is part of LibVMI.
LibVMI is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
LibVMI is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with LibVMI. If not, see <http://www.gnu.org/licenses/>.
"""
import pyvmi
import sys
def get_processes(vmi):
tasks_offset = vmi.get_offset("win_tasks")
name_offset = vmi.get_offset("win_pname") - tasks_offset
pid_offset = vmi.get_offset("win_pid") - tasks_offset
list_head = vmi.read_addr_ksym("PsInitialSystemProcess")
next_process = vmi.read_addr_va(list_head + tasks_offset, 0)
list_head = next_process
while True:
procname = vmi.read_str_va(next_process + name_offset, 0)
p | id | = vmi.read_32_va(next_process + pid_offset, 0)
next_process = vmi.read_addr_va(next_process, 0)
if (pid < 1<<16):
yield pid, procname
if (list_head == next_process):
break
def main(argv):
vmi = pyvmi.init(argv[1], "complete")
for pid, procname in get_processes(vmi):
print "[%5d] %s" % (pid, procname)
if __name__ == "__main__":
main(sys.argv)
|
# Function to st | ack raster bands.
import numpy as np
from osgeo import gdal
def stack_bands(filenames):
"""Returns a 3D array containing all band data from all files."""
bands = []
for fn in filenames:
ds = gdal.Open(fn)
for i in range(1, ds.RasterCount + 1):
bands.append(ds.GetRasterBand(i).ReadAsArray())
return np.dst | ack(bands)
|
n[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target)
model.zero_grad()
loss.backward()
for layer_no, param in enumerate(model.parameters()):
if args.static and layer_no == 0: # fixed embedding layer cannot update
continue
# by default I assume you train the models using GPU
noise = torch.cuda.FloatTensor(param.data.size()).normal_() * np.sqrt(epsilon / args.t)
#noise = torch.cuda.FloatTensor(param.data.size()).normal_() * set_scale[layer_no]
parameters[layer_no].data += (- epsilon / 2 * param.grad + noise)
corrects += (torch.max(logit, 1)[1].view(target.size()).data == target.data).sum().item()
accuracy = 100.0 * corrects / batch / (idx + 1)
sys.stdout.write('\rEpoch[{}] Batch[{}] - loss: {:.4f} acc: {:.2f}%({}/{}) tempreture: {}'.format(
epoch, idx, loss.item(), accuracy, corrects, batch * (idx + 1), int(args.t)))
args.t = args.t + 1 # annealing
if epoch % 5 != 0:
continue
'''
try:
set_scale = [parameter.grad.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
except:
set_scale = [parameter.data.std().item() for parameter in model.parameters()]
set_scale = [scale / max(set_scale) for scale in set_scale] # normalize
'''
save(model, args.save_dir, epoch)
print()
eval(X_valid, y_valid, model, 'Validation', args)
eval(X_test, y_test, model, 'Testing ', args)
def eval(X, y, model, term, args):
model.eval()
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16, 0.4:1e-16}
batch = args.batch_size
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), | target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += | loss.data.item()
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item()
total_part[thres] += idx_thres.sum().item()
corrects += (torch.max(logit, 1)[1] == target.data).sum().item()
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
# TP, TN: True Positive/True Negative
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def bma_eval(X, y, mymodels, term, args):
corrects, TP, avg_loss = 0, 0, 0
correct_part, total_part = {0.2:0, 0.4:0}, {0.2:1e-16,0.4:1e-16}
batch = args.batch_size
for model in mymodels:
model.eval()
for idx in range(int(X.shape[0]/batch) + 1):
feature = torch.LongTensor(X[(idx*batch):(idx*batch+batch),])
target = torch.LongTensor(y[(idx*batch):(idx*batch+batch)])
if args.cuda:
feature, target = feature.cuda(), target.cuda()
logit = model(feature)
loss = F.cross_entropy(logit, target, size_average=False)
avg_loss += loss.data.item() / (len(mymodels) * 1.0)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
for xnum in range(1, 3):
thres = round(0.2 * xnum, 1)
idx_thres = (predictor > 0.5 + thres) + (predictor < 0.5 - thres)
correct_part[thres] += (torch.max(logit, 1)[1][idx_thres] == target.data[idx_thres]).sum().item() / (len(mymodels) * 1.0)
total_part[thres] += idx_thres.sum().item() / (len(mymodels) * 1.0)
corrects += (torch.max(logit, 1)[1] == target.data).sum().item() / (len(mymodels) * 1.0)
TP += (((torch.max(logit, 1)[1] == target.data).int() + (torch.max(logit, 1)[1]).int()) == 2).sum().item()
size = y.shape[0]
avg_loss /= size
accuracy = 100.0 * corrects / size
TP = TP * 1.0 / (len(mymodels) * 1.0)
print(' {} - loss: {:.4f} acc: {:.2f}%({}/{}) {:.2f}%({}/{}) {:.2f}%({}/{}) TP/TN: ({}/{}) \n'.format(term,
avg_loss, accuracy, corrects, size, 100.0 * correct_part[0.2] / total_part[0.2], correct_part[0.2], int(total_part[0.2]),
100.0 * correct_part[0.4] / total_part[0.4], correct_part[0.4], int(total_part[0.4]), TP, corrects - TP))
return accuracy
def predictor_preprocess(cnn, args):
# load trained thinning samples (Bayesian CNN models) from input/models/
mymodels = []
for num, each_model in enumerate(os.listdir(args.save_dir)):
print(args.save_dir + each_model)
if args.cuda:
cnn.load_state_dict(torch.load(args.save_dir + each_model))
else:
cnn.load_state_dict(torch.load(args.save_dir + each_model, map_location=lambda storage, loc: storage))
mymodels.append(copy.deepcopy(cnn))
if num > 30: # in case memory overloads
break
with open('./input/word2idx', 'r') as file:
word2idx = json.load(file)
stopWords = set()
with open('./input/stopWords') as file:
for word in file:
stopWords.add(word.strip())
return(mymodels, word2idx, stopWords)
def predict(sentence, mymodels, word2idx, stopWords, args):
tokens = tokenize_news(sentence, stopWords)
tokens = [word2idx[t] if t in word2idx else word2idx['UNKNOWN'] for t in tokens]
if len(tokens) < 5 or tokens == [word2idx['UNKNOWN']] * len(tokens): # tokens cannot be too short or unknown
signal = 'Unknown'
else:
feature = torch.LongTensor([tokens])
logits = []
for model in mymodels:
model.eval()
if args.cuda:
feature = feature.cuda()
logit = model(feature)
predictor = torch.exp(logit[:, 1]) / (torch.exp(logit[:, 0]) + torch.exp(logit[:, 1]))
logits.append(predictor.item())
signal = signals(np.mean(logits))
return(signal)
def daily_predict(cnn, args):
mymodels, word2idx, stopWords = predictor_preprocess(cnn, args)
output = './input/news/' + args.date[:4] + '/news_' + args.date + '.csv'
fout = open(output + '_bak', 'w')
with open(output) as f:
for num, line in enumerate(f):
line = line.strip().split(',')
if len(line) == 6:
ticker, name, day, headline, body, newsType = line
elif len(line) == 7:
ticker, name, day, headline, body, newsType, signal = line
else:
continue
#if newsType != 'topStory': # newsType: [topStory, normal]
# signal = 'Unknown'
content = headline + ' ' + body
signal = predict(content, mymodels, word2idx, stopWords, args)
fout.write(','.join([ticker, name, day, headline, body, newsType, signal]) + '\n')
fout.close()
print('change file name')
print('mv ' + output + '_bak ' + output)
os.system('mv ' + output + '_bak ' + output)
def save(model, save_dir, steps):
if not os.path.isdi |
the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
S3 TOKEN MIDDLEWARE
This WSGI component:
* Get a request from the swift3 middleware with an S3 Authorization
access key.
* Validate s3 token in Keystone.
* Transform the account name to AUTH_%(tenant_name).
"""
import logging
from oslo.serialization import jsonutils
import requests
import six
from six.moves import urllib
import webob
PROTOCOL_NAME = 'S3 Token Authentication'
# TODO(kun): remove it after oslo merge this.
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
class ServiceError(Exception):
pass
class S3Token(object):
"""Auth Middleware that handles S3 authenticating client calls."""
def __init__(self, app, conf):
"""Common initialization code."""
self.app = app
self.logger = logging.getLogger(conf.get('log_name', __name__) | )
self.logger.debug('Starting the %s | component', PROTOCOL_NAME)
self.logger.warning(
'This middleware module is deprecated as of v0.11.0 in favor of '
'keystonemiddleware.s3_token - please update your WSGI pipeline '
'to reference the new middleware package.')
self.reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
# where to find the auth service (we use this to validate tokens)
auth_host = conf.get('auth_host')
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self.request_uri = '%s://%s:%s' % (auth_protocol, auth_host, auth_port)
# SSL
insecure = conf.get('insecure', False)
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self.verify = False
elif cert_file and key_file:
self.verify = (cert_file, key_file)
elif cert_file:
self.verify = cert_file
else:
self.verify = None
def deny_request(self, code):
error_table = {
'AccessDenied': (401, 'Access denied'),
'InvalidURI': (400, 'Could not parse the specified URI'),
}
resp = webob.Response(content_type='text/xml')
resp.status = error_table[code][0]
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, error_table[code][1]))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json):
headers = {'Content-Type': 'application/json'}
try:
response = requests.post('%s/v2.0/s3tokens' % self.request_uri,
headers=headers, data=creds_json,
verify=self.verify)
except requests.exceptions.RequestException as e:
self.logger.info('HTTP connection exception: %s', e)
resp = self.deny_request('InvalidURI')
raise ServiceError(resp)
if response.status_code < 200 or response.status_code >= 300:
self.logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
resp = self.deny_request('AccessDenied')
raise ServiceError(resp)
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = webob.Request(environ)
self.logger.debug('Calling S3Token middleware.')
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query, skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
# Read request signature and access id.
if 'Authorization' not in req.headers:
msg = 'No Authorization header. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
token = req.headers.get('X-Auth-Token',
req.headers.get('X-Storage-Token'))
if not token:
msg = 'You did not specify an auth or a storage token. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
auth_header = req.headers['Authorization']
try:
access, signature = auth_header.split(' ')[-1].rsplit(':', 1)
except ValueError:
msg = 'You have an invalid Authorization header: %s'
self.logger.debug(msg, auth_header)
return self.deny_request('InvalidURI')(environ, start_response)
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
creds_json = jsonutils.dumps(creds)
self.logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json |
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_oam_meg_ref import TapiOamMegRef # noqa: F401,E501
from tapi_server import util
class TapiOamMipRef(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, meg_uuid=None, mip_local_id=None): # noqa: E501
"""TapiOamMipRef - a model defined in OpenAPI
:param meg_uuid: The meg_uuid of this TapiOamMipRef. # noqa: E501
:type meg_uuid: str
| :param mip_local_id: The mip_local_id of this TapiOamMipRef. # noqa: E501
:type mip_local_id: str
"""
self.openapi_types = {
'meg_uuid': str,
| 'mip_local_id': str
}
self.attribute_map = {
'meg_uuid': 'meg-uuid',
'mip_local_id': 'mip-local-id'
}
self._meg_uuid = meg_uuid
self._mip_local_id = mip_local_id
@classmethod
def from_dict(cls, dikt) -> 'TapiOamMipRef':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.oam.MipRef of this TapiOamMipRef. # noqa: E501
:rtype: TapiOamMipRef
"""
return util.deserialize_model(dikt, cls)
@property
def meg_uuid(self):
"""Gets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:return: The meg_uuid of this TapiOamMipRef.
:rtype: str
"""
return self._meg_uuid
@meg_uuid.setter
def meg_uuid(self, meg_uuid):
"""Sets the meg_uuid of this TapiOamMipRef.
none # noqa: E501
:param meg_uuid: The meg_uuid of this TapiOamMipRef.
:type meg_uuid: str
"""
self._meg_uuid = meg_uuid
@property
def mip_local_id(self):
"""Gets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:return: The mip_local_id of this TapiOamMipRef.
:rtype: str
"""
return self._mip_local_id
@mip_local_id.setter
def mip_local_id(self, mip_local_id):
"""Sets the mip_local_id of this TapiOamMipRef.
none # noqa: E501
:param mip_local_id: The mip_local_id of this TapiOamMipRef.
:type mip_local_id: str
"""
self._mip_local_id = mip_local_id
|
#!/usr/bin/python
import yaml
import pprint
import os
import pdb
import re
import cgi
import codecs
import sys
import cgitb
cgitb.enable()
if (sys.stdout.encoding is None):
print >> sys.stderr, "please set python env PYTHONIOENCODING=UTF-8, example: export PYTHONIOENCODING=UTF-8, when write to stdout."
exit(1)
specsFile = open('../source/syntax.yml')
specs = yaml.safe_load(specsFile)
specsFile.close()
htmlTest = re.compile("(?i)<\/?\w+((\s+\w+(\s*=\s*(?:\".*?\"|'.*?'|[^'\">\s]+))?)+\s*|\s*)\/?>")
def paragraphy(text):
if htmlTest.match(text):
return text
else:
return "\t\t<p>" + cgi.escape(text).strip() + "\n\t\t</p>\n"
def loadExample(example):
if ("annotation" in example):
result = paragraphy(example["annotation"])
else:
result = ""
result = result + "\t\t<div class=\"code2\">\n\t\t\t<p>Example</p>\n\t\t\t<pre>\n"
if type(example) is str or type(example) is unicode:
result = result + cgi.escape(example).strip()
elif type(example) is dict:
if ("example" in example):
result += cgi.escape(example["example"]).strip()
else:
raise ValueError("every entry must contain an example element")
else:
raise ValueError("unrecognized type for example data")
result = result + "\n</pre>\n\t\t</div>"
return result
indexPageContents = "<meta charset='utf-8'/>\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=0.6\">\n<html>\n\t<head>\n\t\t<title>Syntax Listing - Plange</title>\n\t\t<link rel=StyleSheet href='../css/general.css' type='text/css' />\n\t</head>\n\t<body>\n\t\t<?php require('../header.php') ?>\n\n\n\t\t<p>This page is generated from the <a href='/source/syntax.yml'>syntax specification</a>. Tags, identities, and all {IC} (optional whitespace and comment regions) are filtered from the syntax display to improve readability. See the specification for the full grammar specification. The root production of the grammar is \"STATEMENT_SCOPE\".</p>\n\t\t<h2>Subpage Listing</h2>\n\t\t<table>\n"
names = specs.keys()
names.sort()
regexs = {name: re.compile("\\b" + name + "\\b") for name in names}
stripRegex = re.compile("(\\$)|(%[_\w0-9]+)|(\\{IC\\})")
openParenSpaceRegex = re.compile("\\( ")
openBraceSpaceRegex = re.compile("\\{ ")
openBracketSpaceREgex = re.compile("\\[ ")
spaceCloseParenRegex = re.compile(" \\)")
spaceCloseBraceRegex = re.compile(" \\}")
spaceCloseBracketRegex = re.compile(" \\]")
def simplifySyntaxString(syntax):
syntax = cgi.escape(syntax).strip()
syntax = stripRegex.sub("", syntax)
syntax = openParenSpaceRegex.sub("(", syntax)
syntax = openBraceSpaceRegex.sub("{", syntax)
syntax = openBracketSpaceREgex.sub("[", syntax)
syntax = spaceCloseParenRegex.sub(")", syntax)
syntax = spa | ceCloseBraceRegex.sub("}", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
syntax = spaceCloseBracketRegex.sub("]", syntax)
return syntax
def simplifySyntaxStringAddAnchors(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs | [refName].sub("<a href=\"/documentation/syntax.php#" + refName + "\">" + refName + "</a>", syntax)
return syntax
def simplifySyntaxStringAddLinks(syntax):
syntax = simplifySyntaxString(syntax)
for refName in names:
if refName == name:
continue
syntax = regexs[refName].sub("<a href=\"/documentation/syntax.php?name=" + refName + "\">" + refName + "</a>", syntax)
return syntax
if len(sys.argv) == 1: # output the table for the syntax listing page
print "\n\t\t<table>\n"
for name in names:
details = specs[name]
syntaxString = simplifySyntaxStringAddAnchors(details["syntax"])
print "\t\t\t<tr>\n"
print "\t\t\t\t<td><a id=\"" + name + "\" href=\"/documentation/syntax.php?name=" + name + "\">" + name + "</a></td>\n"
if "doc" in details:
print "\t\t\t\t<td>" + details["doc"].strip() + "</td>\n"
else:
print "\t\t\t\t<td>no doc string</td>\n"
print "\t\t\t\t<td>" + syntaxString + "</td>\n"
print "\t\t\t</tr>\n"
print "\t\t</table>\n"
else:
name = sys.argv[1]
details = specs[name]
if "doc" in details:
print "\t\t<p>" + details["doc"].strip() + "</p>\n\n"
if "syntax" in details:
syntaxString = simplifySyntaxStringAddLinks(details["syntax"])
title = "syntax"
if "assoc" in details:
title = title + " (associativity: " + details["assoc"] + ")"
print "\t\t<div class=\"syntax\">\n\t\t\t<p>" + title + "</p>\n\t\t\t<div>" + syntaxString + "</div>\n\t\t</div>\n"
else:
raise ValueError("every entry must contain a syntax element")
if "example" in details:
print loadExample(details["example"])
if "examples" in details:
for example in details["examples"]:
print loadExample(example)
if "notes" in details:
print "\t\t<h2>Notes</h2>\n\t\t" + paragraphy(details["notes"])
if "see" in details:
print "\t\t<p>See:"
for i in details["see"]:
print " <a href=\"syntax.php?name=" + i + "\">" + i + "</a>"
print "\n\t\t</p>\n"
|
en attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_generate_docs
def x(self):
return self._x
```
Args:
obj: The object to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC, None)
return obj
_DO_NOT_DOC_INHERITABLE = "_tf_docs_do_not_doc_inheritable"
def do_not_doc_inheritable(obj: T) -> T:
"""A decorator: Do not generate docs for this method.
This version of the decorator is "inherited" by subclasses. No docs will be
generated for the decorated method in any subclass. Even if the sub-class
overrides the method.
For example, to ensure that `method1` is **never documented** use this
decorator on the base-class:
```
class Parent(object):
@do_not_doc_inheritable
def method1(self):
pass
def method2(self):
pass
class Child(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method2
/Child.md
# method2
```
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any class in the `__mro__`.
Note: This is implemented by adding a hidden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@do_not_doc_inheritable
def x(self):
return self._x
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DO_NOT_DOC_INHERITABLE, None)
return obj
_FOR_SUBCLASS_IMPLEMENTERS = "_tf_docs_tools_for_subclass_implementers"
def for_subclass_implementers(obj: T) -> T:
"""A decorator: Only generate docs for this method in the defining class.
Also group this method's docs with and `@abstractmethod` in the class's docs.
No docs will generated for this class attribute in sub-classes.
The canonical use case for this is `tf.keras.layers.Layer.call`: It's a
public method, essential for anyone implementing a subclass, but it should
never be called directly.
Works on method, or other class-attributes.
When generating docs for a class's arributes, the `__mro__` is searched and
the attribute will be skipped if this decorator is detected on the attribute
on any **parent** class in the `__mro__`.
For example:
```
class Parent(object):
@for_subclass_implementers
def method1(self):
pass
def method2(self):
pass
class Child1(Parent):
def method1(self):
pass
def method2(self):
pass
class Child2(Parent):
def method1(self):
pass
def method2(self):
pass
```
This will produce the following docs:
```
/Parent.md
# method1
# method2
/Child1.md
# method2
/Child2.md
# method2
```
Note: This is implemented by adding a hi | dden attribute on the object, so it
cannot be used on objects which do not allow new attributes to be added. So
this decorator must go *below* `@property`, `@classmethod`,
or `@staticmethod`:
```
class Example(object):
@property
@for_subclass_implementers
def x(self):
return self._x
```
Args:
obj: The class-attribut | e to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _FOR_SUBCLASS_IMPLEMENTERS, None)
return obj
do_not_doc_in_subclasses = for_subclass_implementers
_DOC_PRIVATE = "_tf_docs_doc_private"
def doc_private(obj: T) -> T:
"""A decorator: Generates docs for private methods/functions.
For example:
```
class Try:
@doc_controls.doc_private
def _private(self):
...
```
As a rule of thumb, private(beginning with `_`) methods/functions are
not documented.
This decorator allows to force document a private method/function.
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DOC_PRIVATE, None)
return obj
def should_doc_private(obj) -> bool:
return hasattr(obj, _DOC_PRIVATE)
_DOC_IN_CURRENT_AND_SUBCLASSES = "_tf_docs_doc_in_current_and_subclasses"
def doc_in_current_and_subclasses(obj: T) -> T:
"""Overrides `do_not_doc_in_subclasses` decorator.
If this decorator is set on a child class's method whose parent's method
contains `do_not_doc_in_subclasses`, then that will be overriden and the
child method will get documented. All classes inherting from the child will
also document that method.
For example:
```
class Parent:
@do_not_doc_in_subclasses
def method1(self):
pass
def method2(self):
pass
class Child1(Parent):
@doc_in_current_and_subclasses
def method1(self):
pass
def method2(self):
pass
class Child2(Parent):
def method1(self):
pass
def method2(self):
pass
class Child11(Child1):
pass
```
This will produce the following docs:
```
/Parent.md
# method1
# method2
/Child1.md
# method1
# method2
/Child2.md
# method2
/Child11.md
# method1
# method2
```
Args:
obj: The class-attribute to hide from the generated docs.
Returns:
obj
"""
setattr(obj, _DOC_IN_CURRENT_AND_SUBCLASSES, None)
return obj
def should_skip(obj) -> bool:
"""Returns true if docs generation should be skipped for this object.
Checks for the `do_not_generate_docs` or `do_not_doc_inheritable` decorators.
Args:
obj: The object to document, or skip.
Returns:
True if the object should be skipped
"""
if isinstance(obj, type):
# For classes, only skip if the attribute is set on _this_ class.
if _DO_NOT_DOC in obj.__dict__:
return True
else:
return False
# Unwrap fget if the object is a property
if isinstance(obj, property):
obj = obj.fget
return hasattr(obj, _DO_NOT_DOC) or hasattr(obj, _DO_NOT_DOC_INHERITABLE)
def _unwrap_func(obj):
# Unwrap fget if the object is a property or static method or classmethod.
if isinstance(obj, property):
return obj.fget
if isinstance(obj, (classmethod, staticmethod)):
return obj.__func__
return obj
def _cls_attr_has_tag(cls, attr, tag):
"""Check if a class attribute `attr` is decorated with `dec`."""
# Use __dict__, it doesn't go up the __mro__ like getattr.
obj = cls.__dict__.get(attr, None)
if obj is None:
return False
obj = _unwrap_func(obj)
if isinstance(obj, type):
# The attribute is a class. Check __dict__ to see if the attribute is set
# on _this_ class, not its parents.
if tag in obj.__dict__:
return True
else:
return False
return hasattr(obj, tag)
def should_skip_class_attr(cls, name):
"""Returns true if docs should be skipped for this class attribute.
Args:
cls: The class the attribute belongs to.
name: The name of the attribute.
Returns:
True if the attribute should be skipped.
"""
# Get the object with standard lookup, from the nearest
# defining parent.
try:
obj = getattr(cls, name)
except AttributeError:
# This can fail for a variety of reasons. Always skip if `getattr` fails.
return True
# Unwrap fget if the object is a property
obj = _unwrap_func(obj)
# Skip if the object is decorated with `do_not_generate_docs` or
# `do_not_doc_inheritable`
if should_skip(obj):
return True
classes = getattr(cls, "__mro__", [cls])
# Find where all the decorators turn docs on and off.
# All these lists contain `(level, skip)` pairs.
for_subclass_levels = [
# The [1:] is because `for_subclass_implementers` turns off docs
# one level down (and you don't want to consider level -1).
(i, True)
for (i, mro_cls) in enumerate(classes[1:])
if _cls_attr_has_tag(mro_cls, name, _FOR_SUBCLASS_IMPLEMENTERS)
]
not_below_leve |
#!/usr/bin/env pyth | on
import os
from setuptools import setup, find_packages
from structure import __version__
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to typ | e in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# the setup
setup(
name='structure',
version=__version__,
description='An demonstration of PyPi.',
# long_description=read('README'),
url='https://github.com/kengz/structure',
author='kengz',
author_email='kengzwl@gmail.com',
license='MIT',
keywords='example pypi tutorial',
packages=find_packages(exclude=('docs', 'tests', 'env', 'index.py')),
include_package_data=True,
install_requires=[
],
extras_require={
'dev': [],
'docs': [],
'testing': [],
},
classifiers=[],
)
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Local File system implementation for accessing files on disk."""
from __future__ import absolute_import
import glob
import os
import shutil
from apache_beam.io.filesystem import BeamIOError
from apache_beam.io.filesystem import CompressedFile
from apache_beam.io.filesystem import CompressionTypes
from apache_beam.io.filesystem import FileMetadata
from apache_beam.io.filesystem import FileSystem
from apache_beam.io.filesystem import MatchResult
class LocalFileSystem(FileSystem):
"""A Local ``FileSystem`` implementation for accessing files on disk.
"""
def mkdirs(self, path):
"""Recursively create directories for the provided path.
Args:
path: string path of the directory structure that should be created
Raises:
IOError if leaf directory already exists.
"""
try:
os.makedirs(path)
except OSError as err:
raise IOError(err)
def match(self, patterns, limits=None):
"""Find all matching paths to the pattern provided.
Args:
patterns: list of string for the file path pattern to match against
limits: list of maximum number of responses that need to be fetched
Returns: list of ``MatchResult`` objects.
Raises:
``BeamIOError`` if any of the pattern match operations fail
"""
if limits is None:
limits = [None] * len(patterns)
else:
err_msg = "Patterns and limits should be equal in length"
assert len(patterns) == len(limits), err_msg
def _match(pattern, limit):
"""Find all matching paths to the pattern provided.
"""
files = glob.glob(pattern)
metadata = [FileMetadata(f, os.path.getsize(f)) for f in files[:limit]]
return MatchResult(pattern, metadata)
exceptions = {}
result = []
for pattern, limit in zip(patterns, limits):
try:
result.append(_match(pattern, limit))
except Exception as e: # pylint: disable=broad-except
exceptions[pattern] = e
if exceptions:
raise BeamIOError("Match operation failed", exceptions)
return result
def _path_open(self, path, mode, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Helper functions to open a file in the provided mode.
"""
compression_type = FileSystem._get_compression_type(path, compression_type)
raw_file = open(path, mode)
if compression_type == CompressionTypes.UNCOMPRESSED:
return raw_file
else:
return CompressedFile(raw_file, compression_type=compression_type)
def create(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a write channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'wb', mime_type, compression_type)
def open(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""Returns a read channel for the given file path.
Args:
path: string path of the file object to be written to the system
mime_type: MIME type to specify the type of content in the file object
compression_type: Type of compression to be used for this object
Returns: file handle with a close function for the user to use
"""
return self._path_open(path, 'rb', mime_type, compression_type)
def copy(self, source_file_names, destination_file_names):
"""Recursively copy the file tree from the source to the destination
Args:
source_file_names: list of source file objects that needs to be copied
destination_file_names: list of destination of the new object
Raises:
``BeamIOError`` if any of the copy operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _copy_path(source, destination):
"""Recursively copy the file tree from the source to the destination
"""
try:
if os.path.exists(destination):
if os.path.isdir(destination):
shutil.rmtree(destination)
else:
os.remove(destination)
if os.path.isdir(source):
shutil.copytree(source, destination)
else:
shutil.copy2(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_copy_path(source, destination)
except Exception as e: # pylint: disable=broad-ex | cept
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Copy operation failed", exceptions)
def rename(self, source_file_names, destination_file_names):
"""Rename the files at the source list to the destination list.
Source and destination lists should be of the same size.
Args:
source_file_names: List of file paths that need to be moved |
destination_file_names: List of destination_file_names for the files
Raises:
``BeamIOError`` if any of the rename operations fail
"""
err_msg = ("source_file_names and destination_file_names should "
"be equal in length")
assert len(source_file_names) == len(destination_file_names), err_msg
def _rename_file(source, destination):
"""Rename a single file object"""
try:
os.rename(source, destination)
except OSError as err:
raise IOError(err)
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
_rename_file(source, destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError("Rename operation failed", exceptions)
def exists(self, path):
"""Check if the provided path exists on the FileSystem.
Args:
path: string path that needs to be checked.
Returns: boolean flag indicating if path exists
"""
return os.path.exists(path)
def delete(self, paths):
"""Deletes files or directories at the provided paths.
Directories will be deleted recursively.
Args:
paths: list of paths that give the file objects to be deleted
Raises:
``BeamIOError`` if any of the delete operations fail
"""
def _delete_path(path):
"""Recursively delete the file or directory at the provided path.
"""
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError as err:
raise IOError(err)
exceptions = {}
for path in paths:
try:
_delete_path(path)
except Exception as e: # pylint: disable=broad-except
exceptions[path] = e
if exceptions:
raise BeamIOError("Delete operation failed", exceptions)
|
####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import base64
import json
import re
import urllib
import urlparse
from | openscrapers.modules import cleantitle
from openscrapers.modules import client
from openscrapers.modules import directstream
from openscrapers.modules import dom_parser
from openscrapers.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['tata.to']
self.base_link = 'http://tata.to' |
self.search_link = '/filme?suche=%s&type=alle'
self.ajax_link = '/ajax/stream/%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search_movie(imdb, year)
return url if url else None
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle,
'aliases': aliases, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
tvshowtitle = data['tvshowtitle']
localtvshowtitle = data['localtvshowtitle']
aliases = source_utils.aliases_to_array(eval(data['aliases']))
year = re.findall('(\d{4})', premiered)
year = year[0] if year else data['year']
url = self.__search([localtvshowtitle] + aliases, year, season, episode)
if not url and tvshowtitle != localtvshowtitle:
url = self.__search([tvshowtitle] + aliases, year, season, episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
ref = urlparse.urljoin(self.base_link, url)
url = urlparse.urljoin(self.base_link, self.ajax_link % re.findall('-(\w+)$', ref)[0])
headers = {'Referer': ref, 'User-Agent': client.randomagent()}
result = client.request(url, headers=headers, post='')
result = base64.decodestring(result)
result = json.loads(result).get('playinfo', [])
if isinstance(result, basestring):
result = result.replace('embed.html', 'index.m3u8')
base_url = re.sub('index\.m3u8\?token=[\w\-]+[^/$]*', '', result)
r = client.request(result, headers=headers)
r = [(i[0], i[1]) for i in
re.findall('#EXT-X-STREAM-INF:.*?RESOLUTION=\d+x(\d+)[^\n]+\n([^\n]+)', r, re.DOTALL) if i]
r = [(source_utils.label_to_quality(i[0]), i[1] + source_utils.append_headers(headers)) for i in r]
r = [{'quality': i[0], 'url': base_url + i[1]} for i in r]
for i in r: sources.append(
{'source': 'CDN', 'quality': i['quality'], 'language': 'de', 'url': i['url'], 'direct': True,
'debridonly': False})
elif result:
result = [i.get('link_mp4') for i in result]
result = [i for i in result if i]
for i in result:
try:
sources.append(
{'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'de',
'url': i, 'direct': True, 'debridonly': False})
except:
pass
return sources
except:
return
def resolve(self, url):
return url
def __search_movie(self, imdb, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link % imdb)
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
r = [(dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href'),
dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})) for i in r]
r = [(i[0][0].attrs['href'], re.findall('calendar.+?>.+?(\d{4})', ''.join([x.content for x in i[1]]))) for i
in r if i[0] and i[1]]
r = [(i[0], i[1][0] if len(i[1]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if i[1] in y][0]
return source_utils.strip_domain(r)
except:
return
def __search(self, titles, year, season=0, episode=False):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0])))
query = urlparse.urljoin(self.base_link, query)
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(query)
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'container'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'ml-item-content'})
f = []
for i in r:
_url = dom_parser.parse_dom(i, 'a', attrs={'class': 'ml-image'}, req='href')[0].attrs['href']
_title = re.sub('<.+?>|</.+?>', '', dom_parser.parse_dom(i, 'h6')[0].content).strip()
try:
_title = re.search('(.*?)\s(?:staf+el|s)\s*(\d+)', _title, re.I).group(1)
except:
pass
_season = '0'
_year = re.findall('calendar.+?>.+?(\d{4})', ''.join(
[x.content for x in dom_parser.parse_dom(i, 'ul', attrs={'class': 'item-params'})]))
_year = _year[0] if len(_year) > 0 else '0'
if season > 0:
s = dom_parser.parse_dom(i, 'span', attrs={'class': 'season-label'})
s = dom_parser.parse_dom(s, 'span', attrs={'class': 'el-num'})
if s: _season = s[0].content.strip()
if cleantitle.get(_title) in t and _year in y and int(_season) == int(season):
f.append((_url, _year))
r = f
r = sorted(r, key=lambda i: int(i[1]), reverse=True) # with year > no year
r = [i[0] for i in r if r[0]][0]
url = source_utils.strip_domain(r)
if episode:
r = client.request(urlparse.urljoin(self.base_link, url))
r = dom_parser.parse_dom(r, 'div', attrs={'class': 'season-list'})
|
"""Support for the Hive devices."""
import logging
from pyhiveapi import Pyhiveapi
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'hive'
DATA_HIVE = 'data_hive'
DEVICETYPES = {
'binary_sensor': 'device_list_binary_sensor',
'climate': 'device_list_climate',
'water_heater': 'device_list_water_heater',
'light': 'device_list_light',
'switch': 'device_list_plug',
'sensor': 'device_list_sensor',
}
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=2): cv.positive_int,
})
}, extra=vol.ALLOW_EXTRA)
class HiveSession:
"""Initiate Hive | Session Class."""
entities = []
core = None
heating = None
hotwater = None
light = None
sensor = None
switch = None
weather = None
at | tributes = None
def setup(hass, config):
"""Set up the Hive Component."""
session = HiveSession()
session.core = Pyhiveapi()
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
update_interval = config[DOMAIN][CONF_SCAN_INTERVAL]
devicelist = session.core.initialise_api(
username, password, update_interval)
if devicelist is None:
_LOGGER.error("Hive API initialization failed")
return False
session.sensor = Pyhiveapi.Sensor()
session.heating = Pyhiveapi.Heating()
session.hotwater = Pyhiveapi.Hotwater()
session.light = Pyhiveapi.Light()
session.switch = Pyhiveapi.Switch()
session.weather = Pyhiveapi.Weather()
session.attributes = Pyhiveapi.Attributes()
hass.data[DATA_HIVE] = session
for ha_type, hive_type in DEVICETYPES.items():
for key, devices in devicelist.items():
if key == hive_type:
for hivedevice in devices:
load_platform(hass, ha_type, DOMAIN, hivedevice, config)
return True
|
import pickle
from deap import tools
from stats import record
logbook = tools.Logbook()
logbook.record(gen=0, evals=30, **record)
print(logbook)
gen, avg = logbook.select("gen", "avg")
pickle.dump(logbook, open("logbook.pkl", "w"))
# Cleaning the pickle file ...
import os
os.remove("logbook.pkl")
logbook.header = "gen", "avg", "spam"
print(logbook)
print(logbook.stream)
logbook.record(gen=1, | evals=15, **record)
print(logbook.stream)
from multistats import record
logbook = tools.Logbook()
logbook.record(gen=0, evals=30, **record)
logbook.header = "gen", "evals", "fitness", "size"
logbook.chapters["fitness"].header = "min", "avg", "max"
logbook.chapters["size"].header = "min", "avg", "max"
print(logbook)
gen = logbook.select("gen")
fit_mins = logbook.chapters["fitness"].select("min")
size_avgs = logbook.chapters["size"].select("av | g")
import matplotlib.pyplot as plt
fig, ax1 = plt.subplots()
line1 = ax1.plot(gen, fit_mins, "b-", label="Minimum Fitness")
ax1.set_xlabel("Generation")
ax1.set_ylabel("Fitness", color="b")
for tl in ax1.get_yticklabels():
tl.set_color("b")
ax2 = ax1.twinx()
line2 = ax2.plot(gen, size_avgs, "r-", label="Average Size")
ax2.set_ylabel("Size", color="r")
for tl in ax2.get_yticklabels():
tl.set_color("r")
lns = line1 + line2
labs = [l.get_label() for l in lns]
ax1.legend(lns, labs, loc="center right")
plt.show() |
def _is_edited_new(self):
return self.edited is not None and self.edited.is_new
def _fill(self):
for i in range(self.rowcount):
self.append(TestRow(self, i))
def _update_selection(self):
self.updated_rows = self.selected_rows[:]
def table_with_footer():
table = Table()
table.append(TestRow(table, 0))
footer = TestRow(table, 1)
table.footer = footer
return table, footer
def table_with_header():
table = Table()
table.append(TestRow(table, 1))
header = TestRow(table, 0)
table.header = header
return table, header
#--- Tests
def test_allow_edit_when_attr_is_property_with_fset():
# When a row has a property that has a fset, by default, make that cell editable.
class TestRow(Row):
@property
def foo(self):
pass
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
row = TestRow(Table())
assert row.can_edit_cell('bar')
assert not row.can_edit_cell('foo')
assert not row.can_edit_cell('baz') # doesn't exist, can't edit
def test_can_edit_prop_has_priority_over_fset_checks():
# When a row has a cen_edit_* property, it's the result of that property that is used, not the
# result of a fset check.
class TestRow(Row):
@property
def bar(self):
pass
@bar.setter
def bar(self, value):
pass
can_edit_bar = False
row = TestRow(Table())
assert not row.can_edit_cell('bar')
def test_in():
# When a table is in a list, doing "in list" with another instance returns false, even if
# they're the same as lists.
table = Table()
some_list = [table]
assert Table() not in some_list
def test_footer_del_all():
# Removing all rows doesn't crash when doing the footer check.
table, footer = table_with_footer()
del table[:]
assert table.footer is None
def test_footer_del_row():
# Removing the footer row sets it to None
table, footer = table_with_footer()
del table[-1]
assert table.footer is None
eq_(len(table), 1)
def test_footer_is_appened_to_table():
# A footer is appended at the table's bottom
table, footer = table_with_footer()
eq_(len(table), 2)
assert table[1] is footer
def test_footer_remove():
# remove() on footer sets it to None
table, footer = table_with_footer()
table.remove(footer)
assert table.footer is None
def test_footer_replaces_old_footer():
table, footer = table_with_footer()
other = Row(table)
table.footer = other
assert table.footer is other
eq_(len(table), 2)
assert table[1] is other
def test_footer_rows_and_row_count():
# rows() and row_count() ignore footer.
table, footer = table_with_footer()
eq_(table.row_count, 1)
eq_(table.rows, table[:-1])
def test_footer_setting_to_none_removes_old_one():
table, footer = table_with_footer()
table.footer = None
assert table.footer is None
eq_(len(table), 1)
def test_footer_stays_there_on_append():
# Appending another row puts it above the footer
table, footer = table_with_footer()
table.append(Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_footer_stays_there_on_insert():
# Inserting another row puts it above the footer
table, footer = table_with_footer()
table.insert(3, Row(table))
eq_(len(table), 3)
assert table[2] is footer
def test_header_del_all():
# Removing all rows doesn't crash when doing the header check.
table, header = table_with_header()
del table[:]
assert table.header is None
def test_header_del_row():
# Removing the header row sets it to None
table, header = table_with_header()
del table[0]
assert table.header is None
eq_(len(table), 1)
def test_header_is_inserted_in_table():
# A header is inserted at the table's top
table, header = table_with_header()
eq_(len(table), 2)
assert table[0] is header
def test_header_remove():
# remove() on header sets it to None
table, header = table_with_header()
table.remove(header)
assert table.header is None
def test_header_replaces_old_header():
table, header = table_with_header()
other = Row(table)
table.header = other
assert table.header is other
eq_(len(table), 2)
assert table[0] is other
def test_header_rows_and_row_count():
# rows() and row_count() ignore header.
table, header = table_with_header()
eq_(table.row_count, 1)
eq_(table | .rows, table[1:])
def test_header_setting_to_none_removes_old_one():
table, header = table_with_header()
table.header = None
assert table.header is None
eq_(len(table), 1)
def test_header_stays_there_on_insert():
# Inserting another row at the top puts it below the header
table, header = table_with_header()
table.insert(0, Row(table))
eq_(len(table), 3)
assert table[0] is header
def test_refresh_view_on_refresh():
# If refresh_view is not False, | we refresh the table's view on refresh()
table = TestGUITable(1)
table.refresh()
table.view.check_gui_calls(['refresh'])
table.view.clear_calls()
table.refresh(refresh_view=False)
table.view.check_gui_calls([])
def test_restore_selection():
# By default, after a refresh, selection goes on the last row
table = TestGUITable(10)
table.refresh()
eq_(table.selected_indexes, [9])
def test_restore_selection_after_cancel_edits():
# _restore_selection() is called after cancel_edits(). Previously, only _update_selection would
# be called.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selection):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
table.add()
table.cancel_edits()
eq_(table.selected_indexes, [6])
def test_restore_selection_with_previous_selection():
# By default, we try to restore the selection that was there before a refresh
table = TestGUITable(10)
table.refresh()
table.selected_indexes = [2, 4]
table.refresh()
eq_(table.selected_indexes, [2, 4])
def test_restore_selection_custom():
# After a _fill() called, the virtual _restore_selection() is called so that it's possible for a
# GUITable subclass to customize its post-refresh selection behavior.
class MyTable(TestGUITable):
def _restore_selection(self, previous_selection):
self.selected_indexes = [6]
table = MyTable(10)
table.refresh()
eq_(table.selected_indexes, [6])
def test_row_cell_value():
# *_cell_value() correctly mangles attrnames that are Python reserved words.
row = Row(Table())
row.from_ = 'foo'
eq_(row.get_cell_value('from'), 'foo')
row.set_cell_value('from', 'bar')
eq_(row.get_cell_value('from'), 'bar')
def test_sort_table_also_tries_attributes_without_underscores():
# When determining a sort key, after having unsuccessfully tried the attribute with the,
# underscore, try the one without one.
table = Table()
row1 = Row(table)
row1._foo = 'a' # underscored attr must be checked first
row1.foo = 'b'
row1.bar = 'c'
row2 = Row(table)
row2._foo = 'b'
row2.foo = 'a'
row2.bar = 'b'
table.append(row1)
table.append(row2)
table.sort_by('foo')
assert table[0] is row1
assert table[1] is row2
table.sort_by('bar')
assert table[0] is row2
assert table[1] is row1
def test_sort_table_updates_selection():
table = TestGUITable(10)
table.refresh()
table.select([2, 4])
table.sort_by('index', desc=True)
# Now, the updated rows should be 7 and 5
eq_(len(table.updated_rows), 2)
r1, r2 = table.updated_rows
eq_(r1.index, 7)
eq_(r2.index, 5)
def test_sort_table_with_footer():
# Sorting a table with a footer keeps it at the bottom
table, footer = table_with_footer()
table.sort_by('index', desc=True)
assert table[-1] is footer
def test_sort_table_with_header():
# Sorting a table with a header keeps |
from django.conf import settings
from django.conf.urls.defaults import handler500, handler404, patterns, include, \
url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', | include(admin.site.urls)),
url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_ | catalog'),
url(r'^media/cms/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.CMS_MEDIA_ROOT, 'show_indexes': True}),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^', include('cms.test_utils.project.second_cms_urls_for_apphook_tests')),
)
|
fr | om nanoplay import PayloadProtocol, ControlProtocol, Player, CustomServer
| |
#!/usr/bin/ | env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "eksi.settings")
from django.core.management import execu | te_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import email
import mimetypes
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
import smtplib
from time import sleep
def sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
strFrom = fromAdd
strTo = ', '.join(toAdd)
server = authInfo.get('server')
user = authInfo.get('user')
passwd = authInfo.get('password')
if not (server and user and passwd) :
print 'incomplete login info, exit now'
return
# 设定root信息
msgRoot = MIMEMultipart('related')
msgRoot['Subject'] = subject
msgRoot['From'] = strFrom
msgRoot['To'] = strTo
msgRoot.preamble = 'This is a multi-part message in MIME format.'
# Encapsulate the plain and HTML versions of the message body in an
# 'alternative' part, so message agents can decide which they want to display.
msgAlternative = MIMEM | ultipart('alternative')
msgRoot.attach(msgAlternative)
#设定纯文本信息
#msgText = MIMEText(plainText, 'plain', 'GB18030')
msgText = MIMEText(plainText, 'plain', 'utf-8')
msgA | lternative.attach(msgText)
#设定HTML信息
#msgText = MIMEText(htmlText, 'html', 'GB18030')
msgText = MIMEText(htmlText, 'html', 'utf-8')
msgAlternative.attach(msgText)
#设定内置图片信息
#fp = open('test.jpg', 'rb')
#msgImage = MIMEImage(fp.read())
#fp.close()
#msgImage.add_header('Content-ID', '<image1>')
#msgRoot.attach(msgImage)
#发送邮件
smtp = smtplib.SMTP()
#设定调试级别,依情况而定
# 1-open log-
#smtp.set_debuglevel(1)
# 0-close log
smtp.set_debuglevel(0)
smtp.connect(server)
smtp.login(user, passwd)
smtp.sendmail(strFrom, strTo, msgRoot.as_string())
smtp.quit()
return
def sendmail(_title,_content,_toUserList,_html=None):
#(authInfo, fromAdd, toAdd, subject, plainText, htmlText):
print "start to send mail start"
authInfo = {}
authInfo['server'] = 'smtp.exmail.qq.com'
authInfo['user'] = 'm@localhost.com'
authInfo['password'] = '123'
fromAdd = 'm@localhost.com'
#toAdd = ["root@localhost.com"]
subject = 'search exception category'
if(_title):
subject =_title
plainText = _content
#plainText = '服务器异常状态报警'
htmlText = _html
for t in _toUserList:
#print t
tarr=[]
tarr.append(t)
sendEmail(authInfo, fromAdd, tarr, subject, plainText, htmlText)
sleep(2)
print 'send mail success.'
"""
by zcl at 2016.6.15
"""
def rendar_table(title,notice,rhead_list,rdata_list):
html ="""
<p class="section">{0}</p>
<p class="section">{1}</p>
<table cellpadding="5" cellspacing="0" border="1" bordercolor="#04B4AE" style="text-align: center; font-family: Arial; border-collapse: collapse; width: auto;">
<tbody>
<tr>
<td colspan="{2}"><div>{0}</div></td>
</tr>
<tr>
""".format(title,notice,str(len(rhead_list)))
for rhead in rhead_list:
rhead = rhead.encode('utf8')
tmp = """<th style="background-color: #04B4AE; color: #ffffff">{0}</th>
""".format(str(rhead))
html+=tmp
html+="</tr>"
for o in rdata_list:
line_html=''
line_html+="<tr>"
for key in rhead_list:
val = o[key]
key = key.encode('utf8')
line_html+="<td>"+str(val)+"</td>"
line_html+="</tr>"
html+=line_html
html+="""
</tbody>
</table>
<hr>
"""
return html
if __name__ == '__main__' :
toUserList = ['root@localhost.com']
sendmail('test','sorry to disturb, this mail is just for test',toUserList)
#sendmail('[热门行业统计]'+title,'',toUserList,html.encode('utf8'))
#sendEmail(authInfo, fromAdd, toAdd, subject, plainText, htmlText)
|
bintype, template = get_params
gal = Galaxy(request.param)
gal.set_params(bintype=bintype, template=template)
gal.set_filepaths()
yield gal
@pytest.fixture()
def get_cube(newgalaxy, rsync):
if not os.path.isfile(newgalaxy.cubepath):
rsync.add('manga | cube', **newgalaxy.access_kwargs)
rsync.set_stream()
rsync.commit()
yield newgalaxy
@pytest.fixture(params=rmodes)
def asurl(request):
if request.param == 'full':
return False
elif request.param == 'url':
return True
@pytest.fixture()
def make_paths(request, rsync, mode, asurl, release):
inputs = request.param if hasattr(request, 'param') else None
rmode = 'url' if asurl else 'full' |
fullpaths = []
inputs = inputs if inputs else imagelist
for plateifu in inputs:
gal = Galaxy(plateifu)
gal.set_params(release=release)
gal.set_filepaths()
if mode == 'local':
path = rsync.__getattribute__(rmode)('mangaimage', **gal.access_kwargs)
fullpaths.append(path)
else:
rsync.add('mangaimage', **gal.access_kwargs)
rsync.set_stream()
path = rsync.get_urls() if asurl else rsync.get_paths()
fullpaths.extend(path)
return fullpaths
class TestImagesGetDir3d(object):
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plateifu, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.parametrize('expval', [('stack')])
def test_getdir3d_plate(self, galaxy, expval, mode, db):
dir3d = getDir3d(galaxy.plate, mode=mode, release=galaxy.release)
assert expval == dir3d
@pytest.mark.xfail()
@pytest.mark.timeout(40)
class TestImagesByList(object):
@pytest.mark.parametrize('imglist, mode, errmsg',
[('7495-1901', 'local', 'Input must be of type list or Numpy array'),
(['nogoodid'], 'local', 'Input must be of type plate-ifu or mangaid'),
(imagelist, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['notlist', 'badid', 'badmode'])
def test_failures(self, imglist, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getImagesByList(imglist, mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
def test_get_imagelist(self, make_paths, mode, asurl, release):
images = getImagesByList(imagelist, mode=mode, as_url=asurl, release=release)
assert set(make_paths) == set(images)
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# #assert os.path.isfile(get_cube.imgpath) is False
# assert imgpath.check(file=0) is True
# image = getImagesByList([get_cube.plateifu], mode='remote', as_url=True, download=True, release=get_cube.release)
# #assert os.path.isfile(get_cube.imgpath) is True
# assert imgpath.check(file=1) is True
# assert image is None
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download_fails(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter('always')
# image = getImagesByList([get_cube.plateifu], mode='local', as_url=True, download=True)
# assert cm[-1].category is MarvinUserWarning
# assert errmsg in str(cm[-1].message)
class TestImagesByPlate(object):
@pytest.mark.parametrize('plateid, mode, errmsg',
[('8485abcd', 'local', 'Plateid must be a numeric integer value'),
(None, 'notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badid', 'badmode'])
def test_failures(self, galaxy, plateid, mode, errmsg):
plateid = plateid if plateid else galaxy.plate
with pytest.raises(AssertionError) as cm:
image = getImagesByPlate(plateid, mode=mode, release=galaxy.release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('make_paths, plate', [(['8485-1901'], '8485')], indirect=['make_paths'], ids=['plateifu'])
def test_get_imageplate(self, make_paths, plate, mode, asurl, release):
images = getImagesByPlate(plate, mode=mode, as_url=asurl, release=release)
assert make_paths[0] in images
# @pytest.mark.parametrize('make_paths', [(['7495-1901'])], indirect=True, ids=['newplateifu'])
# def test_download(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# image = getImagesByPlate(get_cube.plate, mode='remote', as_url=True, download=True)
# assert imgpath.check(file=1) is True
# assert image is None
# def test_get_images_download_local_fail(self, monkeymanga, temp_scratch, get_cube):
# imgpath = tempafile(get_cube.imgpath, temp_scratch)
# assert imgpath.check(file=0) is True
# errmsg = 'Download not available when in local mode'
# with warnings.catch_warnings(record=True) as cm:
# warnings.simplefilter("always")
# image = getImagesByPlate(self.new_plate, mode='local', as_url=True, download=True)
# self.assertIs(cm[-1].category, MarvinUserWarning)
# self.assertIn(errmsg, str(cm[-1].message))
class TestRandomImages(object):
@pytest.mark.parametrize('mode, errmsg',
[('notvalidmode', 'Mode must be either auto, local, or remote')],
ids=['badmode'])
def test_failures(self, mode, errmsg, release):
with pytest.raises(AssertionError) as cm:
image = getRandomImages(mode=mode, release=release)
assert cm.type == AssertionError
assert errmsg in str(cm.value)
@pytest.mark.parametrize('num', [(10), (5)], ids=['num10', 'num5'])
def test_get_image_random(self, base, num, mode, asurl, release):
images = getRandomImages(num=num, mode=mode, as_url=asurl, release=release)
assert images is not None
assert num == len(images)
assert isinstance(images, list) is True
assert base in images[0]
class TestShowImage(object):
def _assert_image(self, galaxy, image):
assert image is not None
assert image.size == (562, 562)
assert image.format == 'PNG'
assert str(galaxy.plate) in image.filename
assert galaxy.ifu in image.filename
@pytest.mark.parametrize('return_image', [(True), (False)], ids=['returnyes', 'returnno'])
def test_show_image(self, galaxy, mode, return_image):
image = showImage(plateifu=galaxy.plateifu, mode=mode, release=galaxy.release, return_image=return_image, show_image=False)
if return_image:
self._assert_image(galaxy, image)
else:
assert image is None
return image
@pytest.mark.parametrize('param, error, errmsg',
[({'mode': 'notvalidmode'}, AssertionError, 'Mode must be either auto, local, or remote'),
({}, AssertionError, 'A filepath or plateifu must be specified!'),
({'plateifu': '8485-1905'}, MarvinError, 'Error: remote filepath'),
({'path': '/tmp/image.png'}, MarvinError, 'Error: local filepath /tmp/image.png does not exist.'),
({'path': ['/tmp/image.png', '/tmp/image1.png']}, MarvinError, 'showImage currently only works on a sing |
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose | Tools | Templates
# and open the template in the editor.
| |
),
backend_port=dict(
type='int'
),
idle_timeout=dict(
type='int',
default=4
),
natpool_frontend_port_start=dict(
type='int'
),
natpool_frontend_port_end=dict(
type='int'
),
natpool_backend_port=dict(
type='int'
),
natpool_protocol=dict(
type='str'
)
)
self.resource_group = None
self.name = None
self.location = None
self.sku = None
self.frontend_ip_configurations = None
self.backend_address_pools = None
self.probes = None
self.inbound_nat_pools = None
self.load_balancing_rules = None
self.public_ip_address_name = None
self.state = None
self.probe_port = None
self.probe_protocol = None
self.probe_interval = None
self.probe_fail_count = None
self.probe_request_path = None
self.protocol = None
self.load_distribution = None
self.frontend_port = None
self.backend_port = None
self.idle_timeout = None
self.natpool_frontend_port_start = None
self.natpool_frontend_port_end = None
self.natpool_backend_port = None
self.natpool_protocol = None
self.tags = None
self.results = dict(changed=False, state=dict())
super(AzureRMLoadBalancer, self).__init__(
derived_arg_spec=self.module_args,
supports_check_mode=True
)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_args.keys()) + ['tags']:
setattr(self, key, kwargs[key])
changed = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
load_balancer = self.get_load_balancer()
if self.state == 'present':
# compatible parameters
if not self.frontend_ip_configurations and not self.backend_address_pools and not self.probes and not self.inbound_nat_pools:
self.deprecate('Discrete load balancer config settings are deprecated and will be removed.'
' Use frontend_ip_configurations, backend_address_pools, probes, inbound_nat_pools lists instead.', version='2.9')
frontend_ip_name = 'frontendip0'
backend_address_pool_name = 'backendaddrp0'
prob_name = 'prob0'
inbound_nat_pool_name = 'inboundnatp0'
lb_rule_name = 'lbr'
self.frontend_ip_configurations = [dict(
name=frontend_ip_name,
public_ip_address=self.public_ip_address_name
)]
self.backend_address_pools = [dict(
name=backend_address_pool_name
)]
self.probes = [dict(
name=prob_name,
port=self.probe_port,
protocol=self.probe_protocol,
interval=self.probe_interval,
fail_count=self.probe_fail_count,
request_path=self.probe_request_path
)] if self.probe_protocol else None
self.inbound_nat_pools = [dict(
name=inbound_nat_pool_name,
frontend_ip_configuration_name=frontend_ip_name,
protocol=self.natpool_protocol,
frontend_port_range_start=self.natpool_frontend_port_start,
frontend_port_range_end=self.natpool_frontend_port_end,
| backend_port=self.natpool_backend_port
)] if self.natpool_protocol else None
self.load_balancing_rules = [dict(
name=lb_rule_name,
frontend_ip_configuration=frontend_ip_name,
backend_address_pool=backend_address_pool_name,
probe=prob_name,
protocol=self.protocol,
load_distribution=self.load_distribution,
| frontend_port=self.frontend_port,
backend_port=self.backend_port,
idle_timeout=self.idle_timeout,
enable_floating_ip=False
)] if self.protocol else None
if load_balancer:
# check update, NIE
changed = False
else:
changed = True
elif self.state == 'absent' and load_balancer:
changed = True
self.results['state'] = load_balancer_to_dict(load_balancer)
if 'tags' in self.results['state']:
update_tags, self.results['state']['tags'] = self.update_tags(self.results['state']['tags'])
if update_tags:
changed = True
else:
if self.tags:
changed = True
self.results['changed'] = changed
if self.state == 'present' and changed:
# create or update
frontend_ip_configurations_param = [self.network_models.FrontendIPConfiguration(
name=item.get('name'),
public_ip_address=self.get_public_ip_address_instance(item.get('public_ip_address')) if item.get('public_ip_address') else None,
private_ip_address=item.get('private_ip_address'),
private_ip_allocation_method=item.get('private_ip_allocation_method'),
subnet=self.network_models.Subnet(id=item.get('subnet')) if item.get('subnet') else None
) for item in self.frontend_ip_configurations] if self.frontend_ip_configurations else None
backend_address_pools_param = [self.network_models.BackendAddressPool(
name=item.get('name')
) for item in self.backend_address_pools] if self.backend_address_pools else None
probes_param = [self.network_models.Probe(
name=item.get('name'),
port=item.get('port'),
protocol=item.get('protocol'),
interval_in_seconds=item.get('interval'),
request_path=item.get('request_path'),
number_of_probes=item.get('fail_count')
) for item in self.probes] if self.probes else None
inbound_nat_pools_param = [self.network_models.InboundNatPool(
name=item.get('name'),
frontend_ip_configuration=self.network_models.SubResource(
frontend_ip_configuration_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('frontend_ip_configuration_name'))),
protocol=item.get('protocol'),
frontend_port_range_start=item.get('frontend_port_range_start'),
frontend_port_range_end=item.get('frontend_port_range_end'),
backend_port=item.get('backend_port')
) for item in self.inbound_nat_pools] if self.inbound_nat_pools else None
load_balancing_rules_param = [self.network_models.LoadBalancingRule(
name=item.get('name'),
frontend_ip_configuration=self.network_models.SubResource(
frontend_ip_configuration_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('frontend_ip_configuration')
)
),
backend_address_pool=self.network_models.SubResource(
backend_address_pool_id(
self.subscription_id,
self.resource_group,
self.name,
item.get('backend_address_pool')
)
),
probe=self.network_models.SubResource(
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Helper class for creating an InstanceGroup object.
"""
from vm_network_migration.modules.instance_group_modules.regional_managed_instance_group import RegionalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.unmanaged_instance_group import UnmanagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.zonal_managed_instance_group import ZonalManagedInstanceGroup
from vm_network_migration.modules.instance_group_modules.instance_group import InstanceGroup
from vm_network_migration.utils import initializer
class InstanceGroupHelper:
@initializer
def __init__(self, compute, project, instance_group_name,
region, zone, network, subnetwork, preserve_instance_ip=False):
""" Initialize an instance group helper object
Args:
compute: google compute engine
project: project ID
instance_group_name: name of the instance group
region: region of the instance group
zone: zone of the instance group
preserve_instance_ip: only valid for an unmanaged instance group
"""
def build_instance_group(self) -> InstanceGroup:
""" Build an object which is an instance of the InstanceGroup's subclass
"""
# try to build a zonal instance group
try:
instance_group_configs = self.get_instance_group_in_zone()
except Exception:
# It is not a single zone instance group
pass
else:
if 'Instance Group Manager' not in instance_group_configs[
'description']:
return UnmanagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
| self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zon | e)
else:
return ZonalManagedInstanceGroup(self.compute,
self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.zone)
# try to build a regional instance group
try:
self.get_instance_group_in_region()
except Exception as e:
raise e
else:
return RegionalManagedInstanceGroup(self.compute, self.project,
self.instance_group_name,
self.network,
self.subnetwork,
self.preserve_instance_ip,
self.region)
def get_instance_group_in_zone(self) -> dict:
""" Get a zonal instance group's configurations
Returns: instance group's configurations
"""
return self.compute.instanceGroups().get(
project=self.project,
zone=self.zone,
instanceGroup=self.instance_group_name).execute()
def get_instance_group_in_region(self) -> dict:
""" Get a regional instance group's configurations
Returns: instance group's configurations
"""
return self.compute.regionInstanceGroups().get(
project=self.project,
region=self.region,
instanceGroup=self.instance_group_name).execute()
|
# The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
|
from core.plugins import ProtocolPlugin
from core.decorators import *
from cor | e.constants import *
class FetchPlugin(ProtocolPlugin):
commands = {
"respawn": "commandRespawn",
}
@player_list
@mod_only
@only_username_command
def commandRespawn(self, username, fromloc, rankoverride):
"/respawn username - Mod\nRespawns the user."
if username in self.client.factory.usernames:
self.client.factory.usernames[username].respawn()
else:
self.client.sendServerMessage("%s is not on the server." % username)
return
self.client.factory.usernames[username].sendServerMessage("You have been respawned by %s." % self.client.username)
self.client.sendServerMessage("%s respawned." % username)
|
from .default import default
import os
import re
class image_png(default):
def __init__(self, key, stat):
default.__init__(self, key, stat)
self.data = {}
def compile(self, prop):
if not os.path.exists(prop['value']):
print("Image '{}' not found.".format(prop['value']))
else:
# Convert SVG to PNG
m = re.search("\.svg$", prop['value'])
if m:
from wand.image import Image
from wand.color import Color
from wand.api import library
dest = self.stat['icons_dir'] + "/" + prop['value'].replace('/', '_') + ".png"
print("svg icon detected. converting '{0}' to '{1}'".format(prop['value'], dest))
with Image() as img:
with Color('transparent') as bg_color:
library.MagickSetBackgroundColor(img.wand, bg_color.resource)
img.read(blob=open(prop['value'], 'rb | ').read())
dest_img = img.make_blob('png32')
with open(dest, 'wb') as out:
out.write(dest_img)
return repr(dest)
return rep | r(prop['value'])
def stat_value(self, prop):
if prop['value'] is None:
return prop['value']
if os.path.exists(prop['value']):
from wand.image import Image
img = Image(filename=prop['value'])
self.data[prop['value']] = img.size
if not prop['key'] in self.stat['global_data']:
self.stat['global_data'][prop['key']] = {}
self.stat['global_data'][prop['key']][prop['value']] = img.size
return prop['value']
def get_global_data(self):
self.stat.property_values(self.key)
return self.data
|
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 10 10:43:53 20 | 19
@author: Heathro
Description: Reduces a vcf file to meta section and
one line for each chromosome number for testing and
debugging purposes.
"""
# Open files to read from and write to
vcfpath = open("D:/MG_GAP/Ali_w_767.vcf", "rU")
testvcf = open("REDUCED_ali.vcf", "w")
# Keep track of chromosome number so we ca | n get one of each
temp_chrom = 0
counter = 0
for line_index, line in enumerate(vcfpath):
# Found a chromosome line
if line[0:8] == "sNNffold":
column = line.split('\t')
first_col = column[0].split('_')
current_chrom = first_col[1]
# Write up to 1000 lines of each chromosome
if current_chrom == temp_chrom:
counter = counter + 1
if counter < 1000:
testvcf.write(line)
# If a new chromosome, write a line, start counter at 0
elif current_chrom != temp_chrom:
counter = 0
temp_chrom = current_chrom
testvcf.write(line)
# Include the meta lines and header line
else:
testvcf.write(line)
testvcf.close()
vcfpath.close() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""@package docstring
Yowsup connector for wxpyWha (a simple wxWidgets GUI wrapper atop yowsup).
Uses WhaLayer to build the Yowsup stack.
This is based on code from the yowsup echo example, the yowsup cli and pywhatsapp.
"""
SECONDS_RECONNECT_DELAY = 10
import sys
# from echo stack
from yowsup.stacks import YowStackBuilder
from yowsup.layers.auth import AuthError
from yowsup.layers.network import YowNetworkLayer
# from cli stack
try:
from yowsup.layers.axolotl.props import PROP_IDENTITY_AUTOTRUST #tgalal
except ImportError as ie:
sys.stderr.write("WARNING: PROP_IDENTITY_AUTOTRUST could not be imported from yowsup.layers.axolotl.props. Using hardcoded value instead.\n")
PROP_IDENTITY_AUTOTRUST = "org.openwhatsapp.yowsup.prop.axolotl.INDENTITY_AUTOTRUST" #as done by jlguardi
# from cli layer
from yowsup.layers import YowLayerEvent
# from http://stackoverflow.com/questions/3702675/how-to-print-the-full-traceback-without-halting-the-program
import traceback
# from https://github.com/tgalal/yowsup/issues/1069
import logging
try:
import queue
except ImportError: # python2 compatibility
import Queue as queue
from whalayer import WhaLayer
class WhaClient(object):
def __init__(self, credentials, encryptionEnabled = True):
stackBuilder = YowStackBuilder()
self.stack = stackBuilder\
.pushDefaultLayers(encryptionEnabled)\
.push(WhaLayer)\
.build()
self.stack.setCredentials(credentials)
self.stack.setProp(PROP_IDENTITY_AUTOTRUST, Tru | e) #not in jlguardi
self.wantReconnect = True
self.abortReconnectWait = queue.Queue()
def setYowsupEventHandler(self, handler):
interface = self.stack.getLayerInterface(WhaLayer)
| interface.enventHandler = handler
def sendMessage(self, outgoingMessage):
interface = self.stack.getLayerInterface(WhaLayer)
interface.sendMessage(outgoingMessage)
def disconnect(self):
interface = self.stack.getLayerInterface(WhaLayer)
interface.disconnect()
def start(self):
logging.basicConfig(level=logging.WARNING)
while (self.wantReconnect):
self.stack.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_CONNECT))
try:
self.stack.loop()
except AuthError as e:
sys.stderr.write("Authentication Error\n")
except KeyboardInterrupt:
# This is only relevant if this is the main module
# TODO: disconnect cleanly
print("\nExit")
sys.exit(0)
except: # catch *all* exceptions
sys.stderr.write("Unhandled exception.\n")
traceback.print_exc()
# TODO: regard connection state in the GUI
sys.stderr.write("Yowsup WhaClient exited.\nYOU ARE NOW DISCONNECTED.\n")
if (self.wantReconnect):
sys.stderr.write("Auto-reconnect enabled. Waiting up to %d seconds before reconnecting...\n"%(SECONDS_RECONNECT_DELAY))
try:
self.abortReconnectWait.get(timeout=SECONDS_RECONNECT_DELAY)
except queue.Empty:
pass
def setEnableReconnect(self, b = True):
self.wantReconnect = b
self.abortReconnectWait.put(b)
if __name__ == "__main__":
client = WhaClient(("login","base64passwd"))
client.start()
|
from sqlalchemy.orm import joinedload
from datetime import datetime
from changes.api.base import APIView
from changes.api.build_index import execute_build
from changes.config import db
from changes.constants import Result, Status
from changes.models import Build, Job, JobStep, ItemStat
class BuildRestartAPIView(APIView):
def post(self, build_id):
build = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source').joinedload('revision'),
).get(build_id)
if bu | ild is None:
return '', 404
if build.status != Status.finished:
return '', 400
# ItemStat doesnt cascade by itself
stat_ids | = [build.id]
job_ids = [
j[0] for j in
db.session.query(Job.id).filter(Job.build_id == build.id)
]
if job_ids:
step_ids = [
s[0] for s in
db.session.query(JobStep.id).filter(JobStep.job_id.in_(job_ids))
]
stat_ids.extend(job_ids)
stat_ids.extend(step_ids)
if stat_ids:
ItemStat.query.filter(
ItemStat.item_id.in_(stat_ids),
).delete(synchronize_session=False)
# remove any existing job data
# TODO(dcramer): this is potentially fairly slow with cascades
Job.query.filter(
Job.build_id == build.id
).delete(synchronize_session=False)
build.date_started = datetime.utcnow()
build.date_modified = build.date_started
build.date_finished = None
build.duration = None
build.status = Status.queued
build.result = Result.unknown
db.session.add(build)
execute_build(build=build)
return self.respond(build)
|
"""Print all records in the pickle for the specified test"""
import sy | s
import argparse
from autocms.core import (load_configuration, load_records)
def main():
"""Print all records correspondin | g to test given as an argument"""
parser = argparse.ArgumentParser(description='Submit one or more jobs.')
parser.add_argument('testname', help='test directory')
parser.add_argument('-c', '--configfile', type=str,
default='autocms.cfg',
help='AutoCMS configuration file name')
args = parser.parse_args()
config = load_configuration(args.configfile)
records = load_records(args.testname,config)
for job in records:
print str(job)+'\n'
return 0
if __name__ == '__main__':
status = main()
sys.exit(status)
|
# /usr/bin/env python
'''
Written by Kong Xiaolu and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
'''
import os
import numpy as | np
import torch
import CBIG_pMFM_basic_functions as fc
def CBIG_mfm_test_desikan_main(gpu_index=0):
'''
This function is to implement the testing processes of mean field model.
The objective function is the summation of FC correlation cost and FCD KS statistics cost.
Args:
gpu_index: index of gpu used for optimization
input_path: input directory to load validation data
output_path: output directory for saving selected model p | arameters and costs on test set
Returns:
None
'''
input_path = '../output/rsfcpc2_rsfc/validation/'
output_path = '../output/rsfcpc2_rsfc/test/'
if not os.path.isdir(output_path):
os.makedirs(output_path)
torch.cuda.set_device(gpu_index)
torch.cuda.manual_seed(1)
n_set = 100
n_dup = 10
n_node = 68
vali_raw_all = np.zeros((3 * n_node + 1 + 8, 1))
print('Get data')
for i in range(1, 11):
load_file = 'random_seed_' + str(i) + '.csv'
load_path = os.path.join(input_path, load_file)
xmin = fc.csv_matrix_read(load_path)
index_mat = np.zeros((2, xmin.shape[1]))
index_mat[0, :] = i
index_mat[1, :] = np.arange(xmin.shape[1])
xmin = np.concatenate((index_mat, xmin), axis=0)
vali_raw_all = np.concatenate((vali_raw_all, xmin), axis=1)
vali_raw_all = vali_raw_all[:, 1:]
vali_index = np.argsort(vali_raw_all[7, :])
vali_sort_all = vali_raw_all[:, vali_index]
vali_sel_num = 10
i = 0
vali_sel = np.zeros((vali_raw_all.shape[0], vali_sel_num))
p = 0
p_set = np.zeros(vali_sel_num)
print('select data')
while i < vali_sel_num and p < vali_raw_all.shape[1]:
corr_t = np.zeros(vali_sel_num, dtype=bool)
corr_tr = np.zeros((vali_sel_num, 3))
for j in range(vali_sel_num):
w_corr = np.corrcoef(vali_sel[8:8 + n_node, j:j + 1].T,
vali_sort_all[8:8 + n_node, p:p + 1].T)
i_corr = np.corrcoef(
vali_sel[8 + n_node:8 + 2 * n_node, j:j + 1].T,
vali_sort_all[8 + n_node:8 + 2 * n_node, p:p + 1].T)
s_corr = np.corrcoef(vali_sel[9 + 2 * n_node:, j:j + 1].T,
vali_sort_all[9 + 2 * n_node:, p:p + 1].T)
corr_tr[j, 0] = w_corr[0, 1]
corr_tr[j, 1] = i_corr[0, 1]
corr_tr[j, 2] = s_corr[0, 1]
for k in range(vali_sel_num):
corr_t[k] = (corr_tr[k, :] > 0.98).all()
if not corr_t.any():
vali_sel[:, i] = vali_sort_all[:, p]
p_set[i] = p
i += 1
p += 1
result_save = np.zeros((3 * n_node + 1 + 11, vali_sel_num))
result_save[0:8, :] = vali_sel[0:8, :]
result_save[11:, :] = vali_sel[8:, :]
print('Start testing')
for j in range(vali_sel_num):
test_cost = np.zeros((3, n_set))
for k in range(1):
arx = np.tile(vali_sel[8:, j:j + 1], [1, n_set])
total_cost, fc_cost, fcd_cost = fc.CBIG_combined_cost_test(
arx, n_dup)
test_cost[0, n_set * k:n_set * (k + 1)] = fc_cost
test_cost[1, n_set * k:n_set * (k + 1)] = fcd_cost
test_cost[2, n_set * k:n_set * (k + 1)] = total_cost
test_file = os.path.join(output_path,
'test_num_' + str(j + 1) + '.csv')
np.savetxt(test_file, test_cost, delimiter=',')
result_save[8, j] = np.nanmean(test_cost[0, :])
result_save[9, j] = np.nanmean(test_cost[1, :])
result_save[10, j] = np.nanmean(test_cost[2, :])
print('**************** finish top ' + str(j + 1) +
' test ****************')
test_file_all = os.path.join(output_path, 'test_all.csv')
np.savetxt(test_file_all, result_save, delimiter=',')
if __name__ == '__main__':
CBIG_mfm_test_desikan_main(gpu_index=0)
|
#!/ | usr/bin/env python3
# Uses the wikipedia module to define words on the command line
import wikipedia
import sys
sys.argv.pop(0)
for word in sys.argv:
try:
if word[0] != '-':
if '-full' in sys.argv:
print(wikipedia.summary(word))
else:
print(wikipedia.summary(word, sentences=1))
except:
print("* Unknown word: " + word | )
|
import os
import logging
# standardize use of logging module in fs-drift
def start_log(prefix, verbosity=0):
log = logging.getLogger(prefix)
if os.getenv('LOGLEVEL_DEBUG') != None or verbosity != 0:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
log_format = prefix + ' %(asctime)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(log_format)
h = logging.StreamHandler()
h.setFormatter(formatter)
h.setLevel(logging.INFO)
log.addHandler(h)
h2 = logging.FileHandler('/var/tmp/fsd.%s.log' % prefix)
h2.setFormatter(formatter)
log.addHandler(h2)
log.info('starting log')
return log
# assumptions:
# - there is only 1 FileHandler associated with logger
# - you don't want to change loglevel of StreamHandler
def change_loglevel(logger, loglevel):
for h in logger.handlers:
if isinst | ance(h, logging.FileHandler):
logger.info('changing log level of FileHandler to %s' % loglevel)
h.setLevel(loglevel)
if __name__ == '__main__':
log = start_log('fsd_log_test')
log.error('level %s', 'error')
log.warn('level %s', 'warn')
log.info('level %s', 'info | ')
log.debug('level %s', 'debug')
change_loglevel(log, logging.DEBUG)
log.debug('level %s', 'debug - should see this one in the log file /var/tmp/fsd.fsd_log_test.log')
change_loglevel(log, logging.INFO)
log.debug('level %s', 'debug - should NOT see this one there')
|
# DeepSpeed handles backward internally
*([dict(name="backward", args=(ANY, None, None))] if not using_deepspeed else []),
dict(name="Callback.on_after_backward", args=(trainer, model)),
dict(name="on_after_backward"),
# `manual_backward` calls the previous 3
dict(name="manual_backward", args=(ANY,)),
dict(name="Callback.on_before_optimizer_step", args=(trainer, model, ANY, 0)),
dict(name="on_before_optimizer_step", args=(ANY, 0)),
dict(name="training_step", args=(ANY, i)),
dict(name="training_step_end", args=(dict(loss=ANY),)),
dict(name="Callback.on_train_batch_end", args=(trainer, model, dict(loss=ANY), ANY, i, 0)),
dict(name="on_train_batch_end", args=(dict(loss=ANY), ANY, i, 0)),
dict(name="Callback.on_batch_end", args=(trainer, model)),
]
)
return out
@staticmethod
def _eval_epoch(fn, trainer, model, batches, key, device=torch.device("cpu")):
outputs = {key: ANY}
return [
dict(name="Callback.on_epoch_start", args=(trainer, model)),
dict(name="on_epoch_start"),
dict(name=f"Callback.on_{fn}_epoch_start", args=(trainer, model)),
dict(name=f"on_{fn}_epoch_start"),
*HookedModel._eval_batch(fn, trainer, model, batches, key, device=device),
dict(name=f"{fn}_epoch_end", args=([outputs] * batches,)),
dict(name=f"Callback.on_{fn}_epoch_end", args=(trainer, model)),
dict(name=f"on_{fn}_epoch_end"),
dict(name="Callback.on_epoch_end", args=(trainer, model)),
dict(name="on_epoch_end"),
]
@staticmethod
def _eval_batch(fn, trainer, model, batches, key, device=torch.device("cpu")):
out = []
outputs = {key: ANY}
for i in rang | e(batches):
out.extend(
[
| dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, device, 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `{,Callback}.on_batch_{start,end}`
dict(name=f"Callback.on_{fn}_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name=f"on_{fn}_batch_start", args=(ANY, i, 0)),
dict(name="forward", args=(ANY,)),
dict(name=f"{fn}_step", args=(ANY, i)),
dict(name=f"{fn}_step_end", args=(outputs,)),
dict(name=f"Callback.on_{fn}_batch_end", args=(trainer, model, outputs, ANY, i, 0)),
dict(name=f"on_{fn}_batch_end", args=(outputs, ANY, i, 0)),
]
)
return out
@staticmethod
def _predict_batch(trainer, model, batches):
out = []
for i in range(batches):
out.extend(
[
dict(name="on_before_batch_transfer", args=(ANY, 0)),
dict(name="transfer_batch_to_device", args=(ANY, torch.device("cpu"), 0)),
dict(name="on_after_batch_transfer", args=(ANY, 0)),
# TODO: `{,Callback}.on_batch_{start,end}`
dict(name="Callback.on_predict_batch_start", args=(trainer, model, ANY, i, 0)),
dict(name="on_predict_batch_start", args=(ANY, i, 0)),
dict(name="forward", args=(ANY,)),
dict(name="predict_step", args=(ANY, i)),
# TODO: `predict_step_end`
dict(name="Callback.on_predict_batch_end", args=(trainer, model, ANY, ANY, i, 0)),
dict(name="on_predict_batch_end", args=(ANY, ANY, i, 0)),
]
)
return out
@pytest.mark.parametrize(
"kwargs",
[
{},
# these precision plugins modify the optimization flow, so testing them explicitly
pytest.param(dict(gpus=1, precision=16, plugins="deepspeed"), marks=RunIf(deepspeed=True, min_gpus=1)),
pytest.param(dict(gpus=1, precision=16, amp_backend="native"), marks=RunIf(amp_native=True, min_gpus=1)),
pytest.param(dict(gpus=1, precision=16, amp_backend="apex"), marks=RunIf(amp_apex=True, min_gpus=1)),
],
)
@pytest.mark.parametrize("automatic_optimization", (True, False))
def test_trainer_model_hook_system_fit(tmpdir, kwargs, automatic_optimization):
called = []
class TestModel(HookedModel):
def __init__(self, *args):
super().__init__(*args)
self.automatic_optimization = automatic_optimization
def training_step(self, batch, batch_idx):
if self.automatic_optimization:
return super().training_step(batch, batch_idx)
loss = self.step(batch[0])
opt = self.optimizers()
opt.zero_grad()
self.manual_backward(loss)
opt.step()
return {"loss": loss}
model = TestModel(called)
callback = HookedCallback(called)
train_batches = 2
val_batches = 2
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
limit_train_batches=train_batches,
limit_val_batches=val_batches,
progress_bar_refresh_rate=0,
weights_summary=None,
callbacks=[callback],
**kwargs,
)
assert called == [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
]
trainer.fit(model)
saved_ckpt = {
"callbacks": ANY,
"epoch": 1,
"global_step": train_batches,
"lr_schedulers": ANY,
"optimizer_states": ANY,
"pytorch-lightning_version": __version__,
"state_dict": ANY,
}
if kwargs.get("amp_backend") == "native":
saved_ckpt["native_amp_scaling_state"] = ANY
elif kwargs.get("amp_backend") == "apex":
saved_ckpt["amp_scaling_state"] = ANY
device = torch.device("cuda:0" if "gpus" in kwargs else "cpu")
expected = [
dict(name="Callback.on_init_start", args=(trainer,)),
dict(name="Callback.on_init_end", args=(trainer,)),
dict(name="prepare_data"),
dict(name="configure_callbacks"),
dict(name="Callback.on_before_accelerator_backend_setup", args=(trainer, model)),
# DeepSpeed needs the batch size to figure out throughput logging
*([dict(name="train_dataloader")] if kwargs.get("plugins") == "deepspeed" else []),
dict(name="Callback.setup", args=(trainer, model), kwargs=dict(stage="fit")),
dict(name="setup", kwargs=dict(stage="fit")),
dict(name="configure_sharded_model"),
dict(name="Callback.on_configure_sharded_model", args=(trainer, model)),
# DeepSpeed skips initializing optimizers here as they are handled via config
*([dict(name="configure_optimizers")] if kwargs.get("plugins") != "deepspeed" else []),
dict(name="Callback.on_fit_start", args=(trainer, model)),
dict(name="on_fit_start"),
# TODO: explore whether DeepSpeed can have the same flow for optimizers
# DeepSpeed did not find any optimizer in the config so they are loaded here
*([dict(name="configure_optimizers")] if kwargs.get("plugins") == "deepspeed" else []),
dict(name="Callback.on_pretrain_routine_start", args=(trainer, model)),
dict(name="on_pretrain_routine_start"),
dict(name="Callback.on_pretrain_routine_end", args=(trainer, model)),
dict(name="on_pretrain_routine_end"),
dict(name="Callback.on_sanity_check_start", args=(trainer, model)),
dict(name="on_val_dataloader"),
dict(name="val_dataloader"),
dict(name="train", args=(False,)),
dict(name="on_validation_model_eval"),
dict(name="zero_grad"),
dict(name="Callback.on_validation_start", args=(trainer, model)),
dict(name="on_validation_start"),
|
callable(func, 'deprecated')
@functools.wraps(func)
def new_func(*args, **kwargs):
logging.warning(
'From %s: %s (from %s) is deprecated and will be removed '
'after %s.\n'
'Instructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_function_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
DeprecatedArgSpec = collections.namedtuple(
'DeprecatedArgSpec', ['position', 'has_ok_value', 'ok_value'])
def deprecated_args(date, instructions, *deprecated_arg_names_or_tuples):
"""Decorator for marking specific function arguments as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument. It has the following format:
Calling <function> (from <module>) with <arg> is deprecated and will be
removed after <date>. Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the docstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
instructions: String. Instructions on how to update code using the
deprecated function.
*deprecated_arg_names_or_tuples: String. or 2-Tuple(String,
[ok_vals]). The string is the deprecated argument name.
Optionally, an ok-value may be provided. If the user provided
argument equals this value, the warning is suppressed.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, instructions are
empty, the deprecated arguments are not present in the function
signature, or the second element of a deprecated_tuple is not a
list.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_arg_names_or_tuples:
raise ValueError('Specify which argument is deprecated.')
def _get_arg_names_to_ok_vals():
"""Returns a dict mapping arg_name to DeprecatedArgSpec w/o position."""
d = {}
for name_or_tuple in deprecated_arg_names_or_tuples:
if isinstance(name_or_tuple, tuple):
d[name_or_tuple[0]] = DeprecatedArgSpec(-1, True, name_or_tuple[1])
else:
d[name_or_tuple] = DeprecatedArgSpec(-1, False, None)
return d
def _get_deprecated_positional_arguments(names_to_ok_vals, arg_spec):
"""Builds a dictionary from deprecated arguments to thier spec.
Returned dict is keyed by argument name.
Each value is a DeprecatedArgSpec with the following fields:
position: The zero-based argument position of the argument
within the signature. None if the argument isn't found in
the signature.
ok_values: Values of this argument for which warning will be
suppressed.
Args:
names_to_ok_vals: dict from string arg_name to a list of values,
possibly empty, which should not elicit a warning.
arg_spec: Output from inspect.getargspec on the called function.
Returns:
Dictionary from arg_name to DeprecatedArgSpec.
"""
arg_name_to_pos = dict(
(name, pos) for (pos, name) in enumerate(arg_spec.args))
deprecated_positional_args = {}
for arg_name, spec in iter(names_to_ok_vals.items()):
if arg_name in arg_name_to_pos:
pos = arg_name_to_pos[arg_name]
deprecated_positional_args[arg_name] = DeprecatedArgSpec(
pos, spec.has_ok_value, spec.ok_value)
return deprecated_positional_args
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_args')
deprecated_arg_names = _get_arg_names_to_ok_vals()
arg_spec = inspect.getargspec(func)
deprecated_positions = _get_deprecated_positional_arguments(
deprecated_arg_names, arg_spec)
is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names
is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names
if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated
!= len(deprecated_arg_names_or_tuples)):
known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords]
missing_args = [arg_name for arg_name in deprecated_arg_names
if arg_name not in known_args]
raise ValueError('The following deprecated arguments are not present '
'in the function signature: %s. '
'Found next arguments: %s.' % (missing_args, known_args))
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
invalid_args = []
named_args = inspect.getcallargs(func, *args, **kwargs)
for arg_name, spec in iter(deprecated_positions.items()):
if (spec.position < len(args) and
not (spec.has_ok_value and
named_args[arg_name] == spec.ok_value)):
invalid_args.append(arg_name)
if is_varargs_deprecated and len(args) > len(arg_spec.args):
invalid_args.append(arg_spec.varargs)
if is_kwargs_deprecated and kwargs:
invalid_args.append(arg_spec.keywords)
for arg_name in deprecated_arg_names:
if (arg_name in kwargs and
not (deprecated_positions[arg_name].has_ok_value and
(named_args[arg_name] ==
deprecated_positions[arg_name].ok_value))):
invalid_args.append(arg_name)
for arg_name in invalid_args:
logging.warning(
'From %s: calling %s (from %s) with %s is deprecated and will '
'be removed after %s.\nInstructions for updating:\n%s',
_call_location(), decorator_utils.get_qualified_name(func),
func.__module__, arg_name, date, instructions)
return func(*args, **kwargs)
new_func.__doc__ = _add_deprecated_arg_notice_to_docstring(
func.__doc__, date, instructions)
return new_func
return deprecated_wrapper
def deprecated_arg_values(date, instructions, **deprecated_kwargs):
"""Decorator for marking specific function argument values as deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called with the deprecated argument values. It has the following format:
Calling <function> (from <module>) with <arg>=<value> is deprecated and
will be removed after <date>. Instructions for updating:
<instructions>
<function> will include the class name if it is a method.
It also edits the do | cstring of the function: ' (deprecated arguments)' is
appended to the first line of the docstring and a deprecation notice is
prepended to the rest of the docstring.
Args:
date: String. The date the function is scheduled to be removed. Must be
ISO 8601 (YYYY-MM-DD).
ins | tructions: String. Instructions on how to update code using the
deprecated function.
**deprecated_kwargs: The deprecated argument values.
Returns:
Decorated function or method.
Raises:
ValueError: If date is not in ISO 8601 format, or instructions are empty.
"""
_validate_deprecation_args(date, instructions)
if not deprecated_kwargs:
raise ValueError('Specify which argument values are deprecated.')
def deprecated_wrapper(func):
"""Deprecation decorator."""
decorator_utils.validate_callable(func, 'deprecated_arg_values')
@functools.wraps(func)
def new_func(*args, **kwargs):
"""Deprecation wrapper."""
named_args = inspect.getcallargs(func, *args, **kwargs)
for arg_name, arg_value in deprecated_kwargs.items():
if arg_name in named_args and named_args[arg_name] == arg_value:
logging.warning(
'From %s: calling %s (from %s) with %s=%s is deprecated and will '
'be removed after %s.\nInstructions for u |
__all__ = ["Transition"]
class Transition(object):
def __init__(self, startState, nextState, word, | suffix, marked):
self.startState = startState
self.nextState = nextState
self.word = word
self.suffix = suffix
self.marked = False
def similarTransitions(self, transitions):
for transition in transitions:
if (self.startState == transition.startState and
self.nextState == transition.nextState):
y | ield transition |
from unittest import TestCase
EXAMPLES_PATH = '../examples'
SKIPPED_EXAMPLES = {472, 473, 477}
|
def _set_test_class():
import re
from imp import load_module, find_module, PY_SOURCE
from pathlib import Path
def _load_module(name, file, pathname, description):
try:
load_module(name, file, pathname, description)
finally:
if file:
file.close()
def make_method(module_name, module_tuple):
def _m(self):
print('Running: {}'.format(module_name))
_load_module(module_name, *module_ | tuple)
return _m
sols_module_name = 'solutions'
_load_module(sols_module_name, *find_module(sols_module_name, [EXAMPLES_PATH]))
pat_example = re.compile(r'\d+\. .+\.py')
attrs = {}
for i, example_path in enumerate(Path(EXAMPLES_PATH).iterdir()):
if not re.match(pat_example, example_path.name):
continue
module_name = example_path.stem
if int(module_name.split('. ')[0]) in SKIPPED_EXAMPLES:
continue
module_tuple = open(str(example_path), 'rb'), example_path.stem, ('.py', 'rb', PY_SOURCE)
func_name = module_name.replace(' ', '_').replace('.', '').lower()
func_name = 'test_' + ''.join(c for c in func_name if c.isalnum() or c == '_')
attrs[func_name] = make_method(module_name, module_tuple)
class_name = 'TestByExamples'
globals()[class_name] = type(class_name, (TestCase,), attrs)
_set_test_class()
del _set_test_class
|
# coding=utf- | 8
import requests
def download(url):
resp = requests.get(url) # TODO add retries
return resp.content, r | esp.headers
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 201 | 7-03 | -19 02:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pos', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='itemingredient',
name='exclusive',
field=models.BooleanField(default=False),
),
]
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:// | www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
######## | ########################################################################
from pyflink.table import EnvironmentSettings
from pyflink.testing.test_case_utils import PythonAPICompletenessTestCase, PyFlinkTestCase
class EnvironmentSettingsCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether the Python :class:`EnvironmentSettings` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings`.
"""
@classmethod
def python_class(cls):
return EnvironmentSettings
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings"
@classmethod
def excluded_methods(cls):
# internal interfaces, no need to expose to users.
return {'getPlanner', 'getExecutor'}
class EnvironmentSettingsBuilderCompletenessTests(PythonAPICompletenessTestCase, PyFlinkTestCase):
"""
Tests whether the Python :class:`EnvironmentSettings.Builder` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings$Builder`.
"""
@classmethod
def python_class(cls):
return EnvironmentSettings.Builder
@classmethod
def java_class(cls):
return "org.apache.flink.table.api.EnvironmentSettings$Builder"
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
import logging
logger = logging.getLogger(__name__)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, keep=True, *args, **kwargs):
logger.debug("Handle singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, arg | s, kwargs))
if keep:
if cls.instance is None:
logger.debug("Return and keep singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
else:
logger.deb | ug("Return cached singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return cls.instance
else:
logger.debug("Return new singleton instance for %s with args (keep=%s): %s, %s" % (cls, keep, args, kwargs))
return super(Singleton, cls).__call__(*args, **kwargs)
return None
|
from unittest import TestCase
import validictory
class Tes | tItems(TestCase):
def test_property(self):
schema = {
"type": "object",
"properties": {
"foo": {
"default": "bar"
},
"baz": {
"type": "integer"
}
}
}
data = | {'baz': 2}
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, {"foo": "bar", "baz": 2})
def test_item(self):
schema = {
'type': 'object',
'type': 'array',
'items': [
{
'type': 'any'
},
{
'type': 'string'
},
{
'default': 'baz'
},
]
}
data = ['foo', 'bar']
result = validictory.validate(data, schema, required_by_default=False)
self.assertEqual(result, ["foo", "bar", "baz"])
|
import os
from airtng_flask.config import config_env_files
from flask import Flask
from flask_bcrypt import Bcrypt
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
db = SQL | Alchemy()
bcrypt = Bcrypt()
login_manager = LoginManager()
def create_app(config_name='development', p_db=db, p_bcrypt=bcrypt, p_login_manager=login_manager):
new_app = Flask(__name__)
config_app(config_name, new_app)
new_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
p_db.init_app(new_app)
p_bcrypt.init_app(new_app)
p_login_manager.init_app(new_app)
p_login_manager.login_view = 'register'
return new_app
def config_app(config_name, new_app):
new_app.conf | ig.from_object(config_env_files[config_name])
app = create_app()
import airtng_flask.views
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 2 10:56:34 2016
@author: | jmjj (Jari Juopperi, jmjj@juopperi.org) |
"""
from .main import *
|
_editor'))
context.update(trans)
kwargs.update(trans)
record = None
if model and kwargs.get('res_id'):
record = request.registry[model].browse(cr, uid, kwargs.get('res_id'), context)
kwargs.update(content=record and getattr(record, field) or "")
return request.render(kwargs.get("template") or "web_editor.FieldTextHtml", kwargs, uid=request.uid)
#------------------------------------------------------
# Backend html field in inline mode
#------------------------------------------------------
@http.route('/web_editor/field/html/inline', type='http', auth="user")
def FieldTextHtmlInline(self, model=None, res_id=None, field=None, callback=None, **kwargs):
kwargs['inline_mode'] = True
kwargs['dont_load_assets'] = not kwargs.get('enable_editor') and not kwargs.get('edit_translations')
return self.FieldTextHtml(model, res_id, field, callback, **kwargs)
#------------------------------------------------------
# convert font into picture
#------------------------------------------------------
@http.route([
'/web_editor/font_to_img/<icon>',
'/web_editor/font_to_img/<icon>/<color>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>',
'/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>',
], type='http', auth="none")
def export_icon_to_png(self, icon, color='#000', size=100, alpha=255, font='/web/static/lib/fontawesome/fonts/fontawesome-webfont.ttf'):
""" This method converts an unicode character to an image (using Font
Awesome font by default) and is used only for mass mailing because
custom fonts are not supported in mail.
:param icon : decimal encoding of unicode character
:param color : RGB code of the color
:param size : Pixels in integer
:param alpha : transparency of the image from 0 to 255
:param font : font path
:returns PNG image converted from given font
"""
# Make sure we have at least size=1
size = max(1, size)
# Initialize font
addons_path = http.addons_manifest['web']['addons_path']
font_obj = ImageFont.truetype(addons_path + font, size)
# if received character is not a number, keep old behaviour (icon is character)
icon = unichr(int(icon)) if icon.isdigit() else icon
# Determine the dimensions of the icon
image = Image.new("RGBA", (size, size), color=(0, 0, 0, 0))
draw = ImageDraw.Draw(image)
boxw, boxh = draw.textsize(icon, font=font_obj)
draw.text((0, 0), icon, font=font_obj)
left, top, right, bottom = image.getbbox()
# Create an alpha mask
imagemask = Image.new("L", (boxw, boxh), 0)
drawmask = ImageDraw.Draw(imagemask)
drawmask.text((-left, -top), icon, font=font_obj, fill=alpha)
# Create a solid color image and apply the mask
if color.startswith('rgba'):
color = color.replace('rgba', 'rgb')
color = ','.join(color.split(',')[:-1])+')'
iconimage = Image.new("RGBA", (boxw, boxh), color)
iconimage.putalpha(imagemask)
# Create output image
outimage = Image.new("RGBA", (boxw, size), (0, 0, 0, 0))
outimage.paste(iconimage, (left, top))
# output image
output = io.BytesIO()
outimage.save(output, format="PNG")
response = werkzeug.wrappers.Response()
response.mimetype = 'image/png'
response.data = output.getvalue()
response.headers['Cache-Control'] = 'public, max-age=604800'
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers['Connection'] = 'close'
response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime())
response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60))
return response
#------------------------------------------------------
# add attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/add', type='http', auth='user', methods=['POST'])
def at | tach(self, func, upload=None, url=None, disable_optimization=None, **kwargs):
# the upload argument doesn't allow us to access the files if more than
# one file is uploaded, as upload references the f | irst file
# therefore we have to recover the files from the request object
Attachments = request.registry['ir.attachment'] # registry for the attachment table
uploads = []
message = None
if not upload: # no image provided, storing the link and the image name
name = url.split("/").pop() # recover filename
attachment_id = Attachments.create(request.cr, request.uid, {
'name': name,
'type': 'url',
'url': url,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
uploads += Attachments.read(request.cr, request.uid, [attachment_id], ['name', 'mimetype', 'checksum', 'url'], request.context)
else: # images provided
try:
attachment_ids = []
for c_file in request.httprequest.files.getlist('upload'):
data = c_file.read()
try:
image = Image.open(cStringIO.StringIO(data))
w, h = image.size
if w*h > 42e6: # Nokia Lumia 1020 photo resolution
raise ValueError(
u"Image size excessive, uploaded images must be smaller "
u"than 42 million pixel")
if not disable_optimization and image.format in ('PNG', 'JPEG'):
data = tools.image_save_for_web(image)
except IOError, e:
pass
attachment_id = Attachments.create(request.cr, request.uid, {
'name': c_file.filename,
'datas': data.encode('base64'),
'datas_fname': c_file.filename,
'public': True,
'res_model': 'ir.ui.view',
}, request.context)
attachment_ids.append(attachment_id)
uploads += Attachments.read(request.cr, request.uid, attachment_ids, ['name', 'mimetype', 'checksum', 'url'], request.context)
except Exception, e:
logger.exception("Failed to upload image to attachment")
message = unicode(e)
return """<script type='text/javascript'>
window.parent['%s'](%s, %s);
</script>""" % (func, json.dumps(uploads), json.dumps(message))
#------------------------------------------------------
# remove attachment (images or link)
#------------------------------------------------------
@http.route('/web_editor/attachment/remove', type='json', auth='user')
def remove(self, ids, **kwargs):
""" Removes a web-based image attachment if it is used by no view (template)
Returns a dict mapping attachments which would not be removed (if any)
mapped to the views preventing their removal
"""
cr, uid, context = request.cr, request.uid, request.context
Attachment = request.registry['ir.attachment']
Views = request.registry['ir.ui.view']
attachments_to_remove = []
# views blocking removal of the attachment
removal_blocked_by = {}
for attachment in Attachment.browse(cr, uid, ids, context=context):
# in-document URLs are html-escaped, a straight search will not
# find them
url = tools.html_escape(attachment.local_url)
ids = V |
a= None, sigma= None, recovery_rate= None, market_name= None):
self.time_horizon=0
self.recovery_rate = recovery_rate
# initiate
self.market_spread = None
self.eigenval_hist_gen= None
self.eigenvect_hist_gen= None
self.historical_transition_matrix = None
self.RN_migration_matrix=[]
self.spreads=[]
self.mu=mu
self.alpha=alpha
self.sigma=sigma
self.corr_matrix= None
self.fixed_seed = None
self.num_instrument = 0
self.pi_0=pi_0
self.market_name=market_name
# prend comme entrée IR_model, ou pas... On a défini également une méthode qui permet d'aller récupérer les taux zéro coupons d'un modèle IR
# ici, on a peut-etre seulement besoin de tout initialiser à vide
# l'intérêt de la définition est qu'il est prêt d'être utilisé, plus simple
# or une méthode permet de modifier/ d'accéder à des attributes depuis extérieur.
def getMatrixJLT(self,t,T):
out = None
d = self.eigenval_hist_gen
if self.sigma !=0:
v = np.sqrt(self.alpha**2 - 2*d*self.sigma**2)
denominator = (v+self.alpha)*(np.exp(v*(T-t))-1)+2*v
A = (2*self.alpha*self.mu)/(self.sigma**2)*np.log((2*v*np.exp(0.5*(self.alpha+v)*(T-t)))/denominator)
B = - (2*d*(np.exp(v*(T-t))-1))/denominator
value = np.exp(A - B*self.risk_premium[t])
out = np.diag(value)
else:
temp = (self.risk_premium[t]+np.exp(-self.alpha*t))*(T-t) + 1/(self.alpha)*(np.exp(-self.alpha*T)-np.exp(-self.alpha*t))
value = np.exp(d*temp)
out = np.diag(value)
return out
def add_time_horizon(self,time_horizon):
"""
Method : add_time_horizon
Function : add the time horizon
Parameter :
1. time_horizon
Type : int
Function : correspond to the time horizon
"""
self.time_horizon = time_horizon
def get_spread(self,asset_data):
"""
Method : get_spread
Function : retrieve the spread from the pickle file
Parameter : None
"""
# read the market spread data ''of time 0''
market = asset_data.get_list_market(self.market_name)
spread_list = market.spread_list
col_index = market.col_index
row_index = market.row_index
self.market_spread = spread_list, col_index, row_index
def get_hist_transition_matrix(self, asset_data):
"""
Method : get_hist_transition_matrix
Function : retrieve the historical transition matrix from the pickle file and then deduce the generator matrix, its eigenvectors and its eigenvalues.
Parameter : None
"""
market = asset_data.get_list_market(self.market_name)
historical_transition_matrix = market.historical_transition_matrix
self.historical_transition_matrix = historical_transition_matrix
self.historical_generator_matrix = generator_matrix(self.historical_transition_matrix)
w, v = la.eig(self.historical_generator_matrix)
eigenval_hist_gen = w.real
eigenvect_hist_gen = (v.T).real
for l in range(len(eigenvect_hist_gen)):
eigenvect_hist_gen[l] = eigenvect_hist_gen[l]/norm(eigenvect_hist_gen[l])
eigenvect_hist_gen = eigenvect_hist_ge | n.T
self.eigenval_hist_gen= eigenval_hist_gen
self.eigenvect_hist_gen= eige | nvect_hist_gen
def calibrate_spread(self, asset_data, AAA_AA):
"""
Method : calibrate_spread
Function : calibrate the model on the market data of spread
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
2. AAA_AA
Type : boolean
Function : if it is true, then only spreads of AAA and AA ratings are used for the calibration
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma, self.recovery_rate,
self.eigenvect_hist_gen, self.eigenval_hist_gen,
row_index, col_index, spread_list,AAA_AA)
bds = [(0.001,None)]
res = minimize(f,x0=2, bounds=bds )
self.pi_0 = res.x[0]
return self.pi_0
def calibrate_price(self, asset_data):
"""
Method : calibrate_price
Function : calibrate the model on the market data of bonds' price
Parameter :
1. asset_data
Type : instance of Asset_data class
Function : see class Asset_data for more details.
"""
market = asset_data.get_list_market(self.market_name)
if self.mu is None:
self.mu = market.JLT_mu
if self.sigma is None:
self.sigma = market.JLT_sigma
if self.alpha is None:
self.alpha = market.JLT_alpha
if self.pi_0 is None:
self.pi_0 = market.JLT_pi
if self.recovery_rate is None:
self.recovery_rate = market.recovery_rate
spread_list, col_index, row_index = self.market_spread
def f(pi_0):
return function_optim(pi_0, self.alpha, self.mu, self.sigma,
self.recovery_rate, self.eigenvect_hist_gen, self.eigenval_hist_gen,
row_index, col_index, spread_list)
res = minimize(f,x0=2)
self.pi_0 = res.x[0]
return self.pi_0
def generate_spreads_and_matrix(self):
"""
Method : generate_spreads_and_matrix
Function : generate the spreads and risk-neutral transition matrix with parameters in the model
Parameter : None
"""
self.spreads=[]
self.RN_migration_matrix=[]
dw = generator_correlated_variables(corr_matrix = self.corr_matrix, time_horizon = self.time_horizon, fixed_seed = self.fixed_seed)
# ===================================
# Generate CIR process
# ===================================
self.risk_premium=[self.pi_0]
for time_step in range(1,self.time_horizon+1):
dpi = self.alpha*(self.mu-self.risk_premium[-1]) + self.sigma*np.sqrt(self.risk_premium[-1])*dw[2,time_step-1]
self.risk_premium.append(max(0,self.risk_premium[-1] + dpi))
for t in range(self.time_horizon+1):
#une boucle de bas de temps
RN_generator_matrix_t = np.dot(np.dot(self.eigenvect_hist_gen, np.diag(self.risk_premium[t]*self.eigenval_hist_gen)), inv(self.eigenvect_hist_gen))
RN_migration_matrix_t = exp_matrix(RN_generator_matrix_t).astype('Float64')
self.RN_migration_matrix.append(RN_migration_matrix_t)
for t in range(self.time_horizon+1):
spread_T = []
for T in range(t+1,t+21):
spread_t_T = []
JLTm |
ntime, the shared library must be installed
BUILD_WITH_SYSTEM_ZLIB = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_ZLIB',
False)
# Export this variable to use the system installation of cares. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_CARES = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_CARES',
False)
# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
# compilation by first building gRPC Core using make, then building only the
# Python/Cython layers here.
#
# Note that this requires libboringssl.a in the libs/{dbg,opt}/ directory, which
# may require configuring make to not use the system openssl implementation:
#
# make HAS_SYSTEM_OPENSSL_ALPN=0
#
# TODO(ericgribkoff) Respect the BUILD_WITH_SYSTEM_* flags alongside this option
USE_PREBUILT_GRPC_CORE = os.environ.get(
'GRPC_PYTHON_USE_PREBUILT_GRPC_CORE', False)
# If this environmental variable is set, GRPC will not try to be compatible with
# libc versions old than the one it was compiled against.
DISABLE_ | LIBC_COMPATIBILITY = os.environ.get('GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY', False)
# Environment variable to determine whether or not to enable coverage analysis
# in Cython modules.
ENABLE_CYTHON_TRACING = os.environ.get(
'GRPC_PYTHON_ENABLE_CYTHON_TRACING', False)
# Environment variable specifying whether or not there's interest in setting up
# documentation building.
ENABLE_DOCUMENTATION_BUILD = os.environ.get(
'GRPC_PYTHON_ENABLE_DOCUMENT | ATION_BUILD', False)
def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
code_test = (b'#include <atomic>\n' +
b'int main() { return std::atomic<int64_t>{}; }')
cc_test = subprocess.Popen(['cc', '-x', 'c++', '-std=c++11', '-'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE)
cc_test.communicate(input=code_test)
return cc_test.returncode != 0
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = ' -std=c++11'
if 'win32' in sys.platform:
if sys.version_info < (3, 5):
EXTRA_ENV_COMPILE_ARGS += ' -D_hypot=hypot'
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
elif "linux" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -std=gnu99 -fvisibility=hidden -fno-wrapv -fno-exceptions'
elif "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -stdlib=libc++ -fvisibility=hidden -fno-wrapv -fno-exceptions'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
if check_linker_need_libatomic():
EXTRA_ENV_LINK_ARGS += ' -latomic'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr}'
' -static -lshlwapi'.format(msvcr=msvcr))
if "linux" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -Wl,-wrap,memcpy -static-libgcc'
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CYTHON_EXTENSION_PACKAGE_NAMES = ()
CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',)
CYTHON_HELPER_C_FILES = ()
CORE_C_FILES = tuple(grpc_core_dependencies.CORE_SOURCE_FILES)
if "win32" in sys.platform:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
if BUILD_WITH_SYSTEM_OPENSSL:
CORE_C_FILES = filter(lambda x: 'third_party/boringssl' not in x, CORE_C_FILES)
CORE_C_FILES = filter(lambda x: 'src/boringssl' not in x, CORE_C_FILES)
SSL_INCLUDE = (os.path.join('/usr', 'include', 'openssl'),)
if BUILD_WITH_SYSTEM_ZLIB:
CORE_C_FILES = filter(lambda x: 'third_party/zlib' not in x, CORE_C_FILES)
ZLIB_INCLUDE = (os.path.join('/usr', 'include'),)
if BUILD_WITH_SYSTEM_CARES:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
CARES_INCLUDE = (os.path.join('/usr', 'include'),)
EXTENSION_INCLUDE_DIRECTORIES = (
(PYTHON_STEM,) +
CORE_INCLUDE +
ABSL_INCLUDE +
ADDRESS_SORTING_INCLUDE +
CARES_INCLUDE +
SSL_INCLUDE +
UPB_INCLUDE +
UPB_GRPC_GENERATED_INCLUDE +
ZLIB_INCLUDE)
EXTENSION_LIBRARIES = ()
if "linux" in sys.platform:
EXTENSION_LIBRARIES += ('rt',)
if not "win32" in sys.platform:
EXTENSION_LIBRARIES += ('m',)
if "win32" in sys.platform:
EXTENSION_LIBRARIES += ('advapi32', 'ws2_32', 'dbghelp',)
if BUILD_WITH_SYSTEM_OPENSSL:
EXTENSION_LIBRARIES += ('ssl', 'crypto',)
if BUILD_WITH_SYSTEM_ZLIB:
EXTENSION_LIBRARIES += ('z',)
if BUILD_WITH_SYSTEM_CARES:
EXTENSION_LIBRARIES += ('cares',)
DEFINE_MACROS = (('OPENSSL_NO_ASM', 1), ('_WIN32_WINNT', 0x600))
if not DISABLE_LIBC_COMPATIBILITY:
DEFINE_MACROS += (('GPR_BACKWARDS_COMPATIBILITY_MODE', 1),)
if "win32" in sys.platform:
# TODO(zyc): Re-enable c-ares on x64 and x86 windows after fixing the
# ares_library_init compilation issue
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1), ('CARES_STATICLIB', 1),
('GRPC_ARES', 0), ('NTDDI_VERSION', 0x06000000),
('NOMINMAX', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif sys.version_info >= (3, 5):
# For some reason, this is needed to get access to inet_pton/inet_ntop
# on msvc, but only for 32 bits
DEFINE_MACROS += (('NTDDI_VERSION', 0x06000000),)
else:
DEFINE_MACROS += (('HAVE_CONFIG_H', 1), ('GRPC_ENABLE_FORK_SUPPORT', 1),)
LDFLAGS = tuple(EXTRA_LINK_ARGS)
CFLAGS = tuple(EXTRA_COMPILE_ARGS)
if "linux" in sys.platform or "darwin" in sys.platform:
pymodinit_type = 'PyObject*' if PY3 else 'void'
pymodinit = 'extern "C" __attribute__((visibility ("default"))) {}'.format(pymodinit_type)
DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),)
DEFINE_MACROS += (('GRPC_POSIX_FORK_ALLOW_PTHREAD_ATFORK', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.
# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
if 'darwin' in sys.platform and PY3:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if mac_target and (pkg_resources.parse_version(mac_target) <
pkg_resources.parse_version('10.7.0')):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.7'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)',
r'macosx-10.7-\1',
util.get_platform())
def cython_extensions_and_necessity():
cython_module_files = [os.path.join(PYTHON_STEM,
name.replace('.', '/') + '.pyx')
for name in CYTH |
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
import peak
import datetime
from nagare import presentation, security, ajax, i18n
from nagare.i18n import _, format_date
from .comp import DueDa | te
@peak.rules.when(ajax.py2js, ( | datetime.date,))
def py2js(value, h):
"""Generic method to transcode a Datetime
In:
- ``value`` -- the datetime object
- ``h`` -- the current renderer
Return:
- transcoded javascript
"""
dt = i18n.to_timezone(value)
return 'new Date("%s", "%s", "%s")' % (
dt.year, dt.month - 1, dt.day)
@peak.rules.when(ajax.py2js, (DueDate,))
def py2js(value, h):
if value.due_date:
return ajax.py2js(value.due_date, h)
return None
@presentation.render_for(DueDate)
def render_DueDate(self, h, comp, model):
return h.root
@presentation.render_for(DueDate, model='badge')
def render_DueDate_badge(self, h, *args):
"""Gallery badge for the card"""
if self.due_date:
with h.span(class_='badge'):
h << h.span(h.i(class_='icon-alarm'), ' ', self.get_days_count(), class_='label due-date ' + self.get_class(), title=format_date(self.due_date, 'full'))
return h.root
@presentation.render_for(DueDate, model='action')
def render_DueDate_button(self, h, comp, *args):
if security.has_permissions('due_date', self.card):
self._init_calendar()
id_ = h.generate_id()
if self.due_date:
classes = ['btn', 'btn-due-date', self.get_class()]
with h.a(class_=u' '.join(classes), id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm duedate-icon')
h << format_date(self.due_date, 'short')
else:
with h.a(class_='btn', id_=id_).action(self.calendar().toggle):
h << h.i(class_='icon-alarm')
h << _('Due date')
h << self.calendar.on_answer(self.set_value)
return h.root
|
port:
description:
- The port that the virtual listens for connections on.
- When creating a new application, if this parameter is not specified, the
default value of C(53) will be used.
type: str
default: 53
service_environment:
description:
- Specifies the name of service environment that the application will be
deployed to.
- When creating a new application, this parameter is required.
- The service environment type will be discovered by this module automatically.
Therefore, it is crucial that you maintain unique names for items in the
different service environment types.
- SSGs are not supported for this type of application.
type: str
add_analytics:
description:
- Collects statistics of the BIG-IP that the application is deployed to.
- This parameter is only relevant when specifying a C(service_environment) which
is a BIG-IP; not an SSG.
type: bool
default: no
state:
description:
- The state of the resource on the system.
- When C(present), guarantees that the resource exists with the provided attributes.
- When C(absent), removes the resource from the system.
type: str
choices:
- absent
- present
default: present
wait:
description:
- If the module should wait for the application to be created, deleted or updated.
type: bool
default: yes
extends_documentation_fragment: f5
notes:
- This module does not support updating of your application (whether deployed or not).
If you need to update the application, the recommended practice is to remove and
re-create.
- This module will not work on BIGIQ version 6.1.x or greater.
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Load balance a UDP-based application with a FastL4 profile
bigiq_application_fastl4_udp:
name: my-app
description: My description
service_environment: my-bigip-device
servers:
- address: 1.2.3.4
port: 8080
- address: 5.6.7.8
port: 8080
inbound_virtual:
name: foo
address: 2.2.2.2
netmask: 255.255.255.255
port: 53
provider:
password: secret
server: lb.mydomain.com
user: admin
state: present
delegate_to: localhost
'''
RETURN = r'''
description:
description: The new description of the application of the resource.
returned: changed
type: str
sample: My application
service_environment:
description: The environment which the service was deployed to.
returned: changed
type: str
sample: my-ssg1
inbound_ | virtual_destination:
description: The destination of the virtual that was created.
returned: ch | anged
type: str
sample: 6.7.8.9
inbound_virtual_netmask:
description: The network mask of the provided inbound destination.
returned: changed
type: str
sample: 255.255.255.0
inbound_virtual_port:
description: The port the inbound virtual address listens on.
returned: changed
type: int
sample: 80
servers:
description: List of servers, and their ports, that make up the application.
type: complex
returned: changed
contains:
address:
description: The IP address of the server.
returned: changed
type: str
sample: 2.3.4.5
port:
description: The port that the server listens on.
returned: changed
type: int
sample: 8080
sample: hash/dictionary of values
'''
import time
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigiq import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.icontrol import bigiq_version
except ImportError:
from ansible.module_utils.network.f5.bigiq import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.icontrol import bigiq_version
class Parameters(AnsibleF5Parameters):
api_map = {
'templateReference': 'template_reference',
'subPath': 'sub_path',
'configSetName': 'config_set_name',
'defaultDeviceReference': 'default_device_reference',
'addAnalytics': 'add_analytics'
}
api_attributes = [
'resources', 'description', 'configSetName', 'subPath', 'templateReference',
'defaultDeviceReference', 'addAnalytics'
]
returnables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'inbound_virtual', 'add_analytics'
]
updatables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'default_device_reference', 'servers', 'add_analytics'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def http_profile(self):
return "profile_http"
@property
def config_set_name(self):
return self.name
@property
def sub_path(self):
return self.name
@property
def template_reference(self):
filter = "name+eq+'Default-f5-FastL4-UDP-lb-template'"
uri = "https://{0}:{1}/mgmt/cm/global/templates/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"No default HTTP LB template was found."
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
@property
def default_device_reference(self):
if is_valid_ip(self.service_environment):
# An IP address was specified
filter = "address+eq+'{0}'".format(self.service_environment)
else:
# Assume a hostname was specified
filter = "hostname+eq+'{0}'".format(self.service_environment)
uri = "https://{0}:{1}/mgmt/shared/resolver/device-groups/cm-adccore-allbigipDevices/devices/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"The specified service_environment '{0}' was found.".format(self.service_environment)
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
|
#!/usr/bin/env python3
import os
from i3_lemonbar_conf import *
cwd = os.path.dirname(os.path.abspath(__file__))
lemon = "lemonbar -p -f '%s' -f '%s' -g '%s' -B '%s' -F '%s'" % (font, iconfont, geometry, color_back, color_fore)
feed = "python3 -c 'import i3_lemonbar_feeder; i3_lemonbar_feeder.run()'"
check_output('cd %s; %s | %s' % (cwd, feed, lemon), shell= | True)
| |
from __future__ import unicode_literals
from django.db import transaction
from django.db import models
from djang | o.contrib.auth.models import User
# Create your models here.
class UserProfile(models.Model):
user = models.OneToOneField(User, unique=True, verbose_name=('user'))
phone = models.CharField(max_length=20)
USER_SOURCE = (
('LO', 'Local'),
('WB', 'Weibo'),
('QQ', 'QQ'),
)
source = models.CharField(max_length=2, choices=USER_SOURCE, default='LO')
created_date = models.Da | teTimeField(auto_now_add=True)
last_updated_date = models.DateTimeField(auto_now=True)
@transaction.atomic
def createUser(self):
self.user.save()
self.save() |
import os, unicodedata
from django.utils.translation import ugettext_lazy as _
from django.core.files.storage import FileSystemStorage
from django.db.models.fields.files import FileField
from django.core.files.storage import default_storage
from django.conf import settings
from django.utils.safestring import mark_safe
class AdminThumbnailMixin(object):
thumbnail_options = {'size': (60, 60)}
thumbnail_image_field_name = 'image'
thumbnail_alt_field_name = None
def _thumb(self, image, options={'size': (60, 60)}, alt=None):
from easy_thumbnails.files import get_thumbnailer
media = getattr(settings, 'THUMBNAIL_MEDIA_URL', settings.MEDIA_URL)
attrs = []
try:
src = "%s%s" % (media, get_thumbnailer(image).get_thumbnail(options))
except:
src = ""
if alt is not None: attrs.append('alt="%s"' % al | t)
return mark_safe('<img src="%s" %s />' % (src, " ".join(attrs)))
def thumbnail(self, obj):
kwargs = {'options': self.thumbnail_options}
if self.thumbnail_alt_field_name:
kwargs['alt'] = getattr( | obj, self.thumbnail_alt_field_name)
return self._thumb(getattr(obj, self.thumbnail_image_field_name), **kwargs)
thumbnail.allow_tags = True
thumbnail.short_description = _('Thumbnail')
def file_cleanup(sender, **kwargs):
"""
File cleanup callback used to emulate the old delete
behavior using signals. Initially django deleted linked
files when an object containing a File/ImageField was deleted.
Usage:
>>> from django.db.models.signals import post_delete
>>> post_delete.connect(file_cleanup, sender=MyModel, dispatch_uid="mymodel.file_cleanup")
"""
for fieldname in sender._meta.get_all_field_names():
try:
field = sender._meta.get_field(fieldname)
except:
field = None
if field and isinstance(field, FileField):
inst = kwargs['instance']
f = getattr(inst, fieldname)
m = inst.__class__._default_manager
if hasattr(f, 'path') and os.path.exists(f.path) \
and not m.filter(**{'%s__exact' % fieldname: getattr(inst, fieldname)})\
.exclude(pk=inst._get_pk_val()):
try:
#os.remove(f.path)
default_storage.delete(f.path)
except:
pass
class ASCIISafeFileSystemStorage(FileSystemStorage):
"""
Same as FileSystemStorage, but converts unicode characters
in file name to ASCII characters before saving the file. This
is mostly useful for the non-English world.
Usage (settings.py):
>>> DEFAULT_FILE_STORAGE = 'webcore.utils.storage.ASCIISafeFileSystemStorage'
"""
def get_valid_name(self, name):
name = unicodedata.normalize('NFKD', unicode(name.replace(' ', '_'))).encode('ascii', 'ignore')
return super(ASCIISafeFileSystemStorage, self).get_valid_name(name)
|
from d | jango.contrib.sitemaps import Sitemap
from .models import BlogEntry
class BlogEntrySitemap(Sitemap):
changefreq = "yearly"
priority = 0.6
protocol = 'https'
def items(self):
return BlogEntry.on_site.filter(is_visible=True)
def lastmod(self, item):
retu | rn item.modification
|
pecial_properties = ('service_notification_commands', 'host_notification_commands',
'service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'host_notification_commands', 'contact_name')
_simple_way_parameters = ('service_notification_period', 'host_notification_period',
'service_notification_options', 'host_notification_options',
'service_notification_commands', 'host_notification_commands',
'min_business_impact')
class Contact(Item):
id = 1 # zero is always special in database, so we do not take risk here
my_type = 'contact'
properties = Item.properties.copy()
properties.update({
'contact_name': StringProp(fill_brok=['full_status']),
'alias': StringProp(default='none', fill_brok=['full_status']),
'contactgroups': StringProp(default='', fill_brok=['full_status']),
'host_notifications_enabled': BoolProp(default='1', fill_brok=['full_status']),
'service_notifications_enabled': BoolProp(default='1', fill_brok=['full_status']),
'host_notification_period': StringProp(fill_brok=['full_status']),
'service_notification_period': StringProp(fill_brok=['full_status']),
'host_notification_options': StringProp(fill_brok=['full_status']),
'service_notification_options': StringProp(fill_brok=['full_status']),
'host_notification_commands': StringProp(fill_brok=['full_status']),
'service_notification_commands': StringProp(fill_brok=['full_status']),
'min_business_impact': IntegerProp(default='0', fill_brok=['full_status']),
'email': StringProp(default='none', fill_brok=['full_status']),
'pager': StringProp(default='none', fill_brok=['full_status']),
'address1': StringProp(default='none', fill_brok=['full_status']),
'address2': StringProp(default='none', fill_brok=['full_status']),
'address3': StringProp(default='none', fill_brok=['full_status']),
'address4': StringProp(default='none', fill_brok=['full_status']),
'address5': StringProp(default='none', fill_brok=['full_status']),
'address6': StringProp(default='none', fill_brok=['full_status']),
'can_submit_commands': BoolProp(default='0', fill_brok=['full_status']),
'is_admin': BoolProp(default='0', fill_brok=['full_status']),
'retain_status_information': BoolProp(default='1', fill_brok=['full_status']),
'notificationways': StringProp(default='', fill_brok=['full_status']),
'password': StringProp(default='NOPASSWORDSET', fill_brok=['full_status']),
})
running_properties = Item.running_properties.copy()
running_properties.update({
'modified_attributes': IntegerProp(default=0L, fill_brok=['full_status'], retention=True),
'downtimes': StringProp(default=[], fill_brok=['full_status'], retention=True),
})
# This tab is used to transform old parameters name into new ones
# so from Nagios2 format, to Nagios3 ones.
# Or Shinken deprecated names like criticity
old_properties = {
'min_criticity': 'min_business_impact',
}
macros = {
'CONTACTNAME': 'contact_name',
'CONTACTALIAS': 'alias',
'CONTACTEMAIL': 'email',
'CONTACTPAGER': 'pager',
'CONTACTADDRESS1': 'address1',
'CONTACTADDRESS2': 'address2',
'CONTACTADDRESS3': 'address3',
'CO | NTACTADDRESS4 | ': 'address4',
'CONTACTADDRESS5': 'address5',
'CONTACTADDRESS6': 'address6',
'CONTACTGROUPNAME': 'get_groupname',
'CONTACTGROUPNAMES': 'get_groupnames'
}
# For debugging purpose only (nice name)
def get_name(self):
try:
return self.contact_name
except AttributeError:
return 'UnnamedContact'
# Search for notification_options with state and if t is
# in service_notification_period
def want_service_notification(self, t, state, type, business_impact, cmd=None):
if not self.service_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now the rest is for sub notificationways. If one is OK, we are ok
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_service_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh... no one is ok for it? so no, sorry
return False
# Search for notification_options with state and if t is in
# host_notification_period
def want_host_notification(self, t, state, type, business_impact, cmd=None):
if not self.host_notifications_enabled:
return False
# If we are in downtime, we do nto want notification
for dt in self.downtimes:
if dt.is_in_effect:
return False
# Now it's all for sub notificationways. If one is OK, we are OK
# We will filter in another phase
for nw in self.notificationways:
nw_b = nw.want_host_notification(t, state, type, business_impact, cmd)
if nw_b:
return True
# Oh, nobody..so NO :)
return False
# Call to get our commands to launch a Notification
def get_notification_commands(self, type):
r = []
# service_notification_commands for service
notif_commands_prop = type + '_notification_commands'
for nw in self.notificationways:
r.extend(getattr(nw, notif_commands_prop))
return r
# Check is required prop are set:
# contacts OR contactgroups is need
def is_correct(self):
state = True
cls = self.__class__
# All of the above are checks in the notificationways part
for prop, entry in cls.properties.items():
if prop not in _special_properties:
if not hasattr(self, prop) and entry.required:
logger.error("[contact::%s] %s property not set", self.get_name(), prop)
state = False # Bad boy...
# There is a case where there is no nw: when there is not special_prop defined
# at all!!
if self.notificationways == []:
for p in _special_properties:
if not hasattr(self, p):
logger.error("[contact::%s] %s property is missing", self.get_name(), p)
state = False
if hasattr(self, 'contact_name'):
for c in cls.illegal_object_name_chars:
if c in self.contact_name:
logger.error("[contact::%s] %s character not allowed in contact_name", self.get_name(), c)
state = False
else:
if hasattr(self, 'alias'): # take the alias if we miss the contact_name
self.contact_name = self.alias
return state
# Raise a log entry when a downtime begins
# CONTACT DOWNTIME ALERT: test_contact;STARTED; Contact has entered a period of scheduled downtime
def raise_enter_downtime_log_entry(self):
naglog_result('info', "CONTACT DOWNTIME ALERT: %s;STARTED; Contact has "
"entered a period of scheduled downtime"
% self.get_name())
# Raise a log entry when a downtime has finished
# CONTACT DOWNTIME ALERT: test_contact;STOPPED; Contact has exited from a period of scheduled downtime
def raise_exit_downtime_log_entry(self):
naglog_result('info', "CONTACT DOWNTIME ALERT: %s;STOPPED; Contact has "
"exited from a period of scheduled downtime"
% self.get_name())
# Raise a log entry when a downtime prematurely ends
# CONTACT DOWNTIME ALERT: test_con |
import torch
from transformers import PreTrainedModel
from .custom_configuration import CustomConfig, NoSuperInitConfig
class CustomModel(PreTrainedModel):
config_class = CustomConfig
def __init__(self | , config):
super().__init__(config)
self.linear = torch.nn.Linear(config.hidden_size, config.hidden_size)
def forward(self, x):
return self.linear(x)
def _init_weights(self, module):
pass
class NoSuperInitModel(PreTrainedModel):
| config_class = NoSuperInitConfig
def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(config.attribute, config.attribute)
def forward(self, x):
return self.linear(x)
def _init_weights(self, module):
pass
|
import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5')
m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.assertEqual(5, seconds)
class UptimeFormatTestCase(unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
s = uptime_format(135420)
self.assertEqual('1:13:37:00', s)
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S | ')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock. | patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text)
|
# -*- coding: utf-8 -*-
import os
import re
try:
import simplejson as json
except ImportError:
import json
from ToolBoxAssistant.app import AppFactory
from ToolBoxAssistant.helpers import get_svn_url, readfile, find_versionned_folders, yes_no, Color
from ToolBoxAssistant.log import logger
VERSION = '0.1'
class ToolBoxAssistant(object):
"""
The main class
"""
config_basedir = os.path.join(os.path.expanduser('~'), '.tba')
tba_required_fields = ['path', 'apps']
app_required_fields = ['type', 'url', 'path']
vcs_repo_finders = {
'git': (
'.git/config',
re.compile(r'\[remote "origin"\]\s+url = (.*)$', re.M),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'hg': (
'.hg/hgrc',
re.compile(r'default = (.*)$'),
lambda regex, cfg: regex.search(readfile(cfg)).group(1)
),
'svn': (
'',
re.compile(r'Repository Root: (.*)$', re.M),
get_svn_url
)
}
def __init__(self):
self.config_dir = None
def setup_config_dir(self, path):
self.config_dir = os.path.join(
self.config_basedir,
path.replace(os.path.sep, '_').strip('_')
)
if not os.path.exists(self.config_dir):
logger.debug('creating configuration folder: %s' % Color.GREEN+self.config_dir+Color.END)
os.makedirs(self.config_dir)
def load_specs(self, fpath):
"""
Loads a specifications file and checks for missing fields.
"""
with open(fpath) as ifile:
logger.debug('loading specfile: %s' % Color.GREEN+fpath+Color.END)
data = json.load(ifile)
for field in self.tba_required_fields:
if field not in data:
logger.error('missing top-level field in specs: %s' % Color.GREEN+field+Color.END)
return None
for app_name in data['apps']:
app_specs = data['apps'][app_name]
for app_field in self.app_required_fields:
if app_field not in app_specs:
logger.error('missing app field in specs: %s' % Color.GREEN+app_field+Color.END)
return None
return data
def do_sync(self, args):
"""
Synchronizes installed application with the specfile.
""" |
if (not os.path.exists(args.file)) or (not os.path.isfile(args.file)):
logger.error('file not found: %s' % Color.GREEN+args.file+Color.END)
return
specs = self.load_sp | ecs(args.file)
if specs is None:
return
self.setup_config_dir(specs['path'])
rootpath = specs['path']
for app_name in specs['apps']:
app_specs = specs['apps'][app_name]
if not app_specs['path'].startswith(os.path.sep):
app_specs['path'] = os.path.join(rootpath, app_specs['path'])
app = AppFactory.load(self, app_name, app_specs)
app.sync()
if app.is_updated:
app.build()
if args.unlisted:
for _, folder in find_versionned_folders(rootpath):
folder, app_name = os.path.split(folder)
logger.warn('found unlisted application in %s: %s' % (
folder, Color.GREEN+app_name+Color.END
))
def do_genspec(self, args):
"""
Scans current folder for versionned applications and
creates a specfile accordingly.
"""
self.setup_config_dir(args.path)
new_specs = {
'path': args.path,
'apps': {}
}
if args.merge is not None:
new_specs = self.load_specs(args.merge)
apps_specs = new_specs['apps']
new_apps_found = False
for vcs_type, app_folder in find_versionned_folders(args.path):
app_path = app_folder[len(args.path)+1:]
if app_path not in [apps_specs[a]['path'] for a in apps_specs]:
new_apps_found = True
folder, app_name = os.path.split(app_folder)
logger.info('found%s application in %s: %s (%s)' % (
' new' if args.merge is not None else '',
folder, Color.GREEN+app_name+Color.END, vcs_type
))
cfg_file, regex, handler = self.vcs_repo_finders[vcs_type]
cfg_path = os.path.join(app_folder, cfg_file)
app_specs = {
'type': vcs_type,
'url': handler(regex, cfg_path),
'path': app_path,
}
apps_specs[app_name] = app_specs
if new_apps_found:
outfile = args.merge or args.file
if os.path.exists(outfile):
logger.warning('file already exists: %s' % Color.GREEN+outfile+Color.END)
if not yes_no('Overwrite ?'):
logger.error('operation aborted by user')
return
with open(outfile, 'w') as ofile:
json.dump(new_specs, ofile, sort_keys=True, indent=2, separators=(',', ': '))
logger.info('specfile written to %s' % Color.GREEN+outfile+Color.END)
logger.info('you may now add build information to the new specfile')
else:
logger.info('no new application found')
|
import numpy as np
import random
class ReplayBuffer:
""" Buffer for storing values over timesteps.
"""
def __init__(self):
""" Initializes the buffer.
"""
pass
def batch_sample(self, batch_size):
""" Randomly sample a batch of values from the buffer.
"""
raise NotImplementedError
def put(self, *value):
""" Put values into the replay buffer.
"""
raise NotImplementedError
class ExperienceReplay(ReplayBuffer):
"""
Experience Replay stores action, state, reward and terminal signal
for each time step.
"""
def __init__(self, state_size, action_size, capacity):
""" Creates an Experience Replay of certain capacity.
Acts like a circular buffer.
Args:
state_size: The size of the state to be stored.
action_size: The size of the action to be stored.
capacity: The capacity of the experience replay buffer.
"""
self.state_size = state_size
self.action_size = action_size
self.length = 0
self.capacity = capacity
self.actions = np.empty((self.capacity, self.action_size), dtype = np.float16)
self.states = np.empty((self.capacity, self.state_size), dtype = np.float16)
self.rewards = np.empty(self.capacity, dtype = np.float16)
self.dones = np.empty(self.capacity, dtype = np.bool)
self.current_index = 0
self.staged = False
def batch_sample(self, batch_size):
""" Sample a batch of experiences from the replay.
Args:
batch_size: The number of batches to select
Returns:
s_t
a_t
r_t
s_t1
done
"""
if batch_size > self.length-3:
# we might not have enough experience
raise IOError('batch_size out of range')
idxs = []
while len(idxs) < batch_size:
while True:
# keep trying random indices
idx = random.randint(1, self.length - 1)
# don't want to grab current index since it wraps
if not( idx == self.current_index and idx == self.current_index - 1 ):
idxs.append(idx)
break
s_t = self.states[idxs]
s_t1 = self.states[[(x+1) for x in idxs]]
a_t = self.actions[idxs]
r_t = np.expand_dims(self.rewards[idxs], axis = 1)
done = self.dones[idxs]
'''
j = 0
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
j = 1
print(s_t[j], s_t1[j], a_t[j], r_t[j], done[j])
raw_input("Press Enter to continue...")
'''
return s_t, a_t, r_t, s_t1, done
def _put(self, s_t, a_t, reward, done):
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
self.rewards[self.current_index] = reward
self.dones[self.current_index] = done
self._icrement_index()
def put_act(self, s_t, a_t):
""" Puts the current state and the action taking into Experience Replay.
Args:
s_t: Current state.
a_t: Action taking at this state.
Raises:
IOError: If trying to overwrite previously staged action and state.
"""
if not self.staged:
self.actions[self.current_index] = a_t
self.states[self.current_index] = s_t
# stage to prevent double staging
self.staged = True
else:
# already staged an action and state
raise IOError('Trying to override previously staged action and state.')
def put_rew(self, reward, done):
""" Completes a staged insertion by adding reward and
terminal signal to Experience Replay
Args:
reward: Reward received in this step.
done: Bool signalling terminal step.
Raises:
IOError: If trying to complete insertion without having staged first.
"""
if(self.staged):
self.rewards[self.current_index] = reward
| self.dones[self.current_index] = done
# unstage and increment index
self.staged = False
self._increment_index()
else:
# not yet staged state and action
raise IOError( 'Trying to complete unstaged insertion. Must insert action and state first.')
def unstage(self):
""" Unstages any currently staged insertion
"""
if(self.staged):
# stage to prevent double staging
self.staged = False
self.actions[self.current_index] = None
self.states[self.current_index] = None
def _increment_index(self):
self.current_index = (self.current_index + 1) % self.capacity
self.length = min(self.capacity-1, self.length + 1)
| |
cadata.extend(
ssl.PEM_cert_to_DER_cert(
to_native(b_cert, errors='surrogate_or_strict')
)
)
except Exception:
continue
else:
os.write(tmp_fd, b_cert)
os.write(tmp_fd, b'\n')
except (OSError, IOError):
pass
if HAS_SSLCONTEXT:
default_verify_paths = ssl.get_default_verify_paths()
paths_checked[:0] = [default_verify_paths.capath]
else:
os.close(tmp_fd)
return (tmp_path, cadata, pa | ths_checked)
def validate_proxy_response(self, response, valid_codes=None):
'''
make sure we get back a valid code from the proxy
'''
valid_codes = [200] if valid_codes is None else valid_codes
try:
(http_version, resp_code, msg) = re.match(br'(HTTP/\d\.\d) (\d\d\d) (.*)', response).groups()
if int(re | sp_code) not in valid_codes:
raise Exception
except Exception:
raise ProxyError('Connection to proxy failed')
def detect_no_proxy(self, url):
'''
Detect if the 'no_proxy' environment variable is set and honor those locations.
'''
env_no_proxy = os.environ.get('no_proxy')
if env_no_proxy:
env_no_proxy = env_no_proxy.split(',')
netloc = urlparse(url).netloc
for host in env_no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# Our requested URL matches something in no_proxy, so don't
# use the proxy for this
return False
return True
def make_context(self, cafile, cadata):
cafile = self.ca_path or cafile
if self.ca_path:
cadata = None
else:
cadata = cadata or None
if HAS_SSLCONTEXT:
context = create_default_context(cafile=cafile)
elif HAS_URLLIB3_PYOPENSSLCONTEXT:
context = PyOpenSSLContext(PROTOCOL)
else:
raise NotImplementedError('Host libraries are too old to support creating an sslcontext')
if cafile or cadata:
context.load_verify_locations(cafile=cafile, cadata=cadata)
return context
def http_request(self, req):
tmp_ca_cert_path, cadata, paths_checked = self.get_ca_certs()
# Detect if 'no_proxy' environment variable is set and if our URL is included
use_proxy = self.detect_no_proxy(req.get_full_url())
https_proxy = os.environ.get('https_proxy')
context = None
try:
context = self.make_context(tmp_ca_cert_path, cadata)
except NotImplementedError:
# We'll make do with no context below
pass
try:
if use_proxy and https_proxy:
proxy_parts = generic_urlparse(urlparse(https_proxy))
port = proxy_parts.get('port') or 443
proxy_hostname = proxy_parts.get('hostname', None)
if proxy_hostname is None or proxy_parts.get('scheme') == '':
raise ProxyError("Failed to parse https_proxy environment variable."
" Please make sure you export https proxy as 'https_proxy=<SCHEME>://<IP_ADDRESS>:<PORT>'")
s = socket.create_connection((proxy_hostname, port))
if proxy_parts.get('scheme') == 'http':
s.sendall(to_bytes(self.CONNECT_COMMAND % (self.hostname, self.port), errors='surrogate_or_strict'))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username', ''), proxy_parts.get('password', ''))
s.sendall(b'Proxy-Authorization: Basic %s\r\n' % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
s.sendall(b'\r\n')
connect_result = b""
while connect_result.find(b"\r\n\r\n") <= 0:
connect_result += s.recv(4096)
# 128 kilobytes of headers should be enough for everyone.
if len(connect_result) > 131072:
raise ProxyError('Proxy sent too verbose headers. Only 128KiB allowed.')
self.validate_proxy_response(connect_result)
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
else:
raise ProxyError('Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s = socket.create_connection((self.hostname, self.port))
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname)
else:
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
# ssl_s.unwrap()
s.close()
except (ssl.SSLError, CertificateError) as e:
build_ssl_validation_error(self.hostname, self.port, paths_checked, e)
except socket.error as e:
raise ConnectionError('Failed to connect to %s at port %s: %s' % (self.hostname, self.port, to_native(e)))
return req
https_request = http_request
def maybe_add_ssl_handler(url, validate_certs, ca_path=None):
parsed = generic_urlparse(urlparse(url))
if parsed.scheme == 'https' and validate_certs:
if not HAS_SSL:
raise NoSSLError('SSL validation is not available in your version of python. You can use validate_certs=False,'
' however this is unsafe and not recommended')
# create the SSL validation handler and
# add it to the list of handlers
return SSLValidationHandler(parsed.hostname, parsed.port or 443, ca_path=ca_path)
def getpeercert(response, binary_form=False):
""" Attempt to get the peer certificate of the response from urlopen. """
# The response from urllib2.open() is different across Python 2 and 3
if PY3:
socket = response.fp.raw._sock
else:
socket = response.fp._sock.fp._sock
try:
return socket.getpeercert(binary_form)
except AttributeError:
pass # Not HTTPS
def get_channel_binding_cert_hash(certificate_der):
""" Gets the channel binding app data for a TLS connection using the peer cert. """
if not HAS_CRYPTOGRAPHY:
return
# Logic documented in RFC 5929 section 4 https://tools.ietf.org/html/rfc5929#section-4
cert = x509.load_der_x509_certificate(certificate_der, default_backend())
hash_algorithm = None
try:
hash_algorithm = cert.signature_hash_algorithm
except UnsupportedAlgorithm:
pass
# If the signature hash algorithm is unknown/unsupported or md5/sha1 we must use SHA256.
if not hash_algorithm or hash_algorithm.name in ['md5', 'sha1']:
|
# -*- coding: utf-8 -*-
from __future__ import unicode | _literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('simsoexp', '0005_schedulingpolicy_class_name'),
]
operations = [
migration | s.RemoveField(
model_name='results',
name='metrics',
),
migrations.AddField(
model_name='results',
name='aborted_jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='jobs',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='migrations',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='norm_laxity',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='on_schedule',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='preemptions',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='sys_preempt',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='task_migrations',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='results',
name='timers',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
|
lf.name, val)
self._name = val
@property
def descr(self):
"""Array-interface compliant full description of the column.
This returns a 3-tuple (name, type, shape) that can always be
used in a structured array dtype definition.
"""
return (self.name, self.dtype.str, self.shape[1:])
def iter_str_vals(self):
"""
Return an iterator that yields the string-formatted values of this
column.
| Returns
-------
str_vals : iterator
Column values formatted as strings
"""
# Iterate over formatted values with no max number of lines, no column
# name, no unit, and ignoring the returned header info in outs.
_pformat_col_iter = self._formatter._pfo | rmat_col_iter
for str_val in _pformat_col_iter(self, -1, show_name=False, show_unit=False,
show_dtype=False, outs={}):
yield str_val
def attrs_equal(self, col):
"""Compare the column attributes of ``col`` to this object.
The comparison attributes are: ``name``, ``unit``, ``dtype``,
``format``, ``description``, and ``meta``.
Parameters
----------
col : Column
Comparison column
Returns
-------
equal : boolean
True if all attributes are equal
"""
if not isinstance(col, BaseColumn):
raise ValueError('Comparison `col` must be a Column or '
'MaskedColumn object')
attrs = ('name', 'unit', 'dtype', 'format', 'description', 'meta')
equal = all(getattr(self, x) == getattr(col, x) for x in attrs)
return equal
@property
def _formatter(self):
return FORMATTER if (self.parent_table is None) else self.parent_table.formatter
def pformat(self, max_lines=None, show_name=True, show_unit=False, show_dtype=False,
html=False):
"""Return a list of formatted string representation of column values.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default will be
determined using the ``astropy.conf.max_lines`` configuration
item. If a negative value of ``max_lines`` is supplied then
there is no line limit applied.
Parameters
----------
max_lines : int
Maximum lines of output (header + data rows)
show_name : bool
Include column name (default=True)
show_unit : bool
Include a header row for unit (default=False)
show_dtype : bool
Include column dtype (default=False)
html : bool
Format the output as an HTML table (default=False)
Returns
-------
lines : list
List of lines with header and formatted column values
"""
_pformat_col = self._formatter._pformat_col
lines, outs = _pformat_col(self, max_lines, show_name=show_name,
show_unit=show_unit, show_dtype=show_dtype,
html=html)
return lines
def pprint(self, max_lines=None, show_name=True, show_unit=False, show_dtype=False):
"""Print a formatted string representation of column values.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default will be
determined using the ``astropy.conf.max_lines`` configuration
item. If a negative value of ``max_lines`` is supplied then
there is no line limit applied.
Parameters
----------
max_lines : int
Maximum number of values in output
show_name : bool
Include column name (default=True)
show_unit : bool
Include a header row for unit (default=False)
show_dtype : bool
Include column dtype (default=True)
"""
_pformat_col = self._formatter._pformat_col
lines, outs = _pformat_col(self, max_lines, show_name=show_name, show_unit=show_unit,
show_dtype=show_dtype)
n_header = outs['n_header']
for i, line in enumerate(lines):
if i < n_header:
color_print(line, 'red')
else:
print(line)
def more(self, max_lines=None, show_name=True, show_unit=False):
"""Interactively browse column with a paging interface.
Supported keys::
f, <space> : forward one page
b : back one page
r : refresh same page
n : next row
p : previous row
< : go to beginning
> : go to end
q : quit browsing
h : print this help
Parameters
----------
max_lines : int
Maximum number of lines in table output
show_name : bool
Include a header row for column names (default=True)
show_unit : bool
Include a header row for unit (default=False)
"""
_more_tabcol = self._formatter._more_tabcol
_more_tabcol(self, max_lines=max_lines, show_name=show_name,
show_unit=show_unit)
@property
def unit(self):
"""
The unit associated with this column. May be a string or a
`astropy.units.UnitBase` instance.
Setting the ``unit`` property does not change the values of the
data. To perform a unit conversion, use ``convert_unit_to``.
"""
return self._unit
@unit.setter
def unit(self, unit):
if unit is None:
self._unit = None
else:
self._unit = Unit(unit, parse_strict='silent')
@unit.deleter
def unit(self):
self._unit = None
def convert_unit_to(self, new_unit, equivalencies=[]):
"""
Converts the values of the column in-place from the current
unit to the given unit.
To change the unit associated with this column without
actually changing the data values, simply set the ``unit``
property.
Parameters
----------
new_unit : str or `astropy.units.UnitBase` instance
The unit to convert to.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the unit are not
directly convertible. See :ref:`unit_equivalencies`.
Raises
------
astropy.units.UnitsError
If units are inconsistent
"""
if self.unit is None:
raise ValueError("No unit set on column")
self.data[:] = self.unit.to(
new_unit, self.data, equivalencies=equivalencies)
self.unit = new_unit
@property
def groups(self):
if not hasattr(self, '_groups'):
self._groups = groups.ColumnGroups(self)
return self._groups
def group_by(self, keys):
"""
Group this column by the specified ``keys``
This effectively splits the column into groups which correspond to
unique values of the ``keys`` grouping object. The output is a new
`Column` or `MaskedColumn` which contains a copy of this column but
sorted by row according to ``keys``.
The ``keys`` input to ``group_by`` must be a numpy array with the
same length as this column.
Parameters
----------
keys : numpy array
Key grouping object
Returns
-------
out : Column
New column with groups attribute set accordingly
"""
return groups.column_group_by(self, keys)
def _copy_groups(self, out):
"""
Copy current groups into a copy of self ``out``
"""
if self.parent_table:
if hasattr(self.parent_table, '_groups'):
out._groups = groups. |
#!/usr/bin/env python
from .util import Spec
class Port(Spec):
STATES = [
"listening", "closed", "open",
"bound_to",
"tcp", "tcp6", "udp"
]
def __init__(self, portnumber):
self.portnumber = portnumber
self.get_state()
self.state = {
'state': 'closed',
'bound': False,
'uid': None,
'inode': None,
'proto': None,
}
self.get_state()
#
self.WIN = "Port %s is %%s" % self.portnumber
def get_state(self):
import os
for line in os.popen("netstat -tnle").readlines():
line = line.strip().split()
if len(line) != 8:
continue
(proto, _, _, local, foreign, state, uid, inode) = line
if proto == 'tcp':
(bound, port) = local.split(':')
if proto == 'tcp6':
port = local.split(':::')[-1]
port = int(port)
if port == self.portnumber:
self.state = {
'state': 'listening',
'bound': bound | ,
'uid': uid,
'inode': inode,
'proto': proto,
}
def _make_sure(self, x, y):
if x == y:
return True
else:
return False
def sb_listening(self, *args):
if self._make_sure(self.state['state'], "listening"):
return True, "Port %s is listening" % self.portnumber
return False, "Port %s | is current %s not listening" % (
self.portnumber,
self.state['state']
)
def sb_closed(self, *args):
if self._make_sure(self.state['state'], "closed"):
return True, "Port %s is closed" % self.portnumber
return False, "Port %s is current %s not closed" % (
self.portnumber, self.state['state']
)
def sb_tcp(self, *args):
if self._make_sure(self.state['proto'], "tcp"):
return True
return "Port %s is using protocol %s not TCP" % (
self.portnumber, self.state['proto']
)
def sb_udp(self, *args):
if self._make_sure(self.state['proto'], "udp"):
return True
return "Port %s is using protocol %s not udp" % (
self.portnumber, self.state['proto']
)
def sb_tcp6(self, *args):
if self._make_sure(self.state['proto'], "tcp6"):
return True
return "Port %s is using protocol %s not TCP6" % (
self.portnumber, self.state['proto']
)
def sb_bound_to(self, bound_ip):
if self._make_sure(self.state['bound'], bound_ip):
return True, "Port %s is bound to %s" % (self.portnumber, bound_ip)
return False, "The port currently bound to %s not %s" % (
self.state['bound'], bound_ip
)
|
import logging
import warnings
from collections import namedtuple
logger = logging.getLogger(__name__)
Field = namedtuple('Field', ('name', 'type_', 'default', 'desc', 'warn'))
class Config:
"""配置模块
用户可以在 rc 文件中配置各个选项的值
"""
def __init__(self):
object.__setattr__(self, '_fields', {})
def __getattr__(self, name):
# tips: 这里不能用 getattr 来获取值, 否则会死循环
if name == '_fields':
return object.__getattribute__(self, '_fields')
if name in self._fields:
try:
object.__getattribute__(self, name)
except AttributeError:
return self._fields[name].default
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name in self._fields:
field = self._fields[name]
| if field.warn is not None:
warnings.warn('Config field({}): {}'.format(name, field.warn),
stacklevel=2)
# TODO: 校验值类型
object.__setattr__(self, | name, value)
else:
logger.warning('Assign to an undeclared config key.')
def deffield(self, name, type_=None, default=None, desc='', warn=None):
"""Define a configuration field
:param str name: the field name. It SHOULD be capitalized except the field
refers to a sub-config.
:param type_: feild type.
:param default: default value for the field.
:param desc: description for the field.
:param warn: if field is deprecated, set a warn message.
"""
if name not in self._fields:
self._fields[name] = Field(name=name,
type_=type_,
default=default,
desc=desc,
warn=warn)
else:
raise ValueError('Field({}) is already defined.'.format(name))
|
f | rom sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("XGBRegressor" , "RandomReg_500" , "db2" | )
|
# https://leetcode.com/problems/valid-parentheses/
class Solution(object):
def isValid(self, s):
" | ""
:type s: str
:rtype: boo | l
"""
if not s:
return True
stack = []
for i in xrange(len(s)):
# if its opening it, its getting deeper so add to stack
if s[i] in "([{":
stack.append(s[i])
# if not it must be a closing parenth
# in which case check if stack is empty if not pop and check
# whether popped elem is closed with the current item
else:
if len(stack) == 0:
return False
last = stack.pop()
if s[i] == ")" and last != "(": return False
if s[i] == "]" and last != "[": return False
if s[i] == "}" and last != "{": return False
return len(stack) == 0
|
import pytest
from cleo.exceptions import LogicException
from cleo.exceptions import ValueException
from cleo.io.inputs.option import Option
def test_create():
opt = Option("option")
assert "option" == opt.name
assert opt.shortcut is None
assert opt.is_flag()
assert not opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert not opt.default
def test_dashed_name():
opt = Option("--option")
assert "option" == opt.name
def test_fail_if_name_is_empty():
with pytest.raises(ValueException):
Option("")
def test_fail_if_default_value_provided_for_flag():
with pytest.raises(LogicException):
Option("option", flag=True, default="default")
def test_fail_if_wrong_default_value_for_list_option():
with pytest.raises(LogicException):
Option("option", flag=False, is_list=True, default="default")
def test_shortcut():
opt = Option("option", "o")
assert "o" == opt.shortcut
def test_dashed_shortcut():
opt = Option("option", "-o")
assert "o" == opt.shortcut
def test_multiple_shortcuts():
opt = Option("option", "-o|oo|-ooo")
assert "o|oo|ooo" == opt.shortcut
def test_fail_if_shortcut_is_empty():
with pytest.raises(ValueException):
Option("option", "")
def t | est_optional_value():
opt = Option("option", flag=False, requires_value=False)
assert not opt.is_flag()
assert opt.accepts_value()
assert not opt.requires_value()
assert not opt.is_list()
assert opt.default is None
def test_optional_value_with_default():
opt = Option("option", flag=False, requires_value=False, default="Default")
assert not opt.is_flag()
assert opt.accepts_value()
as | sert not opt.requires_value()
assert not opt.is_list()
assert opt.default == "Default"
def test_required_value():
opt = Option("option", flag=False, requires_value=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert opt.default is None
def test_required_value_with_default():
opt = Option("option", flag=False, requires_value=True, default="Default")
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert not opt.is_list()
assert "Default" == opt.default
def test_list():
opt = Option("option", flag=False, is_list=True)
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert [] == opt.default
def test_multi_valued_with_default():
opt = Option("option", flag=False, is_list=True, default=["foo", "bar"])
assert not opt.is_flag()
assert opt.accepts_value()
assert opt.requires_value()
assert opt.is_list()
assert ["foo", "bar"] == opt.default
|
from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import (
DatabaseWrapper as SQLiteDatabaseWrapper, SQLiteCursorWrapper,
)
from .client import SpatiaLiteClient
from .features import DatabaseFeatures
from .introspection import SpatiaLiteIntrospection
from .operations import SpatiaLiteOperations
from .schema import SpatialiteSchemaEditor
class DatabaseWrapper(SQLiteDatabaseWrapper):
SchemaEditorClass = SpatialiteSchemaEditor
# Classes instantiated in __init__().
client_class = SpatiaLiteClient
features_class = DatabaseFeatures
introspection_class = SpatiaLiteIntrospection
ops_class = SpatiaLiteOperations
def __init__(self, *args, **kwargs):
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.spatialite_lib = ge | tattr(settings, 'SPATIALITE_LIBRARY_PATH',
find_library('spatialite'))
if not self.spatialite_lib:
raise ImproperlyC | onfigured('Unable to locate the SpatiaLite library. '
'Make sure it is in your library path, or set '
'SPATIALITE_LIBRARY_PATH in your settings.'
)
super(DatabaseWrapper, self).__init__(*args, **kwargs)
def get_new_connection(self, conn_params):
conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
# Enabling extension loading on the SQLite connection.
try:
conn.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured(
'SpatiaLite requires SQLite to be configured to allow '
'extension loading.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = conn.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception as exc:
raise ImproperlyConfigured(
'Unable to load the SpatiaLite library extension "%s"' % self.spatialite_lib
) from exc
cur.close()
return conn
def prepare_database(self):
super(DatabaseWrapper, self).prepare_database()
# Check if spatial metadata have been initialized in the database
with self.cursor() as cursor:
cursor.execute("PRAGMA table_info(geometry_columns);")
if cursor.fetchall() == []:
arg = "1" if self.features.supports_initspatialmetadata_in_one_transaction else ""
cursor.execute("SELECT InitSpatialMetaData(%s)" % arg)
|
# Xlib.__init__ -- glue for Xlib package
#
# Copyright (C) 2000-2002 Peter Liljenberg <petli@ctrl-c.liu.se>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software | Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
__version__ = (0, 31)
__version_extra__ = ''
__version_string__ = '.'.join(map(str, __version__)) + __version_extra__
__all__ = [
'X',
'XK',
'Xatom',
'Xcursorfont',
'Xutil',
'display',
'error',
'rdb',
# Explicitly exclude threaded, so that it isn't imported by
# from Xlib import *
] | |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at you | r option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330 | , Boston, MA 02111-1307, USA.
#
# Authors:
# Santiago Dueñas <sduenas@bitergia.com>
# Alvaro del Castillo San Felix <acs@bitergia.com>
#
import logging
import pickle
import rq
from .common import CH_PUBSUB
logger = logging.getLogger(__name__)
class ArthurWorker(rq.Worker):
"""Worker class for Arthur"""
def __init__(self, queues, **kwargs):
super().__init__(queues, **kwargs)
self.__pubsub_channel = CH_PUBSUB
@property
def pubsub_channel(self):
return self.__pubsub_channel
@pubsub_channel.setter
def pubsub_channel(self, value):
self.__pubsub_channel = value
def perform_job(self, job, queue):
"""Custom method to execute a job and notify of its result
:param job: Job object
:param queue: the queue containing the object
"""
result = super().perform_job(job, queue)
job_status = job.get_status()
job_result = job.return_value if job_status == 'finished' else None
data = {
'job_id': job.id,
'status': job_status,
'result': job_result
}
msg = pickle.dumps(data)
self.connection.publish(self.pubsub_channel, msg)
return result
|
from django.conf import settings
def mask_toggle(number_ | to_mask_or_unmask):
return int(n | umber_to_mask_or_unmask) ^ settings.MASKING_KEY |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
import frappe
from frappe.utils import cstr, flt, nowdate, random_string
from erpnext.hr.doctype.employee.test_employee import make_employee
from erpnext.hr.doctype.vehicle_log.vehicle_log import make_expense_claim
class TestVehicleLog(unittest.TestCase):
def setUp(self):
employee_id = frappe.db.sql("""select name from `tabEmployee` where name='testdriver@example.com'""")
self.employee_id = employee_id[0][0] if employee_id else None
if not self.employee_id:
self.employee_id = make_employee("testdriver@example.com", company="_Test Company")
self.license_plate = get_vehicle(self.employee_id)
def tearDown(self):
frappe.delete_doc("Vehicle", self.license_plate, force=1)
frappe.delete_doc("Employee", self.employee_id, force=1)
def test_make_vehicle_log_and_syncing_of_odometer_value(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
#checking value of vehicle odometer value on submit.
vehicle = frappe.get_doc("Vehicle", self.license_plate)
self.assertEqual(vehicle.last_odometer, vehicle_log.odometer)
#checking value vehicle odometer on vehicle log cancellation.
last_odometer = vehicle_log.last_odometer
current_odometer = vehicle_log.odometer
distance_travelled = current_odometer - last_odometer
vehicle_log.cancel()
vehicle.reload()
self.assertEqual(vehicle.last_odometer, current_odometer - distance_travelled)
vehicle_log.delete()
def test_vehicle_log_fuel_expense(self):
vehicle_log = make_vehicle_log(self.license_plate, self.employee_id)
expense_claim = make_expense_claim(vehicle_log.name)
fuel_expense = expense_claim.expenses[0].amount
self.assertEqual(fuel_expense, 50*500)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def test_vehicle_log_with_service_expenses(self):
vehicle_log = make | _vehicle_log(self.license_plate, self.employee_id, with_services=True)
expense_claim = make_expense_claim(vehicle_log.name)
expenses = expense_claim.expenses[0].amount
self.assertEqual(expenses, 27000)
vehicle_log.cancel()
frappe.delete_doc("Expense Claim", expense_claim.name)
frappe.delete_doc("Vehicle Log", vehicle_log.name)
def get_vehicle(employee_id):
license_plate=random_string(10).upper()
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"lic | ense_plate": cstr(license_plate),
"make": "Maruti",
"model": "PCM",
"employee": employee_id,
"last_odometer": 5000,
"acquisition_date": nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": flt(500000)
})
try:
vehicle.insert()
except frappe.DuplicateEntryError:
pass
return license_plate
def make_vehicle_log(license_plate, employee_id, with_services=False):
vehicle_log = frappe.get_doc({
"doctype": "Vehicle Log",
"license_plate": cstr(license_plate),
"employee": employee_id,
"date": nowdate(),
"odometer": 5010,
"fuel_qty": flt(50),
"price": flt(500)
})
if with_services:
vehicle_log.append("service_detail", {
"service_item": "Oil Change",
"type": "Inspection",
"frequency": "Mileage",
"expense_amount": flt(500)
})
vehicle_log.append("service_detail", {
"service_item": "Wheels",
"type": "Change",
"frequency": "Half Yearly",
"expense_amount": flt(1500)
})
vehicle_log.save()
vehicle_log.submit()
return vehicle_log
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.