prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
# coding=utf-8 from safe.common.exceptions import NoAttributeInLayerError from safe.impact_functions.bases.utilities import check_attribute_exist __author__ = 'Rizky Maulana Nugraha "lucernae" <lana.pcfre@gmail.com>' __date__ = '08/05/15' class ClassifiedVectorExposureMixin(object): def __init__(self): self._exposure_class_attribute = None self._exposure_unique_values = None @property def exposure_class_attribute(self): return self._exposure_class_attribute @exposure_class_attribute.setter def exposure_class_attribute(self, value): # self.exposure is from base IF. exposure_layer = self.exposure.qgis_vector_layer() if (exposure_layer and check_attribute_exist(exposure_layer, value)): self._exposure_class_attribute = value else: message = ('The attribute "%s" does not exist in the exposure ' 'layer.') % value raise NoAttributeInLayerError(message) # finding unique v
alues in layer if exposure_layer: attr_index = exposure_layer.dataProvider().\ fieldNameIndex(value) unique_list = list() for feature in exposure_layer.getFeatures(): feature_value = feature.attributes()[attr_index] if fe
ature_value not in unique_list: unique_list.append(feature_value) self.exposure_unique_values = unique_list @property def exposure_unique_values(self): return self._exposure_unique_values @exposure_unique_values.setter def exposure_unique_values(self, value): self._exposure_unique_values = value
# Copyright (C) 2010 Trinity Western University from cube.books.models import Book from cube.twupass.settings import TWUPASS_LOGOUT_URL from django.contrib.auth.models import User from django.contrib import admin from django.conf.urls.defaults import * from django.views.generic.simple import direct_to_template, redirect_to admin.autodiscover() urlpatterns = patterns('', url(r'^twupass-logout/$', redirect_to, {'url': TWUPASS_LOGOUT_URL}, name="twupass-logout"), url(r'^help/$', direct_to_template, {'template' : 'help.html'}, name="help"), (r'^admin/doc/', include('django.contrib.admindocs.urls')), (r'^admin/(.*)', admin.site.root), ) urlpatterns += patterns('cube.twupass.views', (r'^$', 'login_cube'), (r'^logout/$', 'logout_cube') ) urlpatterns += patterns('cube.books.views.books', url(r'^books/$', 'book_list', name="list"), url(r'^books/update/book/$', 'update_book', name="update_book"), url(r'^books/update/book/edit/$', 'update_book_edit', name="update_book_edit"), url(r'books/update/remove_holds_by_user/$', 'remove_holds_by_user', name="remove_holds_by_user"), url(r'^add_book/$', 'add_book', name="add_book"), url(r'^add_new_book/$', 'add_new_book', name="add_new_book"), url(r'^attach_book/$', 'attach_book', name="attach_book"), url(r'^my_books/$', 'my_books', name="my_books"), ) urlpatterns += patterns('cube.books.views.reports', url(r'^reports/$', 'menu', name="reports_menu"), url(r'^reports/per_status/$', 'per_status', name='per_st
atus'), url(r'^reports/books_sold_within_date/$', 'books_sold_within_date', name='books_sold_within_date'), url(r'^reports/user/(\d+)/$', 'user', name='user'), url(r'^reports/book/(\d+)/$', 'book', name='book'), url(r'^reports/metabook/(\d+)/$', 'metabook', name='metabook'), url(r'^reports/holds_by_user/$', 'holds_by_user', name='holds_by_user'), ) urlpatterns += patterns('cube.books.views.metabooks', url(r'^metabooks/$','metabo
ok_list', name="list_metabooks"), url(r'metabooks/update/$', 'update', name="update_metabooks"), ) urlpatterns += patterns('cube.books.views.staff', url(r'^staff/$','staff_list', name="staff"), url(r'^staff_edit/$','staff_edit', name="staff_edit"), url(r'^update_staff/$','update_staff', name="update_staff"), ) urlpatterns += patterns('cube.books.views.admin', url(r'^books/admin/dumpdata/$', 'dumpdata', name='dumpdata'), url(r'^books/admin/bad_unholds/$', 'bad_unholds', name='bad_unholds'), ) urlpatterns += patterns('cube.users.views', url(r'^profile/$', 'profile', name='profile'), url(r'^profile/edit/$', 'edit_profile', name='edit_profile') ) urlpatterns += patterns('cube.appsettings.views', url(r'^appsettings/$', 'setting_list', name='appsettings'), url(r'^appsettings/(\d+)/$', 'edit_setting', name='edit_setting'), url(r'^appsettings/save/$', 'save_setting', name="save_setting"), )
''' https://leetcode.com/problems/path-sum/#/description Given a binary tree and a sum, determine if the tree has a root-to-leaf path such that adding up all the values along the path equals the given sum. For example: Given the below bina
ry tree and sum = 22, 5 / \ 4 8 / / \ 11 13 4 / \ \ 7 2 1 return true, as there exist a root-to-leaf path 5->4->11->2 which sum is 22. ''' # Definition for a binary tree node. # class TreeNode(ob
ject): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def hasPathSum(self, root, sum): """ :type root: TreeNode :type sum: int :rtype: bool """ answer = False total = 0 if root is None: return answer return self.sumAndCheck(root,total,sum,answer) def sumAndCheck(self,node,total,sum,answer): ''' @thought process: - Use a depth first search, - set a base condition, if answer is true, just return and exit happily. - else, at each valid node, check if the node is a leaf and if the val plus total equal the sum, if true, you have found your result,just return. - if not, check on the left node, then check then check right and return answer. - Your base condition takes care of unnecessary traversal. - If you reach the leaf without finding the sum, return False. ''' if answer: return True elif node: if self.isLeaf(node) and (node.val + total) == sum: return True else: answer = self.sumAndCheck(node.left,node.val + total,sum, answer) answer = self.sumAndCheck(node.right,node.val + total,sum,answer) return answer else: return False def isLeaf(self,node): ''' @params takes a valid node @return bool ''' return not node.left and not node.right
import csv import math import numpy as np from PIL import Image width = 854 height = 480 fov_multiplier = 1.73 # For 60 degrees, set to 1.73. For 90 degrees, set to 1. minwh2 = 0.5 * min(width, height) class Star: def __init__(self, ra, dec, parallax, g_flux, bp_flux, rp_flux): self.ra = ra self.dec = dec self.parallax = parallax self.g_flux = g_flux self.bp_flux = bp_flux self.rp_flux = rp_flux distance_parsecs = 1000 / parallax distance_ly = distance_parsecs * 3.26156 ra_rad = ra * math.pi / 180 dec_rad = (dec + 90) * math.pi / 180 self.x = distance_ly * math.sin(dec_rad) * math.cos(ra_rad) self.y = distance_ly * math.sin(dec_rad) * math.sin(ra_rad) self.z = distance_ly * math.cos(dec_rad) self.absolute_luminosity = g_flux * distance_ly**2 def ParseFloat(s): try: return float(s) except: return 0 stars = [] with open('lmc-stars.csv', 'rb') as input_file: reader = csv.DictReader(input_file) for row in reader: stars.append(Star( ParseFloat(row['ra']), ParseFloat(row['dec']), ParseFloat(row['parallax']), ParseFloat(row['phot_g_mean_flux']), ParseFloat(row['phot_bp_mean_flux']), ParseFloat(row['phot_rp_mean_flux']) )) def ProjectPointOntoVector(p, v): return np.dot(p, v) / dot(v, v) def IntegrateFromPointOfView(position, direction, up): g_flux = np.zeros((width, height)) red_flux = np.zeros((width, height)) blue_flux = np.zeros((width, height)) right = -np.cross(direction, up) for s in stars: transformed = [s.x - position[0], s.y - position[1], s.z - position[2]] x = np.dot(transformed, right) y = np.dot(transformed, up) z = np.dot(transformed, direction) if z < 1: continue sx = int(width / 2 + fov_multiplier * minwh2 * x / z) sy = int(height / 2 - fov_multiplier * minwh2 * y / z) if sx < 0 or sx >= width or sy < 0 or sy >= height: continue d2 = x**2 + y**2 + z**2 apparent_luminosity = s.absolute_luminosity / d2 g_flux[sx,sy] += apparent_luminosity redness = 0.5 if s.rp_flux + s.bp_flux > 0: redness = s.rp_flux / (s.rp_flux + s.bp_flux) red_flux[sx,sy] += apparent_luminosity * redness blue_flux[sx,sy] += apparent_luminosity * (1 - redness) return g_flux, red_flux, blue_flux # Mix the two colors in the proportion specified by the ratio. def MixColors(color1, color2, ratio): r = ratio * color2[0] + (1 - ratio) * color1[0] g = ratio * color2[1] + (1 - ratio) * color1[1] b = ratio * color2[2] + (1 - ratio) * color1[2] return r, g, b # Converts a color's components to integer values. def IntColor(c): return (int(c[0]), int(c[1]), int(c[2])) # What fraction of the way between lo and hi is the value? If outside the # range of (lo,hi), it's capped to 0 and 1 respectively. def CappedRange(lo, hi, value): if value < lo: return float(0) elif value > hi: return float(1) else: return float(value - lo) / (hi - lo) # redness is a number between 0 and 1. It's the ratio of red to blue light. def RednessRatioToColor(redness): red = (255, 0, 0) blue = (0, 0, 255) return MixColors(red, blue, CappedRange(0.3, 0.9, redness)) # g_normalized: a number between 0 and 1 representing the percentile # brightness of a pixel. # red_flux: how much total red flux in a pixel. No need to normalize. # blue_flux: how much total blue flux in a pixel. No need to normalize. def FluxToColor(g_normalized, red_flux, blue_flux): redness = 0.6 if red_flux + blue_flux > 0: redness = red_flux / (red_flux + blue_flux) base_color = RednessRatioToColor(redness) black = (0, 0, 0) white = (255, 255, 255) if g_normalized < 0.5: return MixColors(bla
ck, base_color, CappedRange(0, 0.5, g_normalized)) else: return MixColors(bas
e_color, white, CappedRange(0.5, 1, g_normalized)) # Normalizes a raw flux value into the range [0,1]. def FluxPercentile(flux, sorted_sample): lo = 0 hi = len(sorted_sample) while hi - lo > 1: mid = int((lo + hi) / 2) if flux >= sorted_sample[mid]: lo = mid else: hi = mid return 1.0 * lo / len(sorted_sample) frame_number = 1 def RenderImageFromFlux(g_flux, red_flux, blue_flux): global frame_number sorted_flux = [] for i in range(width): for j in range(height): flux = g_flux[i,j] if flux > 0.000000001: sorted_flux.append(flux) sorted_flux.sort() image = Image.new('RGB', (width, height)) for i in range(width): for j in range(height): p = FluxPercentile(g_flux[i,j], sorted_flux) color = FluxToColor(p, red_flux[i,j], blue_flux[i,j]) image.putpixel((i, j), IntColor(color)) image.save('frames/lmc%05d.png' % frame_number) frame_number += 1 def RenderFrameFromPointOfView(position, direction, up): g_flux, red_flux, blue_flux = IntegrateFromPointOfView(position, direction, up) RenderImageFromFlux(g_flux, red_flux, blue_flux) num_frames = 10 * 30 up = np.array([0, 1, 0]) lmc = np.array([8950, 59000, 152880]) orbit_radius = 100 * 1000 for i in range(num_frames): print 'Frame', (i + 1), 'of', num_frames angle = 2 * math.pi * i / num_frames direction = np.array([math.sin(angle), 0, -math.cos(angle)]) position = lmc - orbit_radius * direction RenderFrameFromPointOfView(position, direction, up)
# -*- coding: utf-8 -*- # Some utils import hashlib import uuid def get
_hash(data): """Returns hashed string""" return hashlib.sha256(data
).hexdigest() def get_token(): return str(uuid.uuid4())
se the casenames in public.resource.org can be so # long this varies too much. # if stats['case_name_similarities'][i] < 0.125: # # The case name is wildly different # continue if stats['length_diffs'][i] > 400: # The documents have wildly different lengths continue # Commented out because the headnotes sometimes included in Resource.org made this calculation vary too much. #elif stats['gestalt_diffs'][i] < 0.4: # # The contents are wildly different # continue elif stats['cos_sims'][i] < 0.90: # Very different cosine similarities continue else: # It's a reasonably close match. filtered_candidates.append(candidates[i]) filtered_stats['case_name_similarities'].append(stats['case_name_similarities'][i]) filtered_stats['length_diffs'].append(stats['length_diffs'][i]) filtered_stats['gestalt_diffs'].append(stats['gestalt_diffs'][i]) filtered_stats['cos_sims'].append(stats['cos_sims'][i]) filtered_stats['candidate_count'] = len(filtered_candidates) return filtered_candidates, filtered_stats class Case(object): def _get_case_name_and_status(self): case_name = self.url_element.get('title').lower() ca1regex = re.compile('(unpublished disposition )?notice: first circuit local rule 36.2\(b\)6 states unpublished opinions may be cited only in related cases.?') ca2regex = re.compile('(unpublished disposition )?notice: second circuit local rule 0.23 states unreported opinions shall not be cited or otherwise used in unrelated cases.?') ca2regex2 = re.compile('(unpublished disposition )?notice: this summary order may not be cited as precedential authority, but may be called to the attention of the court in a subsequent stage of this case, in a related case, or in any case for purposes of collateral estoppel or res judicata. see second circuit rule 0.23.?') ca3regex = re.compile('(unpublished disposition )?notice: third circuit rule 21\(i\) states citations to federal decisions which have not been formally reported should identify the court, docket number and date.?') ca4regex = re.compile('(unpublished disposition )?notice: fourth circuit (local rule 36\(c\)|i.o.p. 36.6) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the fourth circuit.?') ca5regex = re.compile('(unpublished disposition )?notice: fifth circuit local rule 47.5.3 states that unpublished opinions should normally be cited only when they establish the law of the case, are relied upon as a basis for res judicata or collateral estoppel, or involve related facts. if an unpublished opinion is cited, a copy shall be attached to each copy of the brief.?') ca6regex = re.compile('(unpublished disposition )?notice: sixth circuit rule 24\(c\) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the sixth circuit.?') ca7regex = re.compile('(unpublished disposition )?notice: seventh circuit rule 53\(b\)\(2\) states unpublished orders shall not be cited or used as precedent except to support a claim of res judicata, collateral estoppel or law of the case in any federal court within the circuit.?') ca8regex = re.compile('(unpublished disposition )?notice: eighth circuit rule 28a\(k\) governs citation of unpublished opinions and provides that (no party may cite an opinion not intended for publication unless the cases are related by identity between the parties or the causes of action|they are not precedent and generally should not be cited unless relevant to establishing the doctrines of res judicata, collateral estoppel, the law of the case, or if the opinion has persuasive value on a material issue and no published opinion would serve as well).?') ca9regex = re.compile('(unpublished disposition )?notice: ninth circuit rule 36-3 provides that dispositions other than opinions or orders designated for publication are not precedential and should not be cited except when relevant under the doctrines of law of the case, res judicata, or collateral estoppel.?') ca10regex = re.compile('(unpublished disposition )?notice: tenth circuit rule 36.3 states that unpublished opinions and orders and judgments have no precedential value and shall not be cited except for purposes of establishing the doctrines of the law of the case, res judicata, or collateral estoppel.?') cadcregex = re.compile('(unpublished disposition )?notice: d.c. circuit local rule 11\(c\) states that unpublished orders, judgments, and explanatory memoranda may not be cited as precedents, but counsel may refer to unpublished dispositions when the binding or preclusive effect of the disposition, rather than its quality as precedent, is relevant.?') cafcregex = re.compile('(unpublished disposition )?notice: federal circuit local rule 47.(6|8)\(b\) states that opinions and orders which are designated as not citable as precedent shall not be employed or cited as precedent. this does not preclude assertion of issues of claim preclusion, issue preclusion, judicial estoppel, law of the case or the like based on a decision of the court rendered in a nonprecedential opinion or order.?') # Clean off special cases if 'first circuit' in case_name: case_name = re.sub(ca1regex, '', case_name) status = 'Unpublished' elif 'second circuit' in case_name: case_name = re.sub(ca2regex, '', case_name) case_name = re.sub(ca2regex2, '', case_name) status = 'Unpublished' elif 'third circuit' in case_name: case_name = re.sub(ca3regex, '', case_name) status = 'Unpublished' elif 'fourth circuit' in case_name: case_name = re.sub(ca4regex, '', case_name) status = 'Unpublished' elif 'fifth circuit' in case_name: case_name = re.sub(ca5regex, '', case_name) status = 'Unpublished' elif 'sixth circuit' in case_name: case_name = re.sub(ca6regex, '', case_name) status = 'Unpublished' elif 'seventh circuit' in case_name: case_name = re.sub(ca7regex, '', case_name) status = 'Unpublished' elif 'eighth circuit' in case_name: case_name = re.sub(ca8regex, '', case_name) status = 'Unpublished' elif 'ninth circuit' in case_name: case_name = re.sub(ca9regex, '', case_name) status = 'Unpublished' elif 'tenth circuit' in case_name: case_name = re.sub(ca10regex, '', case_name) status = 'Unpublished' elif 'd.c. circuit' in case_name: case_name = re.sub(cadcregex, '', case_name) status = 'Unpublished' elif 'federal circuit' in case_name: case_name = re.sub(cafcregex, '', case_name) status = 'Unpublished' else: status = 'Published' case_name = titlecase(harmonize(clean_string(case_name))) if case_name == '' or case_name == 'unpublished disposition': # No luck getting the case name saved_case_name =
self._check_fix_list(self.sha1_hash, self.case_name_dict) if saved_case_name: case_name = saved_case_name else:
print self.url if BROWSER: subprocess.Popen([BROWSER, self.url], shell=False).communicate() case_name = raw_input("Short case name: ") self.case_name_fix_file.write("%s|%s\n" % (self.sha1_hash, case_name)) return case_name, status def get_html_from_raw_text(raw_text): """Using the raw_text, creates four useful variables: 1. complete_html_tree: A t
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded from urllib.parse import urlparse from httpobs.conf import (RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_CORS_ORIGIN, RETRIEVER_READ_TIMEOUT, RETRIEVER_USER_AGENT) from httpobs.scanner.utils import parse_http_equiv_headers import logging import requests # Disable the requests InsecureRequestWarning -- we will track certificate errors manually when # verification is disabled. Also disable requests errors at levels lower than CRITICAL, see: # https://github.com/celery/celery/issues/3633 for crashy details from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) logging.getLogger('requests').setLevel(logging.CRITICAL) # MIME types for HTML requests HTML_TYPES = ('text/html', 'application/xhtml+xml') # Maximum timeout for requests for all GET requests for anything but the TLS Observatory # The default ConnectionTimeout is something like 75 seconds, which means that things like # tiles can take ~600s to timeout, since they have 8 DNS entries. Setting it to lower # should hopefully keep requests from taking forever TIMEOUT = (RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_READ_TIMEOUT) # Create a session, returning the session and the HTTP response in a dictionary # Don't create the sessions if it can't connect and retrieve the root of the website # TODO: Allow people to scan a subdirectory instead of using '/' as the default path? def __create_session(url: str, **kwargs) -> dict: s = requests.Session() # Allow certificate verification to be disabled on the initial request, which means that sites won't get # penalized on things like HSTS, even
for self-signed certificates s.verify = kwargs['verify'] # Add the headers to the session if kwargs['headers']: s.headers.update(kwargs['headers']) # Set all the cookies and force th
em to be sent only over HTTPS; this might change in the future if kwargs['cookies']: s.cookies.update(kwargs['cookies']) for cookie in s.cookies: cookie.secure = True # Override the User-Agent; some sites (like twitter) don't send the CSP header unless you have a modern # user agent s.headers.update({ 'User-Agent': RETRIEVER_USER_AGENT, }) try: r = s.get(url, timeout=TIMEOUT) # No tls errors r.verified = True # Let celery exceptions percolate upward except (SoftTimeLimitExceeded, TimeLimitExceeded): raise # We can try again if there's an SSL error, making sure to note it in the session except requests.exceptions.SSLError: try: r = s.get(url, timeout=TIMEOUT, verify=False) r.verified = False except (KeyboardInterrupt, SystemExit): raise except: r = None s = None except (KeyboardInterrupt, SystemExit): raise except: r = None s = None # Store the domain name and scheme in the session if r is not None and s is not None: s.url = urlparse(r.url) return {'session': s, 'response': r} def __get(session, relative_path='/', headers=None, cookies=None): if not headers: headers = {} if not cookies: cookies = {} try: # TODO: limit the maximum size of the response, to keep malicious site operators from killing us # TODO: Perhaps we can naively do it for now by simply setting a timeout? # TODO: catch TLS errors instead of just setting it to None? return session.get(session.url.scheme + '://' + session.url.netloc + relative_path, headers=headers, cookies=cookies, timeout=TIMEOUT) # Let celery exceptions percolate upward except (SoftTimeLimitExceeded, TimeLimitExceeded): raise except (KeyboardInterrupt, SystemExit): raise except: return None def __get_page_text(response: requests.Response, force: bool = False) -> str: if response is None: return None elif response.status_code == 200 or force: # Some pages we want to get the page text even with non-200s # A quick and dirty check to make sure that somebody's 404 page didn't actually return 200 with html ext = (response.history[0].url if response.history else response.url).split('.')[-1] if response.headers.get('Content-Type', '') in HTML_TYPES and ext in ('json', 'txt', 'xml'): return None return response.text else: return None def retrieve_all(hostname, **kwargs): kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database # This way of doing it keeps the urls tidy even if makes the code ugly kwargs['http_port'] = ':' + str(kwargs.get('http_port', '')) if 'http_port' in kwargs else '' kwargs['https_port'] = ':' + str(kwargs.get('https_port', '')) if 'https_port' in kwargs else '' kwargs['path'] = kwargs.get('path', '/') kwargs['verify'] = kwargs.get('verify', True) retrievals = { 'hostname': hostname, 'resources': { }, 'responses': { 'auto': None, # whichever of 'http' or 'https' actually works, with 'https' as higher priority 'cors': None, # CORS preflight test 'http': None, 'https': None, }, 'session': None, } # The list of resources to get resources = ( '/clientaccesspolicy.xml', '/contribute.json', '/crossdomain.xml', '/robots.txt' ) # Create some reusable sessions, one for HTTP and one for HTTPS http_session = __create_session('http://' + hostname + kwargs['http_port'] + kwargs['path'], **kwargs) https_session = __create_session('https://' + hostname + kwargs['https_port'] + kwargs['path'], **kwargs) # If neither one works, then the site just can't be loaded if http_session['session'] is None and https_session['session'] is None: return retrievals else: # Store the HTTP only and HTTPS only responses (some things can only be retrieved over one or the other) retrievals['responses']['http'] = http_session['response'] retrievals['responses']['https'] = https_session['response'] if https_session['session'] is not None: retrievals['responses']['auto'] = https_session['response'] retrievals['session'] = https_session['session'] else: retrievals['responses']['auto'] = http_session['response'] retrievals['session'] = http_session['session'] # Store the contents of the "base" page retrievals['resources']['__path__'] = __get_page_text(retrievals['responses']['auto'], force=True) # Do a CORS preflight request retrievals['responses']['cors'] = __get(retrievals['session'], kwargs['path'], headers={'Origin': RETRIEVER_CORS_ORIGIN}) # Store all the files we retrieve for resource in resources: resp = __get(retrievals['session'], resource) retrievals['resources'][resource] = __get_page_text(resp) # Parse out the HTTP meta-equiv headers if (retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES and retrievals['resources']['__path__']): retrievals['responses']['auto'].http_equiv = parse_http_equiv_headers(retrievals['resources']['__path__']) else: retrievals['responses']['auto'].http_equiv = {} return retrievals
import unittest import datetime import httpretty as HP import json from urllib.parse import parse_qsl from malaysiaflights.aa import AirAsia as AA class AARequestTests(unittest.TestCase): def url_helper(self, from_, to, date): host = 'https://argon.airasia.com' path = '/api/7.0/search' body = {'origin': from_, 'destination': to, 'depart': date, 'passenger-count': '1', 'infant-count': '0', 'currency': 'MYR'} return host, path, body @HP.activate def test_search_calls_api_using_correct_path_and_body(self): host, path, body = self.url_helper('KUL', 'TGG', '18-06-2015') HP.register_uri(HP.POST, host+path, status=200) d = datetime.datetime(2015, 6, 18) AA.search('KUL', 'TGG', d) mocked_request = HP.last_request() actual_body = dict(parse_qsl(mocked_request.body.decode())) self.assertEqual(path, mocked_request.path) self.assertEqual(body, actual_body) class ResponseExtractionTests(unittest.TestCase): def fixture_loader(self, path): prefix = 'malaysiaflights/fixtures/' with open(prefix + path, 'r') as file_: return json.loads(file_.read()) def setUp(self): self.single = self.fixture_loader('aa-single.json') self.zero = self.fixture_loader('aa-no-flights.json') def test_get_number_of_results_for_valid_response(self): json = self.single actual = AA.get_number_of_results(json) self.assertEqual(4, actual) def test_get_number_of_results_for_no_flights_on_date(self): json = self.zero actual = AA.get_number_of_results(json) self.assertEqual(0, actual) def test_get_flight_details_using_index_0_should_return_results(self): json = self.single expected = { 'flight_number': 'AK6225', 'departure_airport': 'TGG', 'arrival_airport': 'KUL', 'departure_time': 'Sat, 20 Jun 2015 08:20:00 +0800', 'arrival_time': 'Sat, 20 Jun 2015 09:15:00 +0800', 'total_fare': 133.99, 'fare_currency': 'MYR'} actual = AA.get_direct_flight_details(json, 0) self.assertEqual(expected, actual) def test_get_flight_details_using_index_1_should_return_results(self): json = self.single expected = { 'flight_number': 'AK6229', 'departure_airport': 'TGG', 'arrival_airport': 'KUL', 'departure_time': 'Sat, 20 Jun 2015 13:10:00 +0800', 'arrival_time': 'Sat, 20 Jun 2015 14:05:00 +0800', 'total_fare': 133.99, 'fare_currency': 'MYR'} actual = AA.get_direct_flight_details(json, 1) self.assertEqual(expected, actual) @unittest.skip('no-data-yet') def test_is_connecting_flights_should_return_true_for_connecting(self): json = '' actual = AA.is_connecting_flights(json, 0) sel
f.assertTrue(actual) def test_is_connecting_flights_should_return_false_for_direct(self):
json = self.single actual = AA.is_connecting_flights(json, 2) self.assertFalse(actual) class TimeConversionTest(unittest.TestCase): def test_convert_to_api_format_returns_correct_output(self): date_object = datetime.datetime(2015, 9, 25) expected = '25-09-2015' actual = AA.to_api(date_object) self.assertEqual(expected, actual) def test_convert_extracted_time_to_datetime_returns_correct_object(self): offset = datetime.timedelta(hours=8) expected = datetime.datetime(2015, 6, 20, 13, 10, tzinfo=datetime.timezone(offset)) actual = AA.to_datetime('Sat, 20 Jun 2015 13:10:00 +0800') self.assertEqual(expected, actual)
, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from plaid.model.distribution_breakdown import DistributionBreakdown globals()['DistributionBreakdown'] = DistributionBreakdown class PayPeriodDetails(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { ('pay_frequency',): { 'None': None, 'PAY_FREQUENCY_UNKNOWN': "PAY_FREQUENCY_UNKNOWN", 'PAY_FREQUENCY_WEEKLY': "PAY_FREQUENCY_WEEKLY", 'PAY_FREQUENCY_BIWEEKLY': "PAY_FREQUENCY_BIWEEKLY", 'PAY_FREQUENCY_SEMIMONTHLY': "PAY_FREQUENCY_SEMIMONTHLY", 'PAY_FREQUENCY_MONTHLY': "PAY_FREQUENCY_MONTHLY", 'NULL': "null", }, } validations = { } @cached_property def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'check_amount': (float, none_type,), # noqa: E501 'distribution_breakdown': ([DistributionBreakdown],), # noqa: E501 'end_date': (date, none_type,), # noqa: E501 'gross_earnings': (float, none_type,), # noqa: E501 'pay_date': (date, none_type,), # noqa: E501 'pay_frequency': (str, none_type,), # noqa: E501 'pay_day': (date, none_type,), # noqa: E501 'start_date': (date, none_type,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'check_amount': 'check_amount', # noqa: E501 'distribution_breakdown': 'distribution_breakdown', # noqa: E501 'end_date': 'end_date', # noqa: E501 'gross_earnings': 'gross_earnings', # noqa: E501 'pay_date': 'pay_date', # noqa: E501 'pay_frequency': 'pay_frequency', # noqa: E501 'pay_day': 'pay_day', # noqa: E501 'start_date': 'start_date', # noqa: E501 } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 """PayPeriodDetails - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the Open
API document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Confi
guration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) check_amount (float, none_type): The amount of the paycheck.. [optional] # noqa: E501 distribution_breakdown ([DistributionBreakdown]): [optional] # noqa: E501 end_date (date, none_type): The pay period end date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501 gross_earnings (float, none_type): Total earnings before tax/deductions.. [optional] # noqa: E501 pay_date (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501 pay_frequency (str, none_type): The frequency at which an individual is paid.. [optional] # noqa: E501 pay_day (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501 start_date (date, none_type): The pay period start date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data
import os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) sys.p
ath.append(os.path.dirname(BASE_DIR)) from global_variables import * from evaluation_helper import * cls_names = g_shape_names img_name_file_list = [os.path.join(g_real_images_voc12val_det_bbox_folder, name+'.txt') for
name in cls_names] det_bbox_mat_file_list = [os.path.join(g_detection_results_folder, x.rstrip()) for x in open(g_rcnn_detection_bbox_mat_filelist)] result_folder = os.path.join(BASE_DIR, 'avp_test_results') test_avp_nv(cls_names, img_name_file_list, det_bbox_mat_file_list, result_folder) img_name_file_list = [os.path.join(g_real_images_voc12val_easy_gt_bbox_folder, name+'.txt') for name in cls_names] view_label_folder = g_real_images_voc12val_easy_gt_bbox_folder result_folder = os.path.join(BASE_DIR, 'vp_test_results') test_vp_acc(cls_names, img_name_file_list, result_folder, view_label_folder)
# Portions Copyright (c) Facebook, Inc. and its affiliates. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2. # hgweb/__init__.py - web interface to a mercurial repository # # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> # Copyright 2005 Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according t
o the terms of the # GN
U General Public License version 2 or any later version. from __future__ import absolute_import import os from .. import error, pycompat, util from ..i18n import _ from . import hgweb_mod, hgwebdir_mod, server def hgweb(config, name=None, baseui=None): """create an hgweb wsgi object config can be one of: - repo object (single repo view) - path to repo (single repo view) - path to config file (multi-repo view) - dict of virtual:real pairs (multi-repo view) - list of virtual:real tuples (multi-repo view) """ if ( (isinstance(config, str) and not os.path.isdir(config)) or isinstance(config, dict) or isinstance(config, list) ): # create a multi-dir interface return hgwebdir_mod.hgwebdir(config, baseui=baseui) return hgweb_mod.hgweb(config, name=name, baseui=baseui) def hgwebdir(config, baseui=None): return hgwebdir_mod.hgwebdir(config, baseui=baseui) class httpservice(object): def __init__(self, ui, app, opts): self.ui = ui self.app = app self.opts = opts def init(self): util.setsignalhandler() self.httpd = server.create_server(self.ui, self.app) portfile = self.opts.get("port_file") if portfile: util.writefile(portfile, "%s" % self.httpd.port) if (self.opts["port"] or portfile) and not self.ui.verbose: return if self.httpd.prefix: prefix = self.httpd.prefix.strip("/") + "/" else: prefix = "" port = r":%d" % self.httpd.port if port == r":80": port = r"" bindaddr = self.httpd.addr if bindaddr == r"0.0.0.0": bindaddr = r"*" elif r":" in bindaddr: # IPv6 bindaddr = r"[%s]" % bindaddr fqaddr = self.httpd.fqaddr if r":" in fqaddr: fqaddr = r"[%s]" % fqaddr if self.opts["port"] or portfile: write = self.ui.status else: write = self.ui.write write( _("listening at http://%s%s/%s (bound to %s:%d)\n") % (fqaddr, port, prefix, bindaddr, self.httpd.port) ) self.ui.flush() # avoid buffering of status message def run(self): self.httpd.serve_forever() def createapp(baseui, repo, webconf): if webconf: return hgwebdir_mod.hgwebdir(webconf, baseui=baseui) else: if not repo: raise error.RepoError( _("there is no Mercurial repository" " here (.hg not found)") ) return hgweb_mod.hgweb(repo, baseui=baseui)
rn value a struct with bit fields") def test_inspecttype(self): ffi = FFI(backend=self.Backend()) assert ffi.typeof("long").kind == "primitive" assert ffi.typeof("long(*)(long, long**, ...)").cname == ( "long(*)(long, long * *, ...)") assert ffi.typeof("long(*)(long, long**, ...)").ellipsis is True def test_new_handle(self): ffi = FFI(backend=self.Backend()) o = [2, 3, 4] p = ffi.new_handle(o) assert ffi.typeof(p) == ffi.typeof("void *") assert ffi.from_handle(p) is o assert ffi.from_handle(ffi.cast("char *", p)) is o py.test.raises(RuntimeError, ffi.from_handle, ffi.NULL) class TestBitfield: def check(self, source, expected_ofs_y, expected_align, expected_size): # NOTE: 'expected_*' is the numbers expected from GCC. # The numbers expected from MSVC are not explicitly written # in this file, and will just be taken from the compiler. ffi = FFI() ffi.cdef("struct s1 { %s };" % source) ctype = ffi.typeof("struct s1") # verify the information with gcc ffi1 = FFI() ffi1.cdef(""" static const int Gofs_y, Galign, Gsize; struct s1 *try_wit
h_value(int fieldnum, long long value); """) fnames = [name for name, cfield in ctype.fields if name and cfield.bitsize > 0] setters = ['case %d: s.%s = value; break;' % iname
for iname in enumerate(fnames)] lib = ffi1.verify(""" struct s1 { %s }; struct sa { char a; struct s1 b; }; #define Gofs_y offsetof(struct s1, y) #define Galign offsetof(struct sa, b) #define Gsize sizeof(struct s1) struct s1 *try_with_value(int fieldnum, long long value) { static struct s1 s; memset(&s, 0, sizeof(s)); switch (fieldnum) { %s } return &s; } """ % (source, ' '.join(setters))) if sys.platform == 'win32': expected_ofs_y = lib.Gofs_y expected_align = lib.Galign expected_size = lib.Gsize else: assert (lib.Gofs_y, lib.Galign, lib.Gsize) == ( expected_ofs_y, expected_align, expected_size) # the real test follows assert ffi.offsetof("struct s1", "y") == expected_ofs_y assert ffi.alignof("struct s1") == expected_align assert ffi.sizeof("struct s1") == expected_size # compare the actual storage of the two for name, cfield in ctype.fields: if cfield.bitsize < 0 or not name: continue if int(ffi.cast(cfield.type, -1)) == -1: # signed min_value = -(1 << (cfield.bitsize-1)) max_value = (1 << (cfield.bitsize-1)) - 1 else: min_value = 0 max_value = (1 << cfield.bitsize) - 1 for t in [1, 2, 4, 8, 16, 128, 2813, 89728, 981729, -1,-2,-4,-8,-16,-128,-2813,-89728,-981729]: if min_value <= t <= max_value: self._fieldcheck(ffi, lib, fnames, name, t) def _fieldcheck(self, ffi, lib, fnames, name, value): s = ffi.new("struct s1 *") setattr(s, name, value) assert getattr(s, name) == value raw1 = ffi.buffer(s)[:] t = lib.try_with_value(fnames.index(name), value) raw2 = ffi.buffer(t, len(raw1))[:] assert raw1 == raw2 def test_bitfield_basic(self): self.check("int a; int b:9; int c:20; int y;", 8, 4, 12) self.check("int a; short b:9; short c:7; int y;", 8, 4, 12) self.check("int a; short b:9; short c:9; int y;", 8, 4, 12) def test_bitfield_reuse_if_enough_space(self): self.check("int a:2; char y;", 1, 4, 4) self.check("int a:1; char b ; int c:1; char y;", 3, 4, 4) self.check("int a:1; char b:8; int c:1; char y;", 3, 4, 4) self.check("char a; int b:9; char y;", 3, 4, 4) self.check("char a; short b:9; char y;", 4, 2, 6) self.check("int a:2; char b:6; char y;", 1, 4, 4) self.check("int a:2; char b:7; char y;", 2, 4, 4) self.check("int a:2; short b:15; char c:2; char y;", 5, 4, 8) self.check("int a:2; char b:1; char c:1; char y;", 1, 4, 4) @pytest.mark.skipif("platform.machine().startswith('arm')") def test_bitfield_anonymous_no_align(self): L = FFI().alignof("long long") self.check("char y; int :1;", 0, 1, 2) self.check("char x; int z:1; char y;", 2, 4, 4) self.check("char x; int :1; char y;", 2, 1, 3) self.check("char x; long long z:48; char y;", 7, L, 8) self.check("char x; long long :48; char y;", 7, 1, 8) self.check("char x; long long z:56; char y;", 8, L, 8 + L) self.check("char x; long long :56; char y;", 8, 1, 9) self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L) self.check("char x; long long :57; char y;", L + 8, 1, L + 9) @pytest.mark.skipif("not platform.machine().startswith('arm')") def test_bitfield_anonymous_align_arm(self): L = FFI().alignof("long long") self.check("char y; int :1;", 0, 4, 4) self.check("char x; int z:1; char y;", 2, 4, 4) self.check("char x; int :1; char y;", 2, 4, 4) self.check("char x; long long z:48; char y;", 7, L, 8) self.check("char x; long long :48; char y;", 7, 8, 8) self.check("char x; long long z:56; char y;", 8, L, 8 + L) self.check("char x; long long :56; char y;", 8, L, 8 + L) self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L) self.check("char x; long long :57; char y;", L + 8, L, L + 8 + L) @pytest.mark.skipif("platform.machine().startswith('arm')") def test_bitfield_zero(self): L = FFI().alignof("long long") self.check("char y; int :0;", 0, 1, 4) self.check("char x; int :0; char y;", 4, 1, 5) self.check("char x; int :0; int :0; char y;", 4, 1, 5) self.check("char x; long long :0; char y;", L, 1, L + 1) self.check("short x, y; int :0; int :0;", 2, 2, 4) self.check("char x; int :0; short b:1; char y;", 5, 2, 6) self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8) @pytest.mark.skipif("not platform.machine().startswith('arm')") def test_bitfield_zero_arm(self): L = FFI().alignof("long long") self.check("char y; int :0;", 0, 4, 4) self.check("char x; int :0; char y;", 4, 4, 8) self.check("char x; int :0; int :0; char y;", 4, 4, 8) self.check("char x; long long :0; char y;", L, 8, L + 8) self.check("short x, y; int :0; int :0;", 2, 4, 4) self.check("char x; int :0; short b:1; char y;", 5, 4, 8) self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8) def test_error_cases(self): ffi = FFI() py.test.raises(TypeError, 'ffi.cdef("struct s1 { float x:1; };"); ffi.new("struct s1 *")') py.test.raises(TypeError, 'ffi.cdef("struct s2 { char x:0; };"); ffi.new("struct s2 *")') py.test.raises(TypeError, 'ffi.cdef("struct s3 { char x:9; };"); ffi.new("struct s3 *")') def test_struct_with_typedef(self): ffi = FFI() ffi.cdef("typedef struct { float x; } foo_t;") p = ffi.new("foo_t *", [5.2]) assert repr(p).startswith("<cdata 'foo_t *' ") def test_struct_array_no_length(self): ffi = FFI() ffi.cdef("struct foo_s { int x; int a[]; };") p = ffi.new("struct foo_s *", [100, [200, 300, 400]]) assert p.x == 100 assert ffi.typeof(p.a) is ffi.typeof("int *") # no length available assert p.a[0] == 200 assert p.a[1] == 300 assert p.a[2] == 400 @pytest.mark.skipif("sys.platform != 'win32'") def test_getwinerror(self): ffi = FFI() code, message = ffi.getwinerror(1155) assert code == 1155 assert message == ("No application is ass
#!/usr/bin/python # coding: utf-8 class Solution(object): def convertToTitle(self, n): """
:type n: int :rtype: str """ return "" if n == 0 else self.convertToTitle((n - 1) / 26) + chr((n
- 1) % 26 + ord('A'))
ismethoddescrip
tor(obj) or inspect.isfunction(obj): return 'function' # Everything else... return 'instance' @property def type(self): """Imitate the tree.Node.type values.""" cls = self._
get_class() if inspect.isclass(cls): return 'classdef' elif inspect.ismodule(cls): return 'file_input' elif inspect.isbuiltin(cls) or inspect.ismethod(cls) or \ inspect.ismethoddescriptor(cls): return 'funcdef' @underscore_memoization def _cls(self): """ We used to limit the lookups for instantiated objects like list(), but this is not the case anymore. Python itself """ # Ensures that a CompiledObject is returned that is not an instance (like list) return self def _get_class(self): if not fake.is_class_instance(self.obj) or \ inspect.ismethoddescriptor(self.obj): # slots return self.obj try: return self.obj.__class__ except AttributeError: # happens with numpy.core.umath._UFUNC_API (you get it # automatically by doing `import numpy`. return type @property def names_dict(self): # For compatibility with `representation.Class`. return self.names_dicts(False)[0] def names_dicts(self, search_global, is_instance=False): return self._names_dict_ensure_one_dict(is_instance) @memoize_method def _names_dict_ensure_one_dict(self, is_instance): """ search_global shouldn't change the fact that there's one dict, this way there's only one `object`. """ return [LazyNamesDict(self._evaluator, self, is_instance)] def get_subscope_by_name(self, name): if name in dir(self.obj): return CompiledName(self._evaluator, self, name).parent else: raise KeyError("CompiledObject doesn't have an attribute '%s'." % name) @CheckAttribute def py__getitem__(self, index): if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict): # Get rid of side effects, we won't call custom `__getitem__`s. return set() return set([create(self._evaluator, self.obj[index])]) @CheckAttribute def py__iter__(self): if type(self.obj) not in (str, list, tuple, unicode, bytes, bytearray, dict): # Get rid of side effects, we won't call custom `__getitem__`s. return for part in self.obj: yield set([create(self._evaluator, part)]) @property def name(self): try: name = self._get_class().__name__ except AttributeError: name = repr(self.obj) return FakeName(name, self) def _execute_function(self, params): if self.type != 'funcdef': return for name in self._parse_function_doc()[1].split(): try: bltn_obj = getattr(_builtins, name) except AttributeError: continue else: if bltn_obj is None: # We want to evaluate everything except None. # TODO do we? continue bltn_obj = create(self._evaluator, bltn_obj) for result in self._evaluator.execute(bltn_obj, params): yield result @property @underscore_memoization def subscopes(self): """ Returns only the faked scopes - the other ones are not important for internal analysis. """ module = self.get_parent_until() faked_subscopes = [] for name in dir(self.obj): try: faked_subscopes.append( fake.get_faked(module.obj, self.obj, parent=self, name=name) ) except fake.FakeDoesNotExist: pass return faked_subscopes def is_scope(self): return True def get_self_attributes(self): return [] # Instance compatibility def get_imports(self): return [] # Builtins don't have imports class CompiledName(FakeName): def __init__(self, evaluator, compiled_obj, name): super(CompiledName, self).__init__(name) self._evaluator = evaluator self._compiled_obj = compiled_obj self.name = name def __repr__(self): try: name = self._compiled_obj.name # __name__ is not defined all the time except AttributeError: name = None return '<%s: (%s).%s>' % (type(self).__name__, name, self.name) def is_definition(self): return True @property @underscore_memoization def parent(self): module = self._compiled_obj.get_parent_until() return _create_from_name(self._evaluator, module, self._compiled_obj, self.name) @parent.setter def parent(self, value): pass # Just ignore this, FakeName tries to overwrite the parent attribute. class LazyNamesDict(object): """ A names_dict instance for compiled objects, resembles the parser.tree. """ name_class = CompiledName def __init__(self, evaluator, compiled_obj, is_instance=False): self._evaluator = evaluator self._compiled_obj = compiled_obj self._is_instance = is_instance def __iter__(self): return (v[0].value for v in self.values()) @memoize_method def __getitem__(self, name): try: getattr(self._compiled_obj.obj, name) except AttributeError: raise KeyError('%s in %s not found.' % (name, self._compiled_obj)) except Exception: # This is a bit ugly. We're basically returning this to make # lookups possible without having the actual attribute. However # this makes proper completion possible. return [FakeName(name, create(self._evaluator, None), is_definition=True)] return [self.name_class(self._evaluator, self._compiled_obj, name)] def values(self): obj = self._compiled_obj.obj values = [] for name in dir(obj): try: values.append(self[name]) except KeyError: # The dir function can be wrong. pass is_instance = self._is_instance or fake.is_class_instance(obj) # ``dir`` doesn't include the type names. if not inspect.ismodule(obj) and obj != type and not is_instance: values += create(self._evaluator, type).names_dict.values() return values def dotted_from_fs_path(fs_path, sys_path): """ Changes `/usr/lib/python3.4/email/utils.py` to `email.utils`. I.e. compares the path with sys.path and then returns the dotted_path. If the path is not in the sys.path, just returns None. """ if os.path.basename(fs_path).startswith('__init__.'): # We are calculating the path. __init__ files are not interesting. fs_path = os.path.dirname(fs_path) # prefer # - UNIX # /path/to/pythonX.Y/lib-dynload # /path/to/pythonX.Y/site-packages # - Windows # C:\path\to\DLLs # C:\path\to\Lib\site-packages # over # - UNIX # /path/to/pythonX.Y # - Windows # C:\path\to\Lib path = '' for s in sys_path: if (fs_path.startswith(s) and len(path) < len(s)): path = s # - Window # X:\path\to\lib-dynload/datetime.pyd => datetime module_path = fs_path[len(path):].lstrip(os.path.sep).lstrip('/') # - Window # Replace like X:\path\to\something/foo/bar.py return _path_re.sub('', module_path).replace(os.path.sep, '.').replace('/', '.') def load_module(evaluator, path=None, name=None): sys_path = evaluator.sys_path if path is not None: dotted_path = dotted_from_fs_path(path, sys_path=sys_path) else: dotted_path = name if dotted_path is None: p, _, dotted_path = path.partition(os.path.sep) sys_path.insert(0, p)
AINAGE, format_str='{:>10}', default=None, no_of_dps=3) obj2 = do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3) obj3 = do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3) localcol = rdc.RowDataCollection() localcol._collection.append(obj1) localcol._collection.append(obj2) localcol._collection.append(obj3) # Initiliase a real collection col = rdc.RowDataCollection() col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)) # Check that they're the same col_eq, msg = self.checkCollectionEqual(localcol, col) self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg) def test_bulkInitCollection(self): objs = [ do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3), do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3), do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3), ] col = rdc.RowDataCollection.bulkInitCollection(objs) localcol = rdc.RowDataCollection() localcol._collection.append(objs[0]) localcol._collection.append(objs[1]) localcol._collection.append(objs[2]) # Check they're the same col_eq, msg = self.checkCollectionEqual(localcol, col) self.assertTrue(col_eq, 'rdc.RowDataCollection initialisation fail - ' + msg) def checkCollectionEqual(self, c1, c2): '''Check the two given collections to make sure that they contain the same data. @param c1: First rdc.RowDataCollection object @param c2: Second rdc.RowDataCollection object @return: True if they're equal False and reason if not. ''' if not len(c1._collection) == len(c2._collection):
return False, 'Collections are different lengths' for i in range(0, len
(c1._collection)): if not c1._collection[i].data_type == c2._collection[i].data_type: return False, 'Collections have different data_types' if not c1._collection[i].format_str == c2._collection[i].format_str: return False, 'Collections have different format_str' if not c1._collection[i].default == c2._collection[i].default: return False, 'Collections have different default' for j in range(0, len(c1._collection[i].data_collection)): if not c1._collection[i].data_collection[j] == c1._collection[i].data_collection[j]: return False, 'Collections have different data' return True, '' def test_indexOfDataObject(self): """Should return the corrent index of a particular ADataObject in colleciton.""" index1 = self.testcol.indexOfDataObject(rdt.CHAINAGE) index2 = self.testcol.indexOfDataObject(rdt.ELEVATION) index3 = self.testcol.indexOfDataObject(rdt.ROUGHNESS) self.assertEquals(index1, 0) self.assertEquals(index2, 1) self.assertEquals(index3, 2) def test_iterateRows(self): """Test generator for complete row as a list""" testrows = [ [0.00, 32.345, 0.035], [3.65, 33.45, 0.035], ] i = 0 for row in self.testcol.iterateRows(): self.assertListEqual(row, testrows[i]) i += 1 def test_iterateRowsWithKey(self): """Test generator for a single DataObject""" testrows = [ 32.345, 33.45, ] i = 0 for row in self.testcol.iterateRows(rdt.ELEVATION): self.assertEqual(row, testrows[i]) i += 1 def test_rowAsDict(self): """Shoud return a row as a dict of single values.""" test_dict = {rdt.CHAINAGE: 0.00, rdt.ELEVATION: 32.345, rdt.ROUGHNESS: 0.035} row = self.testcol.rowAsDict(0) self.assertDictEqual(row, test_dict) def test_rowAsList(self): test_list = [0.00, 32.345, 0.035] row = self.testcol.rowAsList(0) self.assertListEqual(row, test_list) def test_dataObject(self): """Should return the correct ADataObject.""" test_vals = [0.00, 3.65] obj = self.testcol.dataObject(rdt.CHAINAGE) self.assertEqual(obj.data_type, rdt.CHAINAGE) for i, o in enumerate(obj): self.assertEqual(o, test_vals[i]) def test_dataObjectAsList(self): """Should return the contents of a DataObject as a list.""" test_list = [0.00, 3.65] obj_list = self.testcol.dataObjectAsList(rdt.CHAINAGE) self.assertListEqual(obj_list, test_list) def test_toList(self): test_list = [ [0.00, 3.65], [32.345, 33.45], [0.035, 0.035] ] row_list = self.testcol.toList() self.assertListEqual(row_list, test_list) def test_toDict(self): test_dict = { rdt.CHAINAGE: [0.00, 3.65], rdt.ELEVATION: [32.345, 33.45], rdt.ROUGHNESS: [0.035, 0.035], } row_dict = self.testcol.toDict() self.assertDictEqual(row_dict, test_dict) def test_addValue(self): # Initiliase a real collection col = rdc.RowDataCollection() col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)) col._addValue(rdt.CHAINAGE, 2.5) self.assertEqual(col._collection[0][0], 2.5) def test_setValue(self): # Initiliase a real collection col = rdc.RowDataCollection() col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)) col._collection[0].addValue(2.5) self.assertEqual(col._collection[0][0], 2.5) col._setValue(rdt.CHAINAGE, 3.5, 0) self.assertEqual(col._collection[0][0], 3.5) def test_getPrintableRow(self): test_row = ' 0.000 32.345 0.035' row = self.testcol.getPrintableRow(0) self.assertEqual(row, test_row) def test_updateRow(self): new_row = {rdt.CHAINAGE: 0.1, rdt.ELEVATION: 40, rdt.ROUGHNESS: 0.06} self.testcol.updateRow(new_row, 0) row = self.testcol.rowAsDict(0) self.assertDictEqual(row, new_row) with self.assertRaises(IndexError): self.testcol.updateRow(new_row, 3) fake_row = {'fakekey': 4.3, 'andagain': 3454} with self.assertRaises(KeyError): self.testcol.updateRow(fake_row, 0) def test_addRow(self): # Initiliase a real collection col = rdc.RowDataCollection() col.addToCollection(do.FloatData(rdt.CHAINAGE, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ELEVATION, format_str='{:>10}', default=None, no_of_dps=3)) col.addToCollection(do.FloatData(rdt.ROUGHNESS, format_str='{:>10}', default=0.0, no_of_dps=3)) new_row = {rdt.CHAINAGE: 3.0, rdt.ELEVATION: 41, rdt.ROUGHNESS: 0.06} new_row2 = {rdt.CHAINAGE: 6.0, rdt.ELEVATION: 42, rdt.ROUGHNESS: 0.07} new_row3 = {rdt.CHAINAGE: 10.0, rdt.ELEVATION: 43, rdt.ROUGHNESS: 0.08} new_row4 = {rdt.CHAINAGE: 20.0, rdt.ELEVATION: 44, rdt.ROUGHNESS: 0.09} # append and insert rows col.addRow(new_row2) col.addRow(new_row, 0) # append and insert again col.addRow(new_row4) col.addRow(new_row3, 2)
# coding: utf-8 import os from setuptools import setup, find_packages README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-nomad-activity-feed', version='0.1.1', packages=find_packages(), description='A simple Django app attach an activity feed to any Django model.', long_description=README, url='https://github.com/Nomadblue/django-activity-feed', author='José Sazo', author_email='jose@nomadblue.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Internet :: WWW/HTTP :: WSGI', 'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules', ],
)
import pytest from sqlobject import boundattributes from sqlobject import declarative pytestmark = pytest.mark.skipif( True, reason='The module "boundattributes" and its tests were not finished yet') class SOTestMe(object): pass class AttrReplace(boundattributes.BoundAttribute): __unpackargs__ = ('replace',) replace = None @declarative.classinstancemethod d
ef make_object(self, cls, added_class, attr_name, **attrs): if
not self: return cls.singleton().make_object( added_class, attr_name, **attrs) self.replace.added_class = added_class self.replace.name = attr_name assert attrs['replace'] is self.replace del attrs['replace'] self.replace.attrs = attrs return self.replace class Holder: def __init__(self, name): self.holder_name = name def __repr__(self): return '<Holder %s>' % self.holder_name def test_1(): v1 = Holder('v1') v2 = Holder('v2') v3 = Holder('v3') class V2Class(AttrReplace): arg1 = 'nothing' arg2 = ['something'] class A1(SOTestMe): a = AttrReplace(v1) v = V2Class(v2) class inline(AttrReplace): replace = v3 arg3 = 'again' arg4 = 'so there' for n in ('a', 'v', 'inline'): assert getattr(A1, n).name == n assert getattr(A1, n).added_class is A1 assert A1.a is v1 assert A1.a.attrs == {} assert A1.v is v2 assert A1.v.attrs == {'arg1': 'nothing', 'arg2': ['something']} assert A1.inline is v3 assert A1.inline.attrs == {'arg3': 'again', 'arg4': 'so there'}
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Consume and serialize all of the data from a running TensorBoard instance. This program connects to a live TensorBoard backend at given port, and saves all of the data to local disk JSON in a predictable format. This makes it easy to mock out the TensorBoard backend so that the frontend may be tested in isolation. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import json import os import os.path import shutil import threading import urllib import six from six.moves import http_client import tensorflow as tf from tensorflow.python.summary import event_multiplexer from tensorflow.tensorboard.backend import server tf.flags.DEFINE_string('logdir', None, """the logdir to pass to the TensorBoard backend; data will be read from this logdir for serialization.""") tf.flags.DEFINE_string('target', None, """The directoy where serialized data will be written""") tf.flags.DEFINE_boolean('overwrite', False, """Whether to remove and overwrite TARGET if it already exists.""") tf.flags.DEFINE_boolean( 'purge_orphaned_data', True, 'Whether to purge data that ' 'may have been orphaned due to TensorBoard restarts. ' 'Disabling purge_orphaned_data can be used to debug data ' 'disappearance.') FLAGS = tf.flags.FLAGS BAD_CHARACTERS = "#%&{}\\/<>*? $!'\":@+`|=" def Url(route, params): """Takes route and query params, and produce encoded url for that asset.""" out = route if params: # sorting ensures a unique filename for each query sorted_params = sorted(six.iteritems(params)) out += '?' + urllib.urlencode(sorted_params) return out def Clean(s): """Clean a string so it can be used as a filepath.""" for c in BAD_CHARACTERS: s = s.
replace(c, '_') return s class TensorBoardStaticSerializer(object): """Serialize all the routes from a TensorBoard server to static json.""" def __init__(self, connection, target_path): self.connection = connection EnsureDirectoryExists(os.path.join(target_path, 'data')) self.path = target_path def GetAndSave(self, url): """GET the given url. Serialize the result at clean path version of url.""" self.connection.request('GET', '/data/' + url) response = self.connection.get
response() destination = self.path + '/data/' + Clean(url) if response.status != 200: raise IOError(url) content = response.read() with open(destination, 'w') as f: f.write(content) return content def GetRouteAndSave(self, route, params=None): """GET given route and params. Serialize the result. Return as JSON.""" url = Url(route, params) return json.loads(self.GetAndSave(url)) def Run(self): """Serialize everything from a TensorBoard backend.""" # get the runs object, which is an index for every tag. runs = self.GetRouteAndSave('runs') # collect sampled data. self.GetRouteAndSave('scalars') # now let's just download everything! for run, tag_type_to_tags in six.iteritems(runs): for tag_type, tags in six.iteritems(tag_type_to_tags): try: if tag_type == 'graph': # in this case, tags is a bool which specifies if graph is present. if tags: self.GetRouteAndSave('graph', {run: run}) elif tag_type == 'images': for t in tags: images = self.GetRouteAndSave('images', {'run': run, 'tag': t}) for im in images: url = 'individualImage?' + im['query'] # pull down the images themselves. self.GetAndSave(url) else: for t in tags: # Save this, whatever it is :) self.GetRouteAndSave(tag_type, {'run': run, 'tag': t}) except IOError as e: PrintAndLog('Retrieval failed for %s/%s/%s' % (tag_type, run, tags), tf.logging.WARN) PrintAndLog('Got Exception: %s' % e, tf.logging.WARN) PrintAndLog('continuing...', tf.logging.WARN) continue def EnsureDirectoryExists(path): if not os.path.exists(path): os.makedirs(path) def PrintAndLog(msg, lvl=tf.logging.INFO): tf.logging.log(lvl, msg) print(msg) def main(unused_argv=None): target = FLAGS.target logdir = FLAGS.logdir if not target or not logdir: PrintAndLog('Both --target and --logdir are required.', tf.logging.ERROR) return -1 if os.path.exists(target): if FLAGS.overwrite: if os.path.isdir(target): shutil.rmtree(target) else: os.remove(target) else: PrintAndLog('Refusing to overwrite target %s without --overwrite' % target, tf.logging.ERROR) return -2 path_to_run = server.ParseEventFilesSpec(FLAGS.logdir) PrintAndLog('About to load Multiplexer. This may take some time.') multiplexer = event_multiplexer.EventMultiplexer( size_guidance=server.TENSORBOARD_SIZE_GUIDANCE, purge_orphaned_data=FLAGS.purge_orphaned_data) server.ReloadMultiplexer(multiplexer, path_to_run) PrintAndLog('Multiplexer load finished. Starting TensorBoard server.') s = server.BuildServer(multiplexer, 'localhost', 0) server_thread = threading.Thread(target=s.serve_forever) server_thread.daemon = True server_thread.start() connection = http_client.HTTPConnection('localhost', s.server_address[1]) PrintAndLog('Server setup! Downloading data from the server.') x = TensorBoardStaticSerializer(connection, target) x.Run() PrintAndLog('Done downloading data.') connection.close() s.shutdown() s.server_close() if __name__ == '__main__': tf.app.run()
# This file is a part of MediaDrop (http://www.mediadrop.net), # Copyright 2009-2015 MediaDrop contributors # For the exact contribution history, see the git revision log. # The source code contained in this file is licensed under the GPLv3 or # (at your option) an
y later version. # See LICENSE.txt in the main project directory, for more information. """add custom head tags add setting for custom tags (HTML) in <head
> section added: 2012-02-13 (v0.10dev) previously migrate script v054 Revision ID: 280565a54124 Revises: 4d27ff5680e5 Create Date: 2013-05-14 22:38:02.552230 """ # revision identifiers, used by Alembic. revision = '280565a54124' down_revision = '4d27ff5680e5' from alembic.op import execute, inline_literal from sqlalchemy import Integer, Unicode, UnicodeText from sqlalchemy import Column, MetaData, Table # -- table definition --------------------------------------------------------- metadata = MetaData() settings = Table('settings', metadata, Column('id', Integer, autoincrement=True, primary_key=True), Column('key', Unicode(255), nullable=False, unique=True), Column('value', UnicodeText), mysql_engine='InnoDB', mysql_charset='utf8', ) # -- helpers ------------------------------------------------------------------ def insert_setting(key, value): execute( settings.insert().\ values({ 'key': inline_literal(key), 'value': inline_literal(value), }) ) def delete_setting(key): execute( settings.delete().\ where(settings.c.key==inline_literal(key)) ) # ----------------------------------------------------------------------------- SETTINGS = [ (u'appearance_custom_head_tags', u''), ] def upgrade(): for key, value in SETTINGS: insert_setting(key, value) def downgrade(): for key, value in SETTINGS: delete_setting(key)
if not isinstance(loglevel, int): loglevel = getattr(logging, loglevel.upper(), None) if not isinstance(loglevel, int): print('Invalid log level: %s' % loglevel) exit(1) # remove logfile if it already exists if logfname is not None and os.path.exists(logfname): os.remove(logfname) logging.basicConfig(filename=logfname, format="%(levelname)s %(message)s", level=loglevel) def cubefit(argv=None): DESCRIPTION = "Fit SN + galaxy model to SNFactory data cubes." parser = ArgumentParser(prog="cubefit", description=DESCRIPTION) parser.add_argument("configfile", help="configuration file name (JSON format)") parser.add_argument("outfile", help="Output file name (FITS format)") parser.add_argument("--dataprefix", default="", help="path prepended to data file names; default is " "empty string") parser.add_argument("--logfile", help="Write log to this file " "(default: print to stdout)", default=None) parser.add_argument("--loglevel", default="info", help="one of: debug, info, warning (default is info)") parser.add_argument("--diagdir", default=None, help="If given, write intermediate diagnostic results " "to this directory") parser.add_argument("--refitgal", default=False, action="store_true", help="Add an iteration where galaxy model is fit " "using all epochs and then data/SN positions are " "refit") parser.add_argument("--mu_wave", default=0.07, type=float, help="Wavelength regularization parameter. " "Default is 0.07.") parser.add_argument("--mu_xy", default=0.001, type=float, help="Spatial regularization parameter. " "Default is 0.001.") parser.add_argument("--psftype", default="gaussian-moffat", help="Type of PSF: 'gaussian-moffat' or 'tabular'. " "Currently, tabular means generate a tabular PSF from " "gaussian-moffat parameters.") args = parser.parse_args(argv) setup_logging(args.loglevel, logfname=args.logfile) # record start time tstart = datetime.now() logging.info("cubefit v%s started at %s", __version__, tstart.strftime("%Y-%m-%d %H:%M:%S")) tsteps = OrderedDict() # finish time of each step. logging.info("parameters: mu_wave={:.3g} mu_xy={:.3g} refitgal={}" .format(args.mu_wave, args.mu_xy, args.refitgal)) logging.info(" psftype={}".format(args.psftype)) logging.info("reading config file") with open(args.configfile) as f: cfg = json.load(f) # basic checks on config contents. assert (len(cfg["filenames"]) == len(cfg["xcenters"]) == len(cfg["ycenters"]) == len(cfg["psf_params"])) # ------------------------------------------------------------------------- # Load data cubes from the list of FITS files. nt = len(cfg["filenames"]) logging.info("reading %d data cubes", nt) cubes = [] for fname in cfg["filenames"]: logging.debug(" reading %s", fname) cubes.append(read_datacube(os.path.join(args.dataprefix, fname))) wave = cubes[0].wave nw = len(wave) # assign some local variables for convenience refs = cfg["refs"] master_ref
= cfg["master_ref"] if master_ref not in refs: raise ValueError("master ref choice must be one of the final refs (" + " ".join(refs.astype(str)) + ")") nonmaster_refs = [i for i in refs if i != master_ref] nonrefs = [i for i in range(nt) if i not in refs] # Ensure that all
cubes have the same wavelengths. if not all(np.all(cubes[i].wave == wave) for i in range(1, nt)): raise ValueError("all data must have same wavelengths") # ------------------------------------------------------------------------- # PSF for each observation logging.info("setting up PSF for all %d epochs", nt) psfs = [snfpsf(wave, cfg["psf_params"][i], cubes[i].header, args.psftype) for i in range(nt)] # ------------------------------------------------------------------------- # Initialize all model parameters to be fit yctr0 = np.array(cfg["ycenters"]) xctr0 = np.array(cfg["xcenters"]) galaxy = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64) sn = np.zeros((nt, nw), dtype=np.float64) # SN spectrum at each epoch skys = np.zeros((nt, nw), dtype=np.float64) # Sky spectrum at each epoch yctr = yctr0.copy() xctr = xctr0.copy() snctr = (0., 0.) # For writing out to FITS modelwcs = {"CRVAL1": -SPAXEL_SIZE * (MODEL_SHAPE[0] - 1) / 2., "CRPIX1": 1, "CDELT1": SPAXEL_SIZE, "CRVAL2": -SPAXEL_SIZE * (MODEL_SHAPE[1] - 1) / 2., "CRPIX2": 1, "CDELT2": SPAXEL_SIZE, "CRVAL3": cubes[0].header["CRVAL3"], "CRPIX3": cubes[0].header["CRPIX3"], "CDELT3": cubes[0].header["CDELT3"]} # ------------------------------------------------------------------------- # Position bounds # Bounds on data position: shape=(nt, 2) xctrbounds = np.vstack((xctr - POSITION_BOUND, xctr + POSITION_BOUND)).T yctrbounds = np.vstack((yctr - POSITION_BOUND, yctr + POSITION_BOUND)).T snctrbounds = (-POSITION_BOUND, POSITION_BOUND) # For data positions, check that bounds do not extend # past the edge of the model and adjust the minbound and maxbound. # This doesn't apply to SN position. gshape = galaxy.shape[1:3] # model shape for i in range(nt): dshape = cubes[i].data.shape[1:3] (yminabs, ymaxabs), (xminabs, xmaxabs) = yxbounds(gshape, dshape) yctrbounds[i, 0] = max(yctrbounds[i, 0], yminabs) yctrbounds[i, 1] = min(yctrbounds[i, 1], ymaxabs) xctrbounds[i, 0] = max(xctrbounds[i, 0], xminabs) xctrbounds[i, 1] = min(xctrbounds[i, 1], xmaxabs) # ------------------------------------------------------------------------- # Guess sky logging.info("guessing sky for all %d epochs", nt) for i, cube in enumerate(cubes): skys[i, :] = guess_sky(cube, npix=30) # ------------------------------------------------------------------------- # Regularization penalty parameters # Calculate rough average galaxy spectrum from all final refs. spectra = np.zeros((len(refs), len(wave)), dtype=np.float64) for j, i in enumerate(refs): avg_spec = np.average(cubes[i].data, axis=(1, 2)) - skys[i] mean_spec, bins, bn = scipy.stats.binned_statistic(wave, avg_spec, bins=len(wave)/10) spectra[j] = np.interp(wave, bins[:-1] + np.diff(bins)[0]/2., mean_spec) mean_gal_spec = np.average(spectra, axis=0) # Ensure that there won't be any negative or tiny values in mean: mean_floor = 0.1 * np.median(mean_gal_spec) mean_gal_spec[mean_gal_spec < mean_floor] = mean_floor galprior = np.zeros((nw, MODEL_SHAPE[0], MODEL_SHAPE[1]), dtype=np.float64) regpenalty = RegularizationPenalty(galprior, mean_gal_spec, args.mu_xy, args.mu_wave) tsteps["setup"] = datetime.now() # ------------------------------------------------------------------------- # Fit just the galaxy model to just the master ref. data = cubes[master_ref].data - skys[master_ref, :, None, None] weight = cubes[master_ref].weight logging.info("fitting galaxy to master ref [%d]", master_ref) galaxy = fit_galaxy_single(galaxy, data, weight, (yctr[master_ref], xctr[master_ref]), psfs[master_ref], regpenalty, LBFGSB_FACTOR) if args.diagdir: fname = os.path.join(args.diagdir, 'ste
#!/usr/bin/env python # coding=utf-8 import struct from twisted.internet import defer from txportal.packet import cmcc, huawei from txportal.simulator.handlers import base_han
dler import functools class AuthHandler(base_handler.BasicHandler): def proc_cmccv1(self, req, rundata): resp = cmcc.Portal.newMessage( cmcc.ACK_AUTH, req.userIp, req.serialNo, req.reqId, secret=self.secret ) resp.attrNum = 1 resp.attrs = [ (0x05, 'success'), ] return resp def proc_cmccv2(self, req, rundata): resp = cmcc.Portal.newMessage(
cmcc.ACK_AUTH, req.userIp, req.serialNo, req.reqId, secret=self.secret ) resp.attrNum = 1 resp.attrs = [ (0x05, 'success'), ] return resp def proc_huaweiv1(self, req, rundata): resp = huawei.Portal.newMessage( huawei.ACK_AUTH, req.userIp, req.serialNo, req.reqId, secret=self.secret ) resp.attrNum = 1 resp.attrs = [ (0x05, 'success'), ] return resp @defer.inlineCallbacks def proc_huaweiv2(self, req, rundata): resp = huawei.PortalV2.newMessage( huawei.ACK_AUTH, req.userIp, req.serialNo, req.reqId, self.secret, auth=req.auth, chap=(req.isChap==0x00) ) resp.attrNum = 1 resp.attrs = [ (0x05, 'success'), ] resp.auth_packet() return resp
"""Support for Satel Integra zone states- represented as binary sensors.""" import logging from homeassistant.components.binary_sensor import BinarySensorDevice from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from . import ( CONF_OUTPUTS, CONF_ZONE_NAME, CONF_ZONE_TYPE, CONF_ZONES, DATA_SATEL, SIGNAL_OUTPUTS_UPDATED, SIGNAL_ZONES_UPDATED) DEPENDENCIES = ['satel_integra'] _LOGGER = logging.getLogger(__name__) async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the Satel Integra binary sensor devices.""" if not discovery_info: return configured_zones = discovery_info[CONF_ZONES] devices = [] for zone_num, device_config_data in configured_zones.items(): zone_type = device_config_data[CONF_ZONE_TYPE] zone_name = device_config_data[CONF_ZONE_NAME] device = SatelIntegraBinarySensor( zone_num, zone_name, zone_type, SIGNAL_ZONES_UPDATED) devices.append(device) configured_outputs = discovery_info[CONF_OUTPUTS] for zone_num, device_config_data in configured_outputs.items(): zone_type = device_config_data[CONF_ZONE_TYPE] zone_name = device_config_data[CONF_ZONE_NAME] device = SatelIntegraBinarySensor( zone_num, zone_name, zone_type, SIGNAL_OUTPUTS_UPDATED) devices.append(device) async_add_entities(devices) class SatelIntegraBinarySensor(Bin
arySensorDevice): """Representation of an Satel Integra binary sensor.""" def __init__(self, device_number, device_name, zone_type, react_to_signal): """Initialize the
binary_sensor.""" self._device_number = device_number self._name = device_name self._zone_type = zone_type self._state = 0 self._react_to_signal = react_to_signal async def async_added_to_hass(self): """Register callbacks.""" if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED: if self._device_number in\ self.hass.data[DATA_SATEL].violated_outputs: self._state = 1 else: self._state = 0 else: if self._device_number in\ self.hass.data[DATA_SATEL].violated_zones: self._state = 1 else: self._state = 0 async_dispatcher_connect( self.hass, self._react_to_signal, self._devices_updated) @property def name(self): """Return the name of the entity.""" return self._name @property def icon(self): """Icon for device by its type.""" if self._zone_type == 'smoke': return "mdi:fire" @property def should_poll(self): """No polling needed.""" return False @property def is_on(self): """Return true if sensor is on.""" return self._state == 1 @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._zone_type @callback def _devices_updated(self, zones): """Update the zone's state, if needed.""" if self._device_number in zones \ and self._state != zones[self._device_number]: self._state = zones[self._device_number] self.async_schedule_update_ha_state()
()) def _get_ordinals_from_text(self, input): # https://github.com/IronLanguages/main/issues/1237 if IRONPYTHON and isinstance(input, bytearray): input = bytes(input) for char in input: ordinal = char if is_integer(char) else ord(char) yield self._test_ordinal(ordinal, char, 'Character') def _test_ordinal(self, ordinal, original, type): if 0 <= ordinal <= 255: return ordinal raise RuntimeError("%s '%s' cannot be represented as a byte." % (type, original)) def _get_ordinals_from_int(self, input): if is_string(input): input = input.split() elif is_integer(input): input = [input] for integer in input: ordinal = self._convert_to_integer(integer) yield self._test_ordinal(ordinal, integer, 'Integer') def _get_ordinals_from_hex(self, input): for token in self._input_to_tokens(input, length=2): ordinal = self._convert_to_integer(token, base=16) yield self._test_ordinal(ordinal, token, 'Hex value') def _get_ordinals_from_bin(self, input): for token in self._input_to_tokens(input, length=8): ordinal = self._convert_to_integer(token, base=2) yield self._test_ordinal(ordinal, token, 'Binary value') def _input_to_token
s(self, input, length
): if not is_string(input): return input input = ''.join(input.split()) if len(input) % length != 0: raise RuntimeError('Expected input to be multiple of %d.' % length) return (input[i:i+length] for i in range(0, len(input), length)) def create_list(self, *items): """Returns a list containing given items. The returned list can be assigned both to ``${scalar}`` and ``@{list}`` variables. Examples: | @{list} = | Create List | a | b | c | | ${scalar} = | Create List | a | b | c | | ${ints} = | Create List | ${1} | ${2} | ${3} | """ return list(items) @run_keyword_variant(resolve=0) def create_dictionary(self, *items): """Creates and returns a dictionary based on the given ``items``. Items are typically given using the ``key=value`` syntax same way as ``&{dictionary}`` variables are created in the Variable table. Both keys and values can contain variables, and possible equal sign in key can be escaped with a backslash like ``escaped\\=key=value``. It is also possible to get items from existing dictionaries by simply using them like ``&{dict}``. Alternatively items can be specified so that keys and values are given separately. This and the ``key=value`` syntax can even be combined, but separately given items must be first. If same key is used multiple times, the last value has precedence. The returned dictionary is ordered, and values with strings as keys can also be accessed using a convenient dot-access syntax like ``${dict.key}``. Examples: | &{dict} = | Create Dictionary | key=value | foo=bar | | | # key=value syntax | | Should Be True | ${dict} == {'key': 'value', 'foo': 'bar'} | | &{dict2} = | Create Dictionary | key | value | foo | bar | # separate key and value | | Should Be Equal | ${dict} | ${dict2} | | &{dict} = | Create Dictionary | ${1}=${2} | &{dict} | foo=new | | # using variables | | Should Be True | ${dict} == {1: 2, 'key': 'value', 'foo': 'new'} | | Should Be Equal | ${dict.key} | value | | | | # dot-access | This keyword was changed in Robot Framework 2.9 in many ways: - Moved from ``Collections`` library to ``BuiltIn``. - Support also non-string keys in ``key=value`` syntax. - Returned dictionary is ordered and dot-accessible. - Old syntax to give keys and values separately was deprecated, but deprecation was later removed in RF 3.0.1. """ separate, combined = self._split_dict_items(items) result = DotDict(self._format_separate_dict_items(separate)) combined = DictVariableTableValue(combined).resolve(self._variables) result.update(combined) return result def _split_dict_items(self, items): separate = [] for item in items: name, value = split_from_equals(item) if value is not None or VariableSplitter(item).is_dict_variable(): break separate.append(item) return separate, items[len(separate):] def _format_separate_dict_items(self, separate): separate = self._variables.replace_list(separate) if len(separate) % 2 != 0: raise DataError('Expected even number of keys and values, got %d.' % len(separate)) return [separate[i:i+2] for i in range(0, len(separate), 2)] class _Verify(_BuiltInBase): def _set_and_remove_tags(self, tags): set_tags = [tag for tag in tags if not tag.startswith('-')] remove_tags = [tag[1:] for tag in tags if tag.startswith('-')] if remove_tags: self.remove_tags(*remove_tags) if set_tags: self.set_tags(*set_tags) def fail(self, msg=None, *tags): """Fails the test with the given message and optionally alters its tags. The error message is specified using the ``msg`` argument. It is possible to use HTML in the given error message, similarly as with any other keyword accepting an error message, by prefixing the error with ``*HTML*``. It is possible to modify tags of the current test case by passing tags after the message. Tags starting with a hyphen (e.g. ``-regression``) are removed and others added. Tags are modified using `Set Tags` and `Remove Tags` internally, and the semantics setting and removing them are the same as with these keywords. Examples: | Fail | Test not ready | | | # Fails with the given message. | | Fail | *HTML*<b>Test not ready</b> | | | # Fails using HTML in the message. | | Fail | Test not ready | not-ready | | # Fails and adds 'not-ready' tag. | | Fail | OS not supported | -regression | | # Removes tag 'regression'. | | Fail | My message | tag | -t* | # Removes all tags starting with 't' except the newly added 'tag'. | See `Fatal Error` if you need to stop the whole test execution. Support for modifying tags was added in Robot Framework 2.7.4 and HTML message support in 2.8. """ self._set_and_remove_tags(tags) raise AssertionError(msg) if msg else AssertionError() def fatal_error(self, msg=None): """Stops the whole test execution. The test or suite where this keyword is used fails with the provided message, and subsequent tests fail with a canned message. Possible teardowns will nevertheless be executed. See `Fail` if you only want to stop one test case unconditionally. """ error = AssertionError(msg) if msg else AssertionError() error.ROBOT_EXIT_ON_FAILURE = True raise error def should_not_be_true(self, condition, msg=None): """Fails if the given condition is true. See `Should Be True` for details about how ``condition`` is evaluated and how ``msg`` can be used to override the default error message. """ if self._is_true(condition): raise AssertionError(msg or "'%s' should not be true." % condition) def should_be_true(self, condition, msg=None): """Fails if the given condition is not true. If ``condition`` is a string (e.g. ``${rc} < 10``), it is evaluated as a Python expression as explained in `Evaluating expressions` and the keyword status is decided based on the result. If a non-string item is given, the status is got directly fro
self.description = "Remove a package required by othe
r packages" lp1 = pmpkg("pkg1") self.addpkg2db("local", lp1) lp2 = pmpkg("pkg2") lp2.depends = ["pkg1"] self.addpkg2db("local", lp2) lp3 = pmpkg("pkg3") lp3.depends = ["pkg1"] self.addpkg2db("local", lp3) lp4 = pmpkg("pkg4") lp4.d
epends = ["pkg1"] self.addpkg2db("local", lp4) self.args = "-R pkg1 pkg2" self.addrule("!PACMAN_RETCODE=0") self.addrule("PKG_EXIST=pkg1") self.addrule("PKG_EXIST=pkg2") self.addrule("PKG_EXIST=pkg3") self.addrule("PKG_EXIST=pkg4")
class A(Aa): @property def <warning descr="Getter signatu
re should be (self)">x<caret></warning>(self, r):
return "" @x.setter def <warning descr="Setter should not return a value">x</warning>(self, r): return r
ement_shape(element_shape), element_dtype=element_dtype, max_num_elements=max_num_elements, name=name) def _set_handle_data(list_handle, element_shape, element_dtype): """Sets type information on `list_handle` for consistency with graphs.""" # TODO(b/169968286): It would be better if we had a consistent story for # creating handle data from eager operations (shared with VarHandleOp). if isinstance(list_handle, ops.EagerTensor): if tensor_util.is_tf_type(element_shape): element_shape = tensor_shape.TensorShape(None) elif not isinstance(element_shape, tensor_shape.TensorShape): element_shape = tensor_shape.TensorShape(element_shape) handle_data = cpp_shape_inference_pb2.CppShapeInferenceResult.HandleData() handle_data.is_set = True handle_data.shape_and_type.append( cpp_shape_inference_pb2.CppShapeInferenceResult.HandleShapeAndType( shape=element_shape.as_proto(), dtype=element_dtype.as_datatype_enum, specialized_type=types_pb2.ST_TENSOR_LIST)) list_handle._handle_data = handle_data # pylint: disable=protected-access def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None): result = gen_list_ops.tensor_list_reserve( element_shape=_build_element_shape(element_shape), num_elements=num_elements, element_dtype=element_dtype, name=name) # TODO(b/169968286): gen_ops needs to ensure the metadata is properly # populated for eager operations. _set_handle_data(result, element_shape, element_dtype) return result def tensor_list_from_tensor(tensor, element_shape, name=None): tensor = ops.convert_to_tensor(tensor) result = gen_list_ops.tensor_list_from_tensor( tensor=tensor, element_shape=_build_element_shape(element_shape), name=name) _set_handle_data(result, tensor.shape, tensor.dtype) return result def tensor_list_get_item(input_handle, index, element_dtype, element_shape=None, name=None): return gen_list_ops.tensor_list_get_item( input_handle=input_handle, index=index, element_shape=_build_element_shape(element_shape), element_dtype=element_dtype, name=name) def tensor_list_pop_back(input_handle, element_dtype, name=None): return gen_list_ops.tensor_list_pop_back( input_handle=input_handle, element_shape=-1, element_dtype=element_dtype, name=name) def tensor_list_gather(input_handle, indices, element_dtype, element_shape=None, name=None): return gen_list_ops.tensor_list_gather( input_handle=input_handle, indices=indices, element_shape=_build_element_shape(element_shape), element_dtype=element_dtype, name=name) def tensor_list_scatter(tensor, indices, element_shape=None, input_handle=None, name=None): """Returns a TensorList created or updated by scattering `tensor`.""" tensor = ops.convert_to_tensor(tensor) if input_handle is not None: output_handle = gen_list_ops.tensor_list_scatter_into_existing_list( input_handle=input_handle, tensor=tensor, indices=indices, name=name) handle_data_util.copy_handle_data(input_handle, output_handle) return output_handle else: output_handle = gen_list_ops.tensor_list_scatter_v2( tensor=tensor, indices=indices, element_shape=_build_element_shape(element_shape), num_elements=-1, name=name) _set_handle_data(output_handle, element_shape, tensor.dtype) return output_handle def tensor_list_stack(input_handle, element_dtype, num_elements=-1, element_shape=None, name=None): return gen_list_ops.tensor_list_stack( input_handle=input_handle, element_shape=_build_element_shape(element_shape), element_dtype=element_dtype, num_elements=num_elements, name=name) def tensor_list_concat(input_handle, element_dtype, element_shape=None, name=None): # Ignore the lengths output of TensorListConcat. It is only used during # gradient computation. return gen_list_ops.tensor_list_concat_v2( input_handle=input_handle, element_dtype=element_dtype, element_shape=_build_element_shape(element_shape), leading_dims=ops.convert_to_tensor([], dtype=dtypes.int64), name=name)[0] def tensor_list_split(tensor, element_shape, lengths, name=None): return gen_list_ops.tensor_list_split( tensor=tensor, element_shape=_build_element_shape(element_shape), lengths=lengths, name=name) def tensor_list_set_item(input_handle, index, item, resize_if_index_out_of_bounds=False, name=None): """Sets `item` at `index` in input list.""" if resize_if_index_out_of_bounds: input_list_size = gen_list_ops.tensor_list_length(input_handle) # TODO(srbs): This could cause some slowdown. Consider fusing resize # functionality in the SetItem op. input_handle = control_flow_ops.cond( index >= input_list_size, lambda: gen_list_ops.tensor_list_resize( # pylint: disable=g-long-lambda input_handle, index + 1), lambda: input_handle) output_handle = gen_list_ops.tensor_list_set_item( input_handle=input_handle, index=index, item=item, name=name) handle_data_util.copy_handle_data(input_handle, output_handle) return output_handle @ops.RegisterGradient("TensorListPushBack") def _PushBackGrad(op, dresult): return gen_list_ops.tensor_list_pop_back( dresult, element_shape=array_ops.shape(op.inputs[1]), element_dtype=op.get_attr("element_dtype")) @ops.RegisterGradient("TensorListPopBack") def _PopBackGrad(op, dlist, delement): if dlist is None: dlist = empty_tensor_list( element_dtype=delement.dtype, element_shape=gen_list_ops.tensor_list_element_shape( op.outputs[0], shape_type=dtypes.int32)) if delement is None: delement = array_ops.zeros_like(op.outputs[1]) return gen_list_ops.tensor_list_push_back(dlist, delement), None @ops.RegisterGradient("TensorListStack") def _TensorListStackGrad(unused_op, dtensor): return tensor_list_from_tensor(dtensor, element_shape=dtensor.shape[1:]), None @ops.RegisterGradient("TensorListConcat") @ops.RegisterGradient("TensorListConcatV2") def _TensorListConcatGrad(op, dtensor, unused_dlengths): """Gradient function for TensorListConcat.""" dlist = tensor_list_split( dtensor, element_shape=gen_list_ops.tensor_list_element_shape( op.inputs[0], shape_type=dtypes.int32), lengths=op.outputs[1]) if op.type == "TensorListConcatV2": return dlist, None, None else: return dlist @ops.RegisterGradient("TensorListSplit") def _TensorListSplitGrad(op, dlist): tensor, _, lengths = op.inputs element_shape = array_ops.slice(array_ops.shape(tensor), [1], [-1]) element_shape = array_ops.concat([[-1], element_shape], axis=0) return gen_list_ops.tensor_list_concat_v2( dlist, element_shape=element_shape, leading_dims=lengths, element_dtype=op.inputs[0].dtype)[0], None, None @ops.RegisterGradient("TensorListFromTensor") def _TensorListFromTensorGrad(op, dlist): """Gradient for TensorListFromTensor.""" t = op.inputs[0] if t.shape.dims and t.shape.dims[0].value is not None: num_elements = t.shape.dims[0].value el
se: num_elements = None if dlist is None: dlist = empty_tensor_list( element_dtype=t.dtype,
element_shape=gen_list_ops.tensor_list_element_shape( op.outputs[0], shape_type=dtypes.int32)) tensor_grad = gen_list_ops.tensor_list_stack( dlist, element_shape=array_ops.slice(array_ops.shape(t), [1], [-1]), element_dtype=t.dtype, num_elements=num_elements) shape_grad = None return tensor_grad, shape_
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'SocialAccount.uid' db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=191)) # Changing field 'SocialApp.secret' db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=191)) # Changing field 'SocialApp.client_id' db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=191)) # Changing field 'SocialApp.key' db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=191)) def backwards(self, orm): # Changing field 'SocialAccount.uid' db.alter_column(u'socialaccount_socialaccount', 'uid', self.gf('django.db.models.fields.CharField')(max_length=255)) # Changing field 'SocialApp.secret' db.alter_column(u'socialaccount_socialapp', 'secret', self.gf('django.db.models.fields.CharField')(max_length=100)) # Changing field 'SocialApp.client_id' db.alter_column(u'socialaccount_socialapp', 'client_id', self.gf('django.db.models.fields.CharField')(max_length=100)) # Changing field 'SocialApp.key' db.alter_column(u'socialaccount_socialapp', 'key', self.gf('django.db.models.fields.CharField')(max_length=100)) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'sites.site': { 'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"}, 'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'socialaccount.socialaccount': { 'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}
), 'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'uid': ('django.db.models.fields.CharField', [], {'max_length': '191'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'socialaccount.socialapp': { 'Meta': {'object_name': 'SocialApp'}, 'client_id': ('django.db.models.fields.CharField', [], {'max_length': '191'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '191', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'secret': ('django.db.models.fields.CharField', [], {'max_length': '191'}), 'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'}) }, u'socialaccount.socialtoken': { 'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'}, 'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialAccount']"}), 'app': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialaccount.SocialApp']"}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'token': ('django.db.models.fields.TextField', [], {}), 'token_secret': ('django.db.models.fields.TextField', [], {'blank': 'True'}) } } complete_apps = ['socialaccount']
"""This demo demonstrates how to move the vertex coordinates of a boundary mesh and then updating the interior vertex coordinates of the original mesh by suitably interpolating the vertex coordinates (useful for implementation of ALE methods).""" # Copyright (C) 2008 Solveig Bruvoll and Anders Logg # # This file is part of DOLFIN. # # DOLFIN is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DOLFIN is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should h
ave received a copy of the GNU Lesser General Public License # along with DOLFIN. If not, see <http://www.gnu.org/licenses/>. # # First added: 2008-05-02 # Last changed: 2008-12-12 from dolfin import * print "This demo is presently broken. See https://bugs.launchpad.net/dolfin/+bug/1047641
" exit() # Create mesh mesh = UnitSquareMesh(20, 20) # Create boundary mesh boundary = BoundaryMesh(mesh) # Move vertices in boundary for x in boundary.coordinates(): x[0] *= 3.0 x[1] += 0.1*sin(5.0*x[0]) # Move mesh mesh.move(boundary) # Plot mesh plot(mesh, interactive=True)
#!/usr/bin/env python # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Michael A.G. Aivazis # California Institute of Technology # (C) 1998-2005 All Rights Reserved # # <LicenseText> # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # from pyre.components.Component import Component
class Device(Component): class Inventory(Component.Inventory): from RendererFacility import RendererFacility renderer = RendererF
acility() renderer.meta['tip'] = 'the facility that controls how the messages are formatted' def createDevice(self): raise NotImplementedError("class '%s' must override 'device'" % self.__class__.__name__) def __init__(self, name): Component.__init__(self, name, "journal-device") self.device = None return def _init(self): device = self.createDevice() renderer = self.inventory.renderer.renderer device.renderer = renderer self.device = device return # version __id__ = "$Id: Device.py,v 1.2 2005/03/10 06:16:37 aivazis Exp $" # End of file
import random import datetime import time import hashlib from django.db import models from django.conf import settings from django.urls import reverse from django.contrib.auth.models import User, Group from django.db.models.signals import post_save from djangopress.core.models import Property from django.utils import timezone from PIL import Image DEFAULT_USER_GROUP = getattr(settings, 'DEFAULT_USER_GROUP', None) def avatar_path(instance, filename): return ("avatars/%s/%s-%s-%s" % (time.strftime("%y/%m"), instance.user.pk, instance.user.username.lower(), filename.lower())) class UserProfile(models.Model): EMAIL_SETTINGS = ( ('HI', 'Hide Email'), ('SW', 'Show Email'), ('HB', 'Use Web Form') ) title = models.CharField(max_length=100, default="New member") homepage = models.CharField(max_length=100, blank=True, null=True) #IM contact (jabber, icq, msn, aim, yahoo, gtalk, twitter, facebook) location = models.CharField(max_length=50, blank=True, null=True) avatar = models.ImageField(blank=True, null=True, upload_to=avatar_path) signature = models.TextField(blank=True, null=True) timezone = models.CharField(max_length=50, null=True, blank=True) language = models.CharField(max_length=50, null=True, blank=True) registration_ip = models.GenericIPAddressField(blank=True, null=True, ) last_ip_used = models.GenericIPAddressField(blank=True, null=True) admin_note = models.TextField(blank=True, null=True) activate_key = models.CharField(max_length=127, blank=True, editable=False) activate_key_expirary = models.DateTimeField(blank=True, editable=False) banned = models.BooleanField(default=False) #remember_between_visits = models.BooleanField(default=True) user = models.OneToOneField(User, related_name="profile", on_delete=models.CASCADE) email_settings = models.CharField(choices=EMAIL_SETTINGS, default='HI', max_length=2) gender = models.CharField(max_length=1, blank=True, null=True, default=None, choices=(('', 'Private'), ('M', 'Male'), ('F', 'Female'))) date_of_birth = models.DateTimeField(blank=True, null=True) def get_ip(self): if self.last_ip_used: return self.last_ip_used return self.registration_ip def __getattr__(self, name): if name.startswith("social_"): try: return self.user.social.filter(account=name[7:])[0] except: raise AttributeError(name) return super(UserProfile, self).__getattr__(name) def get_absolute_url(self): return reverse('accounts-profile', kwargs={"username": self.user.username}) def __init__(self, *args, **kwargs): super(UserProfile, self).__init__(*args, **kwargs) self._banned = self.banned self._avatar = self.avatar def save(self, force_insert=False, force_update=False): if self._banned == False and s
elf.banned == True: # if we banned them, they can't then login self.user.is_active = False self.user.save() if self._avatar != self.avatar and self.avatar: image = Image.open(self.avatar) size = settings.ACCOUNTS_USER_LIMITS.get('avatar', {}).get('size', 50) image.resize((size, size), Image.ANTIALIAS)
image.save(self.avatar.path) super(UserProfile, self).save(force_insert, force_update) self._banned = self.banned self._avatar = self.avatar def set_activate_key(self): salt = hashlib.sha1((str(random.random()) + str(random.random())).encode('utf-8')).hexdigest()[:5] key = "".join(str(item) for item in (self.user.username, self.user.email, datetime.datetime.now())) hsh = hashlib.sha1((salt + key).encode('utf-8')).hexdigest() self.activate_key = hsh self.activate_key_expirary = datetime.datetime.fromtimestamp(time.time() + (7 * 24 * 60 * 60)) def check_activate_key(self, hsh): return (hsh == self.activate_key and timezone.now() <= self.activate_key_expirary) class UserSocial(models.Model): ACCOUNTS = ( ('twitter', 'Twitter'), ('google_plus', 'Google Plus'), ('facebook', 'Facebook'), ('linkedin', 'Linked In'), ('pinterest', 'Pinterest'), ) account = models.CharField(max_length=20, choices=ACCOUNTS) value = models.CharField(max_length=100) user_profile = models.ForeignKey(User, related_name="social", on_delete=models.CASCADE) class UserProperty(Property): user_profile = models.ForeignKey(User, related_name="properties", on_delete=models.CASCADE) def create_profile(sender, **kargs): if kargs.get("created", False): profile = UserProfile(user=kargs.get("instance")) profile.set_activate_key() profile.save() post_save.connect(create_profile, User, dispatch_uid="djangopress.accounts.create_profile") def add_to_group(sender, **kargs): if DEFAULT_USER_GROUP and kargs.get("created", False): user = kargs.get("instance") user.groups.add(Group.objects.get(name=DEFAULT_USER_GROUP)) post_save.connect(add_to_group, User, dispatch_uid="djangopress.accounts.add_to_group")
#!/usr/bin/env python #------------------------------------------------------------------------------- import os import sys bin_dir = os.path.dirname(os.path.abspath(__file__)) pkg_dir = os.path.abspath(os.path.join(bin_dir, "..")) sys.path.append(pkg_dir) #------------------------------------------------------------------------------- import argparse import collections import cktapps from cktapps import apps from cktapps.formats import spice #------------------------------------------------------------------------------- def main(args=None): parser = argparse.ArgumentParser(description="Report net capacitances " "and fanout") parser.add_argument('spice_files', metavar='file', nargs='+', type=argparse.FileType('r'), help='spice netlist file(s)') parser.add_argument('--lib', type=argparse.FileType('r'), he
lp='lib file(s) with model (e.g. nch, pch) defintions') parser.add_argument('--cell', help='name of t
he cell to be analyzed ' '(top cell by default)') arg_ns = parser.parse_args(args) #--------------------------------------------------------------------------- ckt = cktapps.Ckt() if arg_ns.lib: ckt.read_spice(arg_ns.lib) for spice_file in arg_ns.spice_files: ckt.read_spice(spice_file) ckt.link() #topcellnames = [cell.name for cell in ckt.get_topcells()] #print "Top cells: %s" % topcellnames if arg_ns.cell: cell = ckt.get_cell(arg_ns.cell) else: topcells = ckt.get_topcells() if topcells: cell = topcells[0] else: cell = ckt #print cell #print "-"*80 #apps.report_hierarchy(cell) #ckt.write_spice(cell) #print "-"*80 cell.ungroup(flatten=True) #print cell #ckt.write_spice(cell) #print "-"*80 lib = arg_ns.lib.name netlists = [f.name for f in arg_ns.spice_files] apps.report_net(cell, lib, netlists) #print "-"*80 #apps.report_hierarchy(cell) return ckt #------------------------------------------------------------------------------- if __name__ == "__main__": ckt = main()
ted with ACI Fabric 1.0(3f)+ notes: - The C(tenant) and C(filter) used must exist before using this module in your playbook. The M(aci_tenant) and M(aci_filter) modules can be used for this. options: arp_flag: description: - The arp flag to use when the ether_type is arp. choices: [ arp_reply, arp_request, unspecified ] description: description: - Description for the Filter Entry. aliases: [ descr ] dst_port: description: - Used to set both destination start and end ports to the same value when ip_protocol is tcp or udp. choices: [ Valid TCP/UDP Port Ranges] dst_port_end: description: - Used to set the destination end port when ip_protocol is tcp or udp. choices: [ Valid TCP/UDP Port Ranges] dst_port_start: description: - Used to set the destination start port when ip_protocol is tcp or udp. choices: [ Valid TCP/UDP Port Ranges] entry: description: - Then name of the Filter Entry. aliases: [ entry_name, name ] ether_type: description: - The Ethernet type. choices: [ arp, fcoe, ip, mac_security, mpls_ucast, trill, unspecified ] filter_name: description: The name of Filter that the entry should belong to. icmp_msg_type: description: - ICMPv4 message type; used when ip_protocol is icmp. choices: [ dst_unreachable, echo, echo_reply, src_quench, time_exceeded, unspecified ] icmp6_msg_type: description: - ICMPv6 message type; used when ip_protocol is icmpv6. choices: [ dst_unreachable, echo_request, echo_reply, neighbor_advertisement, neighbor_solicitation, redirect, time_exceeded, unspecified ] ip_protocol: description: - The IP Protocol type when ether_type is ip. choices: [ eigrp, egp, icmp, icmpv6, igmp, igp, l2tp, ospfigp, pim, tcp, udp, unspecified ] state: description: - present, absent, query default: present choices: [ absent, present, query ] stateful: description: - Determines the statefulness of the filter entry. tenant: description: - The name of the tenant. aliases: [ tenant_name ] extends_documentation_fragment: aci ''' EXAMPLES = r''' - aci_filter_entry: action: "{{ action }}" entry: "{{ entry }}" tenant: "{{ tenant }}" ether_name: "{{ ether_name }}" icmp_msg_type: "{{ icmp_msg_type }}" filter_name: "{{ filter_name }}" descr: "{{ descr }}" host: "{{ inventory_hostname }}" username: "{{ user }}" password: "{{ pass }}" protocol: "{{ protocol }}" ''' RETURN = ''' # ''' from ansible.module_utils.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule VALID_ARP_FLAGS = ['arp_reply', 'arp_request', 'unspecified'] VALID_ETHER_TYPES = ['arp', 'fcoe', 'ip', 'mac_security', 'mpls_ucast', 'trill', 'unspecified'] VALID_ICMP_TYPES = ['dst_unreachable', 'echo', 'echo_reply', 'src_quench', 'time_exceeded', 'unspecified', 'echo-rep', 'dst-unreach'] VALID_ICMP6_TYPES = ['dst_unreachable', 'echo_request', 'echo_reply', 'neighbor_advertisement', 'neighbor_solicitation', 'redirect', 'time_exceeded', 'unspecified'] VALID_IP_PROTOCOLS = ['eigrp', 'egp', 'icmp', 'icmpv6', 'igmp', 'igp', 'l2tp', 'ospfigp', 'pim', 'tcp', 'udp', 'unspecified'] # mapping dicts are used to normalize the proposed data to what the APIC expects, which will keep diffs accurate ARP_FLAG_MAPPING = dict(arp_reply='reply', arp_request='req', unspecified=None) FILTER_PORT_MAPPING = {'443': 'https', '25': 'smtp', '80': 'http', '20': 'ftpData', '53': 'dns', '110': 'pop3', '554': 'rtsp'} ICMP_MAPPING = {'dst_unreachable': 'dst-unreach', 'echo': 'echo', 'echo_reply': 'echo-rep', 'src_quench': 'src-quench', 'time_exceeded': 'time-exceeded', 'unspecified': 'unspecified', 'echo-re': 'echo-rep', 'dst-unreach': 'dst-unreach'} ICMP6_MAPPING = dict(dst_unreachable='dst-unreach', echo_request='echo-req', echo_reply='echo-rep', neighbor_advertisement='nbr-advert', neighbor_solicitation='nbr-solicit', redirect='redirect', time_exceeded='time-exceeded', unspecified='unspecified') def main(): argument_spec = aci_argument_spec argument_spec.update( arp_flag=dict(type='str', choices=VALID_ARP_FLAGS), description=dict(type='str'), dst_port=dict(type='str'), dst_port_end=dict(type='str'), dst_port_start=dict(type='str'), entry=dict(type='str', aliases=['entry_name', 'name']), ether_type=dict(choices=VALID_ETHER_TYPES, type='str'), filter_name=dict(type='str'), icmp_msg_type=dict(type='str', choices=VALID_ICMP_TYPES), icmp6_msg_type=dict(type='str', choices=VALID_ICMP6_TYPES), ip_protocol=dict(choices=VALID_IP_PROTOCOLS, type='str'), state=dict(type='str', default
='present', choices=['absent', 'present', 'query']), stateful=dict(type='str', choices=['no', 'yes']), tenant=dict(type="str", aliases=['tenant_name']) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) arp_flag = module.params['arp_flag'] if arp_flag is not None: arp_flag = ARP_FLAG_MAPPING[arp_flag] description = module.params['description'] dst_
port = module.params['dst_port'] if dst_port in FILTER_PORT_MAPPING.keys(): dst_port = FILTER_PORT_MAPPING[dst_port] dst_end = module.params['dst_port_end'] if dst_end in FILTER_PORT_MAPPING.keys(): dst_end = FILTER_PORT_MAPPING[dst_end] dst_start = module.params['dst_port_start'] if dst_start in FILTER_PORT_MAPPING.keys(): dst_start = FILTER_PORT_MAPPING[dst_start] entry = module.params['entry'] ether_type = module.params['ether_type'] filter_name = module.params['filter_name'] icmp_msg_type = module.params['icmp_msg_type'] if icmp_msg_type is not None: icmp_msg_type = ICMP_MAPPING[icmp_msg_type] icmp6_msg_type = module.params['icmp6_msg_type'] if icmp6_msg_type is not None: icmp6_msg_type = ICMP6_MAPPING[icmp6_msg_type] ip_protocol = module.params['ip_protocol'] state = module.params['state'] stateful = module.params['stateful'] tenant = module.params['tenant'] aci = ACIModule(module) # validate that dst_port is not passed with dst_start or dst_end if dst_port is not None and (dst_end is not None or dst_start is not None): module.fail_json(msg="Parameter 'dst_port' cannot be used with 'dst_end' and 'dst_start'") elif dst_port is not None: dst_end = dst_port dst_start = dst_port # validate that filter_name is not passed without tenant if filter_name is not None and tenant is None: module.fail_json(msg="Parameter 'filter_name' cannot be used without 'tenant'") # TODO: Think through the logic here and see if there is a better way if entry is not None: # fail when entry is provided without tenant and filter_name if tenant is not None and filter_name is not None: path = 'api/mo/uni/tn-%(tenant)s/flt-%(filter_name)s/e-%(entry)s.json' % module.params elif tenant is not None and state == 'query': path = 'api/mo/uni/tn-%(tenant)s.json?rsp-subtree=full&rsp-subtree-class=vzEntry&rsp-subtree-filter=eq(vzEntry.name, \ \"%(entry)s\")&rsp-subtree-include=no-scoped' % module.params else: path = 'api/class/vzEntry.json?query-target-filter=eq(vzEntry.name, \"%(entry)s\")' % module.params elif state == 'query': if tenant is None: path = 'api/class/vzEntry.json' else: path = 'api/mo/uni/tn-%(tenant)s.json?rsp-subtree=full&rsp-subtree-class=vzEntry&rsp-subtree-include=no-scoped' % module.params else: module.fail_json(msg="Parameters 'tenant', 'filter_name', and 'entry' are required for state 'absent' or 'present'") aci.result['url'] = '%(protocol)s://%(hostname)s/' % aci.params + path aci.get_existing() if state == 'present': # Filter out module params with null values aci.payload(aci_class='vzEntry', class_
# -*- coding: utf-8 -
*- from django.db import models, migrations class Migration(migra
tions.Migration): dependencies = [ ('events', '0021_auto_20171023_1358'), ] operations = [ migrations.AlterField( model_name='inductioninterest', name='age', field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'20to25', b'20 to 25 years'), (b'26to30', b'26 to 30 years'), (b'31to35', b'31 to 35 years'), (b'35andabove', b'Above 35 years')]), ), migrations.AlterField( model_name='inductioninterest', name='designation', field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lecturer', b'Lecturer'), (b'AssistantProfessor', b'Assistant Professor'), (b'AssociateProfessor', b'Associate Professor'), (b'Professor', b'Professor'), (b'Other', b'Other')]), ), migrations.AlterField( model_name='inductioninterest', name='experience_in_college', field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Lessthan1year', b'Less than 1 year'), (b'Morethan1yearbutlessthan2years', b'More than 1 year, but less than 2 years'), (b'Morethan2yearsbutlessthan5years', b'More than 2 years but less than 5 years'), (b'Morethan5years', b'More than 5 years')]), ), migrations.AlterField( model_name='inductioninterest', name='gender', field=models.CharField(max_length=50, choices=[(b'', b'-----'), (b'Male', b'Male'), (b'Female', b'Female')]), ), migrations.AlterField( model_name='inductioninterest', name='medium_of_studies', field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'English', b'English'), (b'Other', b'Other')]), ), migrations.AlterField( model_name='inductioninterest', name='phonemob', field=models.CharField(max_length=100), ), migrations.AlterField( model_name='inductioninterest', name='specialisation', field=models.CharField(max_length=100, choices=[(b'', b'-----'), (b'Arts', b'Arts'), (b'Science', b'Science'), (b'Commerce', b'Commerce'), (b'EngineeringorComputerScience ', b'Engineering or Computer Science'), (b'Management', b'Management'), (b'Other', b'Other')]), ), ]
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2014-2015 # # STIC - Universidad de La Laguna (ULL) <gesinv@ull.edu.es> # # This file is part of Modelado de Servicios TIC. # # Modelado de Servicios TIC is free software: you can redistribute it and/or modify it under # the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Modelado de Servicios TIC is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Modelado de Servicios TIC. If not, see # <http://www.gnu.org/licenses/>. # import funcionesxml import generacionpaginas ServicioSonda = [] #Vector que contendrá los servicios de una sonda #Creacion fichero configuracion Nagios def GeneraNagios(): nagstr1 = "" nagstr2 = "" nagstr3 = "" funcionesxml.inicializacion() for i in funcionesxml.SondaArray: #CREACION DE FICHEROS NAGIOS ficheroservicio = open("./confgSonda/servicio/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w") ficherohost = open("./confgSonda/host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w") ficherohost_group = open("./confgSonda/host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg","w") #CREACION FICHEROS SERVICIO nagstr1 += "## services/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n" for j in funcionesxml.getGroupServices(funcionesxml.getGroupID(i[4])): ServicioSonda.append(funcionesxml.getBusinessServiceName(j)) #print "Servicio: "+ str(funcionesxml.getBusinessServiceName(j)) + " PUERTO: " + str(funcionesxml.getPuerto(j))+ " PROTOCOLO: " + str(funcionesxml.getProtocolo(j))+ " URL: " +str(funcionesxml.getURL(j)) for k in ServicioSonda: nagstr1 += "define service{\n use: " nagstr1 += k + "\n" + " host_name: " + "---\n" + " contact_groups: " + "---\n" nagstr1 += "}\n\n" #CREACION FICHEROS HOST_GROUP nagstr2 += "## host_group/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n" nagstr2 += "define hostgroup{\n hostgroup_name: " + "---\n " + "alias: " + "---\n " + "members: " + "---\n"
nagstr2 += "}\n\n" #CREACION FICHEROS HOST_GROUP nagstr3 += "## host/" + generacionpaginas.formatstring(i[3][0]) + ".cfg \n\n" nagstr3 += " " ficheroservicio.write(nag
str1) ficherohost.write(nagstr3) ficherohost_group.write(nagstr2) ficheroservicio.close ficherohost.close ficherohost_group.close GeneraNagios()
args.get('reboot') force_s = request.args.get('force') template = request.args.get('template') reboot=False force=False skipscan=True skip_del=False if template != 'dom0': usage += "\n The only supported template is 'dom0'" return json_msg(usage) if reboot_s == "True": print "do reboot" if force_s == "True": print "force upgrade" force=True overc._system_upgrade(template, reboot, force, skipscan, skip_del) return json_msg(overc.message) @app.route('/host/rollback') @auth.login_required def host_rollback(): overc=Overc.Overc() overc.host_rollback() return json_msg(overc.message) @app.route('/host/upgrade') @auth.login_required def host_upgrade(): usage = 'Usage: ' + request.url_root + 'host/upgrade?reboot=[True|False]&force=[True|False]' overc=Overc.Overc() reboot_s = request.args.get('reboot') force_s = request.args.get('force') reboot=False force=False if reboot_s == "True": print "do reboot" reboot = True if force_s == "True": print "do force to upgrade" force=True overc._host_upgrade(reboot, force) return json_msg(overc.message) @app.route('/host/update') @auth.login_required def host_update(): overc=Overc.Overc() overc.host_update() return json_msg(overc.message) @app.route('/host/newer') @auth.login_required def host_newer(): overc=Overc.Overc() overc.host_newer() return json_msg(overc.message) @app.route('/container/rollback') @auth.login_required def container_rollback(): usage = 'Usage: ' + request.url_root + 'container/rollback?name=<container name>&snapshot=<snapshot name>&template=<template name> [snapshot optional]' overc=Overc.Overc() container_name = request.args.get('name') snapshot = request.args.get('snapshot') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) overc._container_rollback(container_name, snapshot, template) return json_msg(overc.message) @app.route('/container/update') @auth.login_required def container_update(): usage = 'Usage: ' + request.url_root + 'container/update?template=<template name>' overc=Overc.Overc() template = request.args.get('template') if template is None: return json_msg(usage) overc._container_update(template) return json_msg(overc.message) @app.route('/container/list') @auth.login_required def container_list(): usage = 'Usage: ' + request.url_root + 'container/list?template=<template name>' overc=Overc.Overc() template = request.args.get('template') if template is None: return json_msg(usage) overc._container_list(template) return json_msg(overc.message) @app.route('/container/snapshot') @auth.login_required def container_snapshot(): usage = 'Usage: ' + request.url_root + 'container/snapshot?name=<container name>&template=<template name>' overc=Overc.Overc() template = request.args.get('template') container_name = request.args.get('name') if template is None or container_name is None: return json_msg(usage) overc._container_snapshot(container_name, template) return json_msg(overc.message) @app.route('/container/list_snapshots') @auth.login_required def container_list_snapshots(): usage = 'Usage: ' + request.url_root + 'container/list_snapshots?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) overc._container_snapshot_list(container_name, template) return json_msg(overc.message) @app.route('/container/send_image') @auth.login_required def container_send_image(): usage
= 'Usage: ' + request.url_root + 'container/send_image?url=<image url>&template=<template name>' overc=Overc.Overc() url = request.args.get('url') template = request.args.
get('template') if url is None or template is None: return json_msg(usage) template_list = os.listdir("/etc/overc/container") if template not in template_list: usage += "\n The template name is not valid" return json_msg(usage) req = urllib2.Request(url) req.get_method = lambda: 'HEAD' try: status = urllib2.urlopen(req) except Exception,e: usage += "\n The image url is not valid" return json_msg(usage) re_code = status.getcode() if ((re_code != None) and (re_code != 200)): usage += "\n The image url is not valid, http status code is: %s" % re_code return json_msg(usage) overc._container_send_image(template, url) return json_msg(overc.message) @app.route('/container/activate') @auth.login_required def container_activate(): usage = 'Usage: ' + request.url_root + 'container/activate?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) force = True overc._container_activate(container_name, template, force) return json_msg(overc.message) @app.route('/container/start') @auth.login_required def container_start(): usage = 'Usage: ' + request.url_root + 'container/start?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) overc._container_start(container_name, template) return json_msg(overc.message) @app.route('/container/stop') @auth.login_required def container_stop(): usage = 'Usage: ' + request.url_root + 'container/stop?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) overc._container_stop(container_name, template) return json_msg(overc.message) @app.route('/container/upgrade') @auth.login_required def container_upgrade(): usage = 'Usage: ' + request.url_root + 'container/upgrade?name=<container name>&template=<template name>&rpm=yes|no&image=yes|no' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') rpm = request.args.get('rpm') image = request.args.get('image') if container_name is None or template is None: return json_msg(usage) if rpm is None or rpm == 'no': rpm_upgrade = False elif rpm == 'yes': rpm_upgrade = True elif rpm != 'no': return json_msg(usage) if image is None or image == 'no': image_upgrade = False elif image == 'yes': image_upgrade = True elif image != 'no': return json_msg(usage) overc._container_upgrade(container_name, template, rpm_upgrade, image_upgrade) return json_msg(overc.message) @app.route('/container/delete') @auth.login_required def container_delete(): usage = 'Usage: ' + request.url_root + 'container/delete?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) force = True overc._container_delete(container_name, template, force) return json_msg(overc.message) @app.route('/container/delete_snapshots') @auth.login_required def container_delete_snapshots(): usage = 'Usage: ' + request.url_root + 'container/delete_snapshots?name=<container name>&template=<template name>' overc=Overc.Overc() container_name = request.args.get('name') template = request.args.get('template') if container_name is None or template is None: return json_msg(usage) overc._container_delete_snapshots(c
from django.contrib import admin from . import models from django_markdown.admin import MarkdownModelAdmin from django_markdown.widgets import AdminMarkdownWidget from django.db.models import TextField # Register your models here. class SnippetTagAdmin(admin.ModelAdmin): list_display = ('slug',) class SnippetAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['snippet_title', 'snippet_body', 'author', 'publish']}), ('Date Information', {'fields'
: ['modified_date'], 'classes': ['collapse']}), ('Tag Library', {'fields': ['snippet_tags']}) ] list_display = ('snippet_title', 'author', 'create_date', 'modified_date') search_fields = ['snippet_title'] formfield_overrides = {TextField: {'widget': AdminMarkdownWidget}} list_filter = ['create_date', 'publish'] # register the classes with the Admin site admin.site.register(models.Snippet, SnippetAdmin) admin.site.register(models
.SnippetTag, SnippetTagAdmin)
for time_record in time_records: response_object.append(time_record.json_object()) if more: self.response.headers.add('X-Cursor', next_cursor.urlsafe()) else: # List all Time Records time_records, next_cursor, more = model.TimeRecord.query( ancestor=project_key).order(-model.TimeRecord.created)\ .fetch_page(15) response_object = [] for time_record in time_records: response_object.append(time_record.json_object()) if more: self.response.headers.add('X-Cursor', next_cursor.urlsafe()) # Send response self.response.content_type = 'application/json' self.response.out.write(json.dumps(response_object)) def post(self, project_id): """ Create a new Time Record associated with this Project. """ response_object = {} user = users.get_current_user() if not user: self.abort(401) if not project_id: self.abort(400) project_key = utilities.key_for_urlsafe_id(project_id) if not project_key: self.abort(400) project = project_key.get() if not (project and isinstance(project, model.Project)): self.abort(404) if ((user.email not in project.contributors) and not project.is_owner(user.email)): self.abort(401) request_object = {} if self.request.body: request_object = json.loads(self.request.body) completed = request_object.get('completed') new_time_record_key = model.TimeRecord.create_time_record( project_key, user.email, completed=request_object.get('completed'), name=request_object.get('name')) else: new_time_record_key = model.TimeRecord.create_time_record( project_key, user.email) new_time_record = new_time_record_key.get() response_object = new_time_record.json_object() # Send response self.response.content_type = 'application/json' self.response.out.write(json.dumps(response_object)) def put(self, project_id, time_record_id): """ Update the Time Record. """ response_object = {} user = users.get_current_user() if not user: self.abort(401) if not project_id or not time_record_id or not self.request.body: self.abort(400) request_object = json.loads(self.request.body) project_key = utilities.key_for_urlsafe_id(project_id) time_record_key = utilities.key_for_urlsafe_id(time_record_id) if (not project_key or not time_record_key or (project_key != time_record_key.parent())): self.abort(400) project = project_key.get() time_record = time_record_key.get() if (not (project and isinstance(project, model.Project)) or not (time_record and isinstance(time_record, model.TimeRecord))): self.abort(404) if ((user.email not in project.contributors) and not project.is_owner(user.email)): self.abort(401) # Process optional items... name = request_object.get('name') if name: time_record.name = name project.put() end = request_object.get('end') time_record.put() # Check `end` after updating the Project and Time Record; # avoids a bug whereby the Project's original `completed` time is saved. if end: if end is True: time_record.complete_time_record() response_object = time_record.json_object() # Send response self.response.content_type = 'application/json' self.response.out.write(json.dumps(response_object)) class Comments(webapp2.RequestHandler): def get(self, project_id, parent_type=None, parent_id=None): response_object = {} user = users.get_current_user() if not user: self.abort(401) if not project_id: self.abort(400) project_key = utilities.key_for_urlsafe_id(project_id) if not project_key: self.abort(400) project = project_key.get() if not (project and isinstance(project, model.Project)): self.abort(404) if parent_id: # Fetch by Parent ID if parent_type == 'milestones': # Milestones milestone = model.Milestone.for_number(project_key, int(parent_id)) if not milestone: self.abort(404) parent_key = milestone.key else: # assume other... parent_key = utilities.key_for_urlsafe_id(parent_id) if not parent_key or (project_key != parent_key.parent()): self.abort(400) parent = parent_key.get() if not parent and not isinstance(parent, model.TimeRecord): self.abort(404) comments = model.Comment.query(ancestor=parent_key) response_object = [] for comment in comments: response_object.append(comment.json_object()) else: # Rely upon Project comments = model.Comment.query(ancestor=project_key) response_object = [] for comment in comments: response_object.append(comment.json_object()) # Send response self.response.content_type = 'application/json' self.response.out.write(json.dumps(response_object)) def post(self, project_id, parent_type=None, parent_id=None): """ Create a new Comment in the specified Project, bound to another object (either a Time Record or a Milestone. """ response_object = {} user = users.get_current_user() if not user: self.abort(401) # Get JSON request body if not project_id or not self.request.body: self.abort(400) request_object = json.loads(self.request.body) comment_content = request_object.get('comment') if not comment_content: self.abort(400) project_key = utilities.key_for_urlsafe_id(project_id) if not project_key: self.abort(400) project = project_key.get() if not (project and isinstance(project, model.Project)): self.abort(404) if ((user.email not in project.contributors) and not project.is_owner(user.email)): self.abort(401) if parent_id: #
Create with a Object other than the Project as this Comment's parent if parent_type == 'milestones': # Milestones milestone = model.Milestone.for_number(project_key, int(parent_id)) if not milestone: self.abort(404) parent_key = milestone.key else: # assume other... parent_key = utilities.key_for_urlsafe_id(parent_i
d) if (not parent_key or (project_key != parent_key.parent())): self.abort(400) parent = parent_key.get() if not (parent and isinstance(parent, model.TimeRecord)): self.abort(404) # Create with `Project` and `Parent` new_comment_key = model.Comment.create_comment( comment_content, parent_key, project_key, user.email) comment = new_comment_key.get() response_object = comment.json_object() else: # Create with `Project` as parent new_comment_key = model.Comment.create_comment( comment_content, project_key, project_key, user.email) comment = new_comment_key.get() response_object = comment.json_object() # Send response
from setuptools import setup from ast import literal_eval def get_version(source='xpclr/__init__.py'): with open(source) as sf: for line in sf: if line.startswith('__version__'):
return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") VERSION = get_versi
on() DISTNAME = 'xpclr' PACKAGE_NAME = 'xpclr' DESCRIPTION = 'Code to compute xpclr as described in Chen 2010' with open('README.md') as f: LONG_DESCRIPTION = f.read() MAINTAINER = 'Nicholas Harding', MAINTAINER_EMAIL = 'nicholas.harding@bdi.ox.ac.uk', URL = 'https://github.com/hardingnj/xpclr' DOWNLOAD_URL = 'http://github.com/hardingnj/xpclr' LICENSE = 'MIT' # strictly speaking, allel requires numpy, scipy and numexpr, but numexpr # won't install unless numpy is already installed, so leave this blank for now # and require user to pre-install numpy, scipy and numexpr themselves INSTALL_REQUIRES = [] CLASSIFIERS = [] def setup_package(): metadata = dict( name=DISTNAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, long_description=LONG_DESCRIPTION, license=LICENSE, url=URL, download_url=DOWNLOAD_URL, version=VERSION, package_dir={'': '.'}, packages=['xpclr'], scripts=['bin/xpclr'], classifiers=CLASSIFIERS, install_requires=INSTALL_REQUIRES, ) setup(**metadata) if __name__ == '__main__': setup_package()
#!/usr/bin/python import apt import apt.progress import apt_pkg import logging import re import sys logging.basicConfig(filename1='/var/log/supervisor/rps.log', format='%(asctime)s %(levelname)s: deb_install: %(message)s', level=logging.INFO) logging.getLogger().setLevel(logging.INFO) class control_parser(): def __init__(self): apt_pkg.init() self.cache = apt.Cache() self.cache.update() self.cache.open() def parse(self, path = 'debian/control'): try: tagfile = apt_pkg.TagFile(path) for section in tagfile: deps = section.get('Build-Depends', None) if not deps: continue packages = deps.split(',') for p in packages: self.mark_install(p) self.install() except Exception as e: print "E: %s" % e def mark_install(self, pstr): deps = apt_pkg.parse_depends(pstr) have_version = False for ord in deps: if have_version: break print pstr, ord for d in ord: name = d[0] version_num = d[1] version_op = d[2] p = self.cache[name] if not p: logging.error("Could not find package %s in cache", name) continue if len(version_num) > 0: highest_v = None highest_vnum = 0 for version in p.versions: if apt_pkg.check_dep(version.version, version_op, version_num): have_version = True logging.info("package: %s, version: %s, priority: %s/%d", name, version.version, version.priority, version.policy_priority) if (version.policy_priority > highest_vnum): highest_vnum = version.policy_priority highest_v = version if not have_version: logging.error("Could not required version of the package %s, must be %s %s", name, version_op, version_num) # going for the next ORed version if any
continue p.candidate = highest_v logging.info("package %s, selected version: %s, priority: %s/%d", name, p.candidate.version, p.candidate.priority, p.candidate.policy_priority) logging.info("Going to install package %s", name) p.mark_install(auto_fix=True, auto_inst=True) have_version = True
# do not run for the subsequent ORed packages break if not have_version: logging.fatal("Could not find suitable package %s", pstr) def install(self): self.cache.commit() if __name__ == '__main__': if len(sys.argv) != 2: print "E: usage: %s /path/to/debian/control" % sys.argv[0] cp = control_parser() cp.parse(path = sys.argv[1])
from django.db import models from django.utils import timezone import datetime class Question(models.Model): """ Question object model """
question_text = models.CharField(max_length=200) pub_dat
e = models.DateTimeField('date published') def __unicode__(self): # __unicode__ on Python 2 return self.question_text def was_published_recently(self): return self.pub_date >= timezone.now() - datetime.timedelta(days=1) was_published_recently.admin_order_field = 'pub_date' was_published_recently.boolean = True was_published_recently.short_description = 'Published recently?' class Choice(models.Model): """ Choice object model """ question = models.ForeignKey(Question) choice_text = models.CharField(max_length=200) votes = models.IntegerField(default=0) def __unicode__(self): # __unicode__ on Python 2 return self.choice_text
#!/usr/bin/python import simplejson as json i = open('/proc/cpuinfo') my_text = i.readlines() i.close() username = "" for line in my_text: line = line.strip() ar = line.split(' ') if ar[0].startswith('Serial'): username = "a" + ar[1] if not username: exit(-1) o = open('/home/p
i/.cgminer/cgminer.conf', 'w'); pools = [] pools.append({"url": "stratum+tcp://ghash
.io:3333", "user": username, "pass": "12345"}) conf = {"pools": pools, "api-listen" : "true", "api-port" : "4028", "api-allow" : "W:127.0.0.1"} txt = json.dumps(conf, sort_keys=True, indent=4 * ' ') o.write(txt) o.write("\n"); o.close()
from django.conf.urls.defaults import * from django.contrib import admin from fumblerooski.feeds import CoachesFeed feeds = { 'coaches': CoachesFeed, } admin.autodiscover() urlpatterns = patterns('', url(r'^admin/coach_totals/', "fumblerooski.college.views.admin_coach_totals"), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r"^admin/(.*)", admin.site.root), url(r"^blog/", include("fumblerooski.blog.urls")), url(r"^college/", include("fumblerooski.college.urls")), url(r"^rankings/", include("fumblerooski.rankings.urls")), url(r"^api/", include("fumblerooski.api.urls")), url(r"^$", "fumblerooski.college.views.homepage"), (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}), ) urlpatterns += patterns('fumblerooski.college.views', url(r'^coaches/$', 'coach_index'), url(r'^coaches/active/$', 'active_coaches'), url(r'^coaches/feeds/recent_hires/$', 'recent_hires_feed'), url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/$', 'coach_detail', name="coach_detail"), url(r'^coaches/detail/(?P<coach>\d+-[
-a-z]+)/vs/$', 'coach_vs', name="coach_vs"), url(r'^coaches/detail/(?P<coach>\d+-[-a-z]+)/vs/(?P<coach2>\d+-[-a-z]+)/$', 'coach_compare', name="coach_compare"), url(r'^coaches/assistants/$', 'assistant_index'), url(r'^coaches/common/(?P<coach>\d+-[-a-z]+)/(?P<coach2>\d+-[-a-z]+)/$', 'coach_common'), url(r'^coaches/departures/(?P<year>\d\d\d\d)/$', 'departures'
), url(r'^coaches/hires/(?P<year>\d\d\d\d)/$', 'coaching_hires'), )
# -*- coding: utf-8 -*- ''' (c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights Reserved. The copyright to the software program(s) is property of Telefonica I+D. The program(s) may be used and or copied only with the express written consent of Telefonica I+D or in accordance with the terms and conditions stipulated in the agreement/contract under which the program(s) have been supplied. ''' HEADERS={'content-type': 'application/json'} MQTT_BROKER_HOSTNAME='iotagent' MQTT_BROKER_PORT='1883' GW_HOSTNAME='iotagent' GW_PORT='8002' IOT_PORT='8080' MANAGER_PORT='8081' GW_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, GW_PORT) IOT_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, IOT_PORT) MANAGER_SERVER_ROOT = 'http://{}:{}'.format(GW_HOSTNAME, MANAGER_PORT) CBROKER_URL='http://10.95.213.159:6500' CBROKER_HEADER='Fiware-Service' CBROKER_PATH_HE
ADER='Fiware-ServicePath' SMPP_URL='http://sbc04:5371' SMPP_FROM='682996050' DEF_ENTITY_TYPE='th
ing' DEF_TYPE='string' PATH_UL20_COMMAND='/iot/ngsi/d/updateContext' PATH_MQTT_COMMAND='/iot/ngsi/mqtt/updateContext' PATH_UL20_SIMULATOR='/simulaClient/ul20Command' TIMEOUT_COMMAND=10 MQTT_APIKEY='1234' UL20_APIKEY='apikey3'
#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from unittest.mock import Mock, call from tinyrpc.server import RPCServer from tinyrpc.transports import ServerTransport from tinyrpc.protocols import RPCProtocol, RPCResponse from tinyrpc.dispatch import RPCDispatcher CONTEXT='sapperdeflap' RECMSG='out of receive_message' PARMSG='out of parse_request' SERMSG='out of serialize' @pytest.fixture def transport(): transport = Mock(ServerTransport) transport.receive_message = Mock(return_value=(CONTEXT, RECMSG)) return transport @pytest.fixture def protocol(): protocol = Mock(RPCProtocol) protocol.parse_request = Mock(return_value=PARMSG) return protocol @pytest.fixture() def response(): response = Mock(RPCResponse) response.serialize = Mock(return_value=SERMSG) return response @pytest.fixture def dispatcher(response): dispatcher = Mock(RPCDispatcher) dispatcher.dispatch = Mock(return_value=response) return dispatcher def test_handle_message(transport, protocol, dispatcher): serve
r = RPCServer(transport, protoco
l, dispatcher) server.receive_one_message() transport.receive_message.assert_called() protocol.parse_request.assert_called_with(RECMSG) dispatcher.dispatch.assert_called_with(PARMSG, None) dispatcher.dispatch().serialize.assert_called() transport.send_reply.assert_called_with(CONTEXT, SERMSG) def test_handle_message_callback(transport, protocol, dispatcher): server = RPCServer(transport, protocol, dispatcher) server.trace = Mock(return_value=None) server.receive_one_message() assert server.trace.call_args_list == [call('-->', CONTEXT, RECMSG), call('<--', CONTEXT, SERMSG)] server.trace.assert_called()
# -*- coding: utf-8 -*- # # Copyright (C) 2017 European Synchrotron Radiation Facility, Grenoble, France # # Principal author: Wout De Nolf (wout.de_nolf@esrf.eu) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, mod
ify, merge, publish, distribute, sublicense, and/or sell # copies of the
Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.
#!/usr/bin/env python """Grover's quantum search algorithm example.""" from sympy import pprint from sympy.physics.quantum import qapply from sympy.physics.quantum.qubit import IntQubit from sympy.physics.quantum.grover import (OracleGate, superposition_basis, WGate, grover_iteration) def demo_vgate_app(v): for i in range(2**v.nqubits): print('qapply(v*IntQubit(%i, %r))' % (i, v.nqubits)) pprint(qapply(v*IntQubit(i, nqubits=v.nqubits))) qapply(v*IntQubit(i, nqubits=v.nqubits)) def black_box(qubits): return True if qubits == IntQubit(1, nqubits=qubits.nqubits) else False def main(): print() print('Demonstration of Grover\'s Algorithm') print('The OracleGate or V Gate carries the unknown fun
ction f(x)') print('> V|x> = ((-1)^f(x))|x> where f(x) = 1 when x = a (True in our case)') print('> and 0 (False in our
case) otherwise') print() nqubits = 2 print('nqubits = ', nqubits) v = OracleGate(nqubits, black_box) print('Oracle or v = OracleGate(%r, black_box)' % nqubits) print() psi = superposition_basis(nqubits) print('psi:') pprint(psi) demo_vgate_app(v) print('qapply(v*psi)') pprint(qapply(v*psi)) print() w = WGate(nqubits) print('WGate or w = WGate(%r)' % nqubits) print('On a 2 Qubit system like psi, 1 iteration is enough to yield |1>') print('qapply(w*v*psi)') pprint(qapply(w*v*psi)) print() nqubits = 3 print('On a 3 Qubit system, it requires 2 iterations to achieve') print('|1> with high enough probability') psi = superposition_basis(nqubits) print('psi:') pprint(psi) v = OracleGate(nqubits, black_box) print('Oracle or v = OracleGate(%r, black_box)' % nqubits) print() print('iter1 = grover.grover_iteration(psi, v)') iter1 = qapply(grover_iteration(psi, v)) pprint(iter1) print() print('iter2 = grover.grover_iteration(iter1, v)') iter2 = qapply(grover_iteration(iter1, v)) pprint(iter2) print() if __name__ == "__main__": main()
# coding=utf-8 from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import numpy as np import numba from src_legacy.fourier_series.buffer.ringbuffer import Ringbuffer @numba.vectorize(nopython=True) def legendre_recursion(n, x, p1, p2): if n == 0: return 1 elif n == 1: return x else: c0 = (2*n-1)/n c1 = (n-1)/n return c0 * x * p1 - c1 * p2 class FastLegendreEval: """ Pure float-64 class for evaluation of legendre polynomials recursively. """ start_index = 0 def __init__(self, arg, max_degree): if isinstance(arg, np.ndarray): self.arg = arg self.size = self.arg.size elif isinstance(arg, float): self.arg = arg self.size = 1 else: raise ValueError() self.max_degree = max_degree # @profile def generator(self, skip=0): buffer = Ringbuffer(buffer_size=3,
array_size=self.size, dtype=float, start_index=self.start_index, array_size_increment=None, array_margin=0) deg = self.start_index
while self.max_degree is None or deg <= self.max_degree - 1: p1 = buffer[deg - 1, :] p2 = buffer[deg - 2, :] arr = legendre_recursion(deg, self.arg, p1, p2) # ~73% buffer[:] = arr # ~27% if skip == 0: yield deg, buffer else: skip -= 1 deg += 1
# encoding: utf-8 """ corduroy.config Internal state """ from __future__ import with_statement import os, sys from .atoms import odict, adict, Document # LATER: add some sort of rcfile support... # from inspect import getouterframes, currentframe # _,filename,_,_,_,_ = getouterframes(currentframe())[-1] # print "from", os.path.dirname(os.path.abspath(filename)) defaults = adict({ "host":"http://127.0.0.1", "port":5984, "uuid_cache":50, "types":adict({ "doc":Document,
"dict":adict }), "http":adict({ "max_clients":10, "max_redirects":6, "timeout":60*60, "io_loop":None }) }) try: import simplejson as _json except ImportError: import json as _json class json(
object): @classmethod def decode(cls, string, **opts): """Decode the given JSON string. :param string: the JSON string to decode :type string: basestring :return: the corresponding Python data structure :rtype: object """ return _json.loads(string, object_hook=defaults.types.dict, **opts) @classmethod def encode(cls, obj, **opts): """Encode the given object as a JSON string. :param obj: the Python data structure to encode :type obj: object :return: the corresponding JSON string :rtype: basestring """ return _json.dumps(obj, allow_nan=False, ensure_ascii=False, encoding='utf-8', **opts)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 Rackspace Hosting # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse # noqa from django.template.defaultfilters import title # noqa from django.utils.translation import ugettext_lazy as _ # noqa from horizon import tables from horizon.utils import filters from openstack_dashboard import api STATUS_CHOICES = ( ("BUILDING", None), ("COMPLETED", True), ("DELETE_FAILED", False), ("FAILED", False), ("NEW", None), ("SAVING", None), ) class LaunchLink(tables.LinkAction): name = "create" verbose_name = _("Create Backup") url = "horizon:project:database_backups:create" classes = ("btn-launch", "ajax-modal") class RestoreLink(tables.LinkAction): name = "restore" verbose_name = _("Restore Backup") url = "horizon:project:databases:launch" classes = ("btn-launch", "ajax-modal") def get_link_url(self, datam): url = reverse(self.url) return url + '?backup=%s' % datam.id class DeleteBackup(tables.BatchAction): name = "delete" action_present = _("Delete") action_past = _("Scheduled deletion of") data_type_singular = _("Backup") data_type_plural = _("Backups") classes = ('btn-danger', 'btn-terminate') def action(self, request, obj_id): api.trove.backup_delete(request, obj_id) class UpdateRow(tables.Row): ajax = True def get_data(self, request, backup_id): backup = api.trove.backup_get(request, backup_id) try: backup.instance = api.trove.instance_get(request, backup.instance_id) except Exception: pass return backup def db_link(obj): if not hasattr(obj, 'instance'): return if hasattr(obj.instance, 'name'): return reverse( 'horizon:project:databases:detail', kwargs={'instance_id': obj.instance_id}) def db_name(obj): if hasattr(obj.instance, 'name'): return obj.instance.name return obj.instance_id class BackupsTable(tables.DataTable): name = tables.Column("name",
link=("horizon:project:database_backups:detail"), verbose_name=_("Name")) created = tables.Column("created", verbose_name=_("Created At"), filters=
[filters.parse_isotime]) location = tables.Column(lambda obj: _("Download"), link=lambda obj: obj.locationRef, verbose_name=_("Backup File")) instance = tables.Column(db_name, link=db_link, verbose_name=_("Database")) status = tables.Column("status", filters=(title, filters.replace_underscores), verbose_name=_("Status"), status=True, status_choices=STATUS_CHOICES) class Meta: name = "backups" verbose_name = _("Backups") status_columns = ["status"] row_class = UpdateRow table_actions = (LaunchLink, DeleteBackup) row_actions = (RestoreLink, DeleteBackup)
""" Forum attachments models ======================== This module defines models provided by the ``forum_attac
hments`` application. """ from machina.apps.forum_conversation.forum_attachments.abstract_models import AbstractAttachment from machina.core.db.models import model_factory A
ttachment = model_factory(AbstractAttachment)
from nose.tools import eq_, ok_ from remo.base.tests import RemoTestCase from remo.base.utils import get_date from remo.profiles.forms import ChangeUserForm, UserStatusForm from remo.profiles.models import UserStatus from remo.profiles.tests import UserFactory, UserStatusFactory class ChangeUserFormTest(RemoTestCase): def test_change_valid_login_email(self): """Test change login email with a valid one.""" mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True) rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor, last_name='Doe') data = {'first_name': rep.first_name, 'last_name': rep.last_name, 'email': rep.email} form = ChangeUserForm(data=data, instance=rep) ok_(form.is_valid()) def test_change_invalid_login_email(self): """Test change login email with an invalid one.""" mentor = UserFactory.create(groups=['Mentor'], userprofile__initial_council=True) rep = UserFactory.create(groups=['Rep'], userprofile__mentor=mentor) data = {'first_name': rep.first_name, 'last_name': rep.last_name, 'email': mentor.email} form = ChangeUserForm(data=data, instance=rep) ok_(not form.is_valid()) class UserStatusFormTests(RemoTestCase): def test_base(self): mentor = UserFactory.create() user = UserFactory.create(userprofile__mentor=mentor) start_date = get_date() expected_date = get_date(days=1) data = {'start_date': start_date, 'expected_date': expected_date} form = UserStatusForm(data, instance=UserStatus(user=user)) ok_(form.is_valid()) db_obj = form.save() eq_(db_obj.expected_date, get_date(days=1)) eq_(db_obj.user.get_full_name(), user.get_full_name()) def test_invalid_expected_date(self): mentor = UserFactory.create() user = UserFactory.create(userprofile__mentor=mentor) start_date = get_date() expected_date = get_date(weeks=15) data = {'start_date': start_date, 'expected_date': expected_date} form = UserStatusForm(data, instance=UserStatus(user=user)) ok_(not form.is_valid()) ok_('expected_date' in form.errors) def test_start_date_in_the_past(self): mentor = UserFactory.create() user = UserFactory.create(userprofile__mentor=mentor) start_date = get_date(-1) expected_date = get_date(days=2) data = {'start_date': start_date, 'expected_date': expected_date} form = UserStatusForm(data, instance=UserStatus(user=user)) ok_(not form.is_valid()) ok_('start_date' in form.errors) def test_expected_date_before_start_date(self): mentor = UserFactory.create() user = UserFactory.create(userprofile__mentor=mentor) start_date = get_date(4) expected_date = get_date(days=2) data = {'start_date': start_date, 'expected_date': expected_date} form = UserStatusForm(data, instance=UserStatus(user=user)) ok_(not form.is_valid()) ok_('expected_date' in form.errors) def remove_unavailability_status(self): mentor = UserFactory.create() user = UserFactory.create(userprofile__mentor=mentor) start_date = get_date() expected_date = get_date(days=1) data = {'start_date': start_date, 'expected_date': expected_date} user_status = UserStatusFactory.create(user=user, expected_date=expected_date, start
_date=start_date) form = UserStatusForm(data, instance=user_status) ok_(form.is_valid()) ok_(not user_status.end_date) db_obj = form.save() eq_(db_obj.expected_date, get_date()) eq_(db_obj.user.
get_full_name(), user.get_full_name()) ok_(db_obj.return_date)
"""Public API for Fortran parser. Module content -------------- """ from __future__ import absolute_import #Author: Pearu Peterson <pearu@cens.ioc.ee> #Created: Oct 2006 __autodoc__ = ['get_reader', 'parse', 'walk'] from . import Fortran2003 # import all Statement classes: from .base_classes import EndStatement, classes from .block_statements import * # CHAR_BIT is used to convert object bit sizes to byte sizes from .utils import CHAR_BIT def get_reader(input, isfree=None, isstrict=None, include_dirs = None, source_only = None, ignore_comments = True): """ Returns Fortran reader instance. Parameters ---------- input : str Specify a string or filename containing Fortran code. isfree, isstrict : {None, bool} Specify input Fortran format. The values are determined from the input. If that fails then isfree=True and isstrict=False is assumed. include_dirs : {None, list} Specify a list of include directories. The default list (when include_dirs=None) contains the current working directory and the directory of ``filename``. source_only : {None, list} Specify a list of Fortran file names that are searched when the ``USE`` statement is encountered. Returns ------- reader : `FortranReader` Notes ----- If ``input`` is a C filename then the functions searches for comment lines starting with ``/*f2py`` and reads following lines as PYF file content until a line ``*/`` is found. See also -------- parse """ import os import re from .readfortran import FortranFileReader, FortranStringReader if os.path.isfile(input): name,ext = os.path.splitext(input) if ext.lower() in ['.c']: # get signatures from C file comments starting with `/*f2py` and ending with `*/`. # TODO: improve parser to take line number offset making line numbers in # parser messages correct. f2py_c_comments = re.compile('/[*]\s*f2py\s.*[*]/',re.I | re.M) f = open(filename,'r') c_input = '' for s1 in f2py_c_comments.findall(f.read()): c_input += s1[2:-2].lstrip()[4:] + '\n' f.close() if isfree is None: isfree = True if isstrict is None: isstrict = True return parse(c_input, isfree, isstrict, include_dirs) reader = FortranFileReader(input, include_dirs = include_dirs, source_only = source_only) elif isinstance(input, str): reader = FortranStringReader(input, include_dirs = include_dirs, source_only = source_only) else: raise TypeError('Expected string or filename input but got %s' % (type(input))) if isfree is None: isfree = reader.isfree if isstrict is None: isstrict = reader.isstrict reader.set_mode(isfree, isstrict) return reader def parse(input, isfree=None, isstrict=None, include_dirs = None, source_only = None, ignore_comments = True, analyze=True): """ Parse input and return Statement tree. Parameters ---------- input : str Specify a string or filename containing Fortran code. isfree, isstrict : {None, bool} Specify input Fortran format. The values are determined from the input. If that fails then isfree=True and isstrict=False is assumed. include_dirs : {None, list} Specify a list of include directories. The default list (when include_dirs=None) contains the current working directory and the directory of ``filename``. source_only : {None, list} Specify a list of Fortran file names that are searched when the ``USE`` statement is encountered. ignore_comments : bool When True then discard all comment lines in the Fortran code. analyze : bool When True then apply run analyze method on the Fortran code tree. Returns ------- block : `fparser.api.BeginSource` Examples -------- >>> code = ''' ... c comment ... subroutine foo(a) ... integer a ... print*, "a=",a ... end ... ''' >>> tree = parse(code,isfree=False) >>> print tree !BEGINSOURCE <cStringIO.StringI object at 0x1798030> mode=fix90 SUBROUTINE foo(a) INTEGER a PRINT *, "a=", a END SUBROUTINE foo >>> print
`tree` BeginSource blocktype='beginsource' name='<cStringIO.StringI object at 0x1798030> mode=fix90' a=AttributeHolder: external_subprogram=<dict with keys ['foo']>
content: Subroutine args=['a'] item=Line('subroutine foo(a)',(3, 3),'') a=AttributeHolder: variables=<dict with keys ['a']> content: Integer selector=('', '') entity_decls=['a'] item=Line('integer a',(4, 4),'') Print item=Line('print*, "a=",a',(5, 5),'') EndSubroutine blocktype='subroutine' name='foo' item=Line('end',(6, 6),'') See also -------- get_reader """ from .parsefortran import FortranParser reader = get_reader(input, isfree, isstrict, include_dirs, source_only) parser = FortranParser(reader, ignore_comments = ignore_comments) parser.parse() if analyze: parser.analyze() return parser.block def walk(stmt, depth=-1, _initial_depth = None): """ Generate Fortran statements by walking the stmt tree until given depth. For each block statement in stmt, the walk functions yields a tuple ``(statement, depth)`` where ``depth`` is the depth of tree stucture for statement. Parameters ---------- stmt : Statement depth : int If depth is positive then walk in the tree until given depth. If depth is negative then walk the whole tree. Returns ------- generator Examples -------- :: from fparser import api source_str = ''' subroutine foo integer i, r do i=1,100 r = r + i end do end ''' tree = api.parse(source_str) for stmt, depth in api.walk(tree): print depth, stmt.item that will print:: 1 line #2'subroutine foo' 2 line #3'integer i, r' 2 line #4'do i=1,100' 3 line #5'r = r + i' 2 line #6'end do' 1 line #7'end' """ if _initial_depth is None: if depth==0: return _initial_depth = depth if not isinstance(stmt, classes.BeginSource): yield stmt, _initial_depth - depth if isinstance(stmt, classes.BeginStatement): last_stmt = stmt.content[-1] last_index = len(stmt.content) if isinstance(last_stmt, classes.EndStatement): last_index -= 1 else: last_stmt = None if depth != 0: for substmt in stmt.content[:last_index]: for statement, statement_depth in walk(substmt, depth-1, _initial_depth): yield statement, statement_depth if last_stmt is not None: yield last_stmt, _initial_depth - depth
#!/usr/bin/env python3 ''' Make a stream emit at the pace of a slower stream Pros: Introduce a delay between events in an otherwise rapid stream (like range) Cons: When the stream being delayed runs out of events to push, the zipped stream will keep pushing events, defined with the lambda fn passed to the zip operation. ''' from time import sleep from rx import Observable # Generate an interval sequece, firing once each second interval = Observable.interval(1000) # 5..10 numbers = Observable.from_(range(5, 11)) # Zip two streams together so it emits at the pace of the slowest stream source = Observable.zip( interval, numbers, # Because we only push the elements of the `numbers` stream, # As soon as it runs out of events
, it will keep sending empty # events to the subscribers lambda _, n: n ) sub1 = source.subscribe( lambda v : print("Value published to observer 1: {0}
".format(v)), lambda e : print("Error! {0}".format(e)), lambda : print("Completed!") ) sub2 = source.subscribe( lambda v : print("Value published to observer 2: {0}".format(v)), lambda e : print("Error! {0}".format(e)), lambda : print("Completed!") ) # As noted above, we have to dispose the subscriptions before the `numbers` # streams runs out, or the program will get stuck listening to empty events sleep(5) sub1.dispose() sub2.dispose() # => Value published to observer 1: 5 # => Value published to observer 2: 5 # => Value published to observer 1: 6 # => Value published to observer 2: 6 # => Value published to observer 2: 7 # => Value published to observer 1: 7 # => Value published to observer 2: 8 # => Value published to observer 1: 8
alt13 = 1 elif ((FIRST <= LA13_0 <= THIRD)) : alt13 = 2 else: nvae = NoViableAltException("", 13, 0, self.input) raise nvae if alt13 == 1: pass self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec583) self.month_set = self.month_set.union(set([ self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY), self.ValueOf(OCTOBER)])) elif alt13 == 2: pass pass self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec595) self.quarter_ordinals() self._state.following.pop() self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec597) self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec599) self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec601) except RecognitionException, re: self.reportError(re) self.recover(self.input, re) finally: pass return def quarter_ordinals(self, ): try: try: pass pass self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620) self.month_of_quarter_ordinal() self._state.following.pop() while True: alt14 = 2 LA14_0 = self.input.LA(1) if (LA14_0 == COMMA) : alt14 = 1 if alt14 == 1: pass self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals623) self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625) self.month_of_quarter_ordinal() self._state.following.pop() else: break except RecognitionException, re: self.reportError(re) self.recover(self.input, re) finally: pass return def month_of_quarter_ordinal(self, ): offset = None try: try: pass offset = self.input.LT(1) if (FIRST <= self.input.LA(1) <= THIRD): self.input.consume() self._state.errorRecovery = False else: mse = MismatchedSetException(None, self.input) raise mse jOffset = self.ValueOf(offset.type) - 1 self.month_set = self.month_set.union(set([ jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL), jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)])) except RecognitionException, re: self.reportError(re) self.recover(self.input, re) finally: pass return def time_range(self, ): start_time = None end_time = None try: try: pass pass self.match(self.input, FROM, self.FOLLOW_FROM_in_time_range673) pass start_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range680) self.start_time_string = start_time.text self.match(self.input, TO, self.FOLLOW_TO_in_time_range691) pass end_time=self.match(self.input, TIME, self.FOLLOW_TIME_in_time_range698) self.end_time_string = end_time.text except RecognitionException, re: self.reportError(re) self.recover(self.input, re) finally: pass return DFA4_eot = DFA.unpack( u"\13\uffff" ) DFA4_eof = DFA.unpack( u"\13\uffff" ) DFA4_min = DFA.unpack( u"\1\6\1\23\1\12\1\uffff\2\4\1\13\1\uffff\1\24\1\12\1\4" ) DFA4_max = DFA.unpack( u"\1\20\2\32\1\uffff\1\5\1\12\1\20\1\uffff\2\32\1\12" ) DFA4_accept = DFA.unpack( u"\3\uffff\1\1\3\uffff\1\2\3\uffff" ) DFA4_special = DFA.unpack( u"\13\uffff" ) DFA4_transition = [ DFA.unpack(u"\1\1\2\3\2\uffff\6\2"), DFA.unpack(u"\1\4\7\5"), DFA.unpack(u"\1\6\10\uffff\1\4\7\5"), DFA.unpack(u""), DFA.unpack(u"\1\3\1\7"), DFA.unpack(u"\1\3\1\7\4\uffff\1\10"), DFA.unpack(u"\6\11"), DFA.unpack(u""), DFA.unpack(u"\7\12"), DFA.unpack(u"\1\6\10\uffff\1\4\7\5"), DFA.unpack(u"\1\3\1\7\4\uffff\1\10") ] DFA4 = DFA FOLLOW_specifictime_in_timespec44 = frozenset([]) FOLLOW_interval_in_timespec48 = frozenset([]) FOLLOW_EOF_in_timespec52 = frozenset([1]) FOLLOW_ordinals_in_specifictime72 = frozenset([19, 20, 21, 22, 23, 24, 25, 26]) FOLLOW_weekdays_in_specifictime74 = frozenset([4]) FOLLOW_monthdays_in_specifictime77 = frozenset([4]) FOLLOW_OF_in_specifictime80 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40]) FOLLOW_monthspec_in_specifictime83 = frozenset([5]) FOLLOW_quarterspec_in_specifictime85 = frozenset([5]) FOLLOW_ordinals_in_specifictime101 = frozenset([19, 20, 21, 22, 23, 24, 25, 26]) FOLLOW_weekdays_in_specifictime103 = frozenset([5]) FOLLOW_TIME_in_specifictime117 = frozenset([1]) FOLLOW_EVERY_in_interval136 = frozenset([7, 8]) FOLLOW_set_in_interval146 = frozenset([17, 18]) FOLLOW_period_in_interval164 = frozenset([1, 9, 41]) FOLLOW_time_range_in_interval176 = frozenset([1]) FOLLOW_SYNCHRONIZED_in_interval189 = frozenset([1]) FOLLOW_EVERY_in_ordinals218 = frozenset([1]) FOLLOW_ordinal_in_ordinals226 = frozenset([1, 10]) FOLLOW_COMMA_in_ordinals229 = frozenset([11, 12, 13, 14, 15, 16]) FOLLOW_ordinal_in_ordinals231 = frozenset([1, 10]) FOLLOW_set_in_ordinal252 = frozenset([1]) FOLLOW_set_in_period291 = frozenset([1]) FOLLOW_monthday_in_monthdays314 = frozenset([1, 10]) FOLLOW_COMMA_in_monthdays318 = frozens
et([7, 8]) FOLLOW_monthday_in_monthdays320 = frozenset([1, 10]) FOLLOW_set_in_monthday340 = frozenset([1]) FOLLOW_DAY_in_weekdays365 = frozenset([1]) FOLLOW_weekday_in_weekdays373 = frozenset([1, 10]) FOLLOW_COMMA_in_weekdays376 = frozenset([19, 20, 21, 22, 23, 24, 25, 26]) FOLLOW_weekday_in_weekdays378 = frozenset([1, 10]) FOLLOW_set_in_weekday400 = frozenset([1]) FOLLOW_MONTH_in_monthspec459 = frozenset([1]) FOLLOW_months_in_monthspec469 = frozenset([1]) FOLLO
W_month_in_months486 = frozenset([1, 10]) FOLLOW_COMMA_in_months489 = frozenset([27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39]) FOLLOW_month_in_months491 = frozenset([1, 10]) FOLLOW_set_in_month510 = frozenset([1]) FOLLOW_QUARTER_in_quarterspec583 = frozenset([1]) FOLLOW_quarter_ordinals_in_quarterspec595 = frozenset([27]) FOLLOW_MONTH_in_quarterspec597 = frozenset([4]) FOLLOW_OF_in_quarterspec599 = frozenset([40]) FOLLOW_QUARTER_in_quarterspec601 = frozenset([1]) FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals620 = frozenset([1, 10]) FOLLOW_COMMA_in_quarter_ordinals623 = frozenset([11, 12, 13, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40]) FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals625 = frozenset([1, 10]) FOLLOW_set_in_month_of_quarter_ordinal644 = frozenset([1]) FOLLOW_FROM_in_time_range673 = frozenset([5]) FOLLOW_TIME_in_time_range680 = frozenset([42]) FOLLOW_TO_in_time_range691 = frozenset([5]) FOLLOW_TIME_in_time_range698 = frozenset([1]) def main(argv, stdin=sys.stdin, st
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh.""" # pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long from import_shims.warn import warn_deprecated_import warn_deprecated_import('contentstore.management.commands.tests.test_sync_courses', 'cms.djangoapps.contentstore.management.commands.tests.test_sync_courses')
from cms.djangoapps.contentst
ore.management.commands.tests.test_sync_courses import *
) self.max_dist = max_dist self.hicmesh = None def _plot(self, region=None, cax=None): if region is None: raise ValueError("Cannot plot triangle plot for whole genome.") hm, sr = sub_matrix_regions(self.hic_matrix, self.regions, region) hm[np.tril_indices(hm.shape[0])] = np.nan # Remove part of matrix further away than max_dist if self.max_dist is not None: for i in range(hm.shape[0]): i_region = sr[i] for j in range(hm.shape[1]): j_region = sr[j] if j_region.start-i_region.end > self.max_dist: hm[i, j] = np.nan hm_masked = np.ma.MaskedArray(hm, mask=np.isnan(hm)) # prepare an array of the corner coordinates of the Hic-matrix # Distances have to be scaled by sqrt(2), because the diagonals of the bins # are sqrt(2)*len(bin_size) sqrt2 = math.sqrt(2) bin_coords = np.r_[[(x.start - 1) for x in sr], sr[-1].end]/sqrt2 X, Y = np.meshgrid(bin_coords, bin_coords) # rotatate coordinate matrix 45 degrees sin45 = math.sin(math.radians(45)) X_, Y_ = X*sin45 + Y*sin45, X*sin45 - Y*sin45 # shift x coords to correct start coordinate and center the diagonal directly on the # x-axis X_ -= X_[1, 0] - (sr[0].start - 1) Y_ -= .5*np.min(Y_) + .5*np.max(Y_) # create plot self.hicmesh = self.ax.pcolormesh(X_, Y_, hm_masked, cmap=self.colormap, norm=self.norm) # set limits and aspect ratio #self.ax.set_aspect(aspect="equal") ylim_max = 0.5*(region.end-region
.start) if self.max_dist is not None and self.max_dist/2 < ylim_max: ylim_max = self.max_dist/2 self.ax.set_ylim(0, ylim_max) # remove y ticks self.ax.set_yticks([])
# hide background patch self.ax.patch.set_visible(False) if self.show_colorbar: self.add_colorbar(cax) def set_clim(self, vmin, vmax): self.hicmesh.set_clim(vmin=vmin, vmax=vmax) if self.colorbar is not None: self.colorbar.vmin = vmin self.colorbar.vmax = vmax self.colorbar.draw_all() class DataArrayPlot(BasePlotter1D): def __init__(self, data, window_sizes=None, regions=None, title='', midpoint=None, colormap='coolwarm_r', vmax=None, current_window_size=0, log_y=True): if regions is None: regions = [] for i in range(data.shape[1]): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.regions = regions BasePlotter1D.__init__(self, title=title) self.da = data if window_sizes is None: window_sizes = [] try: l = len(data) except TypeError: l = data.shape[0] for i in range(l): window_sizes.append(i) self.window_sizes = window_sizes self.colormap = colormap self.midpoint = midpoint self.mesh = None self.vmax = vmax self.window_size_line = None self.current_window_size = current_window_size self.log_y = log_y def _plot(self, region=None, cax=None): da_sub, regions_sub = sub_data_regions(self.da, self.regions, region) da_sub_masked = np.ma.MaskedArray(da_sub, mask=np.isnan(da_sub)) bin_coords = np.r_[[(x.start - 1) for x in regions_sub], regions_sub[-1].end] x, y = np.meshgrid(bin_coords, self.window_sizes) self.mesh = self.ax.pcolormesh(x, y, da_sub_masked, cmap=self.colormap, vmax=self.vmax) self.colorbar = plt.colorbar(self.mesh, cax=cax, orientation="vertical") self.window_size_line = self.ax.axhline(self.current_window_size, color='red') if self.log_y: self.ax.set_yscale("log") self.ax.set_ylim((np.nanmin(self.window_sizes), np.nanmax(self.window_sizes))) def set_clim(self, vmin, vmax): self.mesh.set_clim(vmin=vmin, vmax=vmax) if self.colorbar is not None: self.colorbar.vmin = vmin self.colorbar.vmax = vmax self.colorbar.draw_all() def update(self, window_size): self.window_size_line.set_ydata(window_size) class TADPlot(BasePlotter1D): def __init__(self, regions, title='', color='black'): BasePlotter1D.__init__(self, title=title) self.regions = regions self.color = color self.current_region = None def _plot(self, region=None, cax=None): self.current_region = region try: sr, start_ix, end_ix = sub_regions(self.regions, region) trans = self.ax.get_xaxis_transform() for r in sr: region_patch = patches.Rectangle( (r.start, .2), width=abs(r.end - r.start), height=.6, transform=trans, facecolor=self.color, edgecolor='white', linewidth=2. ) self.ax.add_patch(region_patch) except ValueError: pass self.ax.axis('off') def update(self, regions): self.regions = regions self.ax.cla() self.plot(region=self.current_region, ax=self.ax) class DataLinePlot(BasePlotter1D): def __init__(self, data, regions=None, title='', init_row=0, is_symmetric=False): BasePlotter1D.__init__(self, title=title) if regions is None: regions = [] for i in range(len(data)): regions.append(GenomicRegion(chromosome='', start=i, end=i)) self.init_row = init_row self.data = data self.sr = None self.da_sub = None self.regions = regions self.current_region = None self.line = None self.current_ix = init_row self.current_cutoff = None self.cutoff_line = None self.cutoff_line_mirror = None self.is_symmetric = is_symmetric def _new_region(self, region): self.current_region = region self.da_sub, self.sr = sub_data_regions(self.data, self.regions, region) def _plot(self, region=None, cax=None): self._new_region(region) bin_coords = [(x.start - 1) for x in self.sr] ds = self.da_sub[self.init_row] self.line, = self.ax.plot(bin_coords, ds) if not self.is_symmetric: self.current_cutoff = (self.ax.get_ylim()[1] - self.ax.get_ylim()[0]) / 2 + self.ax.get_ylim()[0] else: self.current_cutoff = self.ax.get_ylim()[1]/ 2 self.ax.axhline(0.0, linestyle='dashed', color='grey') self.cutoff_line = self.ax.axhline(self.current_cutoff, color='r') if self.is_symmetric: self.cutoff_line_mirror = self.ax.axhline(-1*self.current_cutoff, color='r') self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds))) def update(self, ix=None, cutoff=None, region=None, update_canvas=True): if region is not None: self._new_region(region) if ix is not None and ix != self.current_ix: ds = self.da_sub[ix] self.current_ix = ix self.line.set_ydata(ds) self.ax.set_ylim((np.nanmin(ds), np.nanmax(ds))) if cutoff is None: if not self.is_symmetric: self.update(cutoff=(self.ax.get_ylim()[1]-self.ax.get_ylim()[0])/2 + self.ax.get_ylim()[0], update_canvas=False) else: self.update(cutoff=self.ax.get_ylim()[1] / 2, update_canvas=False) if update_canvas: self.fig.canvas.draw() if cutoff is not None and cutoff != self.current_cutoff: if self.is_symmetric: self.current_cutoff = abs(cutoff) else: self.current_cutoff = cutoff self.cutoff_line.set_ydata(self.current_cutoff) if self.is_symmetric: self.cutoff_line_mirro
from django.db import models from django.contrib.auth.models import User class OrganisationType(models.Model): type_desc = models.CharField(max_length=200) def __unicode__(self): return self.type_desc class Address(models.Model): street_address = models.CharField(max_length=100) city = models.CharField(max_length=100) pin = models.CharField(max_length=10) province = models.CharField(max_length=100) nationality = models.CharField(max_length=100) def __unicode__(self): return self.street_address + ',' + self.city class HattiUser(models.Model): user = models.OneToOneField(User) address = models.ForeignKey(Address) telephone = models.CharField(max_length=500) date_joined = models.DateTimeField(auto_now_add=True) fax = models.CharField(max_length=100) avatar = models.CharField(max_length=100, null=True, blank=True) tagline = models.CharField(max_length=140) class Meta: abstract = True class AdminOrganisations(HattiUser): t
itle = models.CharField(max_length=200) organisation_type = models.ForeignKey(OrganisationType) def __unicode__(self)
: return self.title class Customer(HattiUser): title = models.CharField(max_length=200, blank=True, null=True) is_org = models.BooleanField(); org_type = models.ForeignKey(OrganisationType) company = models.CharField(max_length = 200) def __unicode__(self, arg): return unicode(self.user)
# Please see the file LICENSE.txt for details. # from __future__ import print_function import sys, os import logging, logging.handlers from ginga import AstroImage from ginga.gtkw import GtkHelp from ginga.gtkw.ImageViewGtk import CanvasView from ginga.canvas.CanvasObject import get_canvas_types from ginga import colors from ginga.misc import log import gtk STD_FORMAT = '%(asctime)s | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s' class FitsViewer(object): def __init__(self, logger): self.logger = logger self.drawcolors = colors.get_colors() self.dc = get_canvas_types() root = gtk.Window(gtk.WINDOW_TOPLEVEL) root.set_title("Gtk2 CanvasView Example") root.set_border_width(2) root.connect("delete_event", lambda w, e: quit(w)) self.root = root self.select = GtkHelp.FileSelection(root) vbox = gtk.VBox(spacing=2) fi = CanvasView(logger) fi.enable_autocuts('on') fi.set_autocut_params('zscale') fi.enable_autozoom('on') fi.set_zoom_algorithm('rate') fi.set_zoomrate(1.4) fi.show_pan_mark(True) fi.set_callback('drag-drop', self.drop_file) fi.set_callback('none-move', self.motion) fi.set_bg(0.2, 0.2, 0.2) fi.ui_setActive(True) self.fitsimage = fi bd = fi.get_bindings() bd.enable_all(True) # canvas that we will draw on canvas = self.dc.DrawingCanvas() canvas.enable_draw(True) canvas.set_drawtype('rectangle', color='lightblue') canvas.setSurface(fi) self.canvas = canvas # add canvas to view private_canvas = fi.get_canvas() private_canvas.register_for_cursor_drawing(fi) private_canvas.add(canvas) canvas.ui_setActive(True) self.drawtypes = canvas.get_drawtypes() self.drawtypes.sort() # add a color bar #fi.show_color_bar(True) fi.show_focus_indicator(True) # add little mode indicator that shows keyboard modal states fi.show_mode_indicator(True, corner='ur') w = fi.get_widget() w.set_size_request(512, 512) vbox.pack_start(w, fill=True, expand=True) self.readout = gtk.Label("") vbox.pack_start(self.readout, fill=True, expand=False) hbox = gtk.HBox(spacing=5) wdrawtype = GtkHelp.combo_box_new_text() index = 0 for name in self.drawtypes: wdrawtype.insert_text(index, name) index += 1 index = self.drawtypes.index('rectangle') wdrawtype.set_active(index) wdrawtype.connect('changed', self.set_drawparams) self.wdrawtype = wdrawtype wdrawcolor = GtkHelp.combo_box_new_text() index = 0 for name in self.drawcolors: wdrawcolor.insert_text(index, name) index += 1 index = self.drawcolors.index('lightblue') wdrawcolor.set_active(index) wdrawcolor.connect('changed', self.set_drawparams) self.wdrawcolor = wdrawcolor wfill = GtkHelp.CheckButton("Fill") wfill.sconnect('toggled', self.set_drawparams) self.wfill = wfill walpha = GtkHelp.SpinButton() adj = walpha.get_adjustment() adj.configure(0.0, 0.0, 1.0, 0.1, 0.1, 0) walpha.set_value(1.0) walpha.set_digits(1) walpha.sconnect('value-changed', self.set_drawparams) self.walpha = walpha wclear = gtk.Button("Clear Canvas") wclear.connect('clicked', self.clear_canvas) wopen = gtk.Button("Open File") wopen.connect('clicked', self.open_file) wquit = gtk.Button("Quit") wquit.connect('clicked', quit) for w in (wquit, wclear, walpha, gtk.Label("Alpha:"), wfill, wdrawcolor, wdrawtype, wopen): hbox.pack_end(w, fill=False, expand=False) vbox.pack_start(hbox, fill=False, expand=
False) roo
t.add(vbox) def get_widget(self): return self.root def set_drawparams(self, w): index = self.wdrawtype.get_active() kind = self.drawtypes[index] index = self.wdrawcolor.get_active() fill = self.wfill.get_active() alpha = self.walpha.get_value() params = { 'color': self.drawcolors[index], 'alpha': alpha, #'cap': 'ball', } if kind in ('circle', 'rectangle', 'polygon', 'triangle', 'righttriangle', 'ellipse', 'square', 'box'): params['fill'] = fill params['fillalpha'] = alpha self.canvas.set_drawtype(kind, **params) def clear_canvas(self, w): self.canvas.delete_all_objects() def load_file(self, filepath): image = AstroImage.AstroImage(logger=self.logger) image.load_file(filepath) self.fitsimage.set_image(image) self.root.set_title(filepath) def open_file(self, w): self.select.popup("Open FITS file", self.load_file) def drop_file(self, fitsimage, paths): fileName = paths[0] self.load_file(fileName) def motion(self, fitsimage, button, data_x, data_y): # Get the value under the data coordinates try: #value = fitsimage.get_data(data_x, data_y) # We report the value across the pixel, even though the coords # change halfway across the pixel value = fitsimage.get_data(int(data_x+0.5), int(data_y+0.5)) except Exception: value = None fits_x, fits_y = data_x + 1, data_y + 1 # Calculate WCS RA try: # NOTE: image function operates on DATA space coords image = fitsimage.get_image() if image is None: # No image loaded return ra_txt, dec_txt = image.pixtoradec(fits_x, fits_y, format='str', coords='fits') except Exception as e: self.logger.warning("Bad coordinate conversion: %s" % ( str(e))) ra_txt = 'BAD WCS' dec_txt = 'BAD WCS' text = "RA: %s DEC: %s X: %.2f Y: %.2f Value: %s" % ( ra_txt, dec_txt, fits_x, fits_y, value) self.readout.set_text(text) def quit(self, w): gtk.main_quit() return True def main(options, args): logger = log.get_logger("example2", options=options) # Check whether user wants to use OpenCv if options.opencv: from ginga import trcalc try: trcalc.use('opencv') except Exception as e: logger.warning("failed to set OpenCv preference: %s" % (str(e))) # Check whether user wants to use OpenCL elif options.opencl: from ginga import trcalc try: trcalc.use('opencl') except Exception as e: logger.warning("failed to set OpenCL preference: %s" % (str(e))) fv = FitsViewer(logger) root = fv.get_widget() root.show_all() if len(args) > 0: fv.load_file(args[0]) gtk.main() if __name__ == "__main__": # Parse command line options with nifty optparse module from optparse import OptionParser usage = "usage: %prog [options] cmd [args]" optprs = OptionParser(usage=usage, version=('%%prog')) optprs.add_option("--debug", dest="debug", default=False, action="store_true", help="Enter the pdb debugger on main()") optprs.add_option("--opencv", dest="opencv", default=False, action="store_true", help="Use OpenCv acceleration") optprs.add_option("--opencl", dest="opencl", default=False, action="store_true", help="Use OpenCL acceleration") optprs.add_option("--profile", dest="profile", action="store_true", default=False, help="Run the profiler on main()") log.addlogopts(optprs) (options, args) = optprs.parse_args(sys.argv[1:]) # Are we debu
# (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json import os from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name)
if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except: pas
s fixture_data[path] = data return data class TestMlnxosModule(ModuleTestCase): def execute_module(self, failed=False, changed=False, commands=None, is_updates=False, sort=True, transport='cli'): self.load_fixtures(commands, transport=transport) if failed: result = self.failed() self.assertTrue(result['failed'], result) else: result = self.changed(changed) self.assertEqual(result['changed'], changed, result) if commands is not None: if is_updates: commands_res = result.get('updates') else: commands_res = result.get('commands') if sort: self.assertEqual(sorted(commands), sorted(commands_res), commands_res) else: self.assertEqual(commands, commands_res, commands_res) return result def failed(self): with self.assertRaises(AnsibleFailJson) as exc: self.module.main() result = exc.exception.args[0] self.assertTrue(result['failed'], result) return result def changed(self, changed=False): with self.assertRaises(AnsibleExitJson) as exc: self.module.main() result = exc.exception.args[0] self.assertEqual(result['changed'], changed, result) return result def load_fixtures(self, commands=None, transport='cli'): pass
"""Various useful tools.""" import copy import datetime import logging # FIXME: temporary backward compatibility from eaf.core import Vec3 as Point LOG_FORMAT = ( "[%(asctime)s] %(levelname)-8s %(name)s[%(funcName)s]:%(lineno)s: " "%(message)s" ) """Log message format string.""" TIME_FORMAT = "%H:%M:%S,%03d" """Log time format string.""" DATE_FORMAT = "%Y-%m-%d %a" """Initial log entry date format string.""" def setup_logger(name, debug=False, msgfmt=None, timefmt=None): """Setup logger with linked log file. Do not use it for getting logger, call this once on init, then use logging.getLogger(__name__) for getting actual logger. :param str name: logger relative name :param bool debug: debug mode :param str msgfmt: message format :param str timefmt: time format :return: prepared logger instance :rtype: `logging.Logger` """ logger = logging.getLogger(name) logger.propagate = False level = logging.DEBUG if debug else logging.INFO logger.setLevel(level) handler = logging.FileHandler("{0}.log".format(name)) handler.setLevel(level) formatter = logging.Formatter(msgfmt or LOG_FORMAT, timefmt or TIME_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) date = datetime.date.today().strftime(DATE_FORMAT) logger.info("*** (%s) Initializing XOInvader ***", date) return logger def clamp(val, min_val, max_val): """Clamp value between boundaries.""" if max_val < min_val: raise ValueError("max_val must be >= min
_val") return min(max(val, min_val), max_val) class dotdict(dict): # pylint: disable=invalid-name """Container for dot element
s access.""" def __init__(self, *args, **kwargs): super(dotdict, self).__init__(*args, **kwargs) self.__dict__ = self self._wrap_nested() def _wrap_nested(self): """Wrap nested dicts for deep dot access.""" for key, value in self.items(): if isinstance(value, dict): self[key] = dotdict(value) def fullcopy(self): """Return full copy of internal structure as dotdict. :return :class:`xoinvader.utils.dotdict`: full copy """ return dotdict(copy.deepcopy(self)) class InfiniteList(list): """Infinite list container.""" def __init__(self, *args, **kwargs): super(InfiniteList, self).__init__(*args, **kwargs) self._index = 0 def select(self, index: int) -> object: """Set index and return selected element.""" if not len(self): raise IndexError("List is empty") if not (0 <= index < len(self)): raise IndexError("Index out of bounds.") self._index = index return self[self._index] def current(self) -> object: """Return current element.""" return self[self._index] def next(self) -> object: """Select next element and return it.""" try: self._index = (self._index + 1) % len(self) except ZeroDivisionError: raise IndexError("List is empty.") return self[self._index] def prev(self) -> object: """Select previous element and return it.""" try: self._index = (self._index - 1) % len(self) except ZeroDivisionError: raise IndexError("List is empty.") return self[self._index]
# -*- coding: utf-8 -*- import json from flask import jsonify from flask import render_template, request, url_for, redirect import time, random #------------------------------------------------------------------------------ def get_desktop_items_data(): """ Returns items for Desktop in JSON array: title """ items = [ {'title': 'OS/2 System', 'icon': '/appmedia/imgs/system_folder.png', 'left': '0px', 'top': '40px', 'action': '/system_folder/'}, {'title': 'Information', 'icon': '/appmedia/imgs/help.png', 'left': '0px', 'top': '120px', 'action': '/appmedia/help/desktop.html'}, {'title': 'Virtual PC', 'icon': '/appmedia/imgs/system/minimized.png', 'left': '0px', 'top': '200px', 'action': '/'}, {'title': 'WebExplorer', 'icon': '/appmedia/imgs/web/explore.gif', 'left': '0px', 'top': '280px', 'action': '/webexplorer/'}, {'title': 'WIN-OS/2 Window', 'icon': '/appmedia/imgs/cmd/win_wnd.png', 'left': '0px', 'top': '360px', 'action': '/cmd/?cmd=win_wnd', 'app': 'yes'}, {'title': 'Solitaire', 'icon': '/appmedia/imgs/files/sol.jpg', 'left': '0px', 'top': '440px', 'action': 'http://www.webolog.com/online_games/solitaire/loaderwm.swf', 'app': 'yes'}, ] #return jsonify(items=items) return json.dumps(items) #------------------------------------------------------------------------------ def get_lanchpad_data(): return render_template("lanchpad.html") #------------------------------------------------------------------------------ def get_window_data(): "Returns rendered window with iframe inside" title = request.args.get("title", "") src = request.args.get("src", "") width = request.args.get("width", "634") height = request.args.get("height", "450") win_id = int(time.time()) template = "pm/base_window.html" if src.find("win_") != -1: template = "pm/win_window.html" #title = "Program Manager" content = { "title": title, "src": src, "win_id": win_id, "wnd_left": random.randint(120, 300), "wnd_top": random.randint(20, 100), "width": width, "height": height, } return render_template(template, **content) #------------------------------------------------------------------------------ def get_dialog_data(): "Returns re
ndered dialog" dlg = request.args.get("dlg", "") title = request.args.get("title", "") win_id = int(time.time()) template = "dialogs/%s.html" % dlg content = { "title": title, "dlg": dlg, "win_id": win_id,
"wnd_left": 400, "wnd_top": 300, "width": 290, "height": 150, } return render_template(template, **content) #------------------------------------------------------------------------------
#!/usr/bin/env python3 from http.server import H
TTPServer, CGIHTTPRequestHandler port = 8000 httpd = HTTPServer(('', port), CGIHTTPRequestHandler) print("Starting simple_httpd on port: " + str(httpd.server_port)) h
ttpd.serve_forever()
""" Unittest for time.strftime """ import calendar import sys import os import re from test import test_support import time import unittest # helper functions def fixasctime(s): if s[8] == ' ': s = s[:8] + '0' + s[9:] return s def escapestr(text, ampm): """ Escape text to deal with possible locale values that have regex syntax while allowing regex syntax used for comparison. """ new_text = re.escape(text) new_text = new_text.replace(re.escape(ampm), ampm) new_text = new_text.replace('\%', '%') new_text = new_text.replace('\:', ':') new_text = new_text.replace('\?', '?') return new_text class StrftimeTest(unittest.TestCase): def __init__(self, *k, **kw): unittest.TestCase.__init__(self, *k, **kw) def _update_variables(self, now): # we must update the local variables on every cycle self.gmt = time.gmtime(now) now = time.localtime(now) if now[3] < 12: self.ampm='(AM|am)' else: self.ampm='(PM|pm)' self.jan1 = time.localtime(time.mktime((now[0], 1, 1, 0, 0, 0, 0, 1, 0))) try: if now[8]: self.tz = time.tzname[1] else: self.tz = time.tzname[0] except AttributeError: self.tz = '' if now[3] > 12: self.clock12 = now[3] - 12 elif now[3] > 0: self.clock12 = now[3] else: self.clock12 = 12 self.now = now def setUp(self): try: import java java.util.Locale.setDefault(java.util.Locale.US) except ImportError: import locale locale.setlocale(locale.LC_TIME, 'C') def test_strftime(self): now = time.time() self._update_variables(now) self.strftest1(now) self.strftest2(now) if test_support.verbose: print "Strftime test, platform: %s, Python version: %s" % \ (sys.platform, sys.version.split()[0]) for j in range(-5, 5): for i in range(25): arg = now + (i+j*100)*23*3603 self._update_variables(arg) self.strftest1(arg) self.strftest2(arg) def strftest1(self, now): if test_support.verbose: print "strftime test for", time.ctime(now) now = self.now # Make sure any characters that could be taken as regex syntax is # escaped in escapestr() expectations = ( ('%a', calendar.day_abbr[now[6]], 'abbreviated weekday name'), ('%A', calendar.day_name[now[6]], 'full weekday name'), ('%b', calendar.month_abbr[now[1]], 'abbreviated month name'), ('%B', calendar.month_name[now[1]], 'full month name'), # %c see below ('%d', '%02d' % now[2], 'day of month as number (00-31)'), ('%H', '%02d' % now[3], 'hour (00-23)'), ('%I', '%02d' % self.clock12, 'hour (01-12)'), ('%j', '%03d' % now[7], 'julian day (001-366)'), ('%m', '%02d' % now[1], 'month as number (01-12)'), ('%M', '%02d' % now[4], 'minute, (00-59)'), ('%p', self.ampm, 'AM or PM as appropriate'), ('%S', '%02d' % now[5], 'seconds of current time (00-60)'), ('%U', '%02d' % ((now[7] + self.jan1[6])//7), 'week number of the year (Sun 1st)'), ('%w', '0?%d' % ((1+now[6]) % 7), 'weekday as a number (Sun 1st)'), ('%W', '%02d' % ((now[7] + (self.jan1[6] - 1)%7)//7), 'week number of the year (Mon 1st)'),
# %x see below ('%X', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'), ('%y', '%02d' % (now[0]%100), 'year without century'), ('%Y', '%d' % now[0], 'year with century'), # %Z see below ('%%', '%', 'single percent sign'), ) for e in expectations: # musn't raise a
value error try: result = time.strftime(e[0], now) except ValueError, error: self.fail("strftime '%s' format gave error: %s" % (e[0], error)) if re.match(escapestr(e[1], self.ampm), result): continue if not result or result[0] == '%': self.fail("strftime does not support standard '%s' format (%s)" % (e[0], e[2])) else: self.fail("Conflict for %s (%s): expected %s, but got %s" % (e[0], e[2], e[1], result)) def strftest2(self, now): nowsecs = str(long(now))[:-1] now = self.now nonstandard_expectations = ( # These are standard but don't have predictable output ('%c', fixasctime(time.asctime(now)), 'near-asctime() format'), ('%x', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)), '%m/%d/%y %H:%M:%S'), ('%Z', '%s' % self.tz, 'time zone name'), # These are some platform specific extensions ('%D', '%02d/%02d/%02d' % (now[1], now[2], (now[0]%100)), 'mm/dd/yy'), ('%e', '%2d' % now[2], 'day of month as number, blank padded ( 0-31)'), ('%h', calendar.month_abbr[now[1]], 'abbreviated month name'), ('%k', '%2d' % now[3], 'hour, blank padded ( 0-23)'), ('%n', '\n', 'newline character'), ('%r', '%02d:%02d:%02d %s' % (self.clock12, now[4], now[5], self.ampm), '%I:%M:%S %p'), ('%R', '%02d:%02d' % (now[3], now[4]), '%H:%M'), ('%s', nowsecs, 'seconds since the Epoch in UCT'), ('%t', '\t', 'tab character'), ('%T', '%02d:%02d:%02d' % (now[3], now[4], now[5]), '%H:%M:%S'), ('%3y', '%03d' % (now[0]%100), 'year without century rendered using fieldwidth'), ) for e in nonstandard_expectations: try: result = time.strftime(e[0], now) except ValueError, result: msg = "Error for nonstandard '%s' format (%s): %s" % \ (e[0], e[2], str(result)) if test_support.verbose: print msg continue if re.match(escapestr(e[1], self.ampm), result): if test_support.verbose: print "Supports nonstandard '%s' format (%s)" % (e[0], e[2]) elif not result or result[0] == '%': if test_support.verbose: print "Does not appear to support '%s' format (%s)" % \ (e[0], e[2]) else: if test_support.verbose: print "Conflict for nonstandard '%s' format (%s):" % \ (e[0], e[2]) print " Expected %s, but got %s" % (e[1], result) def test_main(): test_support.run_unittest(StrftimeTest) if __name__ == '__main__': test_main()
# pylint: disable=I0011,C0301 from __future__ import absolute_import, unicode_literals import os from setuptools import find_packages, setup from namespaced_session import __version__ with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.a
bspath(__file__), os.pard
ir))) setup( name='django-namespaced-session', version=__version__, packages=find_packages(exclude=['tests']), include_package_data=True, test_suite="runtests.main", license='MIT', description='Django app which makes it easier to work with dictionaries in sessions', long_description=README, url='https://github.com/ckot/django-namespaced-session/', author='Scott Silliman', author_email='scott.t.silliman@gmail.com', classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'Framework :: Django :: 1.7', 'Framework :: Django :: 1.8' 'Framework :: Django :: 1.9' 'Framework :: Django :: 1.10', 'Intended Audience :: Developers', 'License :: OSI Approved', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7' ], )
test = { 'name': '', 'points': 1, 'suites': [ { 'cases': [ { 'code': r""" >>> type(imdb_by_year) == tables.Table True
>>> imdb_by_year.column('Title').take(range(3)) array(['The Kid (1921)', 'The Gold Rush (1925
)', 'The General (1926)'], dtype='<U75') """, 'hidden': False, 'locked': False }, ], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest' } ] }
. By default this will create: * User `testuser` with password `testpassword` * User `certuser` with password `certpass` * Two security sources * Permissions on * riak_kv.get * riak_kv.put * riak_kv.delete * riak_kv.index * riak_kv.list_keys * riak_kv.list_buckets * riak_kv.mapreduce * riak_core.get_bucket * riak_core.set_bucket * riak_core.get_bucket_type * riak_core.set_bucket_type * search.admin * search.query """ description = "create security settings used in integration tests" user_options = [ ('riak-admin=', None, 'path to the riak-admin script'), ('username=', None, 'test user account'), ('password=', None, 'password for test user account'), ('certuser=', None, 'certificate test user account'), ('certpass=', None, 'password for certificate test user account') ] _commands = [ "add-user $USERNAME password=$PASSWORD", "add-source $USERNAME 127.0.0.1/32 password", "add-user $CERTUSER password=$CERTPASS", "add-source $CERTUSER 127.0.0.1/32 certificate" ] _grants = { "riak_kv.get": ["any"], "riak_kv.put": ["any"], "riak_kv.delete": ["any"], "riak_kv.index": ["any"], "riak_kv.list_keys": ["any"], "riak_kv.list_buckets": ["any"], "riak_kv.mapreduce": ["any"], "riak_core.get_bucket": ["any"], "riak_core.set_bucket": ["any"], "riak_core.get_bucket_type": ["any"], "riak_core.set_bucket_type": ["any"], "search.admin": ["index", "schema"], "search.query": ["index", "schema"] } def initialize_options(self): self.riak_admin = None self.username = None self.password = None self.certuser = None self.certpass = None def finalize_options(self): if self.riak_admin is None: raise DistutilsOptionError("riak-admin option not set") if self.username is None: self.username = 'testuser' if self.password is None: self.password = 'testpassword' if self.certuser is None: self.certuser = 'certuser' if self.certpass is None: self.certpass = 'certpass' def run(self): if self._check_available(): for cmd in self._commands: # Replace the username and password if specified s = Template(cmd) newcmd = s.substitute(USERNAME=self.username, PASSWORD=self.password, CERTUSER=self.certuser, CERTPASS=self.certpass) log.info("Security command: {0}".format(repr(newcmd))) self.run_security_command(tuple(newcmd.split(' '))) for perm in self._grants: self._apply_grant(perm, self._grants[perm]) def _check_available(self): try: self.check_security_command("status") return True except CalledProcessError: log.error("Security is not supported on this Riak node!") return False def _apply_grant(self, perm, targets): for target in targets: cmd = ["grant", perm, "on", target, "to", self.username] log.info("Granting permission {0} on {1} to {2}" .format(repr(perm), repr(target), repr(self.username))) self.run_security_command(cmd) cmd = ["grant", perm, "on", target, "to", self.certuser] log.info("Granting permission {0} on {1} to {2}" .format(repr(perm), repr(target), repr(self.certuser))) self.run_security_command(cmd) class enable_security(Command, security_commands): """ Actually turn on security. """ description = "turn on security within Riak" user_options = [ ('riak-admin=', None, 'path to the riak-admin script'), ] def initialize_options(self): self.riak_admin = None def finalize_options(self): if self.riak_admin is None: raise DistutilsOptionError("riak-admin option not set") def run(self): cmd = "enable" self.run_security_command(tuple(cmd.split(' '))) class disable_security(Command, security_commands): """ Actually turn off security. """ description = "turn off security within Riak" user_options = [ ('riak-admin=', None, 'path to the riak-admin script'), ] def initialize_options(self): self.riak_admin = None def finalize_options(self): if self.riak_admin is None: raise DistutilsOptionError("riak-admin option not set") def run(self): cmd = "disable" self.run_security_command(tuple(cmd.split(' '))) class preconfigure(Command): """ Sets up security configuration. * Update these lines in riak.conf * storage_backend = leveldb * search = on * listener.protobuf.internal = 127.0.0.1:8087 * listener.http.internal = 127.0.0.1:8098 * listener.https.internal = 127.0.0.1:18098 * ssl.certfile = $pwd/tests/resources/server.crt * ssl.keyfile = $pwd/tests/resources/server.key * ssl.cacertfile = $pwd/tests/resources/ca.crt * check_crl = off """ description = "preconfigure security settings used in integration tests" user_options = [ ('riak-conf=', None, 'path to the riak.conf file'), ('host=', None, 'IP of host running Riak'), ('pb-port=', None, 'protocol buffers port number'), ('https-port=', None, 'https port number') ] def initialize_options(self): self.riak_conf = None self.host = "127.0.0.1" self.pb_port = "8087" self.http_port = "8098" self.https_port = "18098" def finalize_options(self): if self.riak_conf is None: raise DistutilsOptionError("riak-conf option not set") def run(self): self.cert_dir = os.path.dirname(os.path.realpath(__file__)) + \ "/riak/tests/resources" self._update_riak_conf() def _update_riak_conf(self): http_host = self.host + ':' + self.http_port https_host = self.host + ':' + self.https_port pb_host = self.host + ':' + self.pb_port self._backup_file(self.riak_conf) f = open(self.riak_conf, 'r', buffering=1) conf = f.read() f.close() conf = re.sub(r'search\s+=\s+off', r'search = on', conf) conf = re.sub(r'##[ ]+ssl\.', r'ssl.', conf) conf = re.sub(r'ssl.certfile\s+=\s+\S+', r'ssl.certfile = ' + self.cert_dir + '/server.crt',
conf) conf = re.sub(r'storage_backend\s+=\s+\S+', r'storage_backend = leveldb', conf) conf = re.sub(r's
sl.keyfile\s+=\s+\S+', r'ssl.keyfile = ' + self.cert_dir + '/server.key', conf) conf = re.sub(r'ssl.cacertfile\s+=\s+\S+', r'ssl.cacertfile = ' + self.cert_dir + '/ca.crt', conf) conf = re.sub(r'#*[ ]*listener.http.internal\s+=\s+\S+', r'listener.http.internal = ' + http_host, conf) conf = re.sub(r'#*[ ]*listener.https.internal\s+=\s+\S+', r'listener.https.internal = ' + https_host, conf) conf = re.sub(r'listener.protobuf.internal\s+=\s+\S+', r'listener.protobuf.internal = ' + pb_host, conf) conf += 'check_crl = off\n' # Older versions of OpenSSL client library need to match on the server conf += 'tls_protocols.tlsv1 = on\n' conf += 'tls_protocols.tlsv1.1 = on\n' f = open(self.riak_conf, 'w', buffering=1) f.write(conf) f.close() def _backup_file(self, name): backup = name + ".bak"
# -*- coding: utf-8 -*- # Copyright 2015-2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.addons.connector_carepoint.unit import mapper from .common import SetUpCarepointBase class TestCarepointImporterMapper(SetUpCarepointBase): def setUp(self): super(TestCarepointImporterMapper, self).setUp() self.Importer = mapper.CarepointImportMapper self.model = 'carepoint.carepoint.store' self.mock_env = self.get_carepoint_helper( self.model ) self.importer = self.Importer(self.mock_env) def test_backend_id(self): """ It should map backend_id correctly """ res = self.i
mporter.backend_id(True) expect = {'backend_id': self.importer.backend_record.id} self.assertDictEqual(expect, res) def test_company_id(self): """ It should map company_id correctly """
res = self.importer.company_id(True) expect = {'company_id': self.importer.backend_record.company_id.id} self.assertDictEqual(expect, res)
r_node(node) if item is None: return parent_index = self.index_for_item(item.parent_item) if parent_index is None: return row = item.row() self.removeRow(row, parent_index) self.insertRow(row, parent_index, node) # CK: This is a pretty ineffective method of finding the node <-> item mapping. # A dictionary mapping would be better. def _item_for_node(self, parent_item, node): ''' Depth first search for the XmlItem containing a given node. @param parent_item (XmlItem): parent of nodes to scan. @param node (Element): the node to locate @return: the found node (Element) if found, None otherwise ''' for child_item in parent_item.child_items: if child_item.node is node: return child_item found_item = self._item_for_node(child_item, node) if found_item is not None: return found_item return None def item_for_node(self, node): ''' Return the item for a given node. @param node (Element): The node to locate. @return: The item containing the given node (XmlItem) or None ''' return self._item_for_node(self._root_item, node) def index_for_node(self, node): ''' Return the qt index for a given node. @param node (Element): The node to locate. @return: The item containing the given node (XmlItem) or None ''' item = self._item_for_node(self._root_item, node) return self.index_for_item(item) def add_node(self, parent_node, node): ''' Adds a child node (may contain a subtree) to a given parent node and updates the model. For efficient insertion of entire trees; first construct the subtree to insert using ElementTree, and then call this method once with the root node for it. @param parent_node (Element): parent node @param node (Element): node to insert ''' parent_item = self.item_for_node(parent_node) parent_index = self.index_for_item(parent_item) self.insertRow(0, parent_index, node) if self.project: self.project.dirty = True # TODO update comments to xml 2.0 def insert_node(self, node, parent_node): ''' Insert a node into the XML and into the model. This method automatically finds the qt index for the parent index so that the item can be inserted. @param node (Element): node to insert @param parent_node (Element): Parent node to append @node @return: True if the node was inserted ''' parent_item = self.item_for_node(parent_node) if parent_item is None: msg = ('Tried to insert a node under <%s>, but that node is not in this XmlModel' % parent_node.tag) return (False, msg) parent_index = self.index_for_item(parent_item) if parent_index is not None: self.insertRow(0, parent_index, node) self.project.dirty = True return (True, 'OK') else: msg = ('Tried to insert a node under <%s>, but could not find its index.' % parent_node.tag) return (False, msg) def flags(self, index): ''' PyQt API Method -- See the PyQt documentation for a description ''' if not index.isValid(): return None node = index.internalPointer().node is_checkbox_node = node.tag == 'selectable' or node.get('type') == 'boolean' # Inherited nodes if node.get('inherited'): # inherited nodes are generally only selectable and enabled, with the exception # of checkboxes that are clickable even when they are inherited if is_checkbox_node: return (Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable) return Qt.ItemIsEnabled | Qt.ItemIsSelectable # Set flags on a per column basis if index.column() == 0: return Qt.ItemIsEnabled | Qt.ItemIsSelectable elif index.column() == 1: if is_checkbox_node: return (Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable) return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable # Unhandled index return QVariant() def headerData(self, section, orientation, role): ''' PyQt API Method -- See the PyQt documentation for a description ''' if orientation == Qt.Horizontal and role == Qt.DisplayRole: return QVariant(self._headers[section]) else: return QVariant() def index(self, row, column, parent_index = QModelIndex()): ''' PyQt API Method -- See the PyQt documentation for a description ''' if not parent_index.isValid(): parent_item = self._root_item else: parent_item = parent_index.internalPointer() child_item = parent_item.child_item(row) if child_item: return self.createIndex(row, column, child_item) else: return QModelIndex() def setData(self, index, value, role): ''' PyQt API Method -- See the PyQt documentation for a description ''' if not index.isValid(): return False item = index.internalPointer() node = item.node is_checkbox_node = node.tag == 'selectable' or node.get('type') == 'boolean' # only allow editing in second column if index.column() != 1: return False # user clicking on a checkbox if role == Qt.CheckStateRole and is_checkbox_node: # ask the users if they want to make inherited nodes local first if node.get('inherited'): title = 'Editing inherited node' msg = ("'%s' is inherited from a parent project. \n\n" "Do you want to make this node part of thi
s project " "so that you can edit it?" % node.get('name') or node.tag) b = (QMessageBox.Yes, QMessageBox.No) ans = QMessageBox.question(None, title, msg, *b) if ans == QMessageBox.Yes: self.make_item_local(i
tem) else: return False del title, msg, b, ans # Clean up namespace if value.toInt()[0] == Qt.Checked: value = QVariant('True') else: value = QVariant('False') # convert the value to a string and set the nodes text value value = value.toString() changed_value = node.text != value if changed_value: node.text = str(value) # avoid QString's in the xml self.dirty = True s = SIGNAL("dataChanged(const QModelIndex &, const QModelIndex &)") self.emit(s, index, index) return True def make_item_local(self, item): if not self.project: return self.project.make_local(item.node) def insertRow(self, row, parent_index, node, reinserting = False): ''' Insert a row into the data model @param row (int): row to insert into. @param parent_index (QModelIndex): index of parent item @param node (Element): node to insert @param reinserting (bool): if True; assume that the project has already reinserted the node and just insert it into the internal model. Also skip making it local after inserting. @return: True if the sibling was inserted, False otherwise ''' if row < 0 or row > self.rowCount(parent_index): return False self.emit(SIGNAL("layoutAboutToBeChanged()")) self.beginInsertRows(parent_index, row, row) # Get a valid parent_item if parent_index == QModelIndex(): parent_item = self._root_item else: parent_item = parent_index.internalPointer() parent_node = parent_item.node if se
from setuptools import setup setup( name='quotequail', version='0.2.3', url='http://github.com/closeio/quotequail', license='MIT', author='Thomas Steinacher', author_email='engineering@close.io', maintainer='Thomas Steinacher', maintainer_email='engineering@close.io', description='A library that identifies quoted text in plain text and HTML email messages.', long_des
cription=__doc__, packages=[ 'quotequail', ], test_suite='tests', tests_require=['lxml'], platforms='any', classifiers=[ 'Environment ::
Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Topic :: Communications :: Email', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
#!/usr/bin/env python # -*- coding: utf-8 -*- import time class CameraClass(object): ''' docstring for CameraClass ''' def __init__(self): super(CameraClass, self).__init__() def visible_target(self): ''' Returns true if target is visible ''' return True if __name__ == '__main__': try: from picamera import PiCamera camera
= PiCamera()
try: camera.start_preview() time.sleep(10) camera.stop_preview() finally: camera.close() except ImportError: pass
import sys import socket from PyQt5.QtWidgets import QApplication from qt_DisplayWindow import DisplayWindow from Server import Server def main(camID): hostname = socket.gethostname() ip_address = socket.gethostbyname_ex(hostname)[2][-1] print(hostname, ip_address) port = 12349 app = QApplication(sys.argv) server = Server(ip_address, port) # set up main display window display = DisplayWindow(camID, server.get_state) display.show
() # connect server -> display slots server.selfie.connect(display.selfie) s
erver.email.connect(display.email) server.status.connect(display.show_msg) server.start() ret = app.exec_() server.join() sys.exit(ret) if __name__ == '__main__': main(0)
import os from pathlib import Path from PIL import Image import pyconfig import pydice class ImageNotSupported(Exception): pass class BeardedDie: def __init__(self, die): self.die = die # Time to strap our to_image to pydice's Die if pyconfig.get('dicebeard.images_path'): pydice.dice.Die.images_path = Path( pyconfig.get('dicebeard.images_path')) else: pydice.dice.Die.images_path = Path(
os.path.dirname(__file__)) / 'images' def __getattr__(self, attr): return getattr(self.die, attr) def to_image(self): '''Emits a PIL.Image of the die is possible''' die_image_path = (self.images_path / 'd{}'.format(self.faces.stop-1) / '{}.png'.format(self.result)) try: return
Image.open(str(die_image_path)) except FileNotFoundError: raise ImageNotSupported( '{} is not currently supported.'.format(self.name))
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Group.auth' db.add_column('people_group', 'auth', self.gf('django.db.models.fields.BooleanField')(default=True), keep_default=False) def backwards(self, orm): # Deleting field 'Group.auth' db.delete_column('people_group', 'auth') models = { 'people.address': { 'Meta': {'ordering': "('address1',)", 'object_name': 'Address'}, 'address1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'atype': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '25'}) }, 'people.group': { 'Meta': {'ordering': "('name',)", 'object_name': 'Group'}, 'auth': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'desc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'gtype': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '10'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'people.groupadmin': { 'Meta': {'ordering': "('group__name', 'person__lname', 'person__fname')", 'object_name': 'GroupAdmin'}, 'can_send': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}) }, 'people.household': { 'Meta': {'ordering': "('name',)", 'object_name': 'Household'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'anniversary': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'barcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'first_visit': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}) }, 'people.person': { 'Meta': {'ordering': "('lname', 'fname')", 'object_name': 'Person'}, 'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'allergies': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'bdate': ('django.db.models.fields
.DateField', [], {'null': 'True', 'blank': 'True'}), 'ddate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'fname': ('django.db.mode
ls.fields.CharField', [], {'max_length': '150'}), 'gender': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['people.Group']", 'null': 'True', 'blank': 'True'}), 'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Household']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'image_temp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.TempImage']", 'null': 'True', 'blank': 'True'}), 'lname': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'mname': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}) }, 'people.phone': { 'Meta': {'ordering': "('person__lname', 'person__fname', 'number')", 'object_name': 'Phone'}, 'alerts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'number': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}), 'type1': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}), 'type2': ('django.db.models.fields.CharField', [], {'default': "'ns'", 'max_length': '10'}) }, 'people.tempimage': { 'Meta': {'ordering': "('-ts',)", 'object_name': 'TempImage'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) } } complete_apps = ['people']
from pprint import pprint from base.models import Colaborador def get_create_colaborador_by_user(user): try: colab = Colaborador.objects.get(user__username=user.username) except Colaborador.DoesNotExist:
colab = Co
laborador( user=user, matricula=72000+user.id, cpf=72000+user.id, ) colab.save() return colab
import json import copy from util.json_request import JsonResponse from django.http import HttpResponseBadRequest from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_http_methods from django_future.csrf import ensure_csrf_cookie from edxmako.shortcuts import render_to_response from django.http import HttpResponseNotFound from django.core.exceptions import PermissionDenied from opaque_keys.edx.keys import CourseKey from xmodule.modulestore.django import modulestore from contentstore.utils import reverse_course_url from .access import has_course_access from xmodule.course_module import CourseDescriptor from django.utils.translation import ugettext __all__ = ['checklists_handler'] # pylint: disable=unused-argument @require_http_methods(("GET", "POST", "PUT")) @login_required @ensure_csrf_cookie def checklists_handler(request, course_key_string, checklist_index=None): """ The restful handler for checklists. GET html: return html page for all checklists json: return
json representing all checklists. checklist_in
dex is not supported for GET at this time. POST or PUT json: updates the checked state for items within a particular checklist. checklist_index is required. """ course_key = CourseKey.from_string(course_key_string) if not has_course_access(request.user, course_key): raise PermissionDenied() course_module = modulestore().get_course(course_key) json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json') if request.method == 'GET': # If course was created before checklists were introduced, copy them over # from the template. if not course_module.checklists: course_module.checklists = CourseDescriptor.checklists.default modulestore().update_item(course_module, request.user.id) expanded_checklists = expand_all_action_urls(course_module) if json_request: return JsonResponse(expanded_checklists) else: handler_url = reverse_course_url('checklists_handler', course_key) return render_to_response('checklists.html', { 'handler_url': handler_url, # context_course is used by analytics 'context_course': course_module, 'checklists': expanded_checklists }) elif json_request: # Can now assume POST or PUT because GET handled above. if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists): index = int(checklist_index) persisted_checklist = course_module.checklists[index] modified_checklist = json.loads(request.body) # Only thing the user can modify is the "checked" state. # We don't want to persist what comes back from the client because it will # include the expanded action URLs (which are non-portable). for item_index, item in enumerate(modified_checklist.get('items')): persisted_checklist['items'][item_index]['is_checked'] = item['is_checked'] # seeming noop which triggers kvs to record that the metadata is # not default course_module.checklists = course_module.checklists course_module.save() modulestore().update_item(course_module, request.user.id) expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist) return JsonResponse(localize_checklist_text(expanded_checklist)) else: return HttpResponseBadRequest( ("Could not save checklist state because the checklist index " "was out of range or unspecified."), content_type="text/plain" ) else: return HttpResponseNotFound() def expand_all_action_urls(course_module): """ Gets the checklists out of the course module and expands their action urls. Returns a copy of the checklists with modified urls, without modifying the persisted version of the checklists. """ expanded_checklists = [] for checklist in course_module.checklists: expanded_checklists.append(localize_checklist_text(expand_checklist_action_url(course_module, checklist))) return expanded_checklists def expand_checklist_action_url(course_module, checklist): """ Expands the action URLs for a given checklist and returns the modified version. The method does a copy of the input checklist and does not modify the input argument. """ expanded_checklist = copy.deepcopy(checklist) urlconf_map = { "ManageUsers": "course_team_handler", "CourseOutline": "course_handler", "SettingsDetails": "settings_handler", "SettingsGrading": "grading_handler", } for item in expanded_checklist.get('items'): action_url = item.get('action_url') if action_url in urlconf_map: item['action_url'] = reverse_course_url(urlconf_map[action_url], course_module.id) return expanded_checklist def localize_checklist_text(checklist): """ Localize texts for a given checklist and returns the modified version. The method does an in-place operation so the input checklist is modified directly. """ # Localize checklist name checklist['short_description'] = ugettext(checklist['short_description']) # Localize checklist items for item in checklist.get('items'): item['short_description'] = ugettext(item['short_description']) item['long_description'] = ugettext(item['long_description']) item['action_text'] = ugettext(item['action_text']) if item['action_text'] != "" else u"" return checklist
imp
ort numpy as np import sys R = np.eye
(int(sys.argv[2])) np.savetxt(sys.argv[1]+'/R.txt', R)
from symbol.builder import FasterRcnn as Detector from models.dcn.builder import DCNResNetC4 as Backbone from symbol.builder import Neck from symbol.builder import RpnHead from symbol.builder import RoiAlign as RoiExtractor from symbol.builder import BboxC5V1Head as BboxHead from mxnext.complicate import normalizer_factory def get_config(is_train): class General: log_frequency = 10 name = __name__.rsplit("/")[-1].rsplit(".")[-1] batch_image = 2 if is_train else 1 fp16 = False class KvstoreParam: kvstore = "local" batch_image = General.batch_image gpus = [0, 1, 2, 3, 4, 5, 6, 7] fp16 = General.fp16 class NormalizeParam: # normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus)) normalizer = normalizer_factory(type="fixbn") class BackboneParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer depth = 50 num_c3_block = 4 num_c4_block = 6 class NeckParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer class RpnParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer batch_image = General.batch_image class anchor_generate: scale = (2, 4, 8, 16, 32) ratio = (0.5, 1.0, 2.0) stride = 16 image_anchor = 256 class head: conv_channel = 512 mean = (0, 0, 0, 0) std = (1, 1, 1, 1) class proposal: pre_nms_top_n = 12000 if is_train else 6000 post_nms_top_n = 2000 if is_train else 300 nms_thr = 0.7 min_bbox_side = 0 class subsample_proposal: proposal_wo_gt = False image_roi = 512 fg_fraction = 0.25 fg_thr = 0.5 bg_thr_hi = 0.5 bg_thr_lo = 0.0 class bbox_target: num_reg_class = 2 class_agnostic = True weight = (1.0, 1.0, 1.0, 1.0) mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class BboxParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer num_class = 1 + 80 image_roi = 512 batch_image = General.batch_image class regress_target: class_agnostic = True mean = (0.0, 0.0, 0.0, 0.0) std = (0.1, 0.1, 0.2, 0.2) class RoiParam: fp16 = General.fp16 normalizer = NormalizeParam.normalizer out_size = 7 stride = 16 class DatasetParam: if is_train: image_set = ("coco_train2017", ) else: image_set = ("coco_val2017", ) backbone = Backbone(BackboneParam) neck = Neck(NeckParam) rpn_head = RpnHead(RpnParam) roi_extractor = RoiExtractor(RoiParam) bbox_head = BboxHead(BboxParam) detector = Detector() if is_train: train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head) rpn_test_sym = None test_sym = None else: train_sym = None rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head) test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head) class ModelParam: train_symbol = train_sym test_symbol = test_sym rpn_test_symbol = rpn_test_sym from_scratch = False random = True memonger = False memonger_until = "stage3_unit21_plus
" class pretrain: prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth epoch = 0 fixed_param =
["conv0", "stage1", "gamma", "beta"] class OptimizeParam: class optimizer: type = "sgd" lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image momentum = 0.9 wd = 0.0001 clip_gradient = 35 class schedule: begin_epoch = 0 end_epoch = 6 lr_iter = [60000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image), 80000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)] class warmup: type = "gradual" lr = 0.0 iter = 1000 class TestParam: min_det_score = 0.05 max_det_per_image = 100 process_roidb = lambda x: x process_output = lambda x, y: x class model: prefix = "experiments/{}/checkpoint".format(General.name) epoch = OptimizeParam.schedule.end_epoch class nms: type = "nms" thr = 0.5 class coco: annotation = "data/coco/annotations/instances_minival2014.json" # data processing class NormParam: mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order std = tuple(i * 255 for i in (0.229, 0.224, 0.225)) class ResizeParam: short = 800 long = 1200 if is_train else 2000 class PadParam: short = 800 long = 1200 max_num_gt = 100 class AnchorTarget2DParam: class generate: short = 800 // 16 long = 1200 // 16 stride = 16 scales = (2, 4, 8, 16, 32) aspects = (0.5, 1.0, 2.0) class assign: allowed_border = 0 pos_thr = 0.7 neg_thr = 0.3 min_pos_thr = 0.0 class sample: image_anchor = 256 pos_fraction = 0.5 class RenameParam: mapping = dict(image="data") from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \ ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \ RenameRecord, AnchorTarget2D, Norm2DImage if is_train: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), Resize2DImageBbox(ResizeParam), Flip2DImageBbox(), Pad2DImageBbox(PadParam), ConvertImageFromHwcToChw(), AnchorTarget2D(AnchorTarget2DParam), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "gt_bbox"] label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"] else: transform = [ ReadRoiRecord(None), Norm2DImage(NormParam), Resize2DImageBbox(ResizeParam), ConvertImageFromHwcToChw(), RenameRecord(RenameParam.mapping) ] data_name = ["data", "im_info", "im_id", "rec_id"] label_name = [] import core.detection_metric as metric rpn_acc_metric = metric.AccWithIgnore( "RpnAcc", ["rpn_cls_loss_output"], ["rpn_cls_label"] ) rpn_l1_metric = metric.L1( "RpnL1", ["rpn_reg_loss_output"], ["rpn_cls_label"] ) # for bbox, the label is generated in network so it is an output box_acc_metric = metric.AccWithIgnore( "RcnnAcc", ["bbox_cls_loss_output", "bbox_label_blockgrad_output"], [] ) box_l1_metric = metric.L1( "RcnnL1", ["bbox_reg_loss_output", "bbox_label_blockgrad_output"], [] ) metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric] return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \ ModelParam, OptimizeParam, TestParam, \ transform, data_name, label_name, metric_list
# -*- coding: utf-8 -*- """ :copyright: 2005-2008 by The PIDA Project :license: GPL 2 or later (see README/COPYING/LICENSE) """ import gtk from pygtkhelpers.delegates import SlaveView # locale from pida.core.locale import Locale locale
= Locale('pida') _ = locale.get
text class PidaView(SlaveView): # Set this to make your views memorable. key = None icon_name = gtk.STOCK_INFO label_text = _('Pida View') pane = None def create_ui(self): """Create the user interface here""" def create_tab_label_icon(self): return gtk.image_new_from_stock(self.icon_name, gtk.ICON_SIZE_MENU) def get_parent_window(self): return self.toplevel.get_parent_window() parent_window = property(get_parent_window) def on_remove_attempt(self, pane): return not self.can_be_closed() def can_be_closed(self): return False gladefile = None def __init__(self, service, title=None, icon=None, *args, **kw): if not self.builder_file: self.builder_file = self.gladefile self.svc = service self.label_text = title or self.label_text self.icon_name = icon or self.icon_name if self.key: pass #self.toplevel.set_name(self.key.replace(".", "_")) super(PidaView, self).__init__() def get_toplevel(self): return self.widget toplevel = property(get_toplevel) def add_main_widget(self, widget, *args, **kw): self.widget.pack_start(widget, *args, **kw) class WindowConfig(object): """ WindowConfig objects are used to register a window in the windows service so they can get proper shortcuts """ key = None label_text = "" description = "" default_shortcut = "" action = None
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Notebook front-end to TensorFlow. When you run this binary, you'll see something like below, which indicates the serving URL of the notebook: The IPython Notebook is running at: http://127.0.0.1:8888/ Press "Shift+Enter" to execute a cell Press "Enter" on a cell to go into edit mode. Press "Escape" to go back into command mode and use arrow keys to navigate. Press "a" in command mode to insert cell above or "b" to insert cell below. Your root notebooks directory is FLAGS.notebook_dir """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import socket import sys # pylint: disable=g-import-not-at-top # Official recommended way of turning on fast protocol buffers as of 10/21/14 os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "cpp" os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION"] = "2" from tensorflow.python.platform import app from tensorflow.python.platform import flags FLAGS = flags.FLAGS flags.DEFINE_string( "password", None, "Password to require. If set, the server will allow public access." " Only used if notebook config file does not exist.") flags.DEFINE_string("notebook_dir", "experimen
tal/brain/notebooks",
"root location where to store notebooks") ORIG_ARGV = sys.argv # Main notebook process calls itself with argv[1]="kernel" to start kernel # subprocesses. IS_KERNEL = len(sys.argv) > 1 and sys.argv[1] == "kernel" def main(unused_argv): sys.argv = ORIG_ARGV if not IS_KERNEL: # Drop all flags. sys.argv = [sys.argv[0]] # NOTE(sadovsky): For some reason, putting this import at the top level # breaks inline plotting. It's probably a bug in the stone-age version of # matplotlib. from IPython.html.notebookapp import NotebookApp # pylint: disable=g-import-not-at-top notebookapp = NotebookApp.instance() notebookapp.open_browser = True # password functionality adopted from quality/ranklab/main/tools/notebook.py # add options to run with "password" if FLAGS.password: from IPython.lib import passwd # pylint: disable=g-import-not-at-top notebookapp.ip = "0.0.0.0" notebookapp.password = passwd(FLAGS.password) else: print("\nNo password specified; Notebook server will only be available" " on the local machine.\n") notebookapp.initialize(argv=["--notebook-dir", FLAGS.notebook_dir]) if notebookapp.ip == "0.0.0.0": proto = "https" if notebookapp.certfile else "http" url = "%s://%s:%d%s" % (proto, socket.gethostname(), notebookapp.port, notebookapp.base_project_url) print("\nNotebook server will be publicly available at: %s\n" % url) notebookapp.start() return # Drop the --flagfile flag so that notebook doesn't complain about an # "unrecognized alias" when parsing sys.argv. sys.argv = ([sys.argv[0]] + [z for z in sys.argv[1:] if not z.startswith("--flagfile")]) from IPython.kernel.zmq.kernelapp import IPKernelApp # pylint: disable=g-import-not-at-top kernelapp = IPKernelApp.instance() kernelapp.initialize() # Enable inline plotting. Equivalent to running "%matplotlib inline". ipshell = kernelapp.shell ipshell.enable_matplotlib("inline") kernelapp.start() if __name__ == "__main__": # When the user starts the main notebook process, we don't touch sys.argv. # When the main process launches kernel subprocesses, it writes all flags # to a tmpfile and sets --flagfile to that tmpfile, so for kernel # subprocesses here we drop all flags *except* --flagfile, then call # app.run(), and then (in main) restore all flags before starting the # kernel app. if IS_KERNEL: # Drop everything except --flagfile. sys.argv = ([sys.argv[0]] + [x for x in sys.argv[1:] if x.startswith("--flagfile")]) app.run()
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See t
he top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class REvd(RPackage): """evd: Functions for Extreme Value Distributions""" homepage = "https://cloud.r-project.org/package=evd" url = "https://cloud.r-project.org/src/contrib/evd_2.3-3.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/evd" version('2.3-3', sha256='2fc5ef2e0c3a2a9392425ddd459144454
97433d90fb80b8c363877baee4559b4')
-------------------------------------------------- from __future__ import absolute_import, division, print_function import unittest import inspect import warnings from skbio.util._decorator import classproperty, overrides from skbio.util._decorator import (stable, experimental, deprecated, _state_decorator) from skbio.util._exception import OverrideError class TestOverrides(unittest.TestCase): def test_raises_when_missing(self): class A(object): pass with self.assertRaises(OverrideError): class B(A): @overrides(A) def test(self): pass def test_doc_inherited(self): class A(object): def test(self): """Docstring""" pass class B(A): @overrides(A) def test(self): pass self.assertEqual(B.test.__doc__, "Docstring") def test_doc_not_inherited(self): class A(object): def test(self): """Docstring""" pass class B(A): @overrides(A) def test(self): """Different""" pass self.assertEqual(B.test.__doc__, "Different") class TestClassProperty(unittest.TestCase): def test_getter_only(self): class Foo(object): _foo = 42 @classproperty def foo(cls): return cls._foo # class-level getter self.assertEqual(Foo.foo, 42) # instance-level getter f = Foo() self.assertEqual(f.foo, 42) with self.assertRaises(AttributeError): f.foo = 4242 class TestStabilityState(unittest.TestCase): # the indentation spacing gets weird, so I'm defining the # input doc string explicitly and adding it after function # defintion _test_docstring = (" Add 42, or something else, to x.\n" "\n" " Parameters\n" " ----------\n" " x : int, x\n" " y : int, optional\n") class TestBase(TestStabilityState): def test_get_indentation_level(self): c = _state_decorator() self.assertEqual(c._get_indentation_level([]), 0) self.assertEqual( c._get_indentation_level([], default_no_existing_docstring=3), 3) self.assertEqual(c._get_indentation_level([""]), 4) self.assertEqual( c._get_indentation_level([""], default_existing_docstring=3), 3) in_ = (["summary"]) self.assertEqual(c._get_indentation_level(in_), 4) in_ = (["summary", "", "", " ", "", " ", ""]) self.assertEqual(c._get_indentation_level(in_), 4) in_ = (["summary", " More indentation", " Less indentation"]) self.assertEqual(c._get_indentation_level(in_), 5) def test_update_docstring(self): c = _state_decorator() in_ = None exp = ("""State: Test!!""") self.assertEqual(c._update_docstring(in_, "Test!!"), exp) in_ = """""" exp = ("""\n\n State: Test!!""") self.assertEqual(c._update_docstring(in_, "Test!!"), exp) in_ = ("""Short summary\n\n Parameters\n\n----------\n """ """x : int\n""") exp = ("""Short summary\n\n State: Test!!\n\n""" """ Parameters\n\n----------\n x : int\n""") self.assertEqual(c._update_docstring(in_, "Test!!"), exp) in_ = ("""Short summary\n\n Parameters\n\n----------\n """ """x : int\n""") exp = ("""Short summary\n\n State: Test!!\n\n""" """ Parameters\n\n----------\n x : int\n""") self.assertEqual(c._update_docstring(in_, "Test!!"), exp) in_ = ("""Short summary\n\n Parameters\n\n----------\n """ """x : int\n""") exp = ("""Short summary\n\n State: Test!!Test!!Test!!Test!!Test!!""" """Test!!Test!!Test!!Test!!Test!!Test!!Te\n st!!T""" """est!!Test!!Test!!Test!!Test!!Test!!Test!!Test!!\n\n""" """ Parameters\n\n----------\n x : int\n""") self.assertEqual(c._update_docstring(in_, "Test!!"*20), exp) class TestStable(TestStabilityState): def _get_f(self, as_of): def f(x, y=42): return x + y f.__doc__ = self._test_docstring f = stable(as_of=as_of)(f) return f def test_function_output(self): f = self._get_f('0.1.0') self.assertEqual(f(1), 43) def test_function_docstring(self): f = self._get_f('0.1.0') e1 = (" Add 42, or something else, to x.\n\n" " State: Stable as of 0.1.0.\n\n" " Parameters") self.assertTrue(f.__doc__.startswith(e1)) f = self._get_f('0.1.1') e1 = ("
Add 42, or something else, to x.\n\n" " State: Stable as of 0.1.1.\n\n"
" Parameters") self.assertTrue(f.__doc__.startswith(e1)) def test_function_signature(self): f = self._get_f('0.1.0') expected = inspect.ArgSpec( args=['x', 'y'], varargs=None, keywords=None, defaults=(42,)) self.assertEqual(inspect.getargspec(f), expected) self.assertEqual(f.__name__, 'f') def test_missing_kwarg(self): self.assertRaises(ValueError, stable) self.assertRaises(ValueError, stable, '0.1.0') class TestExperimental(TestStabilityState): def _get_f(self, as_of): def f(x, y=42): return x + y f.__doc__ = self._test_docstring f = experimental(as_of=as_of)(f) return f def test_function_output(self): f = self._get_f('0.1.0') self.assertEqual(f(1), 43) def test_function_docstring(self): f = self._get_f('0.1.0') e1 = (" Add 42, or something else, to x.\n\n" " State: Experimental as of 0.1.0.\n\n" " Parameters") self.assertTrue(f.__doc__.startswith(e1)) f = self._get_f('0.1.1') e1 = (" Add 42, or something else, to x.\n\n" " State: Experimental as of 0.1.1.\n\n" " Parameters") self.assertTrue(f.__doc__.startswith(e1)) def test_function_signature(self): f = self._get_f('0.1.0') expected = inspect.ArgSpec( args=['x', 'y'], varargs=None, keywords=None, defaults=(42,)) self.assertEqual(inspect.getargspec(f), expected) self.assertEqual(f.__name__, 'f') def test_missing_kwarg(self): self.assertRaises(ValueError, experimental) self.assertRaises(ValueError, experimental, '0.1.0') class TestDeprecated(TestStabilityState): def _get_f(self, as_of, until, reason): def f(x, y=42): return x + y f.__doc__ = self._test_docstring f = deprecated(as_of=as_of, until=until, reason=reason)(f) return f def test_function_output(self): f = self._get_f('0.1.0', until='0.1.4', reason='You should now use skbio.g().') self.assertEqual(f(1), 43) def test_deprecation_warning(self): f = self._get_f('0.1.0', until='0.1.4', reason='You should now use skbio.g().') # adapted from SO example here: http://stackoverflow.com/a/3892301 with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") f(1) self.assertTrue(issubclass(w[0].category, DeprecationWarning)) expected_str = "is deprecated as of scikit-bio version 0.1.0" self.assertTrue(expected_str in str(w[0].message)) def test_function_docstring(self): f = self._get_f('0.1.0', until='0.1.4', reason='You should now use skbio.g().') e1 = (" Add 42, or something else, to x.\n\n" " .. note:: Deprecated as of 0.1.0 for " "removal in 0.1.4. You should now use\n" "
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import django_castle try: from setuptools import setup except ImportError: from distutils.core import setup version = django_castle.__version__ if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() if sys.argv[-1] == 'tag': print("Tagging the version on github:") os.system("git tag -a %s -m 'version %s'" % (version, version)) os.system("git push --tags") sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') setup( name='django
-castle', version=version, description="""A dj
ango integration for the castle.io service""", long_description=readme + '\n\n' + history, author='Jens Alm', author_email='jens.alm@prorenata.se', url='https://github.com/ulmus/django-castle', packages=[ 'django_castle', ], include_package_data=True, install_requires=[ ], license="BSD", zip_safe=False, keywords='django-castle', classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
# The MIT License # # Copyright (c) 2008 James Piechota # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import sys import os.path # Node definition class WReader: def __init__(self): self._fullName = "" self._path =
"" self._maxInfluences = 0 self.deformers = [] self.weights = [] def name(self): return self._fullName def read( self, fullName ): '''Load skin weights fro
m a Massive .w (weights) file''' try: if not os.path.isfile(fullName): return self._fullName = fullName self._path = os.path.dirname( fullName ) fileHandle = open(self._fullName, "r") deformers = [] tokens = [] weights = [] maxInfluences = 0 for line in fileHandle: tokens = line.strip().split() if tokens: if tokens[0][0] == "#": # Comment continue elif tokens[0] == "deformer": id = int(tokens[1]) numDeformers = len(self.deformers) if id >= numDeformers: self.deformers.extend([ "" ] * (id - numDeformers + 1)) self.deformers[id] = tokens[2] else: # TODO: see if storing 0s for joints that have # no influence is a problem. Storing the influences # sparsely may make applying the weights later more # complex # numTokens = len(tokens) vtx = int(tokens[0][:-1]) influences = [0] * len(self.deformers) count = 0 for i in range(1, numTokens, 2): influences[int(tokens[i])] = float(tokens[i+1]) count += 1 # keep track of the maximum number of influences on a # given vertex so we can use it to optimize the skin # deformers later # if count > self._maxInfluences: self._maxInfluences = count self.weights.append(influences) fileHandle.close() except: print >> sys.stderr, "Error reading Weights file: %s" % self._fullName raise
# Generated by Django 2.2.11 on 2020-11-09 17:00 import daphne_context.utils from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('daphne_context', '0010_userinformation_mycroft_connection'), ] operations = [ migrations.
RemoveField( model_name='userinformation', name='mycroft_session', ), migrations.CreateModel( name='MycroftUser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('mycroft_session', models.CharField(default=daphne_context
.utils.generate_mycroft_session, max_length=9)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Archive', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('label', models.CharField(help_text=b'Short label to identify an archive', max_length=10)), ('name', models.CharField(help_text=b'repository name (subarea) in EAD to identify finding aids associated with this archive', max_length=255)), ('svn', models.URLField(help_text=b'URL to subversion repository containing EAD for this archive', verbose_name=b'Subversion Repository')), ('slug', models.SlugField(help_text=b'shorthand id\n (auto-generated from label; do not modify after initial archive definition)')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Deleted', fields=[ ('id', models.AutoField(verbose_na
me='ID', serialize=False, auto_created=True, primary_key=True)), ('eadid', models.CharField(unique=True, max_length=50, verbose_name=b'EAD Identifier')), ('title', models.CharField(max_length=200)), ('date', models.DateTimeField(auto_now_add=True, verbose_name=b'Date removed')), ('note', models.CharField(help_text=b'Optional: Enter the reason this document is being d
eleted. These comments will be displayed to anyone who had the finding aid bookmarked and returns after it is gone.', max_length=400, blank=True)), ], options={ 'verbose_name': 'Deleted Record', }, bases=(models.Model,), ), ]
import logging import inspect import numpy as np from pybar.analysis.analyze_raw_data import AnalyzeRawData from pybar.fei4.register_utils import invert_pixel_mask, make_xtalk_mask, make_pixel_mask from pybar.fei4_run_base import Fei4RunBase from pybar.fei4.register_utils import scan_loop from pybar.run_manager import RunManager from pybar.analysis.plotting.plotting import plot_occupancy class CrosstalkScan(Fei4RunBase): '''Crosstalk Scan Implementation of a crosstalk scan. Injection in long edge pixels (row - 1, row + 1). Crosstalk exists when a threshold higher 0 can be measured (s-curve fit successful). ''' _default_run_conf = { "broadcast_commands": True, "threaded_scan": False, "mask_steps": 6, # number of injections per PlsrDAC step "n_injections": 100, # number of injections per PlsrDAC step "scan_parameters": [('PlsrDAC', [None, 800])], # the PlsrDAC range "step_size": 10, # step size of the PlsrDAC during scan "use_enable_mask": False, # if True, use Enable mask during scan, if False, all pixels will be enabled "enable_shift_masks": ["Enable"], # enable masks shifted during scan "disable_shift_masks": [], # disable masks shifted during scan "xtalk_shift_mask": ["C_High", "C_Low"], # crosstalk mask derived from enable_shift_masks "pulser_dac_correction": False # PlsrDAC correction for each double column } def configure(self): commands = [] commands.extend(self.register.get_commands("ConfMode")) # C_Low if "C_Low".lower() in map(lambda x: x.lower(), self.enable_shift_masks): self.register.set_pixel_register_value('C_Low', 1) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low')) else: self.register.set_pixel_register_value('C_Low', 0) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low')) # C_High if "C_High".lower() in map(lambda x: x.lower(), self.enable_shift_masks): self.register.set_pixel_register_value('C_High', 1) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High')) else: self.register.set_pixel_register_value('C_High', 0) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High')) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) def scan(self): scan_parameter_range = [0, (2 ** self.register.global_registers['PlsrDAC']['bitlength'])] if self.scan_parameters.PlsrDAC[0]: scan_parameter_range[0] = self.scan_parameters.PlsrDAC[0] if self.scan_parameters.PlsrDAC[1]: scan_parameter_range[1] = self.scan_parameters.PlsrDAC[1] scan_parameter_range = range(scan_parameter_range[0], scan_parameter_range[1] + 1, self.step_size) logging.info("Scanning %s from %d to %d", 'PlsrDAC', scan_parameter_range[0], scan_parameter_range[-1]) def set_xtalk_mask(): frame = inspect.currentframe() if frame.f_back.f_locals['index'] == 0: mask = make_pixel_mask(steps=self.mask_steps, shift=frame.f_back.f_locals['mask_step']) mask = make_xtalk_mask(mask) map(lambda mask_name: self.register.set_pixel_register_value(mask_name, mask), self.disable_shift_masks) commands = [] commands.append(self.register.get_commands("ConfMode")[0]) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name=self.xtalk_shift_mask, joint_write=True)) commands.append(self.register.get_commands("RunMode")[0]) self.register_utils.send_commands(commands, concatenate=True) for scan_parameter_value in scan_parameter_range: if self.stop_run.is_set(): break commands = [] commands.extend(self.register.get_commands("ConfMode")) self.register.set_global_register_value('PlsrDAC', scan_parameter_value) commands.extend(self.register.get_commands("WrRegister", name=['PlsrDAC'])) self.register_utils.send_comm
ands(commands) with self.readout(PlsrDAC=scan_parameter_value): cal_lvl1_command = self.register.get_commands("CAL")[0] + self.register.get_commands("zeros", length=40)[0] + self.register.get_commands("LV1")[0] scan_loop(self, cal_lvl1_command, repeat_command=self.n_injections, use_delay=True, mask_steps=self.mask_steps, enable_mask_steps=None, enable_double_columns=None, same_mask_for_all_dc=False, fast_dc_loop=False, bol_function=set_xtalk_mask, eol
_function=None, digital_injection=False, enable_shift_masks=self.enable_shift_masks, disable_shift_masks=self.disable_shift_masks, restore_shift_masks=False, mask=invert_pixel_mask(self.register.get_pixel_register_value('Enable')) if self.use_enable_mask else None, double_column_correction=self.pulser_dac_correction) def analyze(self): with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data: analyze_raw_data.create_tot_hist = False analyze_raw_data.create_fitted_threshold_hists = True analyze_raw_data.create_threshold_mask = True analyze_raw_data.n_injections = 100 analyze_raw_data.interpreter.set_warning_output(False) # so far the data structure in a threshold scan was always bad, too many warnings given analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() thr_hist = analyze_raw_data.out_file_h5.root.HistThresholdFitted[:, :].T xtalk_mask = np.zeros(shape=thr_hist.shape, dtype=np.dtype('>u1')) xtalk_mask[thr_hist > 0.0] = 1 plot_occupancy(xtalk_mask.T, title='Crosstalk', z_max=1, filename=analyze_raw_data.output_pdf) if __name__ == "__main__": with RunManager('configuration.yaml') as runmngr: runmngr.run_run(CrosstalkScan)
#!/usr/bin/env python """ Parse a file and write output to another. """ from optparse import OptionParser import re from collections import OrderedDict parser = OptionParser() parser.add_option("-i", "--input", dest="input_filepath", help="input filepath") parser.add_option("-o", "--output", dest="output_filepath", help="output filepath") (options, args) = parser.parse_args() #print options #print args input_filepath = options.input_filepath output_filepath = options.output_filepath lines = {} pattern_key = re.compile(r'ednKey="(.*?)"') pattern_value = re.compile(r'ednvalue="(.*?)"') with open(input_filepath, 'r') as input_file: for line in input_file: line = line.strip() key = pattern_key.search(line) value = pattern_value.search(line) if (key and value): lines[key.group(1)] = value.group(1) ordered_lines = OrderedDict(sorted(lines.items(), key = lambda t: int(t[0]))) with open(output_filepath, 'w') as output_file: for line in ordered_lines.items(): #output_file.write('%s,%s\n' % (line[0], line[
1])) output_file.write("{0} => __( '
{1}', 'ev' ),\n".format(line[0], line[1])) print "Completed"
def f(m,n):
ans = 1 while (m - n >= 0): (ans,m) = (ans*2,m-n) return(ans)
"""Test that resize event works correctly. Expected behaviour: One window will be opened. Resize the window and ensure that the dimensions printed to the terminal are correct. You should see a green border inside the window but no red. Close the window or press ESC to end the test. """ import unittest from pyglet import window from tests.interactive.window import window_util class EVENT_RESIZE(unittest.TestCase): def on_resize(self, width, height): print('Window resized to %dx%d.'
% (width, height)) def test_resize(self): w = window.Window(200, 200, resizable=True) w.push_handlers(self) while not w.has_exit: w.dispatch_events() window_util.draw_client_border(w) w.flip() w.close()
from z
ope.interface import Interface class IUWOshThemeLayer(Interface): """ Marker
interface that defines a browser layer """
ylename", "Style name", "StyleName", None), ("width", "Width", "Width", None), ("height", "Height", "Height", None), ("size", "Size", "Size", None), ("title", "Title", "Title", None), ("zindex", "Z Index", "zIndex", None), ] @classmethod def _getProps(self): return Applier._getProps() + self._props def __init__(self, **kwargs): # do not initialise element, here, to None, whatever you do. # there are circumstances where UIObject.__init__ is the last # thing that is done in derived classes, where self.setElement # will _already_ have been called. Applier.__init__(self, **kwargs) def getAbsoluteLeft(self): return DOM.getAbsoluteLeft(self.getElement()) def getAbsoluteTop(self): return DOM.getAbsoluteTop(self.getElement()) def getElement(self): """Get the DOM element associated with the UIObject, if any""" return self.element def getOffsetHeight(self): return DOM.getIntAttribute(self.element, "offsetHeight") def getOffsetWidth(self): return DOM.getIntAttribute(self.element, "offsetWidth") def getStyleName(self): return DOM.getAttribute(self.element, "className") def getStylePrimaryName(self): """Return with the first className if there are multiples""" fullClassName = self.getStyleName() if fullClassName: return fullClassName.split()[0] def getStyleAttribute(self, attribute): """ can be called with two forms: getStyleAttribute(self, attr) - returns value getStyleAttribute(self, (attr1,attr2,...)) - returns dictionary of attr:value pairs """ if isinstance(attribute, basestring): return DOM.getStyleAttribute(self.getElement(), attribute) # if attribute is not a string, assume it is iterable, # and return the multi-attribute form el = self.getElement() result = {} for attr in attribute: result[attr] = DOM.getStyleAttribute(el,attr) return result def
getTitle(self): return DOM.getAttribute(self.element, "title") def setElement(self, element):
"""Set the DOM element associated with the UIObject.""" self.element = element def setHeight(self, height): """Set the height of the element associated with this UIObject. The value should be given as a CSS value, such as 100px, 30%, or 50pi """ if height is None: height = "" DOM.setStyleAttribute(self.element, "height", str(height)) def getHeight(self): return DOM.getStyleAttribute(self.element, "height") def setPixelSize(self, width, height): """Set the width and height of the element associated with this UIObject in pixels. Width and height should be numbers. """ if width >= 0: self.setWidth("%dpx" % width) if height >= 0: self.setHeight("%dpx" % height) def setSize(self, width, height): """Set the width and height of the element associated with this UIObject. The values should be given as a CSS value, such as 100px, 30%, or 50pi """ self.setWidth(width) self.setHeight(height) def addStyleName(self, style): """Append a style to the element associated with this UIObject. This is a CSS class name. It will be added after any already-assigned CSS class for the element. """ self.setStyleName(self.element, style, True) def addStyleDependentName(self, styleSuffix): """Adds a secondary or dependent style name to this element. For example if the primary stylename is gwt-TextBox, self.addStyleDependentName("readonly") will return gwt-TextBox-readonly. """ self.addStyleName(self.getStylePrimaryName()+"-"+styleSuffix) def removeStyleName(self, style): """Remove a style from the element associated with this UIObject. This is a CSS class name.""" self.setStyleName(self.element, style, False) def removeStyleDependentName(self, styleSuffix): """Remove a dependent style name by specifying the style name's suffix. """ self.removeStyleName(self.getStylePrimaryName()+"-"+styleSuffix) # also callable as: setStyleName(self, style) def setStyleName(self, element, style=None, add=True): """When called with a single argument, this replaces all the CSS classes associated with this UIObject's element with the given parameter. Otherwise, this is assumed to be a worker function for addStyleName and removeStyleName. """ # emulate setStyleName(self, style) if style is not None: setStyleName(element, style, add) return style = element DOM.setAttribute(self.element, "className", style) def setStyleAttribute(self, attribute, value=None): """ can be called with two forms: single attr: setStyleAttribute(self, attr, value) multi attr: setStyleAttribute(self, {attr1:val1, attr2:val2, ...}) """ if value is not None: # assume single attr form DOM.setStyleAttribute(self.getElement(), attribute, value) return # assume multi value form el = self.getElement() for attr, val in attribute.items(): DOM.setStyleAttribute(el, attr, val) def setTitle(self, title): DOM.setAttribute(self.element, "title", title) def setWidth(self, width): """Set the width of the element associated with this UIObject. The value should be given as a CSS value, such as 100px, 30%, or 50pi """ if width is None: width = "" DOM.setStyleAttribute(self.element, "width", str(width)) def getWidth(self): return DOM.getStyleAttribute(self.element, "width") def sinkEvents(self, eventBitsToAdd): """Request that the given events be delivered to the event handler for this element. The event bits passed are added (using inclusive OR) to the events already "sunk" for the element associated with the UIObject. The event bits are a combination of values from class L{Event}. """ if self.element: DOM.sinkEvents(self.getElement(), eventBitsToAdd | DOM.getEventsSunk(self.getElement())) def setzIndex(self, index): DOM.setIntStyleAttribute(self.element, "zIndex", index) def isVisible(self, element=None): """ XXX DEPRECATED - use getVisible """ return self.getVisible(element) def getVisible(self, element=None): """Determine whether this element is currently visible, by checking the CSS property 'display' """ if not element: element = self.element try: # yuk! return element.style.display != "none" except AttributeError: # not been set (yet?) return True # also callable as: setVisible(visible) def setVisible(self, element, visible=None): """Set whether this element is visible or not. If a single parameter is given, the self.element is used. This modifies the CSS property 'display', which means that an invisible element not only is not drawn, but doesn't occupy any space on the page. """ if visible is None: visible = element element = self.element if visible: DOM.setStyleAttribute(element, 'display', "") else: DOM.setStyleAttribute(element, 'display', "none") def unsinkEvents(self, eventBitsToRemove): """Reverse the operation of sinkEvents. See L{UIObject.sinkevents}. """ DOM.sinkEvents(self.getElement(), ~
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. from __future__ import division, print_function, unicode_literals, \ absolute_import import os import unittest from pymatgen.io.lammps.sets import LammpsInputSet __author__ = 'Kiran Mathew' __email__ = 'kmathew@lbl.gov' test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", "test_files", "lammps") class TestLammpsInputSet(unittest.TestCase): def setUp(self): template_file = os.path.join(test_dir, "in.peptide.template")
data_file = os.path.join(test_dir, "data.peptide") self.data_filename = "test_data.peptide" self.inpu
t_filename = "test_input.peptide" self.settings = { "pair_style": "lj/charmm/coul/long 8.0 10.0 10.0", "kspace_style": "pppm 0.0001", "fix_1": "1 all nvt temp 275.0 275.0 100.0 tchain 1", "fix_2": "2 all shake 0.0001 10 100 b 4 6 8 10 12 14 18 a 31" } self.lammps_input_set = LammpsInputSet.from_file( "test", template_file, self.settings, lammps_data=data_file, data_filename=self.data_filename) def test_input(self): self.assertEqual(self.lammps_input_set.lammps_input.settings["data_file"], self.data_filename) for k, v in self.settings.items(): self.assertEqual(self.lammps_input_set.lammps_input.settings[k], v) def test_write_input_set(self): self.lammps_input_set.write_input(self.input_filename) self.assertTrue(os.path.exists(self.input_filename)) self.assertTrue(os.path.exists(self.data_filename)) os.remove(self.input_filename) os.remove(self.data_filename) # now change both input and data filenames self.lammps_input_set.write_input("xxxx.input", "yyy.data") self.assertTrue(os.path.exists("xxxx.input")) self.assertTrue(os.path.exists("yyy.data")) os.remove("xxxx.input") os.remove("yyy.data") if __name__ == "__main__": unittest.main()
from csv import DictReader import os from rest_framework import status from rest_framework.viewsets import ViewSet from rest_framework.exceptions import NotFound from rest_framework.response import Response import odatagym_app.settings as ods import logging logger = logging.getLogger('odata_gym') class DatasetsHandler(ViewSet): def get(self, request, dataset_folder, dataset_name, format=None): DELIMITERS_MAP = { 'c': ',', 'sc': ';', 'sp': ' ' } dataset_path = os.path.join(ods.DATASETS_DIR, dataset_folder, dataset_name) print dataset_path if os.path.exists(dataset_path): print request.query_params delimiter = request.GET.get('file_delimiter', 'c') print 'Delimiter is %s' % delimiter with open(dataset_path) as dataset: reader = DictReader(dataset, delimiter=DELIMITERS_MAP[delimi
ter]) data = [x for x in reader] return Response(data, status=status.HTTP_200_OK) else: raise NotFound('There is no da
taset %s for %s' % (dataset_name, dataset_folder))
#!/usr/bin/env python import re import os import time import sys import unittest import ConfigParser from setuptools import setup, Command def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() class SQLiteTest(Command): """ Run the tests on SQLite """ description = "Run tests on SQLite" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://' os.environ['DB_NAME'] = ':memory:' from tests import suite test_result = unittest.TextTestRunner(verbosity=3).run(suite()) if test_result.wasSuccessful(): sys.exit(0) sys.exit(-1) class PostgresTest(Command): """ Run the tests on Postgres. """ description = "Run tests on Postgresql" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://' os.environ['DB_NAME'] = 'test_' + str(int(time.time())) from tests import suite test_result = unittest.TextTestRunner(verbosity=3).run(suite()) if test_result.wasSuccessful(): sys.exit(0) sys.exit(-1) config = ConfigParser.ConfigParser() config.readfp(open('tryton.cfg')) info = dict(config.items('tryton')) for key in ('depends', 'extras_depend', 'xml'): if key in info: info[key] = info[key].strip().splitlines() major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2) major_version = int(major_version) minor_version = int(minor_version) requires = [] MODULE2PREFIX = { 'report_webkit': 'openlabs' } MODULE = "waiting_customer_shipment_report" PREFIX = "fio" for dep in info.get('depends', []): if not re.match(r'(ir|res|webdav)(\W|$)', dep): requires.append( '%s_%s >= %s.%s, < %s.%s' % ( MODULE2PREFIX.get(dep, 'trytond'), dep, major_version, minor_version, major_version, minor_version + 1 ) ) requires.append( 'trytond >= %s.%s, < %s.%s' % ( major_version, minor_version, major_version, minor_version + 1 ) ) setup( name='%s_%s' % (PREFIX, MODULE), version=info.get('version', '0.0.1'), description="", author="Fulfil.IO Inc., Openlabs Technologies and Consulting (P) Ltd.", author_email='info@fulfil.io', url='http://www.fulfil.io/', package_dir={'trytond.modules.%s' % MODULE: '.'}, packages=[ 'trytond.modules.%s' % MODULE, 'trytond.modules.%s.tests' % MODULE, ], package_data={ 'trytond.modules.%s' % MODULE: info.get('xml', []) + info.get('translation', []) + ['tryton.cfg', 'locale/*.po', 'tests/*.rst', 'reports/*.odt'] + ['view/*.xml', 'reports/*.html', 'reports/css/bootstrap/css/*'] + ['reports/css/bootstrap/fonts/*', 'reports/css/font-awesome/css/*'] + ['reports/css/font-awesome/fonts/*', 'reports/js/*.js'] }, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Tryton', 'Topic :: Office/Business', ], long_description=open('README.rst').read(), license='BSD', install_requires=requires, zip_safe=False, entry_points=""" [trytond.modules] %s = trytond.modules.%s """ % (MODULE, M
ODULE), test_suite='tests', test_loader='trytond.test_loader:Loader', cmdclass={ 'test': SQLiteTest, 'test_on_postgre
s': PostgresTest, } )
#!/usr/bin/env python import os import sys if __name__ == '__main__': os.environ.setdefau
lt('DJANGO_SETTINGS_MODULE', 'settings.prod') from django.core.management i
mport execute_from_command_line execute_from_command_line(sys.argv)
e terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ This module contains functions and methods to authenticate with OAuth1 providers. """ __revision__ = \ "$Id$" from invenio.containerutils import get_substructure from invenio.dbquery import run_sql from invenio.external_authentication import ExternalAuth class ExternalOAuth1(ExternalAuth): """ Contains methods for authenticate with an OpenID provider. """ @staticmethod def __init_req(req): req.g['oauth1_provider_name'] = '' req.g['oauth1_debug'] = 0 req.g['oauth1_msg'] = '' req.g['oauth1_debug_msg'] = '' req.g['oauth1_response'] = None def auth_user(self, username, password, req=None): """ Tries to find email and identity of the user from OAuth1 provider. If it doesn't find any of them, returns (None, None) @param username: Isn't used in this function @type username: str @param password: Isn't used in this function @type password: str @param req: request @type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest @rtype: str|NoneType, str|NoneType """ from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS from invenio.access_control_config import CFG_OAUTH1_PROVIDERS from invenio.webinterface_handler import wash_urlargd from rauth.service import OAuth1Service self.__init_req(req) args = wash_urlargd(req.form, {'provider': (str, ''), 'login_method': (str, ''), 'oauth_token': (str, ''), 'oauth_verifier': (str, ''), 'denied': (str, '') }) provider_name = req.g['oauth1_provider_name'] = args['provider'] if not provider_name in CFG_OAUTH1_PROVIDERS: req.g['oauth1_msg'] = 22 return None, None # Load the configurations to construct OAuth1 service config = CFG_OAUTH1_CONFIGURATIONS[args['provider']] req.g['oauth1_debug'] = config.get('debug', 0) if not args['oauth_token']: # In case of an error, display corresponding message if args['denied']: req.g['oauth1_msg'] = 21 return None, None else: req.g['oauth1_msg'] = 22 return None, None provider = OAuth1Service( name = req.g['oauth1_provider_name'], consumer_key = config['consumer_key'], consumer_secret = config['consumer_secret'], request_token_url = config['request_token_url'], access_token_url = config['access_token_url'], authorize_url = config['authorize_url'], header_auth = True) # Get the request token secret from database and exchange it with the # access token. query = """SELECT secret FROM oauth1_storage WHERE token = %s""" params = (args['oauth_token'],) try: # If the request token is already used, return request_token_secret = run_sql(query, params)[0][0] except IndexError: req.g['oauth1_msg'] = 22 return None, None response = provider.get_access_token( 'GET', request_token = args['oauth_token'], request_token_secret = request_token_secret, params = { 'oauth_verifier': args['oauth_verifier'] } ) if req.g['oauth1_debug']: req.g['oauth1_debug_msg'] = str(response.content) + "<br/>" # Some providers send the identity and access token together. email, identity = self._get_user_email_and_id(response.content, req) if not identity and config.has_key('request_url'): # For some providers, to reach user profile we need to make request # to a specific url. params = config.get('request_parameters', {}) response = provider.get(config['request_url'], params = params, access_token = response.content['oauth_token'], acc
ess_token_secret = response.content['oauth_token_secret'] ) if req.oauth1_debug: req.g['oauth1_debug_msg'] += str(response.content) + "<br/>" email, identity = self._get_user_email_and_id(response.content, req) if identity: # If identity i
s found, add the name of the provider at the # beginning of the identity because different providers may have # different users with same id. identity = "%s:%s" % (req.g['oauth1_provider_name'], identity) else: req.g['oauth1_msg'] = 23 # Delete the token saved in the database since it is useless now. query = """ DELETE FROM oauth1_storage WHERE token=%s OR date_creation < DATE_SUB(NOW(), INTERVAL 1 HOUR) """ params = (args['oauth_token'],) run_sql(query, params) if req.g['oauth1_debug']: req.g['oauth1_msg'] = "<code>%s</code>" % req.g['oauth1_debug_msg'].replace("\n", "<br/>") return None, None return email, identity def fetch_user_nickname(self, username, password=None, req=None): """ Fetches the OAuth1 provider for nickname of the user. If it doesn't find any, returns None. This function doesn't need username, password or req. They are exist just because this class is derived from ExternalAuth @param username: Isn't used in this function @type username: str @param password: Isn't used in this function @type password: str @param req: Isn't used in this function @type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest @rtype: str or NoneType """ from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS if req.g['oauth1_provider_name']: path = None if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key( 'nickname' ): path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']]['nickname'] if path: return get_substructure(req.oauth1_response, path) else: return None def _get_user_email_and_id(self, container, req): """ Returns external identity and email address together. Since identity is essential for OAuth1 authentication, if it doesn't find external identity returns None, None. @param container: container which contains email and id @type container: list|dict @rtype str|NoneType, str|NoneType """ from invenio.access_control_config import CFG_OAUTH1_CONFIGURATIONS identity = None email = None if CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_provider_name']].has_key('id'): path = CFG_OAUTH1_CONFIGURATIONS[req.g['oauth1_
# All nodes are of the form [path1, child1, path2, child2] # or <value> from ethereum import utils from ethereum.db import EphemDB, ListeningDB import rlp, sys import copy hashfunc = utils.sha3 HASHLEN = 32 # 0100000101010111010000110100100101001001 -> ASCII def decode_bin(x): return ''.join([chr(int(x[i:i+8], 2)) for i in range(0, len(x), 8)]) # ASCII -> 0100000101010111010000110100100101001001 def encode_bin(x): o = '' for c in x: c = ord(c) p = '' for i in range(8): p = str(c % 2) + p c /= 2 o += p return o # Encodes a binary list [0,1,0,1,1,0] of any length into bytes def encode_bin_path(li): if li == []: return '' b = ''.join([str(x) for x in li]) b2 = '0' * ((4 - len(b)) % 4) + b prefix = ['00', '01', '10', '11'][len(b) % 4] if len(b2) % 8 == 4: return decode_bin('00' + prefix + b2) else: return decode_bin('100000' + prefix + b2) # Decodes bytes into a binary list def decode_bin_path(p): if p == '': return [] p = encode_bin(p) if p[0] == '1': p = p[4:] assert p[0:2] == '00' L = ['00', '01', '10', '11'].index(p[2:4]) p = p[4+((4 - L) % 4):] return [(1 if x == '1' else 0) for x in p] # Get a node from a database if needed def dbget(node, db): if len(node) == HASHLEN: return rlp.decode(db.get(node)) return node # Place a node into a database if needed def dbput(node, db): r = rlp.encode(node) if len(r) == HASHLEN or len(r) > HASHLEN * 2: h = hashfunc(r) db.put(h, r) return h return node # Get a value from a tree def get(node, db, key): node = dbget(node, db) if key == []: return node[0] elif len(node) == 1 or len(node) == 0: return '' else: sub = dbget(node[key[0]], db) if len(sub) == 2: subpath, subnode = sub else: subpath, subnode = '', sub[0] subpath = decode_bin_path(subpath) if key[1:len(subpath)+1] != subpath: return '' return get(subnode, db, key[len(subpath)+1:]) # Get length of shared prefix of inputs def get_shared_length(l1, l2): i = 0 while i < len(l1) and i < len(l2) and l1[i] == l2[i]: i += 1 return i # Replace ['', v] with [v] and compact nodes into hashes # if needed def contract_node(n, db): if len(n[0]) == 2 and n[0][0] == '': n[0] = [n[0][1]] if len(n[1]) == 2 and n[1][0] == '': n[1] = [n[1][1]] if len(n[0]) != 32: n[0] = dbput(n[0], db) if len(n[1]) != 32: n[1] = dbput(n[1], db) return dbput(n, db) # Update a trie def update(node, db, key, val): node = dbget(node, db) # Unfortunately this particular design does not allow # a node to have one child, so at the root for empty # tries we need to add two dummy children if node == '': node = [dbput([encode_bin_path([]), ''], db), dbput([encode_bin_path([1]), ''], db)] if key == []: node = [val] elif len(node) == 1: raise Exception("DB must be prefix-free") else: assert len(node) == 2, node sub = dbget(node[key[0]], db) if len(sub) == 2: _subpath, subnode = sub else: _subpath, subnode = '', sub[0] subpath = decode_bin_path(_subpath) sl = get_shared_length(subpath, key[1:]) if sl == len(subpath): node[key[0]] = [_subpath, update(subnode, db, key[sl+1:], val)] else: subpath_next = subpath[sl] n = [0, 0] n[subpath_next] = [encode_bin_path(subpath[sl+1:]), subnode] n[(1 - subpath_next)] = [encode_bin_path(key[sl+2:]), [val]] n = contract_node(n, db) node[key[0]] = dbput([encode_bin_path(subpath[:sl]), n], db) return contract_node(node, db) # Compression algorithm specialized for merkle proof databases # The idea is similar to standard compression algorithms, where # you replace an instance of a repeat with a pointer to the repeat, # except that here you replace an instance of a hash of a value # with the pointer of a value. This is useful since merkle branches # usually include nodes which contain hashes of each other magic = '\xff\x39' def compress_db(db): out = [] values = db.kv.values() keys = [hashfunc(x) for x in values] assert len(keys) < 65300 for v in values: o = '' pos = 0 while pos < len(v): done = False if v[pos:pos+2] == magic: o += magic + magic done = True pos += 2 for i, k in enumerate(keys): if v[pos:].startswith(k): o += magic + chr(i // 256) + chr(i % 256) done = True pos += len(k) break if not done: o += v[pos] pos += 1 out.append(o) return rlp.encode(out) def decompress_db(ins): ins = rlp.decode(ins) vals = [None] * len(ins) def decipher(i): if vals[i] is None: v = ins[i] o = '' pos = 0 while pos < len(v): if v[pos:pos+2] == magic: if v[pos+2:pos+4] == magic: o += magic else: ind = ord(v[pos+2]) * 256 + ord(v[pos+3]) o += hashfunc(decipher(ind)) pos += 4 else: o += v[pos] pos += 1 vals[i] = o return vals[i] for i in range(len(ins)): decipher(i) o = EphemDB() for v in vals: o.put(hashfunc(v), v) return o # Convert a merkle branch directly into RLP (ie. remove # the hashing indirection). As it turns out, this is a # really compact way to represent a branch def compress_branch(db, root): o = dbget(copy.copy(root), db) def evaluate_node(x): for i in range(len(x)): if len(x[i]) == HASHLEN and x[i] in db.kv:
x[i] = evaluate_node(dbget(x[i], db)) elif isinstance(x, list): x[i] = evaluate_node(x[
i]) return x o2 = rlp.encode(evaluate_node(o)) return o2 def decompress_branch(branch): branch = rlp.decode(branch) db = EphemDB() def evaluate_node(x): if isinstance(x, list): x = [evaluate_node(n) for n in x] x = dbput(x, db) return x evaluate_node(branch) return db # Test with n nodes and k branch picks def test(n, m=100): assert m <= n db = EphemDB() x = '' for i in range(n): k = hashfunc(str(i)) v = hashfunc('v'+str(i)) x = update(x, db, [int(a) for a in encode_bin(rlp.encode(k))], v) print(x) print(sum([len(val) for key, val in db.db.items()])) l1 = ListeningDB(db) o = 0 p = 0 q = 0 ecks = x for i in range(m): x = copy.deepcopy(ecks) k = hashfunc(str(i)) v = hashfunc('v'+str(i)) l2 = ListeningDB(l1) v2 = get(x, l2, [int(a) for a in encode_bin(rlp.encode(k))]) assert v == v2 o += sum([len(val) for key, val in l2.kv.items()]) cdb = compress_db(l2) p += len(cdb) assert decompress_db(cdb).kv == l2.kv cbr = compress_branch(l2, x) q += len(cbr) dbranch = decompress_branch(cbr) assert v == get(x, dbranch, [int(a) for a in encode_bin(rlp.encode(k))]) # for k in l2.kv: # assert k in dbranch.kv o = { 'total_db_size': sum([len(val) for key, val in l1.kv.items()]), 'avg_proof_size': sum([len(val) for key, val in l1.kv.items()]), 'avg_compressed_proof_size': (p // min(n, m)), 'avg_branch_size': (q // min(n, m)), 'compressed_db_size': len(compress_db(l1)) } return o
# Copyright 2010 Ramon Xuriguera # # This file is part of BibtexIndexMaker. # # BibtexIndexMaker is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # BibtexIndexMaker is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with BibtexIndexMaker. If not, see <http://www.gnu.org/licenses/>. from PyQt4 import QtCore, QtGui #@UnresolvedImport from bibim.gui.ui.ui_file_chooser import Ui_FileChooser from bibim.gui.ui.ui_new_collection_dialog import Ui_NewWrapperCollection class FileChooser(QtGui.QWidget): DIR = 0 FILE = 1 pathChanged = QtCore.pyqtSignal() def __init__(self): super(FileChooser, self).__init__() # Setup ui self.ui = Ui_FileChooser() self.ui.setupUi(self) self.path = QtCore.QString() self.mode = self.DIR # Connect signals and slots #self.connect(self.ui.browseButton, QtCore.SIGNAL('clicked()'), self.chooseFile) self.ui.browseButton.clicked.connect(self.chooseFile)
def get_path(self): return self.__path def set_path(self, value): self.__path = value self.pathChanged.emit() path = QtCore.pyqtProperty(QtCore.QString, get_path, set_path) @QtCore.pyqtSlot() def chooseFile(self): if self.mode == self.DIR: self.path = QtGui.QFileDialog.getExistingDirectory(self) else: self.path = QtGui.QFileDialog.getOpenFileName(self) if self.path: self.ui.pathLin
e.setText(self.path) class LogsTextEdit(QtGui.QTextEdit): colors = {'DEBUG':QtGui.QColor(100, 100, 100), 'INFO':QtGui.QColor(0, 0, 0), 'WARNING':QtGui.QColor(222, 145, 2), 'ERROR':QtGui.QColor(191, 21, 43), 'CRITICAL':QtGui.QColor(191, 21, 43)} def __init__(self, parent): QtGui.QTextEdit.__init__(self, parent) self.setReadOnly(True) @QtCore.pyqtSlot(QtCore.QString, QtCore.QString) def updateText(self, message, level='INFO'): self.setTextColor(self.colors[str(level)]) self.append(message) class WrapperCollectionBox(QtGui.QDialog): def __init__(self, parent=None): super(WrapperCollectionBox, self).__init__() self.ui = Ui_NewWrapperCollection() self.ui.setupUi(self) self.setModal(True) # OK Button disabled until both url and field are not empty self.ok_button = self.ui.buttonBox.button(QtGui.QDialogButtonBox.Ok) self.ok_button.setEnabled(False) self.ui.urlLine.textChanged.connect(self._enable_ok_button) self.ui.fieldLine.textChanged.connect(self._enable_ok_button) def _enable_ok_button(self): if not (self.ui.urlLine.text() and self.ui.fieldLine.text()): self.ok_button.setEnabled(False) else: self.ok_button.setEnabled(True) class ConfirmMessageBox(QtGui.QMessageBox): def __init__(self, parent=None): super(ConfirmMessageBox, self).__init__(parent) self.setModal(True) self.setStandardButtons(QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel) self.setDefaultButton(QtGui.QMessageBox.Cancel) self.setIcon(QtGui.QMessageBox.Question)
#!/usr/bin/python import urllib2 import json, csv import subprocess import sys import platform import getopt all_flag = False download_flag = False filename=None offcore_events=[] try: opts, args = getopt.getopt(sys.argv[1:],'a,f:,d',['all','file=','download']) for o, a in opts: if o in ('-a','--all'): all_flag=True if o in ('-f','--file'): filename=a if o in ('-d','--download'): download_flag=True except getopt.GetoptError, err: print("parse error: %s\n" %(str(err))) exit(-2) if filename == None: map_file_raw=urllib2.urlopen('https://download.01.org/perfmon/mapfile.csv') map_dict = csv.DictReader(map_file_raw) map_file = [] core_path = '' offcore_path = '' while True: try: map_file.append(map_dict.next()) except StopIteration: break if platform.system() == 'CYGWIN_NT-6.1': p = subprocess.Popen(['./pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True) elif platform.system() == 'Windows': p = subprocess.Popen(['pcm-core.exe -c'],stdout=subprocess.PIPE,shell=True) else: p = subprocess.Popen(['./pcm-core.x -c'],stdout=subprocess.PIPE,shell=True) (output, err) = p.communicate() p_status = p.wait() for model in map_file: if model['Family-model'] in output: if(model['EventType'] == 'core'): core_path = model['Filename'] elif(model['EventType'] == 'offcore'): offcore_path = model['Filename'] print (model) if core_path != '': json_core_data=urllib2.urlopen('https://download.01.org/perfmon'+core_path) core_events=json.load(json_core_data) if(download_flag == True): with open(core_path.split('/')[-1],'w') as outfile: json.dump(core_events, outfile, sort_keys=True, indent=4) else: print ('no core event found for %s CPU, program abort...' % (output)) exit(-1) if offcore_path != '': json_offcore_data=urllib2.urlopen('https://download.01.org/perfmon'+offcore_path) offcore_events=json.load(json_offcore_data) if(download_flag == True): with open(offcore_path.split('/')[-1],'w') as outfile: json.dump(offcore_events, outfile, sort_keys=True, indent=4) else: core_events=json.load(open(filename)) if all_flag == True: for event in core_events+offcore_events: if event.has_key('EventName') and event.has_key('BriefDescription'): print (event['EventName']+':'+event['BriefDescription']) sys.exit(0) name=raw_input("Event to query (empty enter to quit):") while(name != ''): for event in core_events+offcore_events: if event.has_key('EventN
ame') and name.lower() in event['EventName'].lower(): print (event['EventName']+':'+event['BriefDescription']) for ev_code in event['EventCode'].split(', '): print ('cpu/umask=%s,event=%s,name=%s%s%s%s%s/' % ( event['UMask'], ev_code, event['EventName'], (',offcore_rsp=%s' % (event['MSRValue'])) if event['MSRValue'] != '0' el
se '', (',inv=%s' % (event['Invert'])) if event['Invert'] != '0' else '', (',any=%s' % (event['AnyThread'])) if event['AnyThread'] != '0' else '', (',edge') if event['EdgeDetect'] != '0' else '')) name=raw_input("Event to query (empty enter to quit):")
from __future__ import absolute_import from django.core.urlresolvers import reverse from django.template.response import TemplateResponse from django.test import TestCase from django.test.utils import
override_settings from .models import Action @override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) class AdminCustomUrlsTest(TestCase): fixtures = ['users.json', 'actions.json'] def setUp(self): self.client.login(username='super', password='secret') def tearDown(self): self.client.logout() def testBasicAddGet(self): """ A smoke test to ensure GET on the add_view works. """ response =
self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/') self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def testAddWithGETArgs(self): response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/', {'name': 'My Action'}) self.assertEqual(response.status_code, 200) self.assertTrue( 'value="My Action"' in response.content, "Couldn't find an input with the right value in the response." ) def testBasicAddPost(self): """ A smoke test to ensure POST on add_view works. """ post_data = { '_popup': u'1', "name": u'Action added through a popup', "description": u"Description of added action", } response = self.client.post('/custom_urls/admin/admin_custom_urls/action/!add/', post_data) self.assertEqual(response.status_code, 200) self.assertContains(response, 'dismissAddAnotherPopup') self.assertContains(response, 'Action added through a popup') def testAdminUrlsNoClash(self): """ Test that some admin URLs work correctly. The model has a CharField PK and the add_view URL has been customized. """ # Should get the change_view for model instance with PK 'add', not show # the add_view response = self.client.get('/custom_urls/admin/admin_custom_urls/action/add/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'Change action') # Ditto, but use reverse() to build the URL path = reverse('admin:%s_action_change' % Action._meta.app_label, args=('add',)) response = self.client.get(path) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Change action') # Should correctly get the change_view for the model instance with the # funny-looking PK path = reverse('admin:%s_action_change' % Action._meta.app_label, args=("path/to/html/document.html",)) response = self.client.get(path) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Change action') self.assertContains(response, 'value="path/to/html/document.html"')