prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
"""Graph debug results dumping class.""" import os import json import tvm GRAPH_DUMP_FILE_NAME = '_tvmdbg_graph_dump.json' class DebugResult(object): """Graph debug data module. Data dump module manage all the debug data formatting. Output data and input graphs are formatted and dumped to file. Frontend read these data and graph for visualization. Parameters ---------- graph_json : str The graph to be deployed in json format output by nnvm graph. Each operator (tvm_op) in the graph will have a one to one mapping with the symbol in libmod which is used to construct a "PackedFunc" . dump_path : str Output data path is read/provided from frontend """ def __init__(self, graph_json, dump_path): self._dump_path = dump_path self._output_tensor_list = [] self._time_list = [] self._parse_graph(graph_json) # dump the json information self.dump_graph_json(graph_json) def _parse_graph(self, graph_json): """Parse and extract the NNVM graph and update the nodes, shapes and dltype. Parameters ---------- graph_json : str or graph class The graph to be deployed in json format output by nnvm graph. """ json_obj = json.loads(graph_json) self._nodes_list = json_obj['nodes'] self._shapes_list = json_obj['attrs']['shape'] self._dtype_list = json_obj['attrs']['dltype'] self._update_graph_json() def _update_graph_json(self): """update the nodes_list with name, shape and data type, for temporarily storing the output. """ nodes_len = len(self._nodes_list) for i in range(nodes_len): node = self._nodes_list[i] input_list = [] for input_node in node['inputs']: inp
ut_list.append(self._nodes_list[input_node[0]]['name']) node['inputs'] = input_list dtype = str("type: " + self._dtype_list[1][i]) if 'attrs' not in node: node['attrs'] = {} node['op'] = "param" else: node['op'] = node['attrs']['func_name'
] node['attrs'].update({"T": dtype}) node['shape'] = self._shapes_list[1][i] def _cleanup_tensors(self): """Remove the tensor dump file (graph wont be removed) """ for filename in os.listdir(self._dump_path): if os.path.isfile(filename) and not filename.endswith(".json"): os.remove(filename) def get_graph_nodes(self): """Return the nodes list """ return self._nodes_list def get_graph_node_shapes(self): """Return the nodes shapes list """ return self._shapes_list def get_graph_node_output_num(self, node): """Return the number of outputs of a node """ return 1 if node['op'] == 'param' else int(node['attrs']['num_outputs']) def get_graph_node_dtypes(self): """Return the nodes dtype list """ return self._dtype_list def dump_output_tensor(self): """Dump the outputs to a temporary folder, the tensors are in numpy format """ #cleanup existing tensors before dumping self._cleanup_tensors() eid = 0 order = 0 output_tensors = {} for node, time in zip(self._nodes_list, self._time_list): num_outputs = self.get_graph_node_output_num(node) for j in range(num_outputs): order += time[0] key = node['name'] + "_" + str(j) + "__" + str(order) output_tensors[key] = self._output_tensor_list[eid] eid += 1 with open(os.path.join(self._dump_path, "output_tensors.params"), "wb") as param_f: param_f.write(save_tensors(output_tensors)) def dump_graph_json(self, graph): """Dump json formatted graph. Parameters ---------- graph : json format json formatted NNVM graph contain list of each node's name, shape and type. """ graph_dump_file_name = GRAPH_DUMP_FILE_NAME with open(os.path.join(self._dump_path, graph_dump_file_name), 'w') as outfile: json.dump(graph, outfile, indent=4, sort_keys=False) def display_debug_result(self): """Displays the debugger result" """ header = ["Node Name", "Ops", "Time(us)", "Time(%)", "Start Time", \ "End Time", "Shape", "Inputs", "Outputs"] lines = ["---------", "---", "--------", "-------", "----------", \ "--------", "-----", "------", "-------"] eid = 0 data = [] total_time = sum(time[0] for time in self._time_list) for node, time in zip(self._nodes_list, self._time_list): num_outputs = self.get_graph_node_output_num(node) for j in range(num_outputs): op = node['op'] if node['op'] == 'param': continue name = node['name'] shape = str(self._output_tensor_list[eid].shape) time_us = round(time[0] * 1000000, 2) time_percent = round(((time[0] / total_time) * 100), 2) inputs = str(node['attrs']['num_inputs']) outputs = str(node['attrs']['num_outputs']) node_data = [name, op, time_us, time_percent, str(time[1]), str(time[2]), \ shape, inputs, outputs] data.append(node_data) eid += 1 fmt = "" for i, _ in enumerate(header): max_len = len(header[i]) for j, _ in enumerate(data): item_len = len(str(data[j][i])) if item_len > max_len: max_len = item_len fmt = fmt + "{:<" + str(max_len + 2) + "}" print(fmt.format(*header)) print(fmt.format(*lines)) for row in data: print(fmt.format(*row)) def save_tensors(params): """Save parameter dictionary to binary bytes. The result binary bytes can be loaded by the GraphModule with API "load_params". Parameters ---------- params : dict of str to NDArray The parameter dictionary. Returns ------- param_bytes: bytearray Serialized parameters. """ _save_tensors = tvm.get_global_func("_save_param_dict") args = [] for k, v in params.items(): args.append(k) args.append(tvm.nd.array(v)) return _save_tensors(*args)
import sys import os import pandas as pd import multiprocessing as mp import csv # this code is written for the merged file with combined pval & fdr. although it could have been written for the file without comb fisher and fdr, # it is easier to have the output with the comb pval and fdr and use what we need rather than have to search them in the merged file with comb pval and fdr # or run the next (create network) command to calc the combined pval and fdr. # cd /nfs3/PHARM/Morgun_Lab/richrr/Cervical_Cancer/analysis/merged/corr/gexpress/stage-ltest_corr/p1 """ SGE_Batch -c "python ~/Morgun_Lab/richrr/scripts/python/merging-python-script.py merged_gexp_sp_corr_p1_FolChMedian_merged-parallel-output.csv-comb-pval-output.csv 1 2" -m 150G -F 100G -r log_merge-py_1 -q biomed -M rodrrich@oregonstate.edu -P 8 SGE_Batch -c "python ~/Morgun_Lab/richrr/scripts/python/merging-python-script.py merged_gexp_sp_corr_p1_FolChMedian_merged-parallel-output.csv-comb-pval-output.csv 2 2" -m 150G -F 100G -r log_merge-py_2 -q biomed -M rodrrich@oregonstate.edu -P 8 SGE_Batch -c "python ~/Morgun_Lab/richrr/scripts/python/merging-python-script.py merged_gexp_sp_corr_p1_FolChMedian_merged-parallel-output.csv-comb-pval-output.csv 3 2" -m 150G -F 100G -r log_merge-py_3 -q biomed -M rodrrich@oregonstate.edu -P 8 SGE_Batch -c "python ~/Morgun_Lab/richrr/scripts/python/merging-python-script.py merged_gexp_sp_corr_p1_FolChMedian_merged-parallel-output.csv-comb-pval-output.csv 4 2" -m 150G -F 100G -r log_merge-py_4 -q biomed -M rodrrich@oregonstate.edu -P 8 """ infile = sys.argv[1] analysis = "Analys " + sys.argv[2] + " " numb_datasets = int(sys.argv[3]) # get the header line form the big file and decide w
hich (analysis) columns to use header_line = '' with open(infile, 'r') as f: header_line = f.readline().strip() selcted_cols = [i for i, s in enumerate(header_line.split(',')) if analysis in s] #[s for s in header_line.split(',') if analysis in s] # get the lowest and highest and make range out of it # this way you get the combinedpval and combined fdr cols selcted_col
s = range(min(selcted_cols), max(selcted_cols)+1) selcted_cols.insert(0, 0) # explicitly adding the row id cols print selcted_cols header_for_print = [header_line.split(',')[i] for i in selcted_cols] print header_for_print def process(df): res = list() for row in df.itertuples(): #print row corrs = row[1:numb_datasets+1] corrs_flag = 0 # write some condition to check for NA pos = sum(float(num) > 0 for num in corrs) neg = sum(float(num) < 0 for num in corrs) #print pos, neg if len(corrs) == pos and not len(corrs) == neg: #print "pos" corrs_flag = 1 if len(corrs) == neg and not len(corrs) == pos: #print "neg" corrs_flag = 1 if corrs_flag == 1: res.append(row) return res counter=0 pool = mp.Pool(30) # use 30 processes funclist = [] # http://gouthamanbalaraman.com/blog/distributed-processing-pandas.html #for chunck_df in pd.read_csv(infile, chunksize=100, usecols=range(5), index_col=0): for chunck_df in pd.read_csv(infile, chunksize=100000, usecols=selcted_cols, index_col=0): counter = counter + 1 print counter #print chunck_df # process each data frame f = pool.apply_async(process,[chunck_df]) funclist.append(f) #result = list() OUTfile = infile + analysis.replace(" ", "_") + '-same-dir-corrs.csv' with open(OUTfile, 'w') as of: writer = csv.writer(of, delimiter=',', lineterminator='\n') writer.writerow(header_for_print) for f in funclist: csvd = f.get(timeout=10000) # timeout in 10000 seconds #result.extend(csvd) writer.writerows(csvd) #print result # quick and dirty command to get the first column of the file: cutcmd = "cut -d, -f 1 " + OUTfile + " > " + OUTfile + "-ids.csv" os.system(cutcmd) print "Done" """ # sequential corrs_dict = dict() # satisfies corr direction counter = 0 # with open(in_filename) as in_f, open(out_filename, 'w') as out_f with open(infile) as f: for line in f: counter = counter + 1 line = line.strip() print line contents = line.split(",") corrs = contents[1:numb_datasets+1] corrs_flag = 0 if counter == 1: # move to next iteration of.write(line) continue # write some condition to check for NA pos = sum(float(num) > 0 for num in corrs) neg = sum(float(num) < 0 for num in corrs) #print pos, neg if len(corrs) == pos and not len(corrs) == neg: print "pos" corrs_flag = 1 if len(corrs) == neg and not len(corrs) == pos: print "neg" corrs_flag = 1 if corrs_flag == 1: corrs_dict[contents[0]] = contents[1:] ''' if corrs_flag == 0: # no point in analyzing pvals, move to next iteration continue pvals = contents[numb_datasets+1:] print pvals pvals_flag = 0 # write some condition to check for NA sig = sum(float(num) < 1 for num in pvals) #print sig if len(corrs) == sig: print "sig" pvals_flag = 1 if corrs_flag == 1 and pvals_flag == 1: corrs_dict[contents[0]] = contents[1:] if counter == 5: sys.exit(0) ''' print corrs_dict """
#!/usr/bin/python3 import os import urllib import socket from gi.repository import GObject, Nautilus class ownCloudExtension(GObject.GObject, Nautilus.ColumnProvider, Nautilus.InfoProvider): nautilusVFSFile_table = {} registered_paths = {} remainder = '' connected = False watch_id = 0 def __init__(self): self.connectToOwnCloud if not self.connected: # try again in 5 seconds - attention, logic inverted! GObject.timeout_add(5000, self.connectToOwnCloud) def port(self): return 34001 # Fixme, read from config file. def connectToOwnCloud(self): try: self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect(("localhost", self.port())) self.sock.settimeout(5) self.connected = True self.watch_id = GObject.io_add_watch(self.sock, GObject.IO_IN, self.handle_notify) except: print "Connect could not be established, try again later!" self.sock.close() return not self.connected def sendCommand(self, cmd): if self.connected: try: self.sock.send(cmd) except: print "Sending failed." GObject.source_remove( self.watch_id ) self.connected = False GObject.timeout_add(5000, self.connectToOwnCloud) def find_item_for_file( self, path ): if path in self.nautilusVFSFile_table: return self.nautilusVFSFile_table[path] else: return None def askForOverlay(self, file): if os.path.isdir(file): folderStatus = self.sendCommand("RETRIEVE_FOLDER_STATUS:"+file+"\n"); if os.path.isfile(file): fileStatus = self.sendCommand("RETRIEVE_FILE_STATUS:"+file+"\n"); def invalidate_items_underneath( self, path ): update_items = [] for p in self.nautilusVFSFile_table: if p == path or p.startswith( path ): item = self.nautilusVFSFile_table[p] update_items.append(item) for item in update_items: item.invalidate_extension_info() # self.update_file_info(item) # Handles a single line of server respoonse and sets the emblem def handle_server_response(self, l): Emblems = { 'OK' : 'oC_ok', 'SYNC' : 'oC_sync', 'NEW' : 'oC_sync', 'IGNORE' : 'oC_warn', 'ERROR' : 'oC_error', 'OK+SWM' : 'oC_ok_shared', 'SYNC+SWM' : 'oC_sync_shared', 'NEW+SWM' : 'oC_sync_shared', 'IGNORE+SWM': 'oC_warn_shared', 'ERROR+SWM' : 'oC_error_shared', 'NOP' : 'oC_error' } print "Server response: "+l parts = l.split(':') if len(parts) > 0: action = parts[0] # file = parts[1] # print "Action for " + file + ": "+parts[0] if action == 'STATUS': emblem = Emblems[parts[1]] if emblem: item = self.find_item_for_file(parts[2]) if item: item.add_emblem(emblem) elif action == 'UPDATE_VIEW': # Search all items underneath this path and invalidate them if parts[1] in self.registered_paths: self.invalidate_items_underneath( parts[1] ) elif action == 'REGISTER_PATH': self.registered_paths[parts[1]] = 1 self.invalidate_items_underneath( parts[1] ) elif action == 'UNREGISTER_PATH': del self.registered_paths[parts[1]] self.invalidate_items_underneath( parts[1] ) # check if there are non pathes any more, if so, its usual # that mirall went away. Try reconnect. if not self.registered_paths: self.sock.close() self.connected = False GObject.source_remove( self.watch_id ) GObject.timeout_add(5000, self.connectToOwnCloud) else: # print "We got unknown action " + action 1 # notify is the raw answer from the socket def handle_notify(self, source, condition): data = source.recv(1024) # prepend the remaining data from last call if len(self.remainder
) > 0: data = self.remainder+data self.rema
inder = '' if len(data) > 0: # remember the remainder for next round lastNL = data.rfind('\n'); if lastNL > -1 and lastNL < len(data): self.remainder = data[lastNL+1:] data = data[:lastNL] for l in data.split('\n'): self.handle_server_response( l ) else: return False return True # run again def get_local_path(self, path): return path.replace("file://", "") def update_file_info(self, item): if item.get_uri_scheme() != 'file': return filename = urllib.unquote(item.get_uri()[7:]) if item.is_directory(): filename += '/' for reg_path in self.registered_paths: if filename.startswith(reg_path): self.nautilusVFSFile_table[filename] = item # item.add_string_attribute('share_state', "share state") self.askForOverlay(filename) break else: print "Not in scope:"+filename
""" PyroScope - Controller "torrent". Copyright (c) 2009 The PyroScope Project <pyroscope.project@gmail.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICU
LAR PURPOSE. See the GNU General Public License for more details. You should have received a co
py of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ import logging from pylons import request, response, session, tmpl_context as c from pylons.controllers.util import abort, redirect_to from pyroscope.lib.base import render, PageController from pyroscope.engines import rtorrent log = logging.getLogger(__name__) class TorrentController(PageController): def __init__(self): self.proxy = rtorrent.Proxy() def index(self): # Redirect to view page return redirect_to(action="view") #, id="HelpIndex") def view(self, id): c.hash = id c.name = id c.torrents = list(rtorrent.View(self.proxy, "incomplete").items()) # Return a rendered template return render("pages/torrent.mako")
# 24.05.2007, c # last revision: 25.02.2008 from sfepy import data_dir from sfepy.fem.periodic import * filename_mesh = data_dir + '/meshes/2d/special/channels_symm944t.mesh' if filename_mesh.find( 'symm' ): region_1 = { 'name' : 'Y1', 'select' : """elements of group 3""", } region_2 = { 'name' : 'Y2', 'select' : """elements of group 4 +e elements of group 6 +e elements of group 8""", } region_4 = { 'name' : 'Y1Y2', 'select' : """r.Y1 +e r.Y2""", } region_5 = { 'name' : 'Walls', 'select' : """r.EBCGamma1 +n r.EBCGamma2""", } region_310 = { 'name' : 'EBCGamma1', 'select' : """(elements of group 1 *n elements of group 3) +n (elements of group 2 *n elements of group 3) """, } region_320 = { 'name' : 'EBCGamma2', 'select' : """(elements of group 5 *n
elements of group
4) +n (elements of group 1 *n elements of group 4) +n (elements of group 7 *n elements of group 6) +n (elements of group 2 *n elements of group 6) +n (elements of group 9 *n elements of group 8) +n (elements of group 2 *n elements of group 8) """, } w2 = 0.499 # Sides. region_20 = { 'name' : 'Left', 'select' : 'nodes in (x < %.3f)' % -w2, } region_21 = { 'name' : 'Right', 'select' : 'nodes in (x > %.3f)' % w2, } region_22 = { 'name' : 'Bottom', 'select' : 'nodes in (y < %.3f)' % -w2, } region_23 = { 'name' : 'Top', 'select' : 'nodes in (y > %.3f)' % w2, } field_1 = { 'name' : '2_velocity', 'dtype' : 'real', 'shape' : (2,), 'region' : 'Y1Y2', 'approx_order' : 2, } field_2 = { 'name' : 'pressure', 'dtype' : 'real', 'shape' : (1,), 'region' : 'Y1Y2', 'approx_order' : 1, } variable_1 = { 'name' : 'u', 'kind' : 'unknown field', 'field' : '2_velocity', 'order' : 0, } variable_2 = { 'name' : 'v', 'kind' : 'test field', 'field' : '2_velocity', 'dual' : 'u', } variable_3 = { 'name' : 'p', 'kind' : 'unknown field', 'field' : 'pressure', 'order' : 1, } variable_4 = { 'name' : 'q', 'kind' : 'test field', 'field' : 'pressure', 'dual' : 'p', } integral_1 = { 'name' : 'i1', 'kind' : 'v', 'quadrature' : 'gauss_o2_d2', } equations = { 'balance' : """dw_div_grad.i1.Y1Y2( fluid.viscosity, v, u ) - dw_stokes.i1.Y1Y2( v, p ) = 0""", 'incompressibility' : """dw_stokes.i1.Y1Y2( u, q ) = 0""", } material_1 = { 'name' : 'fluid', 'values' : { 'viscosity' : 1.0, 'density' : 1e0, }, } ebc_1 = { 'name' : 'walls', 'region' : 'Walls', 'dofs' : {'u.all' : 0.0}, } ebc_2 = { 'name' : 'top_velocity', 'region' : 'Top', 'dofs' : {'u.1' : -1.0, 'u.0' : 0.0}, } ebc_10 = { 'name' : 'bottom_pressure', 'region' : 'Bottom', 'dofs' : {'p.0' : 0.0}, } epbc_1 = { 'name' : 'u_rl', 'region' : ['Left', 'Right'], 'dofs' : {'u.all' : 'u.all', 'p.0' : 'p.0'}, 'match' : 'match_y_line', } functions = { 'match_y_line' : (match_y_line,), } ## # FE assembling parameters. fe = { 'chunk_size' : 100, 'cache_override' : True, } solver_0 = { 'name' : 'ls', 'kind' : 'ls.scipy_direct', } solver_1 = { 'name' : 'newton', 'kind' : 'nls.newton', 'i_max' : 2, 'eps_a' : 1e-8, 'eps_r' : 1e-2, 'macheps' : 1e-16, 'lin_red' : 1e-2, # Linear system error < (eps_a * lin_red). 'ls_red' : 0.1, 'ls_red_warp' : 0.001, 'ls_on' : 1.1, 'ls_min' : 1e-5, 'check' : 0, 'delta' : 1e-6, 'is_plot' : False, 'problem' : 'nonlinear', # 'nonlinear' or 'linear' (ignore i_max) } save_format = 'hdf5' # 'hdf5' or 'vtk'
#!/usr/bin/env python from datetime import date import os import re SRC_DIR = 'src/main/java/' ABSTRACT = re.compile(r'public abstract class Abstract') TYPE = re.compile(r'class [A-Za-z0-9]+(<[^>]+?(?: extends ([A-Za-z0-9_]+))?>)?') TARGET = re.compile(r'\s[A-Z][A-Za-z0-9_]+<[A-Z][A-Za-z0-9_]+(?:<.+?>)?, (([A-Z][A-Za-z0-9_]+).*?)(<.+?>)?(?:, [A-Z])*> {') IMPORT
= re.compile(r'import (?:static )?((?:com\.google\.)?android\..*?);') ASSERTIONS = 'Assertions.java' projects = [] for candidate in filter(os.path.isdir, os.listdir('.')): if candidate.startswith('assertj-android'): projects.append(candidate) print('Projects: %s\n' % projects) def _fin
d_assertions(path): for root, dirs, files in os.walk(path): if ASSERTIONS in files: return os.path.join(root, ASSERTIONS) raise Exception('Could not locate Assertions.java in %s.' % path) for project in projects: src_dir = os.path.join(project, SRC_DIR) assertions_file = _find_assertions(src_dir) assertions_dir = os.path.dirname(assertions_file) classes_package = assertions_dir[len(src_dir):].replace(os.sep, '.') print('\n' * 3) print(project) print('') print('src_dir = %s' % src_dir) print('assertions_file = %s' % assertions_file) print('assertions_dir = %s' % assertions_dir) print('classes_package = %s' % classes_package) print('') assertions = [] for root, dirs, files in os.walk(assertions_dir): for f in files: if not f.endswith('Assert.java'): continue print('-'*80) local_package = root[len(src_dir):].replace(os.sep, '.') package = '%s.%s' % (local_package, f[:-5]) print('package : %s' % package) with open(os.path.join(root, f)) as j: java = j.read() if ABSTRACT.search(java) is not None: print('SKIP (abstract)') continue # Abstract class. target_match = TARGET.search(java) import_type = target_match.group(2) target_type = target_match.group(1) generics = target_match.group(3) print('import type: %s' % import_type) print('target type: %s' % target_type) print('generics : %s' % generics) for match in IMPORT.finditer(java): if match.group(1).endswith(import_type): import_package = match.group(1) break else: raise Exception('Could not find target package for %s' % import_type) type_match = TYPE.search(java) bounds_type = type_match.group(1) bounds_ext = type_match.group(2) if generics: print('bounds type: %s' % bounds_type) print('bounds ext : %s' % bounds_ext) if bounds_ext: for match in IMPORT.finditer(java): if match.group(1).endswith(bounds_ext): bounds_type = bounds_type.replace(bounds_ext, match.group(1)) break else: raise Exception('Could not find target package for %s' % bounds_ext) print('bounds fqcn: %s' % bounds_type) target_package = import_package.replace(import_type, target_type) print('import pkg : %s' % import_package) print('target pkg : %s' % target_package) assertions.append( (package, target_package, bounds_type or '', generics or '') ) print('-'*80) with open(assertions_file, 'w') as out: out.write('// Copyright %s Square, Inc.\n' % date.today().year) out.write('//\n') out.write('// This class is generated. Do not modify directly!\n') out.write('package %s;\n\n' % classes_package) out.write('/** Assertions for testing Android classes. */\n') out.write('@SuppressWarnings("deprecation")\n') out.write('public final class Assertions {') for package, target_package, bounds_type, generic_keys in sorted(assertions, key=lambda x: x[0]): out.write('\n') out.write(' public static %s%s%s assertThat(\n' % (bounds_type + ' ' if bounds_type else '', package, generic_keys)) out.write(' %s%s actual) {\n' % (target_package, generic_keys)) out.write(' return new %s%s(actual);\n' % (package, '<>' if generic_keys else '')) out.write(' }\n') out.write('\n') out.write(' private Assertions() {\n') out.write(' throw new AssertionError("No instances.");\n') out.write(' }\n') out.write('}\n') print('\nNew Assertions.java files written!\n')
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2016-03-01 09:32 from __future__ import unicode_literals import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("modelview", "0009_auto_20160301_1030")] operations = [ migrations.AlterField( model_name="energyscenar
io", name="tools_models", field=models.ForeignKey( help_text="Which model(s) and other tools have been used?", null=True, on_delete=django.db.models.deletion.CASCADE, to="modelview.Energymodel", verbose_name="Tools",
), ) ]
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.db import api as db_api from heat.engine import dependencies from heat.engine import resource from heat.engine import scheduler from heat.openstack.common import log as logging from heat.openstack.common.gettextutils import _ logger = logging.getLogger(__name__) class StackUpdate(object): """ A Task to perform the update of an existing stack to a new template. """ def __init__(self, existing_stack, new_stack, previous_stack, rollback=False): """Initialise with the existing stack and the new stack.""" self.existing_stack = existing_stack self.new_stack = new_stack self.previous_stack = previous_stack self.rollback = rollback self.existing_snippets = dict((n, r.parsed_template()) for n, r in self.existing_stack.items()) def __repr__(self): if self.rollback: return '%s Rollback' % str(self.existing_stack) else: return '%s Update' % str(self.existing_stack) @scheduler.wrappertask def __call__(self): """Return a co-routine that updates the stack.""" cleanup_prev = scheduler.DependencyTaskGroup( self.previous_stack.dependencies, self._remove_backup_resource, reverse=True) update = scheduler.DependencyTaskGroup(self.dependencies(), self._resource_update) if not self.rollback: yield cleanup_prev() try: yield update() finally: self.previous_stack.reset_dependencies() def _resource_update(self, res): if res.name in self.new_stack and self.new_stack[res.name] is res: return self._process_new_resource_update(res) else: return self._process_existing_resource_update(res) @scheduler.wrappertask def _remove_backup_resource(self, prev_res): if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE), (prev_res.DELETE, prev_res.COMPLETE)): logger.debug(_("Deleting backup resource %s") % prev_res.name) yield prev_res.destroy() @staticmethod def _exchange_stacks(existing_res, prev_res): db_api.resource_exchange_stacks(existing_res.stack.context, existing_res.id, prev_res.id) prev_stack, existing_stack = prev_res.stack, existing_res.stack prev_stack[existing_res.name] = existing_res existing_stack[prev_res.name] = prev_res @scheduler.wrappertask def _create_resource(self, new_res): res_name = new_res.name # Clean up previous resource if res_name in self.previous_stack: prev_res = self.previous_stack[res_name] if prev_res.state not in ((prev_res.INIT, prev_res.COMPLETE), (prev_res.DELETE, prev_res.COMPLETE)): # Swap in the backup resource if it is in a valid state, # instead of creating a new resource if prev_res.status == prev_res.COMPLETE: logger.debug(_("Swapping in backup Resource %s") % res_name) self._exchange_stacks(self.existing_stack[res_name], prev_res) return logger.debug(_("Deleting backup Resource %s") % res_name) yield prev_res.destroy() # Back up existing resource if res_name in self.existing_stack: logger.debug(_("Backing up existing Resource %s") % res_name) existing_res = self.existing_stack[res_name] self.previous_stack[res_name] = existing_res existing_res.state_set(existing_res.UPDATE, existing_res.COMPLETE) self.existing_stack[res_name] = new_res yield new_res.create() @scheduler.wrappertask def _process_new_resource_update(self, new_res): res_name = new_res.name if res_name in self.existing_stack: existing_res = self.existing_stack[res_name] try: yield self._update_in_place(existing_res, new_res) except resource.UpdateReplace: pass else: logger.info(_("Resource %(res_name)s for stack %(stack_name)s" " updated") % { 'res_name': res_name, 'stack_name': self.existing_stack.name}) return yield self._create_resource(new_res) def _update_in_place(self, existing_res, new_res): # Note the new resource snippet is resolved in the context # of the existing stack (which is the
stack being updated) existing_snippet = self.existing_snippets[existing_res.name] new_snippet = self.existing_stack.resolve_runtime_data(new_res.t) prev_res = self.previous_stack.get(new_res.name) return existing_res.update(new_snippet, existing_snippet
, prev_resource=prev_res) @scheduler.wrappertask def _process_existing_resource_update(self, existing_res): res_name = existing_res.name if res_name in self.previous_stack: yield self._remove_backup_resource(self.previous_stack[res_name]) if res_name in self.new_stack: new_res = self.new_stack[res_name] if new_res.state == (new_res.INIT, new_res.COMPLETE): # Already updated in-place return if existing_res.stack is not self.previous_stack: yield existing_res.destroy() if res_name not in self.new_stack: del self.existing_stack[res_name] def dependencies(self): ''' Return a Dependencies object representing the dependencies between update operations to move from an existing stack definition to a new one. ''' existing_deps = self.existing_stack.dependencies new_deps = self.new_stack.dependencies def edges(): # Create/update the new stack's resources in create order for e in new_deps.graph().edges(): yield e # Destroy/cleanup the old stack's resources in delete order for e in existing_deps.graph(reverse=True).edges(): yield e # Don't cleanup old resources until after they have been replaced for name, res in self.existing_stack.iteritems(): if name in self.new_stack: yield (res, self.new_stack[name]) return dependencies.Dependencies(edges())
""" USA-specific Form helpers """ from django.forms import ValidationError from django.forms.fields import Field, RegexField, Select, EMPTY_VALUES from django.utils.encoding import smart_unicode from django.utils.translation import ugettext_lazy as _ import re phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$') ssn_re = re.compile(r"^(?P<area>\d{3})[-\ ]?(?P<group>\d{2})[-\ ]?(?P<serial>\d{4})$") class USZipCodeField(RegexField): default_error_messages = { 'invalid': _('Enter a zip code in the format XXXXX or XXXXX-XXXX.'), } def __init__(self, *args, **kwargs): super(USZipCodeField, self).__init__(r'^\d{5}(?:-\d{4})?$', max_length=None, min_length=None, *args, **kwargs) class USPhoneNumberField(Field): default_error_messages = { 'invalid': u'Phone numbers must be in XXX-XXX-XXXX format.', } def clean(self, value): super(USPhoneNumberField, self).clean(value) if value in EMPTY_VALUES: return u'' value = re.sub('(\(|\)|\s+)', '', smart_unicode(value)) m = phone_digits_re.search(value) if m: return u'%s-%s-%s' % (m.group(1), m.group(2), m.group(3)) raise ValidationError(self.error_messages['invalid']) class USSocialSecurityNumberField(Field): """ A United States Social Security number. Checks the following rules to determine whether the number is valid: * Conforms to the XXX-XX-XXXX format. * No group consists entirely of zeroes. * The leading group is not "666" (block "666" will never be allocated). * The number is not
in the promotional block 987-65-43
20 through 987-65-4329, which are permanently invalid. * The number is not one known to be invalid due to otherwise widespread promotional use or distribution (e.g., the Woolworth's number or the 1962 promotional number). """ default_error_messages = { 'invalid': _('Enter a valid U.S. Social Security number in XXX-XX-XXXX format.'), } def clean(self, value): super(USSocialSecurityNumberField, self).clean(value) if value in EMPTY_VALUES: return u'' match = re.match(ssn_re, value) if not match: raise ValidationError(self.error_messages['invalid']) area, group, serial = match.groupdict()['area'], match.groupdict()['group'], match.groupdict()['serial'] # First pass: no blocks of all zeroes. if area == '000' or \ group == '00' or \ serial == '0000': raise ValidationError(self.error_messages['invalid']) # Second pass: promotional and otherwise permanently invalid numbers. if area == '666' or \ (area == '987' and group == '65' and 4320 <= int(serial) <= 4329) or \ value == '078-05-1120' or \ value == '219-09-9999': raise ValidationError(self.error_messages['invalid']) return u'%s-%s-%s' % (area, group, serial) class USStateField(Field): """ A form field that validates its input is a U.S. state name or abbreviation. It normalizes the input to the standard two-leter postal service abbreviation for the given state. """ default_error_messages = { 'invalid': u'Enter a U.S. state or territory.', } def clean(self, value): from us_states import STATES_NORMALIZED super(USStateField, self).clean(value) if value in EMPTY_VALUES: return u'' try: value = value.strip().lower() except AttributeError: pass else: try: return STATES_NORMALIZED[value.strip().lower()].decode('ascii') except KeyError: pass raise ValidationError(self.error_messages['invalid']) class USStateSelect(Select): """ A Select widget that uses a list of U.S. states/territories as its choices. """ def __init__(self, attrs=None): from us_states import STATE_CHOICES super(USStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2015-12-19 18:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('judge', '0018_django_1_9'), ] operations = [ migrations.AddField( model_name='blogpost', name='og_image',
field=models.CharField(default=b'', help_text='', max_length=150, verbose_name=b'OpenGraph image'), ),
migrations.AddField( model_name='contest', name='og_image', field=models.CharField(default=b'', help_text='', max_length=150, verbose_name=b'OpenGraph image'), ), ]
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst # # Astropy documentation build configuration file. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this file. # # All configuration values have a default. Some values are defined in # the global Astropy configuration which is loaded here before anything else. # See astropy.sphinx.conf for which values are set there. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('..')) # IMPORTANT: the above commented section was generated by sphinx-quickstart, but # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left # commented out with this explanation to make it clear why this should not be # done. If the sys.path entry above is added, when the astropy.sphinx.conf # import occurs, it will import the *source* version of astropy instead of the # version installed (if invoked as "make html" or directly with sphinx), or the # version in the build directory (if "python setu
p.py build_sphinx" is used). # Thus, any C-extensions that are needed to build the documentation will *not* # be accessible, and the documentation will not build correctly. import datetime import os import sys try: from sphinx_astropy.conf.v1 import * # noqa except ImportError: print('ERROR: the documentation requ
ires the sphinx-astropy package to be installed') sys.exit(1) # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # By default, highlight as Python 3. highlight_language = 'python3' # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to # be used globally. rst_epilog += """ """ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does project = setup_cfg['name'] author = setup_cfg['author'] copyright = '{0}, {1}'.format( datetime.datetime.now().year, setup_cfg['author']) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. __import__(project) package = sys.modules[project] ver = package.__version__ version = '.'.join(ver.split('.'))[:5] release = ver # -- Options for HTML output -------------------------------------------------- # A NOTE ON HTML THEMES # The global astropy configuration uses a custom theme, 'bootstrap-astropy', # which is installed along with astropy. A different theme can be used or # the options for this theme can be modified by overriding some of the # variables set in the global configuration. The variables set in the # global configuration are listed below, commented out. # Add any paths that contain custom themes here, relative to this directory. # To use a different custom theme, add the directory containing the theme. #html_theme_path = [] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. To override the custom theme, set this to the # name of a builtin theme or the name of a custom theme in html_theme_path. #html_theme = 'bootstrap-ccdproc' html_theme_options = { 'logotext1': 'ccd', # white, semi-bold 'logotext2': 'proc', # orange, light 'logotext3': ':docs' # white, light } # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = '' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = '' from os.path import join html_favicon = join('_static', 'ccd_proc.ico') # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". html_title = '{0} v{1}'.format(project, release) # Output file base name for HTML help builder. htmlhelp_basename = project + 'doc' # Static files to copy after template files html_static_path = ['_static'] html_style = 'ccdproc.css' # -- Options for LaTeX output ------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [('index', project + '.tex', project + u' Documentation', author, 'manual')] # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [('index', project.lower(), project + u' Documentation', [author], 1)] # -- Options for the edit_on_github extension --------------------------------- if eval(setup_cfg.get('edit_on_github')): extensions += ['sphinx_astropy.ext.edit_on_github'] versionmod = __import__(setup_cfg['name'] + '.version') edit_on_github_project = setup_cfg['github_project'] if versionmod.version.release: edit_on_github_branch = "v" + versionmod.version.version else: edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" # -- Resolving issue number to links in changelog ----------------------------- github_issues_url = 'https://github.com/astropy/ccdproc/issues/' # -- Turn on nitpicky mode for sphinx (to warn about references not found) ---- # nitpicky = True # nitpick_ignore = [] # # for line in open('nitpick-exceptions'): # if line.strip() == "" or line.startswith("#"): # continue # dtype, target = line.split(None, 1) # target = target.strip() # nitpick_ignore.append((dtype, six.u(target)))
from __future__ import absolute_import, division, print_function import ast from jaspyx.ast_util import ast_load, ast_call from jaspyx.visitor import BaseVisitor class BinOp(BaseVisitor): def visit_BinOp(self, node): attr = getattr(self, 'BinOp_%s' % node.op.__class__.__name__, None) attr(node.left, node.right) for key, value
in { 'Add': '+', 'Sub': '-', 'Mult': '*', 'Div': '/',
'Mod': '%', 'BitAnd': '&', 'BitOr': '|', 'BitXor': '^', 'LShift': '<<', 'RShift': '>>', }.items(): def gen_op(op): def f_op(self, left, right): self.group([left, op, right]) return f_op exec('BinOp_%s = gen_op("%s")' % (key, value)) def BinOp_Pow(self, left, right): pow_func = ast_load('Math.pow') self.visit(ast_call(pow_func, left, right)) def BinOp_FloorDiv(self, left, right): floor = ast_load('Math.floor') self.visit(ast_call(floor, ast.BinOp(left, ast.Div(), right)))
age": "sql" } }, { "name": "dep", "class": "org.apache.zeppelin.spark.DepInterpreter", "defaultInterpreter": false, "editor": { "language": "scala" } }, { "name": "pyspark", "class": "org.apache.zeppelin.spark.PySparkInterpreter", "defaultInterpreter": false, "editor": { "language": "python" } }, { "name": "r", "class": "org.apache.zeppelin.spark.SparkRInterpreter", "defaultInterpreter": false, "editor": { "language": "r" } } ], "dependencies": [], "option": { "remote": true, "port": -1, "perNote": "shared", "perUser": "shared", "isExistingProcess": false, "setPermission": false, "users": [], "isUserImpersonate": false } }, "2C4U48MY3_spark2": { "id": "2C4U48MY3_spark2", "name": "spark2", "group": "spark", "properties": { "spark.executor.memory": "", "args": "", "zeppelin.spark.printREPLOutput": "true", "spark.cores.max": "", "zeppelin.dep.additionalRemoteRepository": "spark-packages,http://dl.bintray.com/spark-packages/maven,false;", "zeppelin.spark.importImplicit": "true", "zeppelin.spark.sql.stacktrace": "false", "zeppelin.spark.concurrentSQL": "false", "zeppelin.spark.useHiveContext": "true", "zeppelin.pyspark.python": "python", "zeppelin.dep.localrepo": "local-repo", "zeppelin.R.knitr": "true", "zeppelin.spark.maxResult": "1000", "master": "local[*]", "spark.app.name": "Zeppelin", "zeppelin.R.image.width": "100%", "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F", "zeppelin.R.cmd": "R" }, "status": "READY", "interpreterGroup": [ { "name": "spark", "class": "org.apache.zeppelin.spark.SparkInterpreter", "defaultInterpreter": true }, { "name": "sql", "class": "org.apache.zeppelin.spark.SparkSqlInterpreter", "defaultInterpreter": false }, { "name": "dep", "class": "org.apache.zeppelin.spark.DepInterpreter", "defaultInterpreter": false }, { "name": "pyspark", "class": "org.apache.zeppelin.spark.PySparkInterpreter", "defaultInterpreter": false }, { "name": "r", "class": "org.apache.zeppelin.spark.SparkRInterpreter", "defaultInterpreter": false } ], "dependencies": [], "option": { "remote": true, "port": -1, "perNoteSession": false, "perNoteProcess": false, "isExistingProcess": false, "setPermission": false } }, "2CK8A9MEG": { "id": "2CK8A9MEG", "name": "jdbc", "group": "jdbc", "properties": { "default.password": "", "zeppelin.jdbc.auth.type": "", "common.max_count": "1000", "zeppelin.jdbc.principal": "", "default.user": "gpadmin", "default.url": "jdbc:postgresql://localhost:5432/", "default.driver": "org.postgresql.Driver", "zeppelin.jdbc.keytab.location": "", "zeppelin.jdbc.concurrent.use": "true", "zeppelin.jdbc.concurrent.max_connection": "10" }, "status": "READY", "interpreterGroup": [ { "name": "sql", "class": "org.apache.zeppelin.jdbc.JDBCInterpreter", "defaultInterpreter": false, "editor": { "language": "sql", "editOnDblClick": false } } ], "dependencies": [], "option": { "remote": true, "port": -1, "perNote": "shared", "perUser": "shared", "isExistingProcess": false, "setPermission": false, "users": [], "isUserImpersonate": false } }, "2CKX6DGQZ": { "id": "2CKX6DGQZ", "name": "livy", "group": "livy", "properties": { "zeppelin.livy.pull_status.interval.millis": "1000", "livy.spark.executor.memory": "", "zeppelin.livy.session.create_timeout": "120", "zeppelin.livy.principal": "", "zeppelin.livy.spark.sql.maxResult": "1000", "zeppelin.livy.keytab": "", "zeppelin.livy.concurrentSQL": "false", "zeppelin.livy.spark.sql.field.truncate": "true", "livy.spark.executor.cores": "", "zeppelin.livy.displayAppInfo": "true", "zeppelin.livy.url": "http://localhost:8998", "livy.spark.dynamicAllocation.minExecutors": "", "livy.spark.driver.cores": "", "livy.spark.jars.packages": "", "livy.spark.dynamicAllocation.enabled": "", "livy.spark.executor.instances": "", "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", "livy.spark.dynamicAllocation.maxExecutors": "", "livy.spark.dynamicAllocation.initialExecutors": "", "livy.spark.driver.memory": "" }, "status": "READY", "interpreterGroup": [ { "name": "spark", "class": "org.apache.zeppelin.livy.LivySparkInterpreter", "defaultInterpreter": true, "editor": { "language": "scala", "editOnDblClick": false } }, { "name": "sql", "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter", "defaultInterpreter": false, "editor": { "language": "sql", "editOnDblClick": false } }, { "name": "pyspark", "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter", "defaul
tInterpreter": false, "editor": { "language": "python", "editOnDblClick": false } }, { "name": "pyspark3", "class": "org.apache.zeppelin.livy.LivyPySpark3Inte
rpreter", "defaultInterpreter": false, "editor": { "language": "python", "editOnDblClick": false } }, { "name": "sparkr", "class": "org.apache.zeppelin.livy.LivySparkRInterpreter", "defaultInterpreter": false, "editor": { "language": "r", "editOnDblClick": false } }, { "name": "shared", "class": "org.apache.zeppelin.livy.LivySharedInterpreter", "defaultInterpreter": false } ], "dependencies": [], "option": { "remote": true, "port": -1, "perNote": "shared", "perUser": "scoped", "isExistingProcess": false, "setPermission": false, "users": [], "isUserImpersonate": false } }, "2C8A4SZ9T_livy2": { "id": "2C8A4SZ9T_livy2", "status": "READY", "group": "livy", "name": "livy2", "properties": { "zeppelin.livy.keytab": "", "zeppelin.livy.spark.sql.maxResult": "1000", "livy.spark.executor.instances": "", "livy.spark.executor.memory": "", "livy.spark.dynamicAllocation.enabled": "", "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "", "livy.spark.dynamicAllocation.initialExecutors": "", "zeppelin.livy.session.create_timeout": "120", "livy.spark.driver.memory": "", "zeppelin.livy.displayAppInfo": "true", "livy.spark.jars.packages": "", "livy.spark.dynamicAllocation.maxExecutors": "", "zeppelin.livy.concurrentSQL": "false", "zeppelin.livy.principal": "", "livy.spark.executor.cores": "", "zeppelin.livy.url": "http://localhost:8998", "zeppelin.livy.pull_status.interval.millis": "1000",
e must also match it. Raises RuntimeError if output capturing was never one for this test. """ check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE, regexp) return self._AssertOutputEndsInMsg(check_msg_func, check_stdout, check_stderr) def AssertOutputEndsInLine(self, regexp, check_stdout=True, check_stderr=False): """Assert requested output ends in line matching |regexp|. Raises RuntimeError if output capturing was never one for this test. """ check_msg_func = self._GenCheckMsgFunc(None, regexp) return self._AssertOutputEndsInMsg(check_msg_func, check_stdout, check_stderr) def FuncCatchSystemExit(self, func, *args, **kwargs): """Run |func| with |args| and |kwargs| and catch exceptions.SystemExit. Return tuple (return value or None, SystemExit number code or None). """ try: returnval = func(*args, **kwargs) return returnval, None except exceptions.SystemExit as ex: exit_code = ex.args[0] return None, exit_code def AssertFuncSystemExitZero(self, func, *args, **kwargs): """Run |func| with |args| and |kwargs| catching exceptions.SystemExit. If the func does not raise a SystemExit with exit code 0 then assert. """ exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1] self.assertFalse(exit_code is None, msg='Expected system exit code 0, but caught none') self.assertTrue(exit_code == 0, msg='Expected system exit code 0, but caught %d' % exit_code) def AssertFuncSystemExitNonZero(self, func, *args, **kwargs): """Run |func| with |args| and |kwargs| catching exceptions.SystemExit. If the func does not raise a non-zero SystemExit code then assert. """ exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1] self.assertFalse(exit_code is None, msg='Expected non-zero system exit code, but caught none') self.assertFalse(exit_code == 0, msg='Expected non-zero system exit code, but caught %d' % exit_code) def AssertRaisesAndReturn(self, error, func, *args, **kwargs): """Like assertRaises, but return exception raised.""" try: func(*args, **kwargs) self.assertTrue(False, msg='Expected %s but got none' % error) except error as ex: return ex class TempDirTestCase(TestCase): """Mixin used to give each test a tempdir that is cleansed upon finish""" sudo_cleanup = False def __init__(self, *args, **kwds): TestCase.__init__(self, *args, **kwds) self.tempdir = None def setUp(self): #pylint: disable=W0212 osutils._TempDirSetup(self) def tearDown(self): #pylint: disable=W0212 osutils._TempDirTearDown(self, self.sudo_cleanup) class _RunCommandMock(mox.MockObject): """Custom mock class used to suppress arguments we don't care about""" DEFAULT_IGNORED_ARGS = ('print_cmd',) def __call__(self, *args, **kwds): for arg in self.DEFAULT_IGNORED_ARGS: kwds.setdefault(arg, mox.IgnoreArg()) return mox.MockObject.__call__(self, *args, **kwds) class LessAnnoyingMox(mox.Mox): """Mox derivative that slips in our suppressions to mox. This is used by default via MoxTestCase; namely, this suppresses certain arguments awareness that we don't care about via switching in (dependent on the namespace requested) overriding MockObject classing. Via this, it makes maintenance much simpler- simplest example, if code doesn't explicitly assert that print_cmd must be true/false... then we don't care about what argument is set (it has no effect beyond output). Mox normally *would* care, making it a pita to maintain. This selectively suppresses that awareness, making it maintainable. """ mock_classes = {}.fromkeys( ['chromite.lib.cros_build_lib.%s' % x for x in dir(cros_build_lib) if "RunCommand" in x], _RunCommandMock) @staticmethod def _GetNamespace(obj): return '%s.%s' % (obj.__module__, obj.__name__) def CreateMock(self, obj, attrs=None): if attrs is None: attrs = {} kls = self.mock_classes.get( self._GetNamespace(obj), mox.MockObject) # Copy attrs; I don't trust mox to not be stupid here. new_mock = kls(obj, attrs=attrs) self._mock_objects.append(new_mock) return new_mock class MoxTestCase(TestCase): """Mox based test case; compatible with StackedSetup""" mox_suppress_verify_all = False def setUp(self): self.mox = LessAnnoyingMox() self.stubs = mox.stubout.StubOutForTesting() def tearDown(self): try: if self.__test_was_run__ and not self.mox_suppress_verify_all: # This means the test code was actually ran. # force a verifyall self.mox.VerifyAll() finally: if hasattr(self, 'mox'): self.mox.UnsetStubs() if hasattr(self, 'stubs'): self.stubs.UnsetAll() self.stubs.SmartUnsetAll() class MoxTempDirTestCase(TempDirTestCase, MoxTestCase): """Convenience class mixing TempDir and Mox""" class MoxOutputTestCase(OutputTestCase, MoxTestCase): """Conevenience class mixing OutputTestCase and MoxTestCase.""" class MockTestCase(TestCase): """Python-mock based test case; compatible with StackedSetup""" def setUp(self): self._patchers = [] def tearDown(self): # We can't just run stopall() by itself, and need to stop our patchers # manually since stopall() doesn't handle repatching. cros_build_lib.SafeRun([p.stop for
p in reversed(self._patchers)] + [mock.patch.stopall]) def StartPatcher(self, patcher): """Call start() on the patcher, and stop() in tearDown.""" m = patcher.start() self._patchers.append(patcher) return m def PatchOb
ject(self, *args, **kwargs): """Create and start a mock.patch.object(). stop() will be called automatically during tearDown. """ return self.StartPatcher(mock.patch.object(*args, **kwargs)) # MockTestCase must be before TempDirTestCase in this inheritance order, # because MockTestCase.StartPatcher() calls may be for PartialMocks, which # create their own temporary directory. The teardown for those directories # occurs during MockTestCase.tearDown(), which needs to be run before # TempDirTestCase.tearDown(). class MockTempDirTestCase(MockTestCase, TempDirTestCase): """Convenience class mixing TempDir and Mock.""" def FindTests(directory, module_namespace=''): """Find all *_unittest.py, and return their python namespaces. Args: directory: The directory to scan for tests. module_namespace: What namespace to prefix all found tests with. Returns: A list of python unittests in python namespace form. """ results = cros_build_lib.RunCommandCaptureOutput( ['find', '.', '-name', '*_unittest.py', '-printf', '%P\n'], cwd=directory, print_cmd=False).output.splitlines() # Drop the trailing .py, inject in the name if one was given. if module_namespace: module_namespace += '.' return [module_namespace + x[:-3].replace('/', '.') for x in results] @contextlib.contextmanager def DisableLogging(): """Temporarily disable chromite logging.""" backup = cros_build_lib.logger.disabled try: cros_build_lib.logger.disabled = True yield finally: cros_build_lib.logger.disabled = backup def main(**kwds): """Helper wrapper around unittest.main. Invoke this, not unittest.main. Any passed in kwds are passed directly down to unittest.main; via this, you can inject custom argv for example (to limit what tests run). """ # Default to exit=True; this matches old behaviour, and allows unittest # to trigger sys.exit on its own. Unfortunately, the exit keyword is only # available in 2.7- as such, handle it ourselves. allow_exit = kwds.pop('exit', True) cros_build_lib.SetupBasicLogging() try: unittest.main(**kwds) raise SystemExit(0) except SystemExit, e: if e.__class__ != Sy
import sys import math as ma import numpy as np params = ["$\\varepsilon_{sph}$ & ", "$q$ & ", "$\\varepsilon$ & ", "$\mu$ & ", "R & ", "$\\theta$ & ", "$\phi$ &", " $\sigma$ & ", "$\\varepsilon$ & ", "$\mu$ & ", "R & ", "$\\theta$ & ", "$\phi$ & ", "$\sigma$ & ", "$\\varepsilon$ & ", "$\mu$ & ", "R & ", "$\\theta$ & ", "$\phi$ & ", "$\sigma$ & "] count = 0 for i in range(1, len(sys.argv)): if i == 3 : count += 1; print "\cline{1-9}" if i == 9 : count += 1; print "\cline{1-1}\cline{4-15}" if i == 15 : count += 1; print "\cline{1-1} \cline{10-21}" a = [( ("%.2g" % k) if (abs(k) < 1000 and abs(k) > .01) else ("%.1e" % k)) for k in [j/(84046.0) for j in map(float, sys.argv[i].split())] ] a[i-1] = "\textbf{" + a[i-1] + "}" placeholder = [ "& ---" for j in range(i+1, len(sys.argv)) ] if count == 0: placeholder[2-i] = "& \multicolumn{1}{|c}{---}" if count == 1: a[2] = "\multicolumn{
1}{|c}{" + a[2] + "} " placeholder[8-i] = "& \multicolumn{1}{|c}{---}" if count == 2: a[8] = "\multicolumn{1}{|c}{" + a[8] + "}" placeholder[14-i] = "& \multicolumn{1}{|c}{---}" if count == 3: a[14] = "\multicolumn{1}{|c}{" + a[14] + "}" #print str(a[:i]).replace('[', '').replace(',', ' &').replace("'",'').replace(']', ' ') pr
int params[i-1] + str(a[:i]).replace('[', '').replace(',', ' &').replace("'",'').replace(']', ' ') + str(placeholder).replace('[', '').replace(',', '').replace("'",'').replace(']', '\\\\')
from setuptools import setup setup( name="SULI", packages=['SULI'], version='0.0.1', description="SULI project", author='Kelin Kurzer-Ogul', author_email='kelin.kurzerogul@gmail.com',
url='https://github.com/k340/SULI', keywords=['SULI', 'Fermi', 'LAT', 'transients'], classifiers=[],
install_requires=['numpy','astropy'], )
import os from itertools import imap from multiprocessing import Pool from config import config from document_topic import Document, Topic class Collection(object): def __init__(self, list_file, prefix=''):
self.names_list = [os.path.join(prefix, file_name.strip()) for file_name in list_file] def __iter__(self): pool = Pool(mi
n(config.workers,len(self.names_list))) return pool.imap(self.type, self.names_list) #return imap(self.type, self.names_list) class TopicCollection(Collection): type = Topic class DocumentCollection(Collection): type = Document
# Copyright 2014 Google Inc. All rights reserved. # Licensed u
nder the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at #
http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import unicode_literals import setuptools import caniusepypy as ciu import caniusepypy.__main__ as ciu_main from caniusepypy import pypi class Command(setuptools.Command): description = """Run caniusepypy over a setup.py file.""" user_options = [] def _dependencies(self): projects = [] for attr in ('install_requires', 'tests_require'): requirements = getattr(self.distribution, attr, None) or [] for project in requirements: if not project: continue projects.append(pypi.just_name(project)) extras = getattr(self.distribution, 'extras_require', None) or {} for value in extras.values(): projects.extend(map(pypi.just_name, value)) return projects def initialize_options(self): pass def run(self): ciu_main.check(self._dependencies()) def finalize_options(self): pass
""" Various tests for class members access. """ # pylint: disable=R0903,print-statement,no-absolute-import, metaclass-assignment,import-error,no-init,missing-docstring, wrong-import-order,wrong-import-position from missing import Missing class MyClass(object): """class docstring""" def __init__(self): """init""" self.correct = 1 def test(self): """test""" self.correct += 2 self.incorrect += 2 # [no-member] del self.havenot # [no-member] self.nonexistent1.truc() # [no-member] self.nonexistent2[1] = 'hehe' # [no-member] class XYZMixin(object): """access to undefined members should be ignored in mixin classes by default """ def __init__(self): print self.nonexistent class NewClass(object): """use object.__setattr__""" def __init__(self): self.__setattr__('toto', 'tutu') from abc import ABCMeta class TestMetaclass(object): """ Test attribute access for metaclasses. """ __metaclass__ = ABCMeta class Metaclass(type): """ metaclass """ @classmethod def test(mcs): """ classmethod """ class UsingMetaclass(object): """ empty """ __m
etaclass__ = Metaclass #TestMetaclass.register(int) #UsingMetacla
ss.test() TestMetaclass().register(int) # [no-member] UsingMetaclass().test() # [no-member] class NoKnownBases(Missing): """Don't emit no-member if we don't know the bases of a class.""" NoKnownBases().lalala() # Should be enabled on astroid > 1.4.0 #class MetaClass(object): # """Look some methods in the implicit metaclass.""" # # @classmethod # def whatever(cls): # return cls.mro() + cls.missing()
# This python module is part of the oocs scanner for Linux. # Copyright (C) 2015 Davide Madrisan <davide.madrisan.gmail.com> import glob from os import sep from os.path import join from pwd import getpwuid from oocs.filesystem import Filesystems, UnixCommand, UnixFile from oocs.io import Config, message_add, quote, unlist from oocs._oocsext import runlevel class Services(object): module_name = 'services' def __init__(self, verbose=False): self.verbose = verbose self.scan = { 'module' : self.module_name, 'checks' : {}, 'status' : {} } try: self.cfg = Config().module(self.module_name) self.enabled = (self.cfg.get('enable', 1) == 1) except KeyError: message_add(self.scan['status'], 'warning', self.module_name + ' directive not found in the configuration file') self.cfg = {} self.required = self.cfg.get("required", []) self.forbidden = self.cfg.get("forbidden", []) self.runlevel = self.cfg.get("runlevel", '') self.enabled = (self.cfg.get('enable', 1) == 1) self.verbose = (self.cfg.get('verbose', verbose) == 1) def configuration(self): return self.cfg def sysv_runlevel(self): try: return runlevel() except: return '' class Service(Services): def __init__(self, service): """ Note: service can be a chain of commands as in the following example: 'syslogd|/sbin/rsyslogd' The service string must match the one displayed by 'ps'. """ Services.__init__(self) self.service = service self.procfs = Filesystems().procfs self.state, self.fullstatus = self._status() def _proc_status_parser(self, pid): procfile = glob.glob(join(self.procfs, str(pid), 'status'))[0] rawdata = UnixFile(procfile).readlines() or [] data = {} for line in rawdata: cols = line.split(':') key = cols[0].lower() values = cols[1].rstrip('\n').split() data[key] = values return data def _status(self): """ Return a touple (state-string, full-status-infos-dict). state-string will be 'running' or 'down' and will reflect the state of the 'self.service' process(es). full-status-infos-dict is a dictionaty containing the information provided by /proc/<pid>/status of each process pid: srv_full_status[pidnum] = dictionary containing the status of the process whith pid equal to pidnum """ cmdlines = glob.glob(join(self.procfs, '*', 'cmdline')) srv_state = 'down' srv_full_status = {} for f in cmdlines: for srv in self.service.split('|'): cmdlinefile = UnixFile(f) if not cmdlinefile.isfile(): continue if cmdlinefile.readfile().startswith(srv): # FIXME pid = f.split(sep)[2] proc_pid_status = self._proc_status_parser(pid) srv_full_status[pid] = proc_pid_status srv_state = 'running' return (srv_state, srv_full_status) def name(self): return self.service def pid(self): """Return the list of pid numbers or an empty list when the process is not running""" return self.fullstatus.keys() def ppid(self): ppids = [] for pid in self.pid(): ppid = self.fullstatus.get(pid)['ppid'][0] ppids.append(ppid) return ppids def state(self): return self.state def uid(self): real_uids = [] for pid in self.pid(): # Real, effective, saved set, and file system UIDs uids = self.fullstatus.get(pid)['uid'] real_uids.append(uids[0]) return real_uids def gid(self): real_gids = [] for pid in self.pid(): # Real, effective, saved set, and file system GIDs gids = self.fullstatus.get(pid)['gid'] real_gids.append(gids[0]) return real_gids def owner(self): owners = []
for uid in self.uid():
owners.append(getpwuid(int(uid)).pw_name) return owners def threads(self): threads_num = 0 for pid in self.pid(): threads_num += int(self.fullstatus.get(pid)['threads'][0]) return threads_num def check_services(verbose=False): services = Services(verbose=verbose) localscan = {} curr_runlevel = services.sysv_runlevel() if services.runlevel and curr_runlevel != services.runlevel: message_add(localscan, 'warning', 'the current runlevel is ' + quote(curr_runlevel) + ' but should be ' + quote(services.runlevel)) if not services.enabled: if verbose: message_add(localscan, 'info', 'Skipping ' + quote(module_name) + ' (disabled in the configuration)') return for srv in services.required: service = Service(srv) pids = service.pid() owners = service.owner() if pids and services.verbose: message_add(localscan, 'info', 'the service ' + quote(service.name()) + ' is running (with pid:%s owner:%s)' % ( unlist(pids,sep=','), unlist(owners,sep=','))) elif not pids: message_add(localscan, 'critical', 'the service ' + quote(service.name()) + ' is not running') for srv in services.forbidden: service = Service(srv) pids = service.pid() if pids: message_add(localscan, 'critical', 'the service ' + quote(service.name()) + ' should not be running') elif services.verbose: message_add(localscan, 'info', 'the service ' + quote(service.name()) + ' is not running as required') message_add(services.scan['checks'], 'running services', localscan) return services.scan
import zeit.newsletter.testing class MetadataTest(zeit.newsletter.testing.SeleniumTestCase): def test_form_should_save_entered_data_on_blur(self): s = self.selenium self.open('/repository/newsletter/@@checkout') s.waitForElementPresent('id=metadata
.subject') s.assertValue('id=metadata.subject', '') s.type('id=metadata.subject', 'flubber\t') s.waitForElementNotPresent('css=.field.dirty') # Re-open the page and verify that the data is still there s.clickAndWait('link=Edit contents') s.waitForElementPresent('id=metadata.subject') s
.assertValue('id=metadata.subject', 'flubber')
""" ==================================================================== Linear and Quadratic Discriminant Analysis with confidence ellipsoid ==================================================================== Plot the confidence ellipsoids of each class and decision boundary """ print(__doc__) from scipy import linalg import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl from matplotlib import colors from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis ############################################################################### # colormap cmap = colors.LinearSegmentedColormap( 'red_blue_classes', {'red': [(0, 1, 1), (1, 0.7, 0.7)], 'green': [(0, 0.7, 0.7), (1, 0.7, 0.7)], 'blue': [(0, 0.7, 0.7), (1, 1, 1)]}) plt.cm.register_cmap(cmap=cmap) ############################################################################### # generate datasets def dataset_fixed_cov(): '''Generate 2 Gaussians samples with the same covariance matrix''' n, dim = 300, 2 np.random.seed(0) C = np.array([[0., -0.23], [0.83, .23]]) X = np.r_[np.dot(np.random.randn(n, dim), C), np.dot(np.random.randn(n, dim), C) + np.array([1, 1])] y = np.hstack((np.zeros(n), np.ones(n))) return X, y def dataset_cov(): '''Generate 2 Gaussians samples with different covariance matrices''' n, dim = 300, 2 np.random.seed(0) C = np.array([[0., -1.], [2.5, .7]]) * 2. X = np.r_[np.dot(np.random.randn(n, dim), C), np.dot(np.random.randn(n, dim), C.T) + np.array([1, 4])] y = np.hstack((np.zeros(n), np.ones(n))) return X, y ############################################################################### # plot functions def plot_data(lda, X, y, y_pre
d, fig_index): splot = plt.subplot(2, 2, fig_index) if fig_index == 1: plt.title('Linear Discriminant Analysis') plt.ylabel('Data with fixed covariance') elif fig_index == 2: plt.title('Quadratic Discriminant Analysis') elif fig_index == 3: plt.ylabel('Data with varying covariances') tp = (y
== y_pred) # True Positive tp0, tp1 = tp[y == 0], tp[y == 1] X0, X1 = X[y == 0], X[y == 1] X0_tp, X0_fp = X0[tp0], X0[~tp0] X1_tp, X1_fp = X1[tp1], X1[~tp1] xmin, xmax = X[:, 0].min(), X[:, 0].max() ymin, ymax = X[:, 1].min(), X[:, 1].max() # class 0: dots plt.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color='red') plt.plot(X0_fp[:, 0], X0_fp[:, 1], '.', color='#990000') # dark red # class 1: dots plt.plot(X1_tp[:, 0], X1_tp[:, 1], 'o', color='blue') plt.plot(X1_fp[:, 0], X1_fp[:, 1], '.', color='#000099') # dark blue # class 0 and 1 : areas nx, ny = 200, 100 x_min, x_max = plt.xlim() y_min, y_max = plt.ylim() xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx), np.linspace(y_min, y_max, ny)) Z = lda.predict_proba(np.c_[xx.ravel(), yy.ravel()]) Z = Z[:, 1].reshape(xx.shape) plt.pcolormesh(xx, yy, Z, cmap='red_blue_classes', norm=colors.Normalize(0., 1.)) plt.contour(xx, yy, Z, [0.5], linewidths=2., colors='k') # means plt.plot(lda.means_[0][0], lda.means_[0][1], 'o', color='black', markersize=10) plt.plot(lda.means_[1][0], lda.means_[1][1], 'o', color='black', markersize=10) return splot def plot_ellipse(splot, mean, cov, color): v, w = linalg.eigh(cov) u = w[0] / linalg.norm(w[0]) angle = np.arctan(u[1] / u[0]) angle = 180 * angle / np.pi # convert to degrees # filled Gaussian at 2 standard deviation ell = mpl.patches.Ellipse(mean, 2 * v[0] ** 0.5, 2 * v[1] ** 0.5, 180 + angle, color=color) ell.set_clip_box(splot.bbox) ell.set_alpha(0.5) splot.add_artist(ell) splot.set_xticks(()) splot.set_yticks(()) def plot_lda_cov(lda, splot): plot_ellipse(splot, lda.means_[0], lda.covariance_, 'red') plot_ellipse(splot, lda.means_[1], lda.covariance_, 'blue') def plot_qda_cov(qda, splot): plot_ellipse(splot, qda.means_[0], qda.covariances_[0], 'red') plot_ellipse(splot, qda.means_[1], qda.covariances_[1], 'blue') ############################################################################### for i, (X, y) in enumerate([dataset_fixed_cov(), dataset_cov()]): # Linear Discriminant Analysis lda = LinearDiscriminantAnalysis(solver="svd", store_covariance=True) y_pred = lda.fit(X, y).predict(X) splot = plot_data(lda, X, y, y_pred, fig_index=2 * i + 1) plot_lda_cov(lda, splot) plt.axis('tight') # Quadratic Discriminant Analysis qda = QuadraticDiscriminantAnalysis() y_pred = qda.fit(X, y, store_covariances=True).predict(X) splot = plot_data(qda, X, y, y_pred, fig_index=2 * i + 2) plot_qda_cov(qda, splot) plt.axis('tight') plt.suptitle('Linear Discriminant Analysis vs Quadratic Discriminant Analysis') plt.show()
num_points_delta': 800, 'hdawg_ch0_amplitude': 1.0, 'hdawg_ch1_amplitude': 1.0, 'hdawg_ch2_amplitude': 0.8, 'hdawg_ch3_amplitude': 0.8, 'hdawg_ch4_amplitude': 1.0, 'hdawg_ch5_amplitude': 1.0, 'hdawg_ch6_amplitude': 1.0, 'hdawg_ch7_amplitude': 1.0, 'lo1_freq': 3.3e9,#3.70e9, 'pna_freq': 6.06e9, #'calibrate_delay_nop': 65536, 'calibrate_delay_nums': 200, 'trigger_readout_channel_name': 'ro_trg', 'trigger_readout_length': 200e-9, 'modem_dc_calibration_amplitude': 1.0, 'adc_nop': 1024, 'adc_nums': 10000, ## Do we need control over this? Probably, but not now... WUT THE FUCK MAN } class hardware_setup(): def __init__(self, device_settings, pulsed_settings): self.device_settings = device_settings self.pulsed_settings = pulsed_settings self.cw_settings = cw_settings self.hardware_state = 'undefined' self.pna = None self.lo1 = None self.rf_switch = None self.sa = None self.coil_device = None self.hdawg = None self.adc_device = None self.adc = None self.ro_trg = None self.coil = None self.iq_devices = None def open_devices(self): # RF switch for making sure we know what sample we are measuring self.pna = Agilent_N5242A('pna', address=self.device_settings['vna_address']) self.lo1 = Agilent_E8257D('lo1', address=self.device_settings['lo1_address']) self.lo1._visainstrument.timeout = self.device_settings['lo1_timeout'] if self.device_settings['use_rf_switch']: self.rf_switch = nn_rf_s
witch('rf_switch', address=self.device_settings['rf_switch_address']) self.sa = Agilent_N9030A('pxa', address=self.device_settings['sa_address']) self.hdawg = Zurich_HDAWG180
8(self.device_settings['hdawg_address']) self.dummy_awg = dummy_awg.DummyAWG(channels=1) self.coil_device = self.hdawg self.adc_device = TSW14J56_evm() self.adc_device.timeout = self.device_settings['adc_timeout'] self.adc = TSW14J56_evm_reducer(self.adc_device) self.adc.output_raw = True self.adc.last_cov = False self.adc.avg_cov = False self.adc.resultnumber = False self.adc_device.set_trig_src_period(self.device_settings['adc_trig_rep_period']) # 10 kHz period rate self.adc_device.set_trig_src_width(self.device_settings['adc_trig_width']) # 80 ns trigger length # self.hardware_state = 'undefined' def set_pulsed_mode(self): self.lo1.set_status(1) # turn on lo1 output self.lo1.set_power(self.pulsed_settings['lo1_power']) self.lo1.set_frequency(self.pulsed_settings['lo1_freq']) self.pna.set_power(self.pulsed_settings['vna_power']) self.pna.write("OUTP ON") self.pna.write("SOUR1:POW1:MODE ON") self.pna.write("SOUR1:POW2:MODE OFF") self.pna.set_sweep_mode("CW") self.pna.set_frequency(self.pulsed_settings['pna_freq']) self.hdawg.stop() self.hdawg.set_clock(self.pulsed_settings['ex_clock']) self.hdawg.set_clock_source(1) # setting repetition period for slave devices # 'global_num_points_delay' is needed to verify that M3202A and other slave devices will be free # when next trigger arrives. global_num_points = int(np.round( self.pulsed_settings['ex_clock'] / self.pulsed_settings['rep_rate'] - self.pulsed_settings[ 'global_num_points_delta'])) # global_num_points = 20000 self.hdawg.set_nop(global_num_points) self.hdawg.clear() # а вот длину сэмплов, которая очевидно то же самое, нужно задавать на всех авгшках. # хорошо, что сейчас она только одна. # this is zashkvar WUT THE FUCK MAN self.hdawg.set_trigger_impedance_1e3() self.hdawg.set_dig_trig1_source([0, 0, 0, 0]) self.hdawg.set_dig_trig1_slope([1, 1, 1, 1]) # 0 - Level sensitive trigger, 1 - Rising edge trigger, # 2 - Falling edge trigger, 3 - Rising or falling edge trigger self.hdawg.set_dig_trig1_source([0, 0, 0, 0]) self.hdawg.set_dig_trig2_slope([1, 1, 1, 1]) self.hdawg.set_trig_level(0.6) for sequencer in range(4): self.hdawg.send_cur_prog(sequencer=sequencer) self.hdawg.set_marker_out(channel=np.int(2 * sequencer), source=4) # set marker 1 to awg mark out 1 for sequencer self.hdawg.set_marker_out(channel=np.int(2 * sequencer + 1), source=7) # set marker 2 to awg mark out 2 for sequencer for channel in range(8): self.hdawg.set_amplitude(channel=channel, amplitude=self.pulsed_settings['hdawg_ch%d_amplitude'%channel]) self.hdawg.set_offset(channel=channel, offset=0 * 1.0) self.hdawg.set_digital(channel=channel, marker=[0]*(global_num_points)) self.hdawg.daq.set([['/{}/sigouts/{}/range'.format(self.hdawg.device, channel), 0.6]]) self.hdawg.daq.set([['/{}/sigouts/4/range'.format(self.hdawg.device), 2]]) self.hdawg.set_all_outs() self.hdawg.run() self.ro_trg = awg_digital.awg_digital(self.hdawg, 1, delay_tolerance=20e-9) # triggers readout card self.coil = awg_channel.awg_channel(self.hdawg, 5) # coil control # ro_trg.mode = 'set_delay' #M3202A # ro_trg.delay_setter = lambda x: adc.set_trigger_delay(int(x*adc.get_clock()/iq_ex.get_clock()-readout_trigger_delay)) #M3202A self.ro_trg.mode = 'waveform' # AWG5014C self.adc.set_nop(self.pulsed_settings['adc_nop']) self.adc.set_nums(self.pulsed_settings['adc_nums']) def set_switch_if_not_set(self, value, channel): if self.rf_switch.do_get_switch(channel=channel) != value: self.rf_switch.do_set_switch(value, channel=channel) def setup_iq_channel_connections(self, exdir_db): # промежуточные частоты для гетеродинной схемы new: self.iq_devices = {'iq_ex1': awg_iq_multi.Awg_iq_multi(self.hdawg, self.hdawg, 2, 3, self.lo1, exdir_db=exdir_db), # M3202A 'iq_ex2': awg_iq_multi.Awg_iq_multi(self.hdawg, self.dummy_awg, 5, 0, self.lo1, exdir_db=exdir_db), 'iq_ex3': awg_iq_multi.Awg_iq_multi(self.hdawg, self.hdawg, 6, 7, self.lo1, exdir_db=exdir_db), # M3202A 'iq_ro': awg_iq_multi.Awg_iq_multi(self.hdawg, self.hdawg, 0, 1, self.pna, exdir_db=exdir_db)} # M3202A # iq_pa = awg_iq_multi.Awg_iq_multi(awg_tek, awg_tek, 3, 4, lo_ro) #M3202A self.iq_devices['iq_ex1'].name = 'ex1' self.iq_devices['iq_ex2'].name = 'ex2' self.iq_devices['iq_ex3'].name = 'ex3' # iq_pa.name='pa' self.iq_devices['iq_ro'].name = 'ro' self.iq_devices['iq_ex1'].calibration_switch_setter = lambda: self.set_switch_if_not_set(1, channel=1) self.iq_devices['iq_ex2'].calibration_switch_setter = lambda: self.set_switch_if_not_set(2, channel=1) self.iq_devices['iq_ex3'].calibration_switch_setter = lambda: self.set_switch_if_not_set(3, channel=1) self.iq_devices['iq_ro'].calibration_switch_setter = lambda: self.set_switch_if_not_set(4, channel=1) self.iq_devices['iq_ex1'].sa = self.sa self.iq_devices['iq_ex2'].sa = self.sa self.iq_devices['iq_ex3'].sa = self.sa self.iq_devices['iq_ro'].sa = self.sa self.fast_controls = {'coil': awg_channel.awg_channel(self.hdawg, 4)} # coil control def get_readout_trigger_pulse_length(self): return self.pulsed_settings['trigger_readout_length'] def get_modem_dc_calibration_amplitude(self): return self.pulsed_settings['
the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: os_nova_flavor short_description: Manage OpenStack compute flavors extends_documentation_fragment: openstack version_added: "2.0" author: "David Shrewsbury (@Shrews)" description: - Add or remove flavors from OpenStack. options: state: description: - Indicate desired state of the resource. When I(state) is 'present', then I(ram), I(vcpus), and I(disk) are all required. There are no default values for those parameters. choices: ['present', 'absent'] required: false default: present name: description: - Flavor name. required: true ram: description: - Amount of memory, in MB. required: false default: null vcpus: description: - Number of virtual CPUs. required: false default: null disk: description: - Size of local disk, in GB. required: false default: null ephemeral: description: - Ephemeral space size, in GB. required: false default: 0 swap: description: - Swap space size, in MB. required: false default: 0 rxtx_factor: description: - RX/TX factor. required: false default: 1.0 is_public: description: - Make flavor accessible to the public. required: false default: true flavorid: description: - ID for the flavor. This is optional as a unique UUID will be assigned if a value is not specified. required: false default: "auto" availability_zone: description: - Ignored. Present for backwards compatability required: false extra_specs: description: - Metadata dictionary required: false default: None version_added: "2.3" requirements: ["shade"] ''' EXAMPLES = ''' - name: "Create 'tiny' flavor with 1024MB of RAM, 1 virtual CPU, and 10GB of local disk, and 10GB of ephemeral." os_nova_flavor: cloud: mycloud state: present name: tiny ram: 1024 vcpus: 1 disk: 10 ephemeral: 10 - name: "Delete 'tiny' flavor" os_nova_flavor: cloud: mycloud state: absent name: tiny - name: Create flavor with metadata os_nova_flavor: cloud: mycloud state: present name: tiny ram: 1024 vcpus: 1 disk: 10 extra_specs: "quota:disk_read_iops_sec": 5000 "aggregate_instance_extra_specs:pinned": false ''' RETURN = ''' flavor: description: Dictionary describing the flavor. returned: On success when I(state) is 'present' type: dictionary contains: id: description: Flavor ID. returned: success type: string sample: "515256b8-7027-4d73-aa54-4e30a4a4a339" name: description: Flavor name. returned: success type: string sample: "tiny" disk: description: Size of local disk, in GB. returned: success type: int sample: 10 ephemeral: description: Ephemeral space size, in GB. returned: success type: int sample: 10 ram: description: Amount of memory, in MB. returned: success type: int sample: 1024 swap: description: Swap space size, in MB. returned: success type: int sample: 100 vcpus: description: Number of virtual CPUs. returned: success type: int sample: 2 is_public: description: Make flavor accessible to the public. returned: success type: bool sample: true extra_specs: description: Flavor metadata returned: success type: dict sample: "quota:disk_read_iops_sec": 5000 "aggregate_instance_extra_specs:pinned": false ''' try: import shade HAS_SHADE = True except ImportError: HAS_SHADE = False def _system_state_change(module, flavor): state = module.params['state'] if state == 'present' and not flavor: return True if state == 'absent' and flavor: return True return False def main(): argument_spec = openstack_full_argument_spec( state = dict(required=False, default='present', choices=['absent', 'present']), name = dict(required=False), # required when state is 'present' ram = dict(required=False, type='int'), vcpus = dict(required=False, type='int'), disk = dict(required=False, type='int'), ephemeral = dict(required=False, default=0, type='int'), swap = dict(required=False, default=0, type='int'), rxtx_factor = dict(required=False, default=1.0, type='float'), is_public = dict(required=False, default=True, type='bool'), flavorid = dict(required=False, default="auto"), extra_specs = dict(required=False, default=None, type='dict'), ) module_kwargs = openstack_module_kwargs() module = AnsibleModule( argument_spec, supports_check_mode=True, required_if=[ ('state', 'present', ['ram', 'vcpus', 'disk']) ], **module_kwargs) if not HAS_SHADE: module.fail_json(msg='shade is required for this module') state = module.params['state'] name = module.params['name'] extra_specs = module.params['extra_specs'] or {} try: cloud = shade.operator_cloud(**module.params) flavor = cloud.get_flavor(name) if module.check_mode: module.exit_json(changed=_system_state_change(module, flavor)) if state == 'present': if not flavor: flavor = cloud.create_flavor( name=name, ram=module.params['ram'], vcpus=module.params['vcpus'
], disk=module.params['disk'], flavorid=module.params[
'flavorid'], ephemeral=module.params['ephemeral'], swap=module.params['swap'], rxtx_factor=module.params['rxtx_factor'], is_public=module.params['is_public'] ) changed=True else: changed=False old_extra_specs = flavor['extra_specs'] new_extra_specs = dict([(k, str(v)) for k, v in extra_specs.items()]) unset_keys = set(flavor['extra_specs'].keys()) - set(extra_specs.keys()) if unset_keys: cloud.unset_flavor_specs(flavor['id'], unset_keys) if old_extra_specs != new_extra_specs: cloud.set_flavor_specs(flavor['id'], extra_specs) changed = (changed or old_extra_specs != new_extra_specs) module.exit_json(changed=changed, flavor=flavor, id=flavor['id']) elif state == 'absent': if flavor: cloud.delete_flavor(name) module.exit_json(changed=True) module.exit_json(changed=False) except shade.OpenStackCloudException as e: module.fail_json(msg=str(e)) # this is magic, see lib/ansible/module_common.py from ansible.module_utils.basic import * from ansible.module_utils.openstack import * if __name__ == '_
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass # FIXME: This test can't run without a redirecti
on for stdin. # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): # functions = ["input"] # not_implemented = [ #
'test_bool', # 'test_bytearray', # 'test_bytes', # 'test_class', # 'test_complex', # 'test_dict', # 'test_float', # 'test_frozenset', # 'test_int', # 'test_list', # 'test_None', # 'test_NotImplemented', # 'test_set', # 'test_str', # 'test_tuple', # ]
# -*- coding: utf-8 -*- """ """ import logging import logging.handlers import sys STANDARD_FORMAT = '%(name)s [%(levelname)s] %(message)s' MESSAGE_ONLY_FORMAT = '%(message)s' def get_level(level_string): """ Returns an appropriate logging level integer from a string name """ levels = {'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARNING, 'error': logging.ERROR, 'critical': logging.CRITICAL} try: level = levels[level_string.lower()] except KeyError: sys.exit('{0} is not a recognized logging level'.format(level_string)) else: return level def activate_logging(level=None): log = logging.getLogger('npyscreen2') if level is None: log.setLevel(logging.DEBUG) else: log.setLevel(get_level(level)) def add_rotating_file_handler(filename, frmt=None, level=None, filtr=None, max_bytes=0, backup_count=0, mode='a'): log = logging.getLogger('npyscreen2') handler = logging.handlers.RotatingFileHandler(filename, maxBytes=max_bytes, backupCount=backup_count,
encoding='utf-8', mode=mode) if level is None: handler.setLevel(logging.WARNING) else: handler.setLevel(get_level(level)) if filtr is not None: handler.addFilter(logging.Filter(filtr)) if frmt is None: handl
er.setFormatter(logging.Formatter(STANDARD_FORMAT)) else: handler.setFormatter(logging.Formatter(frmt)) log.addHandler(handler) #def deactivate_logging(): #log = logging.getLogger('npyscreen2')
# Copyright (C) 2008-2012 Chris Ball <cjb@laptop.org> # Gianluca Montecchi <gian@grys.it> # W. Trevor King <wking@tremily.us> # # This file is part of Bugs Everywhere. # # Bugs Everywhere is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation, either version 2 of the License, or (at your option) any # later version. # # Bugs Everywhere is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # Bugs Everywhere. If not, see <http://www.gnu.org/licenses/>. import copy import os import libbe import libbe.command import libbe.command.util class Merge (libbe.command.Command): """Merge duplicate bugs >>> import sys >>> import libbe.bugdir >>> import libbe.comment >>> bd = libbe.bugdir.SimpleBugDir(memory=False) >>> io = libbe.command.StringInputOutput() >>> io.stdout = sys.stdout >>> ui = libbe.command.UserInterface(io=io) >>> ui.storage_callbacks.set_storage(bd.storage) >>> cmd = Merge(ui=ui) >>> a = bd.bug_from_uuid('a') >>> a.comment_root.time = 0 >>> dummy = a.new_comment('Testing') >>> dummy.time = 1 >>> dummy = dummy.new_reply('Testing...') >>> dummy.time = 2 >>> b = bd.bug_from_uuid('b') >>> b.status = 'open' >>> b.comment_root.time = 0 >>> dummy = b.new_comment('1 2') >>> dummy.time = 1 >>> dummy = dummy.new_reply('1 2 3 4')
>>> dummy.time = 2 >>> ret = ui.run(cmd
, args=['/a', '/b']) Merged bugs #abc/a# and #abc/b# >>> bd.flush_reload() >>> a = bd.bug_from_uuid('a') >>> a.load_comments() >>> a_comments = sorted([c for c in a.comments()], ... cmp=libbe.comment.cmp_time) >>> mergeA = a_comments[0] >>> mergeA.time = 3 >>> print a.string(show_comments=True) ... # doctest: +ELLIPSIS, +REPORT_UDIFF ID : a Short name : abc/a Severity : minor Status : open Assigned : Reporter : Creator : John Doe <jdoe@example.com> Created : ... Bug A --------- Comment --------- Name: abc/a/... From: ... Date: ... <BLANKLINE> Testing --------- Comment --------- Name: abc/a/... From: ... Date: ... <BLANKLINE> Testing... --------- Comment --------- Name: abc/a/... From: ... Date: ... <BLANKLINE> Merged from bug #abc/b# --------- Comment --------- Name: abc/a/... From: ... Date: ... <BLANKLINE> 1 2 --------- Comment --------- Name: abc/a/... From: ... Date: ... <BLANKLINE> 1 2 3 4 >>> b = bd.bug_from_uuid('b') >>> b.load_comments() >>> b_comments = sorted([c for c in b.comments()], ... libbe.comment.cmp_time) >>> mergeB = b_comments[0] >>> mergeB.time = 3 >>> print b.string(show_comments=True) ... # doctest: +ELLIPSIS, +REPORT_UDIFF ID : b Short name : abc/b Severity : minor Status : closed Assigned : Reporter : Creator : Jane Doe <jdoe@example.com> Created : ... Bug B --------- Comment --------- Name: abc/b/... From: ... Date: ... <BLANKLINE> 1 2 --------- Comment --------- Name: abc/b/... From: ... Date: ... <BLANKLINE> 1 2 3 4 --------- Comment --------- Name: abc/b/... From: ... Date: ... <BLANKLINE> Merged into bug #abc/a# >>> print b.status closed >>> ui.cleanup() >>> bd.cleanup() """ name = 'merge' def __init__(self, *args, **kwargs): libbe.command.Command.__init__(self, *args, **kwargs) self.args.extend([ libbe.command.Argument( name='bug-id', metavar='BUG-ID', default=None, completion_callback=libbe.command.util.complete_bug_id), libbe.command.Argument( name='bug-id-to-merge', metavar='BUG-ID', default=None, completion_callback=libbe.command.util.complete_bug_id), ]) def _run(self, **params): storage = self._get_storage() bugdirs = self._get_bugdirs() bugdirA,bugA,comment = ( libbe.command.util.bugdir_bug_comment_from_user_id( bugdirs, params['bug-id'])) bugA.load_comments() bugdirB,bugB,dummy_comment = ( libbe.command.util.bugdir_bug_comment_from_user_id( bugdirs, params['bug-id-to-merge'])) bugB.load_comments() mergeA = bugA.new_comment('Merged from bug #%s#' % bugB.id.long_user()) newCommTree = copy.deepcopy(bugB.comment_root) for comment in newCommTree.traverse(): # all descendant comments comment.bug = bugA # uuids must be unique in storage if comment.alt_id == None: comment.storage = None comment.alt_id = comment.uuid comment.storage = storage comment.uuid = libbe.util.id.uuid_gen() comment.save() # force onto disk under bugA for comment in newCommTree: # just the child comments mergeA.add_reply(comment, allow_time_inversion=True) bugB.new_comment('Merged into bug #%s#' % bugA.id.long_user()) bugB.status = 'closed' print >> self.stdout, 'Merged bugs #%s# and #%s#' \ % (bugA.id.user(), bugB.id.user()) return 0 def _long_help(self): return """ The second bug (B) is merged into the first (A). This adds merge comments to both bugs, closes B, and appends B's comment tree to A's merge comment. """
HttpResponse from django.template import loader import random import string #import user from .forms import UserForm from stack_configs.stack_functions import createInfluxDB from stack_configs.ldap_functions import addToLDAPGroup,resetLDAPpassword,createLDAPuser from stack_configs.grafana_functions import GrafanaUser,testObj import logging logger = logging.getLogger(__name__) # Create your views here. # Create your views here. #from django.contrib.auth.forms import UserCreationForm def index(request): template = loader.get_template('welcome.html') result="welcome" context = { 'content':result, 'has_permission':request.user.is_authenticated, 'is_popup':False, 'title':'welcome!', 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def create_account(request): template = loader.get_template('admin/base_site.html') if request.method == "POST": form = UserForm(request.POST) if form.is_valid(): password = form.cleaned_data['password'] new_user = User.objects.create_user(**form.cleaned_data) #new_user.is_staff=True #new_user.save() if (createLDAPuser(new_user,password)): if (addToLDAPGroup(new_user.username,'active')): if (addToLDAPGroup(new_user.username,'editor')): result=createAndConfigureGrafana(new_user,password) if (result.status): if createInfluxDB(new_user): #creates a user database in influx return HttpResponseRedirect('/thanks/') return HttpResponseRedirect('/account_create_error/') else: form = UserForm() context = { 'has_permission':request.user.is_authenticated, 'is_popup':False, 'form':form, 'title':'New User Creation', 'site_title':'zibawa', 'site_url':settings.SITE_URL } return render(request,'form.html',context) def thanks(request): template = loader.get_template('thanks.html') context = { 'content':'Thanks. Please log in to your dashboard', 'title':'Your account has been created', 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def account_create_error(request): template = loader.get_template('admin/base_site.html') context = { 'content':'Sorry. Something went wrong during the creation of your account. Please contact your administrator', 'title':'Error', 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def id_generator(size=10, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) # Doesn't need csrf_protect since no-one can guess the URL @sensitive_post_parame
ters() @never_cache def zibawa_password_reset_confirm(request, uidb64=None, tok
en=None, template_name='registration/password_reset_confirm.html', token_generator=default_token_generator, set_password_form=SetPasswordForm, post_reset_redirect=None, extra_context=None): """ ZIBAWA NOTE. THIS VIEW CODE IS COPIED FROM DJANGO DEFAULT VIEW WITH MINOR MODIFICATIONS TO UPDATE PASSWORD IN LDAP (INSTEAD OF THE DJANGO DATABASE) https://github.com/django/django/blob/master/django/contrib/auth/views.py Check the hash in a password reset link and present a form for entering a new password. warnings.warn("The password_reset_confirm() view is superseded by the " "class-based PasswordResetConfirmView().", RemovedInDjango21Warning, stacklevel=2)""" assert uidb64 is not None and token is not None # checked by URLconf if post_reset_redirect is None: post_reset_redirect = reverse('password_reset_complete') else: post_reset_redirect = resolve_url(post_reset_redirect) try: # urlsafe_base64_decode() decodes to bytestring uid = force_text(urlsafe_base64_decode(uidb64)) user = UserModel._default_manager.get(pk=uid) except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist): user = None if user is not None and token_generator.check_token(user, token): validlink = True title = _('Enter new password') if request.method == 'POST': form = set_password_form(user, request.POST) if form.is_valid(): form.save() #ZIBAWA MODIFICATIONS START HERE new_password = form.cleaned_data['new_password1'] if(resetLDAPpassword(user.username,new_password)): #change Grafana password grafana_user=GrafanaUser(request.user.id, request.user.username,new_password,request.user.email) logger.debug('resetting Grafana password for %s',request.user.username) if not (grafana_user.changeGrafanaPassword()): #if fails, currently we log but carry on regardless. logger.warning('couldnt reset Grafana password for %s',request.user.username) return HttpResponseRedirect(post_reset_redirect) else: #if result from LDAP is not what we expect, or if no result logger.warning('couldnt reset LDAP password') title = _('Could not reset LDAP password') #ZIBAWA MODIFICATIONS END HERE else: form = set_password_form(user) else: validlink = False form = None title = _('Password reset unsuccessful') context = { 'form': form, 'title': title, 'validlink': validlink, 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } if extra_context is not None: context.update(extra_context) return TemplateResponse(request, template_name, context) @sensitive_post_parameters() @csrf_protect @login_required def zibawa_password_change(request, template_name='registration/password_change_form.html', post_change_redirect=None, password_change_form=SetPasswordForm, extra_context=None): '''warnings.warn("The password_change() view is superseded by the " "class-based PasswordChangeView().", RemovedInDjango21Warning, stacklevel=2)''' if post_change_redirect is None: post_change_redirect = reverse('password_change_done') else: post_change_redirect = resolve_url(post_change_redirect) if request.method == "POST": form = password_change_form(user=request.user, data=request.POST) if form.is_valid(): form.save() # Updating the password logs out all other sessions for the user # except the current one. #ZIBAWA MODIFICATIONS START HERE new_password = form.cleaned_data['new_password1'] if(resetLDAPpassword(request.user.username,new_password)): logger.debug('reset LDAP password') update_session_auth_hash(request, form.user) #change Grafana password grafana_user=GrafanaUser(request.user
from __future__ import ( unicode_literals, absolute_import, division, print_function, ) # Make Py2's str type like Py3's str = type('') # Rules that take into account part of speech to alter text structure_rules = [ ((["JJ*","NN*"],), (["chuffing",0,1],), 0.1), ((["."],), (["our","kid",0],["init",0],["and","that",0],["and","stuff",0]), 0.1), ((["NN"],), (["thing"],), 0.05), ((["START"],), ([0,"here","yar","."],), 0.05), ] # Words to be ignored by the translator ignores = [ "i","a","be","will" ] # Direct word substitutions word_rules = [ (("and",), ("n'",)), (("of",), ("ov",)), (("her",), ("'er",)), (("my",), ("me",)), (("what",), ("wot",)), (("our",), ("ah",)), (("acceptable","ace","awesome","brilliant","excellent","fantastic","good", "great","likable","lovely","super","smashing","nice","pleasing", "rad","superior","worthy","admirable","agreeable","commendable", "congenial","deluxe","honorable","honourable","neat","precious", "reputable","splendid","stupendous","exceptional","favorable", "favourable","marvelous","satisfactory","satisfying","valuable", "wonderful","fine","perfect","special","exciting","amazing","succeeded", "worked","successful"), ("buzzin'","top","mint","boss","sound","fit","sweet","madferit","safe","raz", "bob on","bangin'","peach","bazzin'","kewl","quality")), (("anything",), ("owt",)), (("nothing","none","zero","blank","null","void","nought",), ("nowt",)), (("disappointed","unhappy","sad","melancholy",), ("gutted",)), (("break","damage","smash","crack","destroy","annihilate","obliterate", "corrupt","ruin","spoil","wreck","trash","fail",), ("knacker","bugger",)), (("bad","poor","rubbish","broken","errored","damaged","atrocious","awful", "cheap","crummy","dreadful","lousy","rough","unacceptable", "garbage","inferior","abominable","amiss","beastly","careless", "cheesy","crap","crappy","cruddy","defective","deficient", "erroneous","faulty","incorrect","inadequate","substandard", "unsatisfactory","dysfunctional","malfunctioning","corrupt","failed",), ("naff","shit","knackered","buggered","pants","pear-shaped","tits up", "ragged","devilled","out of order","bang out of order","biz","kippered", "bobbins")), (("error","mistake","problem",), ("cock up","balls up")), (("very","exceedingly","mostly","sheer","exceptionally","genuinely", "especially","really"), ("well","bare","pure","dead","proper",)), (("numerous","many","all","most",), ("bare","pure",)), (("mad","crazy","insane","crazed","kooky","nuts","nutty","silly","wacky", "beserk","cuckoo","potty","batty","bonkers","unhinged","mental", "idiotic","stupid","moronic","dumb","foolish",), ("barmy",)), (("delighted","pleased","happy","cheerful","contented","ecstatic","elated", "glad"
,"joyful","joyous","jubilant","lively","merry","overjoyed", "peaceful","pleasant","pleased","thrilled","upbeat","blessed", "blest","blissful","captivated","gleeful","gratified","jolly", "mirthful","playful","proud",), ("chuffed","buzzin'")), (("things","stuff","elements","parts","pieces","facts","subjects","situations", "concepts","concerns","items","materials","objects","files",), ("shit
",)), (("attractive","alluring","beautiful","charming","engaging","enticing", "glamorous","gorgeous","handsome","inviting","tempting","adorable", "agreeable","enchanting","enthralling","hunky","pretty","seductive", "provocative","tantalizing","teasing","stunning",), ("fit",)), (("any",), ("whatever",)), (("unattractive","ugly","horrible","nasty","unpleasant","hideous","gross", "unsightly","horrid","unseemly","grisly","awful","foul","repelling", "repulsive","repugnant","revolting","uninviting","monstrous",), ("mingin'","rancid","'angin","rank","manky")), (("fast","quick","swift","brief",), ("rapid",)), (("pound",), ("quid","squid",)), (("man",), ("bloke", "fella",)), (("men",), ("blokes", "fellas",)), (("mate", "friend"), ("pal","mate",)), (("hello","greetings","welcome","hi","howdy",), ("arrite","how do","hiya",)), (("bye","goodbye","farewell",), ("ta-ra",)), (("kiss",), ("snog",)), (("sandwich",), ("butty","barm")), (("sandwiches",), ("butties","barms")), (("eat","consume","absorb","digest","food","sustinance",), ("scran",)), (("lunch",), ("dinner",)), (("dinner",), ("tea",)), (("you",), ("youse",)), (("idiot","moron","fool","buffoon","clown","jerk","nerd","nitwit","stooge", "sucker","twit","clod","cretin","dolt","dope","dunce","oaf","twerp", "imbecile","ignoramus","loon","ninny","numskull",), ("scrote","muppet","knobber","spanner","gonk","cabbage")), (("police","law","cop","cops","policeman","policewoman","constable","officer", "detective","bobby","copper",), ("dibble",)), (("house","dwelling","appartment","building","home","mansion","residence", "shack","abode","castle","cave","coop","flat","habitation","pad", "residency","place",), ("gaff",)), (("was",), ("were",)), (("were",), ("was",)), (("yes","ok",), ("aye",)), (("are",), ("iz",)), (("no",), ("nah",)), (("haven't",), ("ain't",)), (("right",), ("reet",)), (("the",), ("t'",)), (("?",), ("eh?","or wot?","yeah?")), ] # Alterations to the sound of a word based on its consonant and vowel sounds phoneme_rules = [ ((["START","HH"],), ["START","'"]), ((["ER","END"],), ["AA","'","END"]), ((["T","END"],), ["'","END"],), ((["AE","R"],), ["AE"]), ((["AA","R"],), ["AE","R"]), ((["AH1"],), ["UW"],), ((["AO","R","END"],["UH","R","END"],), ["AH","R"]), ((["AO"],), ["AA"],), ((["NG","END"],), ["N","'","END"]), ((["T","UW","END"],), ["T","AH","END"]), ((["START","DH"],), ["START","D"]), ((["TH","END"],), ["F","END"],), ((["DH","END"],), ["V","END"],), ((["START","TH"],), ["START","F"]), ((["VOWEL","T","VOWEL"],), [0,"R",2]), ] if __name__ == "__main__": import re,random,sys text = sys.argv[1] for patts,repls in words: for patt in patts: text = re.sub(r'\b'+patt+r'\b',lambda m: random.choice(repls),text) print(text)
########################################################################## # # Copyright (c) 2015, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ####################
###################################################### import Gaffer import GafferTest class ApplicationTest( GafferTest.TestCase ) : def testTaskSchedulerInitDoesntSuppressExceptions( self ) : def f() : import Gaffer._Gaffer as _Gaffer with _Gaffer._tbb_task_scheduler_init( _Gaffer._tbb_task_scheduler_init.automatic ) : raise Exception( "Woops!") self.assertRaises( Exception, f ) if __name__ == "__main__": unittest.main()
#
-*- coding: utf-8 -*- from __future__ import unicode_literals from django.apps import AppConfig class ConsultantformConfig(AppConfig): name = 'consultantform'
cla
ss Runt
imeValidationError(Exception): pass
"""Provides the constants needed for com
ponent.""" from typing import Final SUPPORT_ALARM_ARM_HOME: Final = 1 SUPPORT_ALARM_ARM_AWAY: Final = 2 SUPPORT_ALARM_ARM_NIGHT: Final = 4 SUPPORT_AL
ARM_TRIGGER: Final = 8 SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = 16 SUPPORT_ALARM_ARM_VACATION: Final = 32 CONDITION_TRIGGERED: Final = "is_triggered" CONDITION_DISARMED: Final = "is_disarmed" CONDITION_ARMED_HOME: Final = "is_armed_home" CONDITION_ARMED_AWAY: Final = "is_armed_away" CONDITION_ARMED_NIGHT: Final = "is_armed_night" CONDITION_ARMED_VACATION: Final = "is_armed_vacation" CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass"
# -*
- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.core.urlresolvers import reverse from django.views.generic import DetailView, ListView,
RedirectView, UpdateView from django.contrib.auth.mixins import LoginRequiredMixin from .models import User class UserDetailView(LoginRequiredMixin, DetailView): model = User # These next two lines tell the view to index lookups by username slug_field = "username" slug_url_kwarg = "username" class UserRedirectView(LoginRequiredMixin, RedirectView): permanent = False def get_redirect_url(self): return reverse("users:detail", kwargs={"username": self.request.user.username}) class UserUpdateView(LoginRequiredMixin, UpdateView): fields = ['name', ] # we already imported User in the view code above, remember? model = User # send the user back to their own page after a successful update def get_success_url(self): return reverse("users:detail", kwargs={"username": self.request.user.username}) def get_object(self): # Only get the User record for the user making the request return User.objects.get(username=self.request.user.username) class UserListView(LoginRequiredMixin, ListView): model = User # These next two lines tell the view to index lookups by username slug_field = "username" slug_url_kwarg = "username"
# This file is part of fedmsg. # Copyright (C) 2012 Red Hat, Inc. # # fedmsg is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # fedmsg is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with fedmsg; if not, write to the Free Software # Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA #
# Authors: Ralph Bean <rbean@redhat.com> #
#!/usr/bin/python # System of three linear equations # ax + by + cz = j # dx + ey + fz = k # gx + hy + iz = l # System of three linear equations in matrix notation # - - - - - - # | a b c | | x | | j | # | | | | | | # | d e f | | y | = | k | # | | | | | | # | g h i | | z | | l | # - - - - - - # Matrix of Coefficients # a b c # d e f # g h i # Matrix of Variables # x # y # z # Matrix of Resulting Values # j # k # l # Rule of Sarrus # a b c|a b # d e f|d e # g h i|g h # Rule of Sarrus Index Values # 0 1 2|0 1 # 3 4 5|3 4 # 6 7 8|6 7 # Determinant # det(M) = aei + bfg + cdh - gec - hfa - idb # Cramer's Rule # | j b c | | a j c | | a b j | # | k e f | | d k f | | d e k | # | l h i | | g l i | | g h l | # ---------, ---------, --------- # | a b c | | a b c | | a b c | # | d e f | | d e f | | d e f | # | g h i | | g h i | | g h i | import sys def main(): inputs_dict = {'a':int(raw_input("a:")), 'b':int(raw_input("b:")), 'c':int(raw_input("c:")), 'j':int(raw_input("j:")), 'd':int(raw_input("d:")), 'e':int(raw_input("e:")), 'f':int(raw_input("f:")), 'k':int(raw_input("k:")), 'g':int(raw_input("g:")), 'h':int(raw_input("h:")), 'i':int(raw_input("i:")), 'l':int(raw_input("l:"))} coeffs_matrix = {'a':inputs_dict['a'], 'b':inputs_dict['b'], 'c':inputs_dict['c'], 'd':inputs_dict['d'], 'e':inputs_dict['e'], 'f':inputs_dict['f'], 'g':inputs_dict['g'], 'h':inputs_dict['h'], 'i':inputs_dict['i']} x_numerator_matrix = {'j':inputs_dict['j'], 'b':inputs_dict['b'], 'c':inputs_dict['c'], 'k':inputs_dict['k'], 'e':inputs_dict['e'], 'f':inputs_dict['f'], 'l':inputs_dict['l'], 'h':inputs_dict['h'], 'i':inputs_dict['i']} y_numerator_matrix = {'a':inputs_dict['a'], 'j':inputs_dict['j'], 'c':inputs_dict['c'], 'd':inputs_dict['d'], 'k':inputs_dict['k'], 'f':inputs_dict['f'], 'g':inputs_dict['g'], 'l':inputs_dict['l'], 'i':inputs_dict['i']} z_numerator_matrix = {'a':inputs_dict['a'], 'b':inputs_dict['b'], 'j':inputs_dict['j'], 'd':inputs_dict['d'], 'e':inputs_dict['e'], 'k':inputs_dict['k'], 'g':inputs_dict['g'], 'h':inputs_dict['h'], 'l':inputs_dict['l']} # Rule of Sarrus for det_coeffs_matrix # a b c|a b # d e f|d e # g h i|g h # det_coeffs_matrix = (coeffs_matrix['a'] * coeffs_matrix['e'] * coeffs_matrix['i'] + coeffs_matrix['b'] * coeffs_matrix['f'] * coeffs_matrix['g'] + coeffs_matrix['c'] * coeffs_matrix['d'] * coeffs_matrix['h'] - coeffs_matrix['g'] * coeffs_matrix['e'] * coeffs_matrix['c'] - coeffs_matrix['h'] * coeffs_matrix['f'] * coeffs_matrix['a'] - coeffs_matrix['i'] * coeffs_matrix['d'] * coeffs_matrix['b']) # Rule of Sarrus for det_x_numerator_matrix # j b c|j b # k e f|k e # l h i|l h # det_x_numerator_matrix = (x_numerator_matrix['j'] * x_numerator_matrix['e'] * x_numerator_matrix['i'] + x_numerator_matrix['b'] * x_numerator_matrix['f'] * x_numerator_matrix['l'] + x_numerator_matrix['c'] * x_numerator_matrix['k'] * x_numerator_matrix['h'] - x_numerator_matrix['l'] * x_numerator_matrix['e'] * x_numerator_matrix['c'] - x_numerator_matrix['h'] * x_numerator_matrix['f'] * x_numerator_matrix['j'] - x_numerator_matrix['i'] * x_numerator_matrix['k'] * x_numerator_matrix['b'] ) # Rule of Sarrus for det_y_numerator_matrix # a j c|a j # d k f|d k # g l i|g l # det_y_numerator_matrix = (y_numerator_matrix['a'] * y_numerator_matri
x['k'] * y_numerator_matrix['i'] + y_numerator_matrix['j'] * y_numerator_matrix['f'] * y_numerator_matrix['g'] + y_numerator_matrix['c'] * y_numerator_matrix['d'] * y_numerator_matrix['l'] - y_numerator_matrix['g'] * y_numerator_matrix['k'] * y_numerator_matrix['c'] - y_numerator_matrix['l'] * y_numerator_matrix['f'] * y_numerator_matrix['a'] - y_numerator_matrix['i'] * y_numerator_matrix['d'] * y_numerator_matrix['j'])
# Rule of Sarrus for det_z_numerator_matrix # a b j|a b # d e k|d e # g h l|g h # det_z_numerator_matrix = (z_numerator_matrix['a'] * z_numerator_matrix['e'] * z_numerator_matrix['l'] + z_numerator_matrix['b'] * z_numerator_matrix['k'] * z_numerator_matrix['g'] + z_numerator_matrix['j'] * z_numerator_matrix['d'] * z_numerator_matrix['h'] - z_numerator_matrix['g'] * z_numerator_matrix['e'] * z_numerator_matrix['j'] - z_numerator_matrix['h'] * z_numerator_matrix['k'] * z_numerator_matrix['a'] - z_numerator_matrix['l'] * z_numerator_matrix['d'] * z_numerator_matrix['b']) x = det_x_numerator_matrix/det_coeffs_matrix y = det_y_numerator_matrix/det_coeffs_matrix z = det_z_numerator_matrix/det_coeffs_matrix print print "results: " print "x = " + str(x) print "y = " + str(y) print "z = " + str(z) # Specifies name of main function. if __name__ == "__main__": sys.exit(main())
# -*- coding: utf-8
-*- # This software may be modified and distributed under the terms # of the BSD license. See the LICENSE file for details. from aniso8601 import compat class DateResolution(object): Year, Month, Week, Weekday, Day, Ordinal = list(compat.range(6)) class TimeResolution(object): Seconds, Minutes, Hours = list(compat.range(
3))
attributes and items:: def _object_getattr(obj, field): # check whether this key is an attribute if hasattr(obj, field): value = getattr(obj, field) # if not, perhaps it is an item (raw dicts, etc) elif field in obj: value = obj[field] # return whatever we've got. return value Or consider a more complex, application-specific structure:: def _object_getattr(version, field): if field in ['key', 'committed', 'created', 'hash']: return getattr(version, field) else: return version.attributes[field]['value'] """ # TODO: consider changing this to raise an exception if no value is found. value = None # check whether this key is an attribute if hasattr(obj, field): value = getattr(obj, field) # if not, perhaps it is an item (raw dicts, etc) elif field in obj: value = obj[field] # return whatever we've got. return value def limit_gen(limit, iterable): """A generator that applies a count `limit`.""" limit = int(limit) assert limit >= 0, 'negative limit' for item in iterable: if limit <= 0: break yield item limit -= 1 def offset_gen(offset, iterable, skip_signal=None): """A generator that applies an `offset`, skipping `offset` elements from `iterable`. If skip_signal is a callable, it will be called with every skipped element. """ offset = int(offset) assert offset >= 0, 'negative offset' for item in iterable: if offset > 0: offset -= 1 if callable(skip_signal): skip_signal(item) else: yield item def chain_gen(iterables): """A generator that chains `iterables`.""" for iterable in iterables: for item in iterable: yield item class Filter(object): """Represents a Filter for a specific field and its value. Filters are used on queries to narrow down the set of matching objects. Args: field: the attribute name (string) on which to apply the filter. op: the conditional operator to apply (one of ['<', '<=', '=', '!=', '>=', '>']). value: the attribute value to compare against. Examples:: Filter('name', '=', 'John Cleese') Filter('age', '>=', 18) """ conditional_operators = ['<', '<=', '=', '!=', '>=', '>'] """Conditional operators that Filters support.""" _conditional_cmp = { "<": lambda a, b: a < b, "<=": lambda a, b: a <= b, "=": lambda a, b: a == b, "!=": lambda a, b: a != b, ">=": lambda a, b: a >= b, ">": lambda a, b: a > b } object_getattr = staticmethod(_object_getattr) """Object attribute getter. Can be overridden to match client data model. See :py:meth:`datastore.query._object_getattr`. """ def __init__(self, field, op, value): if op not in self.conditional_operators: raise ValueError( '"%s" is not a valid filter Conditional Operator' % op) self.field = field self.op = op self.value = value def __call__(self, obj): """Returns whether this object passes this filter. This method aggressively tries to find the appropriate value. """ value = self.object_getattr(obj, self.field) # TODO: which way should the direction go here? it may make more sense to # convert the passed-in value instead. Or try both? Or not at all? if not isinstance(value, self.value.__class__) and not self.value is None and not value is None: value = self.value.__class__(value) return self.valuePasses(value) def valuePasses(self, value): """Returns whether this value passes this filter""" return self._conditional_cmp[self.op](value, self.value) def __str__(self): return '%s %s %s' % (self.field, self.op, self.value) def __repr__(self): return "Filter('%s', '%s', %s)" % (self.field, self.op, repr(self.value)) def __eq__(self, o): return self.field == o.field and self.op == o.op and self.value == o.value def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(repr(self)) def generator(self, iterable): """Generator function that iteratively filters given `items`.""" for item in iterable: if self(item): yield item @classmethod def filter(cls, filters, iterable): """Returns the elements in `iterable` that pass given `filters`""" if isinstance(filters, Filter): filters = [filters] for filter in filters: iterable = filter.generator(iterable) return iterable class Order(object): """Represents an Order upon a specific field, and a direction. Orders are used on queries to define how they operate on objects Args: order: an order in string form. This follows the format: [+-]name where + is ascending, - is descending, and name is the name of the field to order by. Note: if no ordering operator is specified, + is default. Examples:: Order('+name') # ascending order by name Order('-age') # descending order by age Order('score') # ascending order by score """ order_operators = ['-', '+'] """Ordering operators: + is ascending, - is descending.""" object_getattr = staticmethod(_object_getattr) """Object attribute getter. Can be overridden to match client data model. See :py:meth:`datastore.query._object_getattr`. """ def __init__(self, order): self.op = '+' try: if order[0] in self.order_operators: self.op = order[0] order = order[1:] except IndexError: raise ValueError('Order input be at least two characters long.') self.field = order if self.op not in self.order_operators: raise ValueError('"%s" is not a valid Order Operator.' % op) def __str__(self): return '%s%s' % (self.op, self.field) def __repr__(self): return "Order('%s%s')" % (self.op, self.field) def __eq__(self, other): return self.field == other.field an
d self.op == other.op def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(repr(self)) def isAscending(self): return self.op == '+' def isDescending(self): return not self.isAscending() def keyfn(self, obj): """A key function to be used in pythonic sort operations.""" return self.object_getattr(obj,
self.field) @classmethod def multipleOrderComparison(cls, orders): """Returns a function that will compare two items according to `orders`""" comparers = [(o.keyfn, 1 if o.isAscending() else -1) for o in orders] def cmpfn(a, b): for keyfn, ascOrDesc in comparers: comparison = cmp(keyfn(a), keyfn(b)) * ascOrDesc if comparison is not 0: return comparison return 0 return cmpfn @classmethod def sorted(cls, items, orders): """Returns the elements in `items` sorted according to `orders`""" return sorted(items, cmp=cls.multipleOrderComparison(orders)) class Query(object): """A Query describes a set of objects. Queries are used to retrieve objects and instances matching a set of criteria from Datastores. Query objects themselves are simply descriptions, the actual Query implementations are left up to the Datastores. """ """Object attribute getter. Can be overridden to match client data model.""" object_getattr = staticmethod(_object_getattr) def __init__(self, key=Key('/'), limit=None, offset=0, offset_key=None, object_getattr=None):
"decodebin", connectToOutput=rtsp, connectWhenAvailable="audio", async_handling=True) self.addElement("audioconvert") self.addElement("audiorate") self.addElement("voaacenc") self.addElement("aacparse") self.mp3src = self.addElement("queue", max_size_time=10000000) elif s == "screen": self.addElement("ximagesrc") self.addElement("capsfilter", caps="video/x-raw,framerate=" + (self.config.get('device.fps', '4') or '4') + "/1") self.addElement("videoconvert") self.addElement("queue", max_size_time=10000000) tr
y: self.addElement("omxh264enc", interval_intraframes=int( (self.config.get('device.fps', '4') or '4'))) except Exception: self.addElement("x264enc", tune="zerolatency", rc_lookahead=0, bitrate=int(self.dev.config['device.bitrate']), key_int_max=int((self.config.get('device.fps', '4') or '4')) * 2) self.addElement( "capsfilter
", caps="video/x-h264, profile=main") self.addElement("h264parse") self.h264source = self.addElement("tee") # Tested # rtspsrc location=rtsp://192.168.1.6:8080/h264_pcm.sdp latency=100 ! queue ! rtph264depay ! h264parse return s import iot_devices.device as devices class NVRChannel(devices.Device): device_type = 'NVRChannel' readme = os.path.join(os.path.dirname(__file__), "README.md") defaultSubclassCode = defaultSubclassCode def putTrashInBuffer(self): "Force a wake up of a thread sitting around waiting for the pipe" if os.path.exists(self.rawFeedPipe): import select try: f = os.open(self.rawFeedPipe, flags=os.O_NONBLOCK | os.O_APPEND) s = 0 for i in range(188 * 42): r, w, x = select.select([], [f], [], 0.2) if w: f.write(b'b') else: s += 1 if s > 15: return except Exception: print(traceback.format_exc()) def thread(self): self.threadExited = False b = b'' while not os.path.exists(self.rawFeedPipe): time.sleep(1) f = open(self.rawFeedPipe, 'rb') initialValue = self.runWidgetThread lp = time.monotonic() while self.runWidgetThread and (self.runWidgetThread == initialValue): try: b += f.read(188 * 32) except OSError: time.sleep(0.2) except TypeError: time.sleep(1) try: f = open(self.rawFeedPipe, 'rb') except: print(traceback.format_exc()) except Exception: time.sleep(0.5) print(traceback.format_exc()) if self.runWidgetThread: if len(b) > (188 * 256) or (lp < (time.monotonic() - 0.2) and b): lp = time.monotonic() self.push_bytes("raw_feed", b) self.lastPushedWSData = time.monotonic() b = b'' self.threadExited = True def close(self): self.closed = True try: self.process.stop() except Exception: print(traceback.format_exc()) self.runWidgetThread = False try: self.putTrashInBuffer() except Exception: print(traceback.format_exc()) try: os.remove(self.rawFeedPipe) except Exception: print(traceback.format_exc()) s = 10 while s: s -= 1 if self.threadExited: break time.sleep(0.1) devices.Device.close(self) try: shutil.rmtree("/dev/shm/knvr_buffer/" + self.name) except Exception: pass try: self.checker.unregister() except Exception: logger.exception("Unregistering") def __del__(self): self.close() def onRawTSData(self, data): pass def getSnapshot(self): if hasattr(self,'snapshotter'): x = self.snapshotter.pullToFile("/dev/shm/knvr_buffer/" + self.name+".bmp") if x: with open("/dev/shm/knvr_buffer/" + self.name+".bmp",'rb') as f: x= f.read() os.remove("/dev/shm/knvr_buffer/" + self.name+".bmp") return x def connect(self, config): if self.closed: return self.config = config if time.monotonic() - self.lastStart < 15: return # When we reconnect we stop the recording and motion self.set_data_point("record", False, None, automated_record_uuid) self.set_data_point("raw_motion_value", 0) self.set_data_point("motion_detected", 0) self.activeSegmentDir = self.segmentDir = None self.lastStart = time.monotonic() if self.process: try: self.process.stop() except Exception: print(traceback.format_exc()) # Used to check that things are actually still working. # Set them to prevent a loop. self.lastSegment = time.monotonic() self.lastPushedWSData = time.monotonic() # Can't stop as soon as they push stop, still need to capture # the currently being recorded segment self.stoprecordingafternextsegment = 0 try: shutil.rmtree("/dev/shm/knvr_buffer/" + self.name) except Exception: pass os.makedirs("/dev/shm/knvr_buffer/" + self.name) try: # Make it so nobody else can read the files os.chmod("/dev/shm/knvr_buffer/" + self.name, 0o700) except Exception: pass # Close the old thread self.runWidgetThread = time.monotonic() self.putTrashInBuffer() s = 10 while s: s -= 1 if self.threadExited: break time.sleep(0.1) # Exec is needed so we can kill it # self.process = reap.Popen("exec gst-launch-1.0 -q "+getGstreamerSourceData(self.data.get('device.source','')) +"! ",shell=True) self.process = Pipeline() self.process.dev = self self.process.getGstreamerSourceData( self.config.get('device.source', ''), self.config, self.config.get('device.username', ''), self.config.get('device.password', '')) x = self.process.addElement( "queue", connectToOutput=self.process.h264source, max_size_time=10000000) self.process.addElement("mpegtsmux", connectToOutput=( x, self.process.mp3src)) self.mpegtssrc = self.process.addElement("tee") # Path to be created path = self.rawFeedPipe # Get rid of the old one, it could be clogged try: os.remove(path) except OSError: pass try: os.mkfifo(path) except OSError: print("Failed to create FIFO") os.chmod(path, 0o700) self.process.addElement("queue", max_size_time=10000000) self.process.addElement("filesink", location=path, buffer_mode=2, sync=self.process.syncFile) # # Motion detection part of the graph # # This flag discards every unit that cannot be handled individually self.process.addElement( "identity", drop_buffer_flags=8192, connectToOutput=self.process.h264source) self.process.addElement("queue", max_size_time=20000000, leaky=2) self.process.addElement("capsfilter", caps="video/x-h264") try: self.process.addElement("omxh264dec") except:
def
main(): a1="X" TextOut(10, LCD_LI
NE1, a1)
# # Copyright (c) 2017 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import Union import numpy as np from rl_coach.agents.policy_optimization_agent import PolicyOptimizationAgent from rl_coach.agents.value_optimization_agent import ValueOptimizationAgent from rl_coach.architectures.embedder_parameters import InputEmbedderParameters from rl_coach.architectures.head_parameters import QHeadParameters from rl_coach.architectures.middleware_parameters import FCMiddlewareParameters from rl_coach.base_parameters import AlgorithmParameters, AgentParameters, NetworkParameters from rl_coach.core_types import EnvironmentSteps from rl_coach.exploration_policies.e_greedy import EGreedyParameters from rl_coach.memories.episodic.single_episode_buffer import SingleEpisodeBufferParameters from rl_coach.utils import last_sample class NStepQNetworkParameters(NetworkParameters): def __init__(self): super().__init__() self.input_embedders_parameters = {'observation': InputEmbedderParameters()} self.middleware_parameters = FCMiddlewareParameters() self.heads_parameters = [QHeadParameters()] self.optimizer_type = 'Adam' self.async_training = True self.shared_optimizer = True self.create_target_network = True class NStepQAlgorithmParameters(AlgorithmParameters): """ :param num_steps_between_copying_online_weights_to_target: (StepMethod) The number of steps between copying the online network weights to the target network weights. :param apply_gradients_every_x_episodes: (int) The number of episodes between applying the accumulated gradients to the network. After every num_steps_between_gradient_updates steps, the agent will calculate the gradients for the collected data, it will then accumulate it in internal accumulators, and will only apply them to the network once in every apply_gradients_every_x_episodes episodes. :param num_steps_between_gradient_updates: (int) The number of steps between calculating gradients for the collected data. In the A3C paper, this parameter is called t_max. Since this algorithm is on-policy, only the steps collected between each two gradient calculations are used in the batch. :param targets_horizon: (str) Should be either 'N-Step' or '1-Step', and defines the length for which to bootstrap the network values over. Essentially, 1-Step follows the regular 1 step bootstrapping Q learning update. For more information, please refer to the original paper (https://arxiv.org/abs/1602.01783) """ def __init__(self): super().__init__() self.num_steps_between_copying_online_weights_to_target = EnvironmentSteps(10000) self.apply_gradients_every_x_episodes = 1 self.num_steps_between_gradient_updates = 5 # this is called t_max in all the papers self.targets_horizon = 'N-Step' class NStepQAgentParameters(AgentParameters): def __init__(self): super().__init__(algorithm=NStepQAlgorithmParameters(), exploration=EGreedyParameters(), memory=SingleEpisodeBufferParameters(), networks={"main": NStepQNetworkParameters()}) @property def path(self): return 'rl_coach.agents.n_step_q_agent:NStepQAgent' # N Step Q Learning Agent - https://arxiv.org/abs/1602.01783 class NStepQAgent(ValueOptimizationAgent, PolicyOptimizationAgent): def __init__(self, agent_parameters, parent: Union['LevelManager', 'CompositeAgent']=None): super().__init__(agent_parameters, parent) self.last_gradient_update_step_idx = 0 self.q_values = self.register_signal('Q Values') self.value_loss = self.register_signal('Value Loss') @property def is_on_policy(self) -> bool: return False def learn_from_batch(self, batch): # batch contains a list of episodes to learn from network_keys = self.ap.network_wrappers['main'].input_embedders_parameters.keys() # get the values for the current states state_value_head_targets = self.networks['main'].online_network.predict(batch.states(network_keys)) # the targets for the state value estimator if self.ap.algorithm.targets_horizon == '1-Step': # 1-Step Q learning q_st_plus_1 = self.networks['main'].target_network.predict(batch.next_states(network_keys)) for i in reversed(range(batch.size)): state_value_head_targets[i][batch.actions()[i]] = \ batch.rewards()[i] \ + (1.0 - batch.game_overs()[i]) * self.ap.algorithm.discount * np.max(q_st_plus_1[i], 0) elif self.ap.algorithm.targets_horizon
== 'N-Step': # N-Step Q learning if batch.game_overs()[-1]: R = 0 else: R = np.max(self.networks['main'].target_network.predict(last_sample(batch.next_states(network_keys)))) for i in reversed(range(batch.size)): R = batch.rewards()[i] + self.ap.algorithm.discount * R state_value_head_targets[i][batch.actions()[i]] = R else:
assert True, 'The available values for targets_horizon are: 1-Step, N-Step' # add Q value samples for logging self.q_values.add_sample(state_value_head_targets) # train result = self.networks['main'].online_network.accumulate_gradients(batch.states(network_keys), [state_value_head_targets]) # logging total_loss, losses, unclipped_grads = result[:3] self.value_loss.add_sample(losses[0]) return total_loss, losses, unclipped_grads def train(self): # update the target network of every network that has a target network if any([network.has_target for network in self.networks.values()]) \ and self._should_update_online_weights_to_target(): for network in self.networks.values(): network.update_target_network(self.ap.algorithm.rate_for_copying_weights_to_target) self.agent_logger.create_signal_value('Update Target Network', 1) else: self.agent_logger.create_signal_value('Update Target Network', 0, overwrite=False) return PolicyOptimizationAgent.train(self)
from pycp2k.inputsection import InputSection class _each438(InputSection): def __init__(self): InputSection.__init__(self) self.Just_energy = None self.Powell_opt = None self.Qs_scf = None self.Xas_scf = None self.Md = None self.Pint = None self.Metadynamics = None self.Geo_opt = None self.Rot_opt = None self.Cell_opt = None self.Band = None self.Ep_lin_solver = None self.Spline_find_coeffs = None self.Replica_eval = None self.Bsse = None self.Shell_opt = None self.Tddft_scf = None self._name = "EACH" self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt':
'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT'
, 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
# 2015.09.05 18:13:46 ora legale Europa occidentale # Embedded file name: b.py from a import x def x(): print 'b' x() from a import x x() # okay decompyling C:\Users\nicola user\wotmods\files\originals\tests\
overriding\b.pyc # decompiled 1 files: 1
okay, 0 failed, 0 verify failed # 2015.09.05 18:13:46 ora legale Europa occidentale
) class BinarySensorSchema: """Voluptuous schema for KNX binary sensors.""" CONF_STATE_ADDRESS = CONF_STATE_ADDRESS CONF_SYNC_STATE = CONF_SYNC_STATE CONF_IGNORE_INTERNAL_STATE = "ignore_internal_state" CONF_AUTOMATION = "automation" CONF_HOOK = "hook" CONF_DEFAULT_HOOK = "on" CONF_COUNTER = "counter" CONF_DEFAULT_COUNTER = 1 CONF_ACTION = "action" CONF_RESET_AFTER = "reset_after" DEFAULT_NAME = "KNX Binary Sensor" AUTOMATION_SCHEMA = vol.Schema( { vol.Optional(CONF_HOOK, default=CONF_DEFAULT_HOOK): cv.string, vol.Optional(CONF_COUNTER, default=CONF_DEFAULT_COUNTER): cv.port, vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA, } ) AUTOMATIONS_SCHEMA = vol.All(cv.ensure_list, [AUTOMATION_SCHEMA]) SCHEMA = vol.All( cv.deprecated("significant_bit"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SYNC_STATE, default=True): vol.Any( vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)), cv.boolean, cv.string, ), vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean, vol.Required(CONF_STATE_ADDRESS): cv.string, vol.Optional(CONF_DEVICE_CLASS): cv.string, vol.Optional(CONF_RESET_AFTER): cv.positive_int, vol.Optional(CONF_AUTOMATION): AUTOMATIONS_SCHEMA, } ), ) class LightSchema: """Voluptuous schema for KNX lights.""" CONF_STATE_ADDRESS = CONF_STATE_ADDRESS CONF_BRIGHTNESS_ADDRESS = "brightness_address" CONF_BRIGHTNESS_STATE_ADDRESS = "brightness_state_address" CONF_COLOR_ADDRESS = "color_address" CONF_COLOR_STATE_ADDRESS = "color_state_address" CONF_COLOR_TEMP_ADDRESS = "color_temperature_address" CONF_COLOR_TEMP_STATE_ADDRESS = "color_temperature_state_address" CONF_COLOR_TEMP_MODE = "color_temperature_mode" CONF_RGBW_ADDRESS = "rgbw_address" CONF_RGBW_STATE_ADDRESS = "rgbw_state_address" CONF_MIN_KELVIN = "min_kelvin" CONF_MAX_KELVIN = "max_kelvin" DEFAULT_NAME = "KNX Light" DEFAULT_COLOR_TEMP_MODE = "absolute" DEFAULT_MIN_KELVIN = 2700 # 370 mireds DEFAULT_MAX_KELVIN = 6000 # 166 mireds SCHEMA = vol.Schema( { vol.Required(CONF_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_STATE_ADDRESS): cv.string, vol.Optional(CONF_BRIGHTNESS_ADDRESS): cv.string, vol.Optional(CONF_BRIGHTNESS_STATE_ADDRESS): cv.string, vol.Optional(CONF_COLOR_ADDRESS): cv.string, vol.Optional(CONF_COLOR_STATE_ADDRESS): cv.string, vol.Optional(CONF_COLOR_TEMP_ADDRESS): cv.string, vol.Optional(CONF_COLOR_TEMP_STATE_ADDRESS): cv.string, vol.Optional( CONF_COLOR_TEMP_MODE, default=DEFAULT_COLOR_TEMP_MODE ): cv.enum(ColorTempModes), vol.Optional(CONF_RGBW_ADDRESS): cv.string, vol.Optional(CONF_RGBW_STAT
E_ADDRESS): cv.string, vol.Optional(CONF_MIN_KELVIN, default=DEFAULT_MIN_KELVIN): vol.All( vol.Co
erce(int), vol.Range(min=1) ), vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All( vol.Coerce(int), vol.Range(min=1) ), } ) class ClimateSchema: """Voluptuous schema for KNX climate devices.""" CONF_SETPOINT_SHIFT_ADDRESS = "setpoint_shift_address" CONF_SETPOINT_SHIFT_STATE_ADDRESS = "setpoint_shift_state_address" CONF_SETPOINT_SHIFT_MODE = "setpoint_shift_mode" CONF_SETPOINT_SHIFT_MAX = "setpoint_shift_max" CONF_SETPOINT_SHIFT_MIN = "setpoint_shift_min" CONF_TEMPERATURE_ADDRESS = "temperature_address" CONF_TEMPERATURE_STEP = "temperature_step" CONF_TARGET_TEMPERATURE_ADDRESS = "target_temperature_address" CONF_TARGET_TEMPERATURE_STATE_ADDRESS = "target_temperature_state_address" CONF_OPERATION_MODE_ADDRESS = "operation_mode_address" CONF_OPERATION_MODE_STATE_ADDRESS = "operation_mode_state_address" CONF_CONTROLLER_STATUS_ADDRESS = "controller_status_address" CONF_CONTROLLER_STATUS_STATE_ADDRESS = "controller_status_state_address" CONF_CONTROLLER_MODE_ADDRESS = "controller_mode_address" CONF_CONTROLLER_MODE_STATE_ADDRESS = "controller_mode_state_address" CONF_HEAT_COOL_ADDRESS = "heat_cool_address" CONF_HEAT_COOL_STATE_ADDRESS = "heat_cool_state_address" CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS = ( "operation_mode_frost_protection_address" ) CONF_OPERATION_MODE_NIGHT_ADDRESS = "operation_mode_night_address" CONF_OPERATION_MODE_COMFORT_ADDRESS = "operation_mode_comfort_address" CONF_OPERATION_MODE_STANDBY_ADDRESS = "operation_mode_standby_address" CONF_OPERATION_MODES = "operation_modes" CONF_ON_OFF_ADDRESS = "on_off_address" CONF_ON_OFF_STATE_ADDRESS = "on_off_state_address" CONF_ON_OFF_INVERT = "on_off_invert" CONF_MIN_TEMP = "min_temp" CONF_MAX_TEMP = "max_temp" DEFAULT_NAME = "KNX Climate" DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010" DEFAULT_SETPOINT_SHIFT_MAX = 6 DEFAULT_SETPOINT_SHIFT_MIN = -6 DEFAULT_TEMPERATURE_STEP = 0.1 DEFAULT_ON_OFF_INVERT = False SCHEMA = vol.All( cv.deprecated("setpoint_shift_step", replacement_key=CONF_TEMPERATURE_STEP), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional( CONF_SETPOINT_SHIFT_MODE, default=DEFAULT_SETPOINT_SHIFT_MODE ): cv.enum(SetpointShiftMode), vol.Optional( CONF_SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX ): vol.All(int, vol.Range(min=0, max=32)), vol.Optional( CONF_SETPOINT_SHIFT_MIN, default=DEFAULT_SETPOINT_SHIFT_MIN ): vol.All(int, vol.Range(min=-32, max=0)), vol.Optional( CONF_TEMPERATURE_STEP, default=DEFAULT_TEMPERATURE_STEP ): vol.All(float, vol.Range(min=0, max=2)), vol.Required(CONF_TEMPERATURE_ADDRESS): cv.string, vol.Required(CONF_TARGET_TEMPERATURE_STATE_ADDRESS): cv.string, vol.Optional(CONF_TARGET_TEMPERATURE_ADDRESS): cv.string, vol.Optional(CONF_SETPOINT_SHIFT_ADDRESS): cv.string, vol.Optional(CONF_SETPOINT_SHIFT_STATE_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_STATE_ADDRESS): cv.string, vol.Optional(CONF_CONTROLLER_STATUS_ADDRESS): cv.string, vol.Optional(CONF_CONTROLLER_STATUS_STATE_ADDRESS): cv.string, vol.Optional(CONF_CONTROLLER_MODE_ADDRESS): cv.string, vol.Optional(CONF_CONTROLLER_MODE_STATE_ADDRESS): cv.string, vol.Optional(CONF_HEAT_COOL_ADDRESS): cv.string, vol.Optional(CONF_HEAT_COOL_STATE_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_NIGHT_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_COMFORT_ADDRESS): cv.string, vol.Optional(CONF_OPERATION_MODE_STANDBY_ADDRESS): cv.string, vol.Optional(CONF_ON_OFF_ADDRESS): cv.string, vol.Optional(CONF_ON_OFF_STATE_ADDRESS): cv.string, vol.Optional( CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT ): cv.boolean, vol.Optional(CONF_OPERATION_MODES): vol.All( cv.ensure_list, [vol.In({**OPERATION_MODES, **PRESET_MODES})] ), vol.Optional(CONF_MIN_TEMP): vol.Coerce(float), vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
from networkx.al
gorithms.flow.maxflow import * from networkx.algorithms.flow
.mincost import *
# -*- coding: utf-8 -*- # # Clawpack documentation build configuration file, created by # sphinx-quickstart on Wed Mar 25 12:07:14 2009. # # This file is execfile()d with the current directory set to its containing dir. # # The contents of this file are pickled, so don't put values in the namespace # that aren't pickleable (module imports are okay, they're removed automatically). # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. sys.path.append(os.path.abspath('../..')) sys.path.append(os.path.abspath('./ext')) # General configuration # --------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx','plot_directive','only_directives', 'sphinx.ext.inheritance_diagram'] # ext
ensions.append('sphinx.ext.jsmath') extensions.append('s
phinx.ext.pngmath') # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Clawpack' copyright = u'2009, Randall J. LeVeque and others' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '4.6' # The full version, including alpha/beta/rc tags. release = '4.6.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['users'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = 'math' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # Options for HTML output # ----------------------- # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. html_style = 'default.css' # html_style = 'mpl.css' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = 'clawlogo.jpg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = 'clawicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Clawpackdoc' # Options for LaTeX output # ------------------------ # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). latex_documents = [ ('index', 'Clawpack.tex', ur'Clawpack Documentation', ur'RJL', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/dev': None} jsmath_path = 'jsmath/easy/load.js' # jsmath_path = '_static/jsMath/easy/load.js' keep_warnings = 'True'
from dj
ango.forms import HiddenInput from .base import WidgetTest class HiddenInputTest(WidgetTest): widget = HiddenInput() def test_render(self): self.check_html(self.widget, 'email', '', html='<input type="hidden" name="email" />') def test_use_required_attribute(self): # Always False to avoid browser validation on inputs hidden from the # user. self.assertIs(self.widget.use_required_attribute(None), False) self.assertIs(self.widget.use_required_attribute
(''), False) self.assertIs(self.widget.use_required_attribute('foo'), False)
################################################################################ # # # Folding@Home Client Control (FAHControl) # # Copyright (C) 2016-2020 foldingathome.org # # Copyright (C) 2010-2016 Stanford University # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ################################################################################ from fah.db import Column, Table import sqlite3 class Database: tables = [ Table('config', [ Column('name', 'Text', 'NOT NULL'), Column('value', 'Text', 'NOT NULL'), ], 'PRIMARY KEY (name)'), Table('clients', [ Column('name', 'Text', 'NOT NULL'), Column('address', 'Text', 'NOT NULL'), Column('port', 'Integer', 'NOT NULL'), Column('password', 'Text', 'NOT NULL'), ], 'PRIMARY KEY (name)'), ] def __init__(self, filename): self.filename = filename self.conn = sqlite3.connect(filename) self.conn.row_factory = sqlite3.Row self.queue = {} def get_table(self, name): for table in self.tables: if table.name == name: return table raise Exception('Table "%s" not found' % name) def get_version(self): return 6 def get_current_version(self): return int(self.execute_one('PRAGMA user_version')[0]) def set_current_version(self, version): self.write('PRAGMA user_version=%d' % version, True) def set(self, name, value, commit = True, queue = False): if queue: self.queue[name] = value else: self.insert('config', name = name, value = value) if commit: self.commit() def clear(self, name, commit = True): self.delete('config', name = name) if commit: self.commit() def get(self, name): c = self.get_table('config').select(self, 'value', name = name) result = c.fetchone() c.close() if result: return result[0] def has(self, name): return self.get(name) != None def default(self, name, default, commit = True): if not self.has(name): self.set(name, default, commit) def flush_queued(self): if len(self.queue) == 0: return for name, value in self.queue.items(): self.set(name, value, commit = False) self.commit() self.queue.clear() def execute(self, sql): #print 'SQL:', sql c = self.conn.cursor() c.execute(sql) return c def execute_one(self, sql): c = self.execute(sql) result = c.fetchone() c.close() return result def write(self, sql, commit = False): self.execute(sql).close() if commit: self.commit() def commit(self): self.conn.commit() def rollback(self): self.conn.rollback() def insert(self, table, **kwargs): self.get_table(table).insert(self, **kwargs) def delete(self, table, **kwargs): self.get_table(table).delete(self, **kwargs) def select(self, table, cols = None, **kwargs): return self.get_table(table).select(self, cols, **kwargs) def create(self): for table in self.tables: table.create(self) self.commit() def validate(self): current = self.get_current_version() if self.get_version() < current: raise Exception('Configuration database "%s" version %d is newer than is supported %d' % (self.filename, current, self.get_version())) elif self.get_version() != current: # Create or upgrade DB if current == 0: self.create() else: if current <= 2: # Just drop and recreate the clients table self.execute('DROP TABLE IF EXISTS clients') for table in self.tables: if table.name == 'clients': table.create(self) if current <= 5:
self.execute('DROP TABLE IF EXISTS projects') self.set_current_version(self.get_version()
) self.commit()
# -*- coding: utf-8 -*- """ Written by Daniel M. Aukes and CONTRIBUTORS Email: danaukes<at>asu.edu. Please see LICENSE for full license. """ import sys import popupcad import qt.QtCore as qc import qt.QtGui as qg i
f __name__=='__main__': app = qg.QApplication(sys.argv[0]) filename_from = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R08.cad' filename_to = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R09.cad' d = popupcad.filetypes.design.Design.load_yaml(filename_from) widget = qg.QDialog()
layout = qg.QVBoxLayout() layout1 = qg.QHBoxLayout() layout2 = qg.QHBoxLayout() list1 = qg.QListWidget() list2 = qg.QListWidget() button_ok = qg.QPushButton('Ok') button_cancel = qg.QPushButton('Cancel') subdesign_list = list(d.subdesigns.values()) for item in subdesign_list: list1.addItem(str(item)) list2.addItem(str(item)) layout1.addWidget(list1) layout1.addWidget(list2) layout2.addWidget(button_ok) layout2.addWidget(button_cancel) layout.addLayout(layout1) layout.addLayout(layout2) widget.setLayout(layout) button_ok.pressed.connect(widget.accept) button_cancel.pressed.connect(widget.reject) if widget.exec_(): if len(list1.selectedIndexes())==1 and len(list2.selectedIndexes())==1: ii_from = list1.selectedIndexes()[0].row() ii_to = list2.selectedIndexes()[0].row() print(ii_from,ii_to) d.replace_subdesign_refs(subdesign_list[ii_from].id,subdesign_list[ii_to].id) d.subdesigns.pop(subdesign_list[ii_from].id) d.save_yaml(filename_to) sys.exit(app.exec_())
# -*- coding: utf-8 -*- """Create an application instan
ce.""" from flask.helpers import get_debug_flag from myflaskapp.app import create_app from myflaskapp.settings import DevConfig, ProdConfig CONFIG = DevConfig if get_de
bug_flag() else ProdConfig app = create_app(CONFIG)
df_monthl
y = df.resample('M').sum() df_monthly.plo
t()
import copy from fmiapi.fmixmlparser import FMIxmlParser from tests.testUtils import * from tests.fmiapi.testdata.expected_data import * def describe_fmi_xml_parser(): parser = FMIxmlParser() def describe_daily_data(): test_data1 = load_xml('./tests/fmiapi/testdata/daily_12_days.xml') test_data2 = load_xml('./tests/fmiapi/testdata/daily_4_days.xml') test_data3 = load_xml('./tests/fmiapi/testdata/daily_14_days.xml') test_1965 = load_xml('./tests/fmiapi/testdata/daily_11_days_1965.xml') def should_parse_xml(): result = parser.parse([test_data1]) assert_equal(12, len(result['time'])) assert 'time' in result assert 'rrday' in result assert 'tday' in result assert 'snow' in result assert 'tmin' in result assert 'tmax' in result assert 'place' in result verify_dataframe(result, EXPECTED_DAILY_12_DAYS) def should_parse_dates_before_1970_correctly(): result = parser.parse([test_1965]) assert_equal(11, len(result['time'])) verify_dataframe(result, EXPECTED_DAILY_1965)
def should_parse_multipart_request_correctly(): result = parser.parse([test_data1, test_data2, test_data3]) assert_equal(30, len(result['time'])) # concat three different dicts to one df
expected_df = copy.deepcopy(EXPECTED_DAILY_12_DAYS) for key in EXPECTED_DAILY_4_DAYS: expected_df[key] = expected_df[key] + EXPECTED_DAILY_4_DAYS[key] for key in EXPECTED_DAILY_14_DAYS: expected_df[key] = expected_df[key] + EXPECTED_DAILY_14_DAYS[key] verify_dataframe(result, expected_df) def describe_realtime_data(): test_data1 = load_xml('./tests/fmiapi/testdata/realtime_1_day.xml') def should_parse_xml_and_remove_full_nan_columns(): result = parser.parse([test_data1]) assert_equal(153, len(result['time'])) assert 'time' in result assert 't2m' in result assert 'rh' in result assert 'td' in result assert 'snow_aws' in result assert 'place' in result verify_dataframe(result, EXPECTED_REALTIME_1_DAY)
# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2019
-02-06 21:49 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('zerver', '0206_stream_rendered_description'), ] operations = [ migrations.AddField( model_name='multiuseinvite', name='invited_as', field=models.PositiveSmallIntegerFiel
d(default=1), ), ]
# -*- coding: utf-8 -*- # Config file handling module # Copyright (C) 2014 Yury Gavrilov <yuriy@igavrilov.ru> # This file is part of VKBuddy. # VKBuddy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (
at your option) any later version. # VKBuddy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PA
RTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with VKBuddy. If not, see <http://www.gnu.org/licenses/>. import yaml class IncorrectConfig(Exception): pass class BareConfig: def __init__(self): self.config = {} self.required_list = [] def add_parameter(self, name, required=False, description='', default=None, typ=str): if required: self.required_list.append(name) self.config[name] = { 'description': description, 'default': default, 'type': typ } class Config: def __init__(self, filename, bare): cfile = open(filename, 'r') self.__config = yaml.load(cfile) cfile.close() self.bare = bare if not self.__config: self.__config = {} for param in bare.required_list: if not param in self.__config: raise IncorrectConfig( 'Required parameter \'{}\' not found'.format(param) ) def __getitem__(self, item): if item in self.__config: if item in self.bare.config: return self.bare.config[item]['type'](self.__config[item]) else: return self.__config[item] elif item in self.bare.config: return self.bare.config[item]['default'] else: raise KeyError(item)
# -*- coding: utf-8 -*- """ werkzeug ~~~~~~~~ Werkzeug is the Swiss Army knife of Python web development. It provides useful classes and functions for any WSGI application to make the life of a python web developer much easier. All of the provided classes are independent from each other so you can mix it with any other library. :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from types import ModuleType import sys # the version. Usually set automatically by a script. __version__ = '0.8.1' # This import magic raises concerns quite often which is why the implementation # and motivation is explained here in detail now. # # The majority of the functions and classes provided by Werkzeug work on the # HTTP and WSGI layer. There is no useful grouping for those which is why # they are all importable from "werkzeug" instead of the modules where they are # implemented. The downside of that is, that now everything would be
loaded at # once, even if unused. # # The implementation of a lazy-loading module in this file replaces the # werkzeug package when imported from within. Attribute access to the werkzeug # module will then lazily import from the modules that implement the objects. # import mapping to objects in other modules all_by_module = { 'werkzeug.debug': ['Debugge
dApplication'], 'werkzeug.local': ['Local', 'LocalManager', 'LocalProxy', 'LocalStack', 'release_local'], 'werkzeug.templates': ['Template'], 'werkzeug.serving': ['run_simple'], 'werkzeug.test': ['Client', 'EnvironBuilder', 'create_environ', 'run_wsgi_app'], 'werkzeug.testapp': ['test_app'], 'werkzeug.exceptions': ['abort', 'Aborter'], 'werkzeug.urls': ['url_decode', 'url_encode', 'url_quote', 'url_quote_plus', 'url_unquote', 'url_unquote_plus', 'url_fix', 'Href', 'iri_to_uri', 'uri_to_iri'], 'werkzeug.formparser': ['parse_form_data'], 'werkzeug.utils': ['escape', 'environ_property', 'append_slash_redirect', 'redirect', 'cached_property', 'import_string', 'dump_cookie', 'parse_cookie', 'unescape', 'format_string', 'find_modules', 'header_property', 'html', 'xhtml', 'HTMLBuilder', 'validate_arguments', 'ArgumentValidationError', 'bind_arguments', 'secure_filename'], 'werkzeug.wsgi': ['get_current_url', 'get_host', 'pop_path_info', 'peek_path_info', 'SharedDataMiddleware', 'DispatcherMiddleware', 'ClosingIterator', 'FileWrapper', 'make_line_iter', 'LimitedStream', 'responder', 'wrap_file', 'extract_path_info'], 'werkzeug.datastructures': ['MultiDict', 'CombinedMultiDict', 'Headers', 'EnvironHeaders', 'ImmutableList', 'ImmutableDict', 'ImmutableMultiDict', 'TypeConversionDict', 'ImmutableTypeConversionDict', 'Accept', 'MIMEAccept', 'CharsetAccept', 'LanguageAccept', 'RequestCacheControl', 'ResponseCacheControl', 'ETags', 'HeaderSet', 'WWWAuthenticate', 'Authorization', 'FileMultiDict', 'CallbackDict', 'FileStorage', 'OrderedMultiDict', 'ImmutableOrderedMultiDict'], 'werkzeug.useragents': ['UserAgent'], 'werkzeug.http': ['parse_etags', 'parse_date', 'http_date', 'cookie_date', 'parse_cache_control_header', 'is_resource_modified', 'parse_accept_header', 'parse_set_header', 'quote_etag', 'unquote_etag', 'generate_etag', 'dump_header', 'parse_list_header', 'parse_dict_header', 'parse_authorization_header', 'parse_www_authenticate_header', 'remove_entity_headers', 'is_entity_header', 'remove_hop_by_hop_headers', 'parse_options_header', 'dump_options_header', 'is_hop_by_hop_header', 'unquote_header_value', 'quote_header_value', 'HTTP_STATUS_CODES'], 'werkzeug.wrappers': ['BaseResponse', 'BaseRequest', 'Request', 'Response', 'AcceptMixin', 'ETagRequestMixin', 'ETagResponseMixin', 'ResponseStreamMixin', 'CommonResponseDescriptorsMixin', 'UserAgentMixin', 'AuthorizationMixin', 'WWWAuthenticateMixin', 'CommonRequestDescriptorsMixin'], 'werkzeug.security': ['generate_password_hash', 'check_password_hash'], # the undocumented easteregg ;-) 'werkzeug._internal': ['_easteregg'] } # modules that should be imported when accessed as attributes of werkzeug attribute_modules = frozenset(['exceptions', 'routing', 'script']) object_origins = {} for module, items in all_by_module.iteritems(): for item in items: object_origins[item] = module class module(ModuleType): """Automatically import objects from the modules.""" def __getattr__(self, name): if name in object_origins: module = __import__(object_origins[name], None, None, [name]) for extra_name in all_by_module[module.__name__]: setattr(self, extra_name, getattr(module, extra_name)) return getattr(module, name) elif name in attribute_modules: __import__('werkzeug.' + name) return ModuleType.__getattribute__(self, name) def __dir__(self): """Just show what we want to show.""" result = list(new_module.__all__) result.extend(('__file__', '__path__', '__doc__', '__all__', '__docformat__', '__name__', '__path__', '__package__', '__version__')) return result # keep a reference to this module so that it's not garbage collected old_module = sys.modules['werkzeug'] # setup the new module and patch it into the dict of loaded modules new_module = sys.modules['werkzeug'] = module('werkzeug') new_module.__dict__.update({ '__file__': __file__, '__package__': 'werkzeug', '__path__': __path__, '__doc__': __doc__, '__version__': __version__, '__all__': tuple(object_origins) + tuple(attribute_modules), '__docformat__': 'restructuredtext en' })
nit__() self.host_name = host_name class RomanaAnyAddressRequest(ipam_req.AnyAddressRequest): """Used to request any available address from the pool.""" def __init__(self, host_name, tenant_id, segment_name): """Initialize RomanaAnyAddressRequest.""" super(ipam_req.AnyAddressRequest, self).__init__() self.host_name = host_name self.tenant_id = tenant_id self.segment_name = segment_name class RomanaAddressRequestFactory(ipam_req.AddressRequestFactory): """Builds address request using ip information.""" _db_url = None _db_conn_dict = None @classmethod def get_request(cls, context, port, ip_dict): """Get a prepared Address Request. :param context: context :param port: port dict :param ip_dict: dict that can contain 'ip_address', 'mac' and 'subnet_cidr' keys. Request to generate is selected depending on this ip_dict keys. :return: returns prepared AddressRequest (specific or any) """ mac = port['mac_address'] owner = port.get('device_owner') LOG.debug("AAA: \tTenant %s, is admin %s\n\tdevice owner: %s\n\t%s\n\t%s", context.tenant, context.is_admin, owner, port, ip_dict) if owner == constants.DEVICE_OWNER_DHCP: return RomanaDhcpAddressRequest(port.get('binding:host_id')) # Lazily instantiate DB connection info. if cls._db_url is None: cls._db_url = cfg.CONF.database.connection _parsed_db_url = urlparse(cls._db_url) cls._db_conn_dict = {'host': _parsed_db_url.hostname, 'user': _parsed_db_url.username, 'passwd': _parsed_db_url.password, 'db': _parsed_db_url.path[1:]} LOG.debug("Connecting to %s" % cls._db_url) con = MySQLdb.connect(**cls._db_conn_dict) cur = con.cursor() # FIXIT! TODO(gg) # What a hack! This is being written by Neutron within a transaction, # so we have to do a dirty read. However, there is no other [good] way # of getting the information about the instance ID in Neutron-land # additional this point without patching Nova. The only fix I can # think of is actually a enhancement/blueprint to OpenStack for a more # flexible ways of creating requests. In other words, the decision # that only host ID and tenant ID (but not instance ID, for one) # should go into a request for an IP address lies right now with Nova, # but why can't it be made more pluggable/flexible, sort of akin # to https://review.openstack.org/#/c/192663/ cur.execute("SET LOCAL TRANSACTION ISOLATION LEVEL READ UNCOMMITTED") query = ("SELECT `key`, value FROM neutron.ports p JOIN " "nova.instance_metadata im ON p.device_id = im.instance_uuid " "WHERE mac_address = '%s' AND `key` = 'romanaSegment'" % mac) LOG.debug("DB Query: %s" % query) cur.execute(query) rows = [row for row in cur.fetchall()] cur.close() con.close() LOG.debug("Found segments for instance: %s" % rows) if rows: segment_name = rows[0][1] else: msg = "Cannot find romanaSegment value for mac_address %s." % mac raise exceptions.RomanaException(msg) #raise ipam_exc.IpAddressGenerationFailure() LOG.debug("segment_id: %s" % segment_name) if ip_dict.get('ip_address'): return ipam_req.SpecificAddressRequest(ip_dict['ip_address']) elif ip_dict.get('eui64_address'): return ipam_req.AutomaticAddressRequest( prefix=ip_dict['subnet_cidr'], mac=ip_dict['mac']) else: return RomanaAnyAddressRequest( port.get('binding:host_id'), port.get('tenant_id'), segment_name) class RomanaAnySubnetRequest(ipam_req.AnySubnetRequest): """A template for allocating an unspecified subnet from IPAM.""" WILDCARDS = {constants.IPv4: '0.0.0.0',
constants.IPv6: '::'} def __init__(self, tenant_id, subnet_id, version, prefixlen, gateway_ip=None, allocation_pools=None):
"""Initialize RomanaAnySubnetRequest. :param version: Either constants.IPv4 or constants.IPv6 :param prefixlen: The prefix len requested. Must be within the min and max allowed. :type prefixlen: int """ super(RomanaAnySubnetRequest, self).__init__( tenant_id=tenant_id, subnet_id=subnet_id, gateway_ip=gateway_ip, allocation_pools=allocation_pools) net = netaddr.IPNetwork(self.WILDCARDS[version] + '/' + str(prefixlen)) self._validate_with_subnet(net) self._prefixlen = prefixlen @property def prefixlen(self): """Return Prefix Length.""" return self._prefixlen class RomanaSubnetRequestFactory(ipam_req.SubnetRequestFactory): """Builds request using subnet information.""" @classmethod def get_request(cls, context, subnet, subnetpool): """Return RomanaAnySubnetRequest.""" LOG.debug("RomanaSubnetRequestFactory.get_request()") cidr = subnet.get('cidr') subnet_id = subnet.get('id', uuidutils.generate_uuid()) is_any_subnetpool_request = not attributes.is_attr_set(cidr) if is_any_subnetpool_request: prefixlen = subnet['prefixlen'] if not attributes.is_attr_set(prefixlen): prefixlen = int(subnetpool['default_prefixlen']) return RomanaAnySubnetRequest( subnet['tenant_id'], subnet_id, common_utils.ip_version_from_int(subnetpool['ip_version']), prefixlen) else: return ipam_req.SpecificSubnetRequest(subnet['tenant_id'], subnet_id, cidr, subnet.get('gateway_ip'), subnet.get( 'allocation_pools')) class RomanaDbPool(subnet_alloc.SubnetAllocator): """Class for handling allocation of subnet prefixes from a subnet pool.""" def get_address_request_factory(self): """Return RomanaAddressRequestFactory.""" LOG.debug("RomanaDbPool.get_address_request_factory") return RomanaAddressRequestFactory def get_subnet_request_factory(self): """Return RomanaSubnetRequestFactory.""" LOG.debug("RomanaDbPool.get_subnet_request_factory()") return RomanaSubnetRequestFactory def get_subnet(self, subnet_id): """Retrieve an IPAM subnet. :param subnet_id: Neutron subnet identifier :returns: a RomanaDbSubnet instance """ LOG.debug("RomanaDbPool.get_subnet(%s)" % subnet_id) return RomanaDbSubnet.load(subnet_id, self._context) def allocate_subnet(self, subnet_request): """Create an IPAM Subnet object for the provided cidr. :param cidr: subnet's CIDR :returns: a RomanaDbSubnet instance """ LOG.debug("RomanaDbPool.allocate_subnet(%s)" % subnet_request) if not isinstance(subnet_request, ipam_req.SpecificSubnetRequest): raise ipam_exc.InvalidSubnetRequestType( subnet_type=type(subnet_request)) return RomanaDbSubnet.create_from_subnet_request(subnet_request, self._context) def update_subnet(self, subnet_request): """Update subnet info the in the IPAM driver. The only update subnet information the driver needs to be aware of are allocation pools. """ LOG.debug("RomanaDbPool.update_subnet(%s)" % subnet_id) if not subnet_request.subnet_id: raise ipam_exc.InvalidSubnetRequest(
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings # noqa from django.utils.translation import uget
text_lazy as _
# noqa from appconf import AppConf class BlogConf(AppConf): DISQUS_SHORTNAME = 'django-staticgen' POST_IDENTIFIER = 'current_post' class Meta: prefix = 'blog'
"""Functional
ity for
determining logic satisfiasbility."""
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agr
eements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file exc
ept in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Functions defined in MXNet. Acknowledgement: This file originates from incubator-tvm""" from ._ffi.function import _init_api _init_api("mxnet.api")
import os import argparse import datetime import yaml import api.src.common.initial_environment_config from ..models.dense import create_model from ..data_processing.data_generator import DataGenerator from ..common.config import TrainingConfig, DataConfig, Config from ..common.utils import print_info, ensure_dir from .plot_trainings import get_description_string from keras.callbacks import ModelCheckpoint, CSVLogger, TensorBoard, LearningRateScheduler, EarlyStopping RUNNING_TIME = datetime.datetime.now().strftime("%H_%M_%d_%m_%y") def train(num_epochs, batch_size, input_size, num_workers): if not Config.NO_SAVE: ensure_dir(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME)) model = create_model((2592,)) callbacks = [ ModelCheckpoint(os.path
.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'weights.h5'), save_best_only=True, monitor=TrainingConfig.callbacks_monitor), CSVLogger(os.path.join(Training
Config.PATHS['MODELS'], RUNNING_TIME, 'history.csv')), LearningRateScheduler(TrainingConfig.schedule), EarlyStopping(patience=5) ]if not Config.NO_SAVE else [] if not Config.NO_SAVE: introduced_change = input("What new was introduced?: ") with open(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'change.txt'), 'w') as f: f.write(introduced_change) with open(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'config.yml'), 'w') as f: yaml.dump(list([TrainingConfig.get_config(), Config.get_config(), DataConfig.get_config()]), f, default_flow_style=False) with open(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'model.txt'), 'w') as f: f.write(get_description_string(model)) optimizer = TrainingConfig.optimizer data_generator_train = DataGenerator(DataConfig.PATHS['TRAINING_PROCESSED_DATA'], batch_size, input_size, False, True) data_generator_valid = DataGenerator(DataConfig.PATHS['VALID_PROCESSED_DATA'], batch_size, input_size, True, True) model.compile(optimizer, TrainingConfig.loss, metrics=TrainingConfig.metrics) model.fit_generator(data_generator_train, samples_per_epoch=data_generator_train.samples_per_epoch, nb_epoch=num_epochs, validation_data=data_generator_valid, nb_val_samples=data_generator_valid.samples_per_epoch, callbacks=callbacks) def main(args): print_info("Training") train(args.num_epochs, args.batch_size, args.input_size, args.num_workers) print_info("Finished") if __name__ == '__main__': argparser = argparse.ArgumentParser(description='Script performing training') argparser.add_argument('--num_epochs', default=TrainingConfig.NB_EPOCHS, type=int, help='Number of training epochs') argparser.add_argument('--num_workers', type=int, default=TrainingConfig.NUM_WORKERS, help='Number of workers during training') argparser.add_argument('--batch_size', type=int, default=TrainingConfig.BATCH_SIZE, help='Batch size') argparser.add_argument('--input_size', type=int, default=Config.IMAGE_SIZE, help='Image size to input') arguments = argparser.parse_args() main(arguments)
# Copyright (c) 2014 - 2016 townhallpinball.org # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. from pin.lib import p, ui, util class Classic(object): initial = True def __init__(self, handler): self.handler = handler self.display = ui.Panel() self.player = ui.Text(top=4) self.players = [ ui.Text(left=0, top=0), ui.Text(right=0, top=0, x_align="right"), ui.Text(left=0, bottom=7), ui.Text(right=0, bottom=7, x_align="right") ] self.ball = ui.Text(bottom=0, left=0, width=54, font="bm3", x_align="right") self.credits_right = ui.Text(bottom=0, left=64, font="bm3") self.credits_
center = ui.Text(bottom=0, font="bm3") self.display.add((self.player, self.players[0], self.players[1], self.players[2], self.players[3], self.ball, self.credits_right, self.credits_center)) self.handler.on("data_credits", self.update) self
.handler.on("add_player", self.update) self.handler.on("next_player", self.next_player) self.handler.on("player_score", self.score) self.update() def next_player(self): self.initial = True self.update() def score(self): self.initial = False self.update() def update(self, *args, **kwargs): self.update_score(self.player, 0, single=True) for index, player in enumerate(self.players): self.update_score(player, index, single=False) if p.game: self.ball.show("BALL {}".format(p.game.ball)) self.credits_right.show(util.credits_string()) self.credits_center.hide() else: self.ball.hide() self.credits_right.hide() self.credits_center.show(util.credits_string()) def update_score(self, text, index, single): show = True if single and len(p.players) > 1: show = False if not single and len(p.players) == 1: show = False if index >= len(p.players): show = False if show: score = p.players[index]["score"] self.update_score_size(text, single, index) text.show(util.format_score(score)) if index == p.player["index"] and self.initial and p.game: text.effect("blink", duration=0.15, repeat=True) else: text.effect_cancel() else: text.hide() # Adapted from # https://github.com/preble/pyprocgame/blob/master/procgame/modes/scoredisplay.py#L104 def update_score_size(self, text, single, index): score = p.players[index]["score"] if single: if score < 1e9: text.update(font="bm10w") elif score < 1e10: text.update(font="bm10") else: text.update(font="bm10n") elif not single and p.game and p.player["index"] == index: if score < 1e6: text.update(font="bm8w") elif score < 1e7: text.update(font="bm8") else: text.update(font="bm8n") else: if score < 1e6: text.update(font="bm5w") elif score < 1e7: text.update(font="bm5") else: text.update(font="bm5n")
import numpy as np import scipy.optimize from scipy.signal import lombscargle import batman import ctools import matplotlib.pyplot as plt def periodogram(time, data, periods): freq = 1/periods nfactor = 2/(data.size * np.var(data)) power = nfactor * lombscargle(time, data-np.mean(data), freq*2*np.pi) return power def phase_dispersion_minimization(time, data, period): mask = time > period mtime = time.copy() mtime[mask] = time[mask] % period inds = np.argsort(mtime, kind='mergesort') data = data[inds] val = np.sum(np.abs(data[1:] - data[:-1])) return val def fit_transit(time, flux, period=None): if period is None: time_range = time.max()-time.min() avg_spacing = time_range/time.size start =
avg_spacing stop = time_range periods = np.arange(start, stop, avg_spacing) phase_dispersion = ctools.phase_dispersion(time, flux, periods) power = periodogram(time, flux, periods) period = 25.0 time %= period inds =
np.argsort(time, kind='mergesort') time = time[inds] flux = flux[inds] flux /= np.median(flux) # Data must be normalized to use the rp parameter in_transit = flux < 1-(1-flux.min())/2 # Estimate planet radius fro mthe transit depth planet_radius = np.sqrt(1-np.median(flux[in_transit])) # Estimate the location of the only dip t0 = np.median(time[in_transit]) # Estimate semi-major axis from transit duration duration = time[in_transit].max()-time[in_transit].min() semi_major_axis = 1 / np.sin(duration * np.pi / period) def transit_model_partial(time, *params): return transit_model(time, period, t0, *params) # Assume inclination of 90, with 0 eccentricity p0 = [planet_radius, semi_major_axis, 90.0, 0.0, 90.0, 0.1, 0.3] plt.plot(time, flux, 'k.') plt.plot(time, transit_model_partial(time, *p0)) plt.show() p, cov = scipy.optimize.curve_fit(transit_model_partial, time, flux, p0=p0) p0 = [period, t0] + list(p) p, cov = scipy.optimize.curve_fit(transit_model, time, flux, p0=p0) #plt.plot(time, flux, 'k.') #plt.plot(time, transit_model(time, *p0)) #plt.plot(time, transit_model(time, *p)) #plt.show() return p def transit_model(time, period, t0, planet_radius, semi_major_axis, inclination, eccentricity, longitude_of_periastron, limb_linear, limb_quadratic): params = batman.TransitParams() params.per = period params.t0 = t0 params.rp = planet_radius params.a = semi_major_axis params.inc = inclination params.ecc = abs(eccentricity) % 1 params.w = longitude_of_periastron params.u = [limb_linear, limb_quadratic] params.limb_dark = 'quadratic' model = batman.TransitModel(params, time) return model.light_curve(params) if __name__ == '__main__': np.random.seed(1) params = batman.TransitParams() params.t0 = 1.0 params.per = 25.0 params.rp = 0.1 params.a = 15.0 params.inc = 90.0 params.ecc = 0.0 params.w = 90.0 params.u = [0.1, 0.3] params.limb_dark = 'quadratic' time = np.linspace(0, 100, 10000) model = batman.TransitModel(params, time) flux = model.light_curve(params) flux += np.random.randn(time.size) * 0.001 print(fit_transit(time, flux))
import numpy as np import pandas as pd import pytest ROWID_ZERO_INDEXED_BACKENDS = ('omniscidb',) @pytest.mark.parametrize( 'column', [ 'string_col', 'double_col', 'date_string_col', pytest.param('timestamp_col', marks=pytest.mark.skip(reason='hangs')), ], ) @pytest.mark.xfail_unsupported def test_distinct_column(backend, alltypes, df, column): expr = alltypes[column].distinct() result = expr.execute() expected = df[column].unique() assert set(result) == set(expected) @pytest.mark.xfail_unsupported def test_rowid(con, backend): t = con.table('functional_alltypes') result = t[t.rowid()].execute() first_value = 0 if backend.name() in ROWID_ZERO_INDEXED_BACKENDS else 1 expected = pd.Series( range(first_value, first_value + len(result)), dtype=np.int64, name='rowid', ) pd.testing.assert_series_equal(result.iloc[:, 0], expected) @pytest.mark.xfail_unsupported def test_named_rowid(con, backend): t = con.table('functional_alltypes') result = t[t.rowid().name('number')].execute() first_value = 0 if backend.name() in ROWID_ZERO_INDEXED_BACKENDS else 1 expected = pd.Series( range(first_value, first_value + len(result)), dtype=np.int64,
name='number', ) pd.testing.assert_series_equal(result.iloc[:, 0], expec
ted)
#!/usr/bin/env python2 # -*- coding: utf-8 -*- # @Author: Adrien Chardon # @Date: 2014-11-16 14:29:10 # @Last Modified by: Adrien Chardon # @Last Modified time: 2014-11-16 16:31:32 import time as t ################################################################################ # Usefull functions ################################################################################ # big number infinity = 1000 # wrapper def getObjectInList(nodes, id): return nodes[
id-1] # Return all childs nodes that can be directly accessed from the node def getChildOfNode(data, node): ret = [] for dic in data: if dic['start'] == node: ret.append(dic['end']) re
turn ret # return the time for traveling directly from node1 to node 2 def getDist(data, node1, node2): for dic in data: if (dic['start'] == node1) and (dic['end'] == node2): return dic['time'] return infinity ################################################################################ # main ################################################################################ # get the time for one request def expert_itinerant_one(nb_node, nb_link, nb_request, data, start, end): nodes = [] for _ in range(nb_node): tmp = { 'distanceFromStart': infinity, 'origin': -1, } nodes.append(tmp) getObjectInList(nodes, start)['distanceFromStart'] = 0 getObjectInList(nodes, start)['origin'] = 0 notVisited = [start] while notVisited != []: cur_id = notVisited.pop() cur = getObjectInList(nodes, cur_id) for child_id in getChildOfNode(data, cur_id): child = getObjectInList(nodes, child_id) if (child['origin'] == -1) or (cur['distanceFromStart'] + getDist(data, cur_id, child_id) < child['distanceFromStart']): child['distanceFromStart'] = cur['distanceFromStart'] + getDist(data, cur_id, child_id) child['origin'] = cur_id if child_id not in notVisited: notVisited.append(child_id) return getObjectInList(nodes, end)['distanceFromStart'] def expert_itinerant(nb_node, nb_link, nb_request, data, request): for dic in request: print expert_itinerant_one(nb_node, nb_link, nb_request, data, dic['start'], dic['end']) if __name__ == '__main__': nb_node, nb_link, nb_request = (int(i) for i in raw_input().split()) data = [] for _ in range(nb_link): start, end, time = (int(i) for i in raw_input().split()) tmp = { 'start': start, 'end': end, 'time': time, } data.append(tmp) data.sort(key=lambda tup: tup['start']) request = [] for _ in range(nb_request): start, end = (int(i) for i in raw_input().split()) tmp = { 'start': start, 'end': end, } request.append(tmp) expert_itinerant(nb_node, nb_link, nb_request, data, request)
: (c) 2014 by the FlaskBB Team. :license: BSD, see LICENSE for more details. """ import sys from flask import (Blueprint, current_app, request, redirect, url_for, flash, jsonify, __version__ as flask_version) from flask_login import current_user, login_fresh from flask_plugins import get_all_plugins, get_plugin, get_plugin_from_all from flask_babelplus import gettext as _ from flask_allows import Permission, Not from flaskbb import __version__ as flaskbb_version from flaskbb._compat import iteritems from flaskbb.forum.forms import UserSearchForm from flaskbb.utils.settings import flaskbb_config from flaskbb.utils.requirements import (IsAtleastModerator, IsAdmin, CanBanUser, CanEditUser, IsAtleastSuperModerator) from flaskbb.extensions import db, allows from flaskbb.utils.helpers import (render_template, time_diff, time_utcnow, get_online_users) from flaskbb.user.models import Guest, User, Group from flaskbb.forum.models import Post, Topic, Forum, Category, Report from flaskbb.management.models import Setting, SettingsGr
oup from flaskbb.management.forms import (AddUserForm, EditUserForm, AddGroupForm,
EditGroupForm, EditForumForm, AddForumForm, CategoryForm) management = Blueprint("management", __name__) @management.before_request def check_fresh_login(): """Checks if the login is fresh for the current user, otherwise the user has to reauthenticate.""" if not login_fresh(): return current_app.login_manager.needs_refresh() @management.route("/") @allows.requires(IsAtleastModerator) def overview(): # user and group stats banned_users = User.query.filter( Group.banned == True, Group.id == User.primary_group_id ).count() if not current_app.config["REDIS_ENABLED"]: online_users = User.query.filter(User.lastseen >= time_diff()).count() else: online_users = len(get_online_users()) stats = { # user stats "all_users": User.query.count(), "banned_users": banned_users, "online_users": online_users, "all_groups": Group.query.count(), # forum stats "report_count": Report.query.count(), "topic_count": Topic.query.count(), "post_count": Post.query.count(), # misc stats "plugins": get_all_plugins(), "python_version": "%s.%s" % (sys.version_info[0], sys.version_info[1]), "flask_version": flask_version, "flaskbb_version": flaskbb_version } return render_template("management/overview.html", **stats) @management.route("/settings", methods=["GET", "POST"]) @management.route("/settings/<path:slug>", methods=["GET", "POST"]) @allows.requires(IsAdmin) def settings(slug=None): slug = slug if slug else "general" # get the currently active group active_group = SettingsGroup.query.filter_by(key=slug).first_or_404() # get all groups - used to build the navigation all_groups = SettingsGroup.query.all() SettingsForm = Setting.get_form(active_group) old_settings = Setting.get_settings(active_group) new_settings = {} form = SettingsForm() if form.validate_on_submit(): for key, values in iteritems(old_settings): try: # check if the value has changed if values['value'] == form[key].data: continue else: new_settings[key] = form[key].data except KeyError: pass Setting.update(settings=new_settings, app=current_app) flash(_("Settings saved."), "success") else: for key, values in iteritems(old_settings): try: form[key].data = values['value'] except (KeyError, ValueError): pass return render_template("management/settings.html", form=form, all_groups=all_groups, active_group=active_group) # Users @management.route("/users", methods=['GET', 'POST']) @allows.requires(IsAtleastModerator) def users(): page = request.args.get("page", 1, type=int) search_form = UserSearchForm() if search_form.validate(): users = search_form.get_results().\ paginate(page, flaskbb_config['USERS_PER_PAGE'], False) return render_template("management/users.html", users=users, search_form=search_form) users = User.query. \ order_by(User.id.asc()).\ paginate(page, flaskbb_config['USERS_PER_PAGE'], False) return render_template("management/users.html", users=users, search_form=search_form) @management.route("/users/<int:user_id>/edit", methods=["GET", "POST"]) @allows.requires(IsAtleastModerator) def edit_user(user_id): user = User.query.filter_by(id=user_id).first_or_404() if not Permission(CanEditUser, identity=current_user): flash(_("You are not allowed to edit this user."), "danger") return redirect(url_for("management.users")) member_group = db.and_(*[db.not_(getattr(Group, p)) for p in ['admin', 'mod', 'super_mod', 'banned', 'guest']]) filt = db.or_( Group.id.in_(g.id for g in current_user.groups), member_group ) if Permission(IsAtleastSuperModerator, identity=current_user): filt = db.or_(filt, Group.mod) if Permission(IsAdmin, identity=current_user): filt = db.or_(filt, Group.admin, Group.super_mod) if Permission(CanBanUser, identity=current_user): filt = db.or_(filt, Group.banned) group_query = Group.query.filter(filt) form = EditUserForm(user) form.primary_group.query = group_query form.secondary_groups.query = group_query if form.validate_on_submit(): form.populate_obj(user) user.primary_group_id = form.primary_group.data.id # Don't override the password if form.password.data: user.password = form.password.data user.save(groups=form.secondary_groups.data) flash(_("User updated."), "success") return redirect(url_for("management.edit_user", user_id=user.id)) return render_template("management/user_form.html", form=form, title=_("Edit User")) @management.route("/users/delete", methods=["POST"]) @management.route("/users/<int:user_id>/delete", methods=["POST"]) @allows.requires(IsAdmin) def delete_user(user_id=None): # ajax request if request.is_xhr: ids = request.get_json()["ids"] data = [] for user in User.query.filter(User.id.in_(ids)).all(): # do not delete current user if current_user.id == user.id: continue if user.delete(): data.append({ "id": user.id, "type": "delete", "reverse": False, "reverse_name": None, "reverse_url": None }) return jsonify( message="{} users deleted.".format(len(data)), category="success", data=data, status=200 ) user = User.query.filter_by(id=user_id).first_or_404() if current_user.id == user.id: flash(_("You cannot delete yourself.", "danger")) return redirect(url_for("management.users")) user.delete() flash(_("User deleted."), "success") return redirect(url_for("management.users")) @management.route("/users/add", methods=["GET", "POST"]) @allows.requires(IsAdmin) def add_user(): form = AddUserForm() if form.validate_on_submit(): form.save() flash(_("User added."), "success") return redirect(url_for("management.users")) return render_template("management/user_form.html", form=form, title=_("Add User")) @management.route("/users/banned", methods=["GET", "POST"]) @allows.requires(IsAtleastModerator) def banned_
from classytags.helpers import InclusionTag from django import template from django.conf import settings from django.template.loader import render_to_string register = template.Library() class Banner(InclusionTag): """ Displays
a checkout mode banner. """ template = 'sagepay/checkout_mode_banner.html' def render_tag(self, context, **kwargs): template = self.get_template(context, **kwargs) if settings.SAGEPAY_MODE == "Live": return '' data = self.get_context(context, **kwargs) return render_to_string(template, data) regist
er.tag(Banner)
= dict(required=False, type='bool'), alias_hosted_zone_id = dict(required=False), alias_evaluate_target_health = dict(required=False, type='bool', default=False), value
= dict(required=False),
overwrite = dict(required=False, type='bool'), retry_interval = dict(required=False, default=500), private_zone = dict(required=False, type='bool', default=False), identifier = dict(required=False, default=None), weight = dict(required=False, type='int'), region = dict(required=False), health_check = dict(required=False), failover = dict(required=False,choices=['PRIMARY','SECONDARY']), vpc_id = dict(required=False), wait = dict(required=False, type='bool', default=False), wait_timeout = dict(required=False, type='int', default=300), ) ) module = AnsibleModule(argument_spec=argument_spec) if not HAS_BOTO: module.fail_json(msg='boto required for this module') if distutils.version.StrictVersion(boto.__version__) < distutils.version.StrictVersion(MINIMUM_BOTO_VERSION): module.fail_json(msg='Found boto in version %s, but >= %s is required' % (boto.__version__, MINIMUM_BOTO_VERSION)) command_in = module.params.get('command') zone_in = module.params.get('zone').lower() hosted_zone_id_in = module.params.get('hosted_zone_id') ttl_in = module.params.get('ttl') record_in = module.params.get('record').lower() type_in = module.params.get('type') value_in = module.params.get('value') alias_in = module.params.get('alias') alias_hosted_zone_id_in = module.params.get('alias_hosted_zone_id') alias_evaluate_target_health_in = module.params.get('alias_evaluate_target_health') retry_interval_in = module.params.get('retry_interval') private_zone_in = module.params.get('private_zone') identifier_in = module.params.get('identifier') weight_in = module.params.get('weight') region_in = module.params.get('region') health_check_in = module.params.get('health_check') failover_in = module.params.get('failover') vpc_id_in = module.params.get('vpc_id') wait_in = module.params.get('wait') wait_timeout_in = module.params.get('wait_timeout') region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module) value_list = () if type(value_in) is str: if value_in: value_list = sorted([s.strip() for s in value_in.split(',')]) elif type(value_in) is list: value_list = sorted(value_in) if zone_in[-1:] != '.': zone_in += "." if record_in[-1:] != '.': record_in += "." if command_in == 'create' or command_in == 'delete': if not value_in: module.fail_json(msg = "parameter 'value' required for create/delete") elif alias_in: if len(value_list) != 1: module.fail_json(msg = "parameter 'value' must contain a single dns name for alias create/delete") elif not alias_hosted_zone_id_in: module.fail_json(msg = "parameter 'alias_hosted_zone_id' required for alias create/delete") elif ( weight_in!=None or region_in!=None or failover_in!=None ) and identifier_in==None: module.fail_json(msg= "If you specify failover, region or weight you must also specify identifier") if command_in == 'create': if ( weight_in!=None or region_in!=None or failover_in!=None ) and identifier_in==None: module.fail_json(msg= "If you specify failover, region or weight you must also specify identifier") elif ( weight_in==None and region_in==None and failover_in==None ) and identifier_in!=None: module.fail_json(msg= "You have specified identifier which makes sense only if you specify one of: weight, region or failover.") if vpc_id_in and not private_zone_in: module.fail_json(msg="parameter 'private_zone' must be true when specifying parameter" " 'vpc_id'") # connect to the route53 endpoint try: conn = Route53Connection(**aws_connect_kwargs) except boto.exception.BotoServerError as e: module.fail_json(msg = e.error_message) # Find the named zone ID zone = get_zone_by_name(conn, module, zone_in, private_zone_in, hosted_zone_id_in, vpc_id_in) # Verify that the requested zone is already defined in Route53 if zone is None: errmsg = "Zone %s does not exist in Route53" % zone_in module.fail_json(msg = errmsg) record = {} found_record = False wanted_rset = Record(name=record_in, type=type_in, ttl=ttl_in, identifier=identifier_in, weight=weight_in, region=region_in, health_check=health_check_in, failover=failover_in) for v in value_list: if alias_in: wanted_rset.set_alias(alias_hosted_zone_id_in, v, alias_evaluate_target_health_in) else: wanted_rset.add_value(v) sets = conn.get_all_rrsets(zone.id, name=record_in, type=type_in, identifier=identifier_in) for rset in sets: # Due to a bug in either AWS or Boto, "special" characters are returned as octals, preventing round # tripping of things like * and @. decoded_name = rset.name.replace(r'\052', '*') decoded_name = decoded_name.replace(r'\100', '@') #Need to save this changes in rset, because of comparing rset.to_xml() == wanted_rset.to_xml() in next block rset.name = decoded_name if identifier_in is not None: identifier_in = str(identifier_in) if rset.type == type_in and decoded_name.lower() == record_in.lower() and rset.identifier == identifier_in: found_record = True record['zone'] = zone_in record['type'] = rset.type record['record'] = decoded_name record['ttl'] = rset.ttl record['value'] = ','.join(sorted(rset.resource_records)) record['values'] = sorted(rset.resource_records) if hosted_zone_id_in: record['hosted_zone_id'] = hosted_zone_id_in record['identifier'] = rset.identifier record['weight'] = rset.weight record['region'] = rset.region record['failover'] = rset.failover record['health_check'] = rset.health_check if hosted_zone_id_in: record['hosted_zone_id'] = hosted_zone_id_in if rset.alias_dns_name: record['alias'] = True record['value'] = rset.alias_dns_name record['values'] = [rset.alias_dns_name] record['alias_hosted_zone_id'] = rset.alias_hosted_zone_id record['alias_evaluate_target_health'] = rset.alias_evaluate_target_health else: record['alias'] = False record['value'] = ','.join(sorted(rset.resource_records)) record['values'] = sorted(rset.resource_records) if command_in == 'create' and rset.to_xml() == wanted_rset.to_xml(): module.exit_json(changed=False) break if command_in == 'get': if type_in == 'NS': ns = record['values'] else: # Retrieve name servers associated to the zone. ns = conn.get_zone(zone_in).get_nameservers() module.exit_json(changed=False, set=record, nameservers=ns) if command_in == 'delete' and not found_record: module.exit_json
def load_config(default_values, user_values): if user_values is None: return default_values config = {} for k, v in user_values.items(): if k in default_values: if isinstance(v, dict): cloned = user_values[k].copy()
for key, value in default_values[k].items(): if key is not None and key no
t in user_values[k] \ or user_values[k][key] == '': cloned[key] = value config[k] = cloned else: config[k] = v else: config[k] = v for k, v in default_values.items(): if k not in config: config[k] = v return config def import_class(full_path): path_split = full_path.split('.') path = ".".join(path_split[:-1]) klass = path_split[-1:] mod = __import__(path, fromlist=[klass]) return getattr(mod, klass[0])
from django.conf import settings from django.conf.urls import static from django.urls import include, path, re_path from django.contrib import admin urlpatterns = [ path(r"admin/", admin.site.urls), path(r"flickr/", include("ditto.flickr.urls")), path(r"lastfm/", include("ditto.lastfm.urls")), path(r"pinboard/", include("ditto.pinboard.urls")), path(r"twitter/", include("ditto.twitter.urls")), path(r"", include("ditto.core.ur
ls")), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ re_path(r"^__debug__/", include(debug_toolbar.urls)), ] urlpatterns += static.sta
tic(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static.static( settings.STATIC_URL, document_root=settings.STATIC_ROOT )
, {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'cms.cmsplugin': { 'Meta': {'object_name': 'CMSPlugin'}, 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}), 'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}), 'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.placeholder': { 'Meta': {'object_name': 'Placeholder'}, 'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slot': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, u'cmsplugin_filer_file.filerfile': { 'Meta': {'object_name': 'FilerFile', '_ormbases': ['cms.CMSPlugin']}, u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}), 'file': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['filer.File']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'style': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'target_blank': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'filer.file': { 'Meta': {'object_name': 'File'}, '_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'all_files'", 'null': 'True', 'to': u"orm['filer.Folder']"}), 'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_publi
c': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharF
ield', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}), 'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}), 'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_filer.file_set+'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}), 'sha1': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}), 'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) }, u'filer.folder': { 'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'parent', u'name'),)", 'object_name': 'Folder'}, 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': u"orm['filer.Folder']"}), u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) } }
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """ Stub functions that are used by the Amazon Pinpoint unit tests. When tests are run against an actual AWS account, the stubber class does not set up stubs and passes all calls through to the Boto 3 client. """ from test_tools.example_stubber import ExampleStubber class PinpointStubber(ExampleStubber): """ A class that implements a variety of stub functions that are used by the Amazon Pinpoint unit tests. The stubbed functions all expect certain parameters to be passed to them as part of the tests, and will raise errors when the actual parameters differ from the expected. """ def __init__(self, client, use_stubs=True): """ Initializes the object with a specific client and configures it for stubbing or AWS passthrough. :param client: A Boto 3 Pinpoint client. :param use_stubs: When True, use stubs to intercept requests. Otherwise, pass requests through to AWS. """ super().__init__(client, use_stubs) def stub_create_app(self, name): self.add_response( 'create_app', expected_params={'CreateApplicationRequest': {'Name': name}}, service_response={ 'ApplicationResponse': { 'Arn': 'arn:aws:mobiletargeting:us-west-2:111122223333:apps/d41d8cd98f00b204e9800998ecf8427e', 'Id': 'd41d8cd98f00b204e9800998ecf8427e', 'Name': name } } ) def stub_create_app_error(self, name, error_code): self.add_client_error( 'create_app', expected_params={'CreateApplicationRequest': {'Name': name}}, service_error_code=error_code ) def stub_get_apps(self, apps): self.add_response( 'get_apps', expected_params={}, service_response={'ApplicationsResponse': {'Item': apps}} ) def stub_get_apps_error(self, error_code): self.add_client_error( 'get_apps', expected_params={}, service_error_code=error_code ) def stub_delete_app(self, app): self.add_response( 'delete_app', expected_params={'ApplicationId': app['Id']}, service_response={'ApplicationResponse': app} ) def stub_delete_app_error(self, app, error_code): self.add_client_error( 'delete_app', expected_params={'ApplicationId': app['Id']}, service_error_code=error_code ) def stub_send_email_messages( self, app_id, sender, to_addresses, char_set, subject, html_message, text_message, message_ids, error_code=None): expected_params = { 'ApplicationId': app_id, 'MessageRequest': { 'Addresses': { to_address: {'ChannelType': 'EMAIL'} for to_address in to_addresses }, 'MessageConfiguration': { 'EmailMessage': { 'FromAddress': sender, 'SimpleEmail': { 'Subject': {'Charset': char_set, 'Data': subject}, 'HtmlPart': {'Charset': char_set, 'Data': html_message}, 'TextPart': {'Charset': char_set, 'Data': text_message}}}}}} response = { 'MessageResponse': { 'ApplicationId': app_id, 'Result': { to_address: { 'MessageId': message_id, 'DeliveryStatus': 'SUCCESSFUL', 'StatusCode': 200 } for to_address, message_id in zip(to_addresses, message_ids) } } } self._stub_bifurcator( 'send_messages', expected_params, response, error_code=error_code) def stub_send_templated_email_messages( self, app_id, sender, to_addresses, template_name, template_version, message_ids, error_code=None): expected_params = { 'ApplicationId': app_id, 'MessageRequest': { 'Addresses': { to_address: {'ChannelType': 'EMAIL'} for to_address in to_addresses }, 'MessageConfiguration': {'EmailMessage': {'FromAddress': sender}}, 'TemplateConfiguration': { 'EmailTemplate': { 'Name': template_name, 'Version': template_version}}}} response = { 'MessageResponse': { 'ApplicationId': app_id, 'Result': { to_address: { 'MessageId': message_id, 'DeliveryStatus': 'SUCCESSFUL', 'StatusCode': 200 } for to_address, message_id in zip(to_addresses, message_ids) } } } self._stub_bifurcator( 'send_messages', expected_params, response, error_code=error_code) def stub_send_sms_message( self, app_id, origination_number, destination_number, message, message_type, message_id, error_code=None): expected_params = { 'ApplicationId': app_id, 'MessageRequest': { 'Addresses': {destination_number: {'ChannelType': 'SMS'}}, 'MessageConfiguration': { 'SMSMessage': { 'Body': message, 'MessageType': message_type,
'OriginationNumber': origination_number}}}} response = {'MessageResponse': { 'ApplicationId': app_id, 'Result': { destination_number: { 'DeliveryStatus': 'SUCCESSFUL', 'StatusCode': 200, 'MessageId': message_id}}}} self._stub_bifurcator( 'send_messages', expected_params, response, error_code=error_code) def stub_send_templated_sms_messa
ge( self, app_id, origination_number, destination_number, message_type, template_name, template_version, message_id, error_code=None): expected_params = { 'ApplicationId': app_id, 'MessageRequest': { 'Addresses': {destination_number: {'ChannelType': 'SMS'}}, 'MessageConfiguration': { 'SMSMessage': { 'MessageType': message_type, 'OriginationNumber': origination_number}}, 'TemplateConfiguration': { 'SMSTemplate': { 'Name': template_name, 'Version': template_version}}}} response = {'MessageResponse': { 'ApplicationId': app_id, 'Result': { destination_number: { 'DeliveryStatus': 'SUCCESSFUL', 'StatusCode': 200, 'MessageId': message_id}}}} self._stub_bifurcator( 'send_messages', expected_params, response, error_code=error_code)
# -*- coding: utf-8 -*- from django.test import Client from django.urls import reverse from nmkapp impor
t views from .n
mk_unit_test_case import NmkUnitTestCase class ResultsTests(NmkUnitTestCase): def test_anon_user(self): """ Test result view with anonymous user """ self.client = Client() response = self.client.get(reverse(views.results)) self.assertEqual(response.status_code, 302) def test_regular_user(self): """ Test result view with logged user """ self.client = Client() self.assertTrue(self.client.login(username='kokan@mail.com', password='12345')) response = self.client.get(reverse(views.results)) self.assertEqual(response.status_code, 200)
# -*- coding: utf-8 -*- # Copyright 2016 Acsone SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { 'name': 'Account Invoice Check Total', 'summary': """ Check if the verification total is equal to the bill's total""", 'version': '10.0.1.0.0', 'license': 'AGPL-3', 'author': 'Acsone SA/NV,Odoo Community Association (OCA)', 'website': 'https://acsone.eu/', 'depends': [ 'account', ], 'data': [ 'views/account_config_settings.xml', 'security/account_invoice_secu
rity.xml', 'views/account_invoice.xml', ], }
# -*- coding: utf-8 -*- """forms""" import floppyforms.__future__ as floppyforms class HidableM
ultipleChoiceField(floppyforms.MultipleChoiceField): """ The MultipleChoiceField doesn't return an <input type="hidden"> when hidden but an empty string Overload this field to restore an <input type="hidden"> """ hidden_widget = floppyforms.HiddenInput
#!/usr/bin/env python
from pylab import * fig = figure() subplot(221) imshow(rand(100,100)) subplot(222) imshow(rand(100,100)) subplot(223
) imshow(rand(100,100)) subplot(224) imshow(rand(100,100)) subplot_tool() show()
#!/usr/bin/env python import unittest import os.path import sys import time sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(__file__)), "..")) import base from PyQt4.QtTest import QTest from enki.core.core import core class Test(base.TestCase): def _verifyText(self, fileName, text): with open(os.path.join(self.TEST_FILE_DIR, fileName)) as file_: actualText = file_.read() self.assertEqual(text, actualText) def test_1(self): # Close all, no modified files self.createFile('file1.rb', 'asdf\nfdsa') self.createFile('file2.rb', 'asdf\nfdsa') self.keyClick('Ctrl+Shift+W') self.assertIsNone(core.workspace().currentDocument()) @base.inMainLoop def test_2(self): # Close all, do not save self.createFile('file1.rb', 'asdf\nfdsa') self.create
File('file2.rb', 'fdsa') self.keyClick('Ctrl+Enter') self.keyClicks('new text') self.openDialog(lambda: self.keyClick('Ctrl+Shift+W'), lambda dialog: self.keyClick('w')) self.assertIsNone(core.workspace().currentDocument()) self._verifyText('file2.rb', 'fdsa') @base.inMainLoop def test_3(self): # Close all, cancel close self.createFile('file1.rb', 'asdf\nfdsa') self.createFile('file2.rb', 'fdsa') sel
f.keyClick('Ctrl+Enter') self.keyClicks('new text') self.openDialog(lambda: self.keyClick('Ctrl+Shift+W'), lambda dialog: self.keyClick('c')) self.assertIsNotNone(core.workspace().currentDocument()) @base.inMainLoop def test_4(self): # Close all, save self.createFile('file1.rb', 'asdf\nfdsa') self.createFile('file2.rb', 'fdsa') self.keyClick('Ctrl+Enter') self.keyClicks('new text+') self.openDialog(lambda: self.keyClick('Ctrl+Shift+W'), lambda dialog: self.keyClick('s')) self.assertIsNone(core.workspace().currentDocument()) self._verifyText('file2.rb', 'new text+fdsa\n') @base.inMainLoop def test_5(self): # Close all, reject save dialog self.createFile('file1.rb', 'asdf\nfdsa') self.createFile('file2.rb', 'fdsa') self.keyClick('Ctrl+N') # file without name self.keyClicks('new text') # but modified def inUiSaveFilesDialog(dialog): # open and reject save dialog for file without name def inSaveFileDialog(saveDialog): QTest.qWait(4000) self.keyClick('Esc') self.openDialog(lambda: self.keyClick('s'), inSaveFileDialog) self.openDialog(lambda: self.keyClick('Ctrl+Shift+W'), inUiSaveFilesDialog) self.assertIsNotNone(core.workspace().currentDocument()) if __name__ == '__main__': unittest.main()
# encoding: utf8 from django.db import models, migrations class Migration(migrations.Migration): dependencies = [("podcasts", "0005_auto_20140610_1854")] operations = [ migrations.AlterField(
model_name="episode", name="outdated", field=models.BooleanField(default=False, db_index=True), ), migrations.AlterField(
model_name="podcast", name="outdated", field=models.BooleanField(default=False, db_index=True), ), migrations.AlterField( model_name="episode", name="guid", field=models.CharField(max_length=100, null=True), ), ]
''' Generic message-based protocol used by Bitcoin and P2Pool for P2P communication ''' import hashlib import struct from twisted.internet import protocol from twisted.python import log import p2pool from p2pool.bitcoin import data as bitcoin_data from p2pool.util import datachunker, variable class TooLong(Exception): pass class Protocol(protocol.Protocol): def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event(), ignore_trailing_payload=False): self._message_prefix = message_prefix self._max_payload_length = max_payload_length self.dataReceived2 = datachunker.DataChunker(self.dataReceiver()) self.traffic_happened = traffic_happened self.ignore_trailing_payload = ignore_trailing_payload def dataReceived(self, data): self.traffic_happened.happened('p2p/in', len(data)) self.dataReceived2(data) def dataReceiver(self): while True: start = '' while start != self._message_prefix: start = (start + (yield 1))[-len(self._message_prefix):] command = (yield 12).rstrip('\0') length, = struct.unpack('<I', (yield 4)) if length > self._max_payload_length: print 'length too large' continue checksum = yield 4 payload = yield length if bitcoin_data.grshash(payload)[:4] != checksum: print 'invalid hash for', self.transport.getPeer().host, repr(command), length, checksum.encode('hex') if p2pool.DEBUG: print __import__('groestlcoin_hash').getHash(payload, len(payload))[:4].encode('hex'), payload.encode('hex') self.badPeerHappened() continue type_ = getattr(self, 'message_' + command, None) if type_ is None: if p2pool.DEBUG: print 'no type for', repr(command) continue try: self.packetReceived(command, type_.unpack(payload, self.ignore_trailing_payload)) except: print 'RECV', command, payload[:100].encode('hex') + ('...' if len(payload) > 100 else '') log.err(None, 'Error handling message: (see RECV line)') self.disconnect() def packetReceived(self, command, payload2): handler = getattr(self, 'handle_' + command, None) if handler is None: if p2pool.DEBUG: print 'no handler for', repr(command) return if getattr(self, 'connected', True) and not getattr(self, 'discon
necting', False): handler(**payload2) def disconnect(self): if hasattr(self.transport, 'abortConnection'): # Available since Twisted 11.1 self.transport.abortConnection
() else: # This doesn't always close timed out connections! warned about in main self.transport.loseConnection() def badPeerHappened(self): self.disconnect() def sendPacket(self, command, payload2): if len(command) >= 12: raise ValueError('command too long') type_ = getattr(self, 'message_' + command, None) if type_ is None: raise ValueError('invalid command') #print 'SEND', command, repr(payload2)[:500] payload = type_.pack(payload2) if len(payload) > self._max_payload_length: raise TooLong('payload too long') data = self._message_prefix + struct.pack('<12sI', command, len(payload)) + bitcoin_data.grshash(payload)[:4] + payload self.traffic_happened.happened('p2p/out', len(data)) self.transport.write(data) def __getattr__(self, attr): prefix = 'send_' if attr.startswith(prefix): command = attr[len(prefix):] return lambda **payload2: self.sendPacket(command, payload2) #return protocol.Protocol.__getattr__(self, attr) raise AttributeError(attr)
from django.db.models import Q from import_export.admin import ImportExportModelAdmin from models import Vote, Law, PrivateProposal, KnessetProposal, GovProposal, Bill, GovLegislationCommitteeDecision from laws.management.commands.scrape_votes import Command as ScrapeVotesCommand from django.utils.translation import ugettext_lazy as _ from django.contrib import admin class MissingDataVotesFilter(admin.SimpleListFilter): # Human-readable title which will be displayed in the # right admin sidebar just above the filter options. title = _('Missing data votes') # Parameter for the filter that will be used in the URL query. parameter_name = 'is_missing_data_vote' def lookups(self, request, model_admin): """ Returns a list of tuples. The first element in each tuple is the coded value for the option that will appear in the URL query. The second element is the human-readable name for the option that will appear in the right sidebar. """ return ( ('is_missing_data_vote', _('Vote has missing data')), ) def queryset(self, request, queryset): """ Returns the filtered queryset based on the value provided in the query string and retrievable via `self.value()`. """ # Compare the requested value # to decide how to filter the queryset. if self.value() == 'is_missing_data_vote': return queryset.filter(Q(votes_count=0) | Q(votes_count=None)) else: return queryset class VoteAdmin(ImportExportModelAdmin): # filter_horizontal = ('voted_for','voted_against','voted_abstain','didnt_vote') list_display = ( '__unicod
e__', 'short_summary', 'full_text_link', 'votes_count', 'for_votes_count', 'against_votes_count', 'abstain_votes_count') search_fields = ('title', 'summary', 'full_text', 'id', 'src_id') list_filter = (MissingDataVotesFilter, ) def update_vote(self, request, queryset): vote_count = queryset.count() for vote in queryset: vote.update_vote_properties() self.message_user(request, "successfully updated {0} votes".format(vote_count)) upda
te_vote.short_description = 'update vote properties and calculations' def recreate_vote(self, request, queryset): recreated_votes = ScrapeVotesCommand().recreate_objects(queryset.values_list('pk', flat=True)) recreated_vote_ids_string = ', '.join([str(v.pk) for v in recreated_votes]) self.message_user(request, "successfully recreated {0} votes: {1}".format(len(recreated_votes), recreated_vote_ids_string)) recreate_vote.short_description = "recreate vote by deleting and then getting fresh data from knesset api" actions = ['update_vote', 'recreate_vote'] admin.site.register(Vote, VoteAdmin) class LawAdmin(ImportExportModelAdmin): search_fields = ('title',) list_display = ('title', 'merged_into') admin.site.register(Law, LawAdmin) class PrivateProposalAdmin(admin.ModelAdmin): pass admin.site.register(PrivateProposal, PrivateProposalAdmin) class KnessetProposalAdmin(admin.ModelAdmin): pass admin.site.register(KnessetProposal, KnessetProposalAdmin) class GovProposalAdmin(admin.ModelAdmin): search_fields = ('title', 'booklet_number') list_display = ('bill', 'booklet_number', 'knesset_id', 'date') list_filter = ('knesset_id',) admin.site.register(GovProposal, GovProposalAdmin) class MissingLawListFilter(admin.SimpleListFilter): # Human-readable title which will be displayed in the # right admin sidebar just above the filter options. title = _('Missing Laws') # Parameter for the filter that will be used in the URL query. parameter_name = 'is_missing_law' def lookups(self, request, model_admin): """ Returns a list of tuples. The first element in each tuple is the coded value for the option that will appear in the URL query. The second element is the human-readable name for the option that will appear in the right sidebar. """ return ( ('missing_law', _('Has Missing Law')), # ('90s', _('in the nineties')), ) def queryset(self, request, queryset): """ Returns the filtered queryset based on the value provided in the query string and retrievable via `self.value()`. """ # Compare the requested value (either '80s' or '90s') # to decide how to filter the queryset. if self.value() == 'missing_law': return queryset.filter(law=None) else: return queryset class BillAdmin(admin.ModelAdmin): list_display = ('law', 'title', 'stage') search_fields = ('title',) list_filter = ('stage', MissingLawListFilter) admin.site.register(Bill, BillAdmin) class GovLegislationCommitteeDecisionAdmin(admin.ModelAdmin): pass admin.site.register(GovLegislationCommitteeDecision, GovLegislationCommitteeDecisionAdmin)
"""MNE software for MEG and EEG data analysis.""" # PEP0440 compatible formatted version, see: # https://www.python.org/dev/peps/pep-0440/ # # Generic release markers: # X.Y # X.Y.Z # For bugfix releases # # Admissible pre-release markers: # X.YaN # Alpha release # X.YbN # Beta release # X.YrcN # Release Candidate # X.Y # Final release # # Dev branch marker is: 'X.Y.devN' where N is an integer. # from ._version import __version__ # have to import verbose first since it's needed by many things from .utils import (set_log_level, set_log_file, verbose, set_config, get_config, get_config_path, set_cache_dir, set_memmap_min_size, grand_average, sys_info, open_docs) from .io.pick import (pick_types, pick_channels, pick_channels_regexp, pick_channels_forward, pick_types_forward, pick_channels_cov, pick_channels_evoked, pick_info, channel_type, channel_indices_by_type) from .io.base import concatenate_raws from .io.meas_info import create_info, Info from .io.proj import Projection from .io.kit import read_epochs_kit from .io.eeglab import read_epochs_eeglab from .io.reference import (set_eeg_reference, set_bipolar_reference, add_reference_channels) from .io.what import what from .bem import (make_sphere_model, make_bem_model, make_bem_solution, read_bem_surfaces, write_bem_surfaces, write_head_bem, read_bem_solution, write_bem_solution) from .cov import (read_cov, write_cov, Covariance, compute_raw_covariance, compute_covariance, whiten_evoked, make_ad_hoc_cov) from .event import (read_events, write_events, find_events, merge_events, pick_events, make_fixed_length_events, concatenate_events, find_stim_steps, AcqParserFIF) from .forward import (read_forward_solution, apply_forward, apply_forward_raw, average_forward_solutions, Forward, write_forward_solution, make_forward_solution, convert_forward_solution, make_field_map, make_forward_dipole, use_coil_def) from .source_estimate import (read_source_estimate, SourceEstimate, VectorSourceEstimate, VolSourceEstimate, VolVectorSourceEstimate, MixedSourceEstimate, MixedVectorSourceEstimate,
grade_to_tris, spatial_src_adjacency, spatial_tris_adjacency,
spatial_dist_adjacency, spatial_inter_hemi_adjacency, spatio_temporal_src_adjacency, spatio_temporal_tris_adjacency, spatio_temporal_dist_adjacency, extract_label_time_course, stc_near_sensors) from .surface import (read_surface, write_surface, decimate_surface, read_tri, read_morph_map, get_head_surf, get_meg_helmet_surf, dig_mri_distances) from .morph import (SourceMorph, read_source_morph, grade_to_vertices, compute_source_morph) from .source_space import (read_source_spaces, vertex_to_mni, head_to_mni, head_to_mri, read_talxfm, write_source_spaces, setup_source_space, setup_volume_source_space, SourceSpaces, add_source_space_distances, morph_source_spaces, get_volume_labels_from_aseg, get_volume_labels_from_src, read_freesurfer_lut) from .annotations import (Annotations, read_annotations, annotations_from_events, events_from_annotations) from .epochs import (BaseEpochs, Epochs, EpochsArray, read_epochs, concatenate_epochs, make_fixed_length_epochs) from .evoked import Evoked, EvokedArray, read_evokeds, write_evokeds, combine_evoked from .label import (read_label, label_sign_flip, write_label, stc_to_label, grow_labels, Label, split_label, BiHemiLabel, read_labels_from_annot, write_labels_to_annot, random_parcellation, morph_labels, labels_to_stc) from .misc import parse_config, read_reject_parameters from .coreg import (create_default_subject, scale_bem, scale_mri, scale_labels, scale_source_space) from .transforms import (read_trans, write_trans, transform_surface_to, Transform) from .proj import (read_proj, write_proj, compute_proj_epochs, compute_proj_evoked, compute_proj_raw, sensitivity_map) from .dipole import read_dipole, Dipole, DipoleFixed, fit_dipole from .channels import (equalize_channels, rename_channels, find_layout, read_vectorview_selection) from .report import Report, open_report from .io import read_epochs_fieldtrip, read_evoked_fieldtrip, read_evokeds_mff from .rank import compute_rank from . import beamformer from . import channels from . import chpi from . import commands from . import connectivity from . import coreg from . import cuda from . import datasets from . import dipole from . import epochs from . import event from . import externals from . import io from . import filter from . import gui from . import inverse_sparse from . import minimum_norm from . import preprocessing from . import simulation from . import stats from . import surface from . import time_frequency from . import viz from . import decoding # deprecations from .utils import deprecated_alias deprecated_alias('read_selection', read_vectorview_selection) # initialize logging set_log_level(None, False) set_log_file()
# subsystemBonusGallentePropulsion2Agility # # Used by: # Su
bsystem: Proteus Propulsion - Hyperspatial Optimization type = "passive" def handler(fit, src, context): fit.ship.boostItemAttr("agility", src.getModifiedItemAttr("subsystemBonusGallentePropulsion2"), skill="Gallente Propu
lsion Systems")
import numpy as np vec_bitstring_3 = np.vectorize(lambda x: np.binary_repr(x,widt
h=3) ) def board_to_int(v): t = vec_bitstring_3(v) return int(''.join(np.apply_along_axis(lambda x: ''.join(x), 1,t)),2) def int_to_board(i): #i = '154444257952488798331863040' s = bin(int(i))[2:].zfill(108) v = np.array([int(s[i:i+3],2) for i in range(0,len(s),3)],dtype=int) return v.reshape((6,
6))
# -*- coding: ut
f-8 -*- # Generated by Django 1.10.3 on 2017-03-07 02:03 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [
('medgointranet', '0012_motivoanulacion'), ] operations = [ migrations.AddField( model_name='atencion', name='formulario', field=models.BooleanField(default=False, verbose_name='¿Doctor completo formulario?'), ), ]
import logging from datetime import timedelta from core import Feed from core.errors import ObservableValidationError from core.observables import Ip class AlienVaultIPReputation(Feed): default_values = { "frequency": timedelta(hours=4), "name": "AlienVaultIPReputation", "source": "http://reputation.alienvault.com/reputation.data", "description": "Reputation IP generated by Alienvault", } def update(self): for index, line in self.update_csv( delimiter="#", comment=None,
header=None, names=[ "IP", "number_1
", "number_2", "Tag", "Country", "City", "Coord", "number_3", ], ): self.analyze(line) def analyze(self, item): try: context = dict(source=self.name) ip_str = item["IP"] category = item["Tag"] country = item["Country"] ip = None try: ip = Ip.get_or_create(value=ip_str) except ObservableValidationError as e: logging.error(e) return False ip.add_source(self.name) context["country"] = country context["threat"] = category context["reliability"] = item["number_1"] context["risk"] = item["number_2"] ip.tag(category) ip.add_context(context) except Exception as e: logging.error("Error to process the item %s %s" % (item, e)) return False return True
lf.ctrl.ItemCount(), 37) def testGetItem(self): "Test the ItemCount method" self.assertRaises(RuntimeError, self.ctrl.GetItem, "test\here\please") self.assertRaises(IndexError, self.ctrl.GetItem, r"\test\here\please") self.assertEquals( self.ctrl.GetItem((0, 1, 2)).Text(), self.texts[1][3] + " kg") self.assertEquals( self.ctrl.GetItem(r"\The Planets\Venus\4.869").Text(), self.texts[1][3] + " kg") self.assertEquals( self.ctrl.GetItem( ["The Planets", "Venus", "4.869"]).Text(), self.texts[1][3] + " kg") def testItemText(self): "Test the ItemCount method" self.assertEquals(self.ctrl.Root().Text(), self.root_text) self.assertEquals( self.ctrl.GetItem((0, 1, 2)).Text(), self.texts[1][3] + " kg") def testSelect(self): "Test selecting an item" self.ctrl.Select((0, 1, 2)) self.ctrl.GetItem((0, 1, 2)).State() self.assertEquals(True, self.ctrl.IsSelected((0, 1, 2))) def testEnsureVisible(self): "make sure that the item is visible" # note this is partially a fake test at the moment because # just by getting an item - we usually make it visible self.ctrl.EnsureVisible((0, 8, 2)) # make sure that the item is not hidden self.assertNotEqual(None, self.ctrl.GetItem((0, 8, 2)).Rectangle()) def testGetProperties(self): "Test getting the properties for the treeview control" props = self.ctrl.GetProperties() self.assertEquals( "TreeView", props['FriendlyClassName']) self.assertEquals( self.ctrl.Texts(), props['Texts']) for prop_name in props: self.assertEquals(getattr(self.ctrl, prop_name)(), props[prop_name]) class HeaderTestCases(unittest.TestCase): "Unit tests for the Header class" def setUp(self): """Start the application set some data and ensure the application is in the state we want it.""" # start the application from pywinauto.application import Application app = Application() app.start_(os.path.join(controlspy_folder, "Header.exe")) self.texts = [u'Distance', u'Diameter', u'Mass'] self.item_rects = [ RECT(0, 0, 90, 26), RECT(90, 0, 180, 26), RECT(180, 0, 260, 26)] self.app = app self.dlg = app.MicrosoftControlSpy self.ctrl = app.MicrosoftControlSpy.Header.WrapperObject() def tearDown(self): "Close the application after tests" # close the application self.dlg.SendMessage(win32defines.WM_CLOSE) def testFriendlyClass(self): "Make sure the friendly class is set correctly" self.assertEquals (self.ctrl.FriendlyClassName(), "Header") def testTexts(self): "Make sure the texts are set correctly" self.assertEquals (self.ctrl.Texts()[1:], self.texts) def testGetProperties(self): "Test getting the properties for the header control" props = self.ctrl.GetProperties() self.assertEquals( self.ctrl.FriendlyClassName(), props['FriendlyClassName']) self.assertEquals( self.ctrl.Texts(), props['Texts']) for prop_name in p
rops: self.assertEquals(getattr(self.ctrl, prop_name)(), props[prop_name]) def testItemCount(self): self.assertEquals(3, self.ctrl.ItemCount()) def testGetColumnRectangle(self): for i in range(0, 3): self.assertEquals( self.item_rects[i], self.ctrl.GetColumnRectangle(i)) def testClientRects(self): test_rect
s = self.item_rects test_rects.insert(0, self.ctrl.ClientRect()) self.assertEquals( test_rects, self.ctrl.ClientRects()) def testGetColumnText(self): for i in range(0, 3): self.assertEquals( self.texts[i], self.ctrl.GetColumnText(i)) class StatusBarTestCases(unittest.TestCase): "Unit tests for the TreeViewWrapper class" def setUp(self): """Start the application set some data and ensure the application is in the state we want it.""" # start the application from pywinauto.application import Application app = Application() app.start_(os.path.join(controlspy_folder, "Status bar.exe")) self.texts = ["Long text", "", "Status Bar"] self.part_rects = [ RECT(0, 2, 65, 20), RECT(67, 2, 90, 20), RECT(92, 2, 357, 20)] self.app = app self.dlg = app.MicrosoftControlSpy self.ctrl = app.MicrosoftControlSpy.StatusBar.WrapperObject() #self.dlg.MenuSelect("Styles") # select show selection always, and show checkboxes #app.ControlStyles.ListBox1.TypeKeys( # "{HOME}{SPACE}" + "{DOWN}"* 12 + "{SPACE}") #self.app.ControlStyles.ApplyStylesSetWindowLong.Click() #self.app.ControlStyles.SendMessage(win32defines.WM_CLOSE) def tearDown(self): "Close the application after tests" # close the application self.dlg.SendMessage(win32defines.WM_CLOSE) def testFriendlyClass(self): "Make sure the friendly class is set correctly" self.assertEquals (self.ctrl.FriendlyClassName(), "StatusBar") def testTexts(self): "Make sure the texts are set correctly" self.assertEquals (self.ctrl.Texts()[1:], self.texts) def testGetProperties(self): "Test getting the properties for the status bar control" props = self.ctrl.GetProperties() self.assertEquals( self.ctrl.FriendlyClassName(), props['FriendlyClassName']) self.assertEquals( self.ctrl.Texts(), props['Texts']) for prop_name in props: self.assertEquals(getattr(self.ctrl, prop_name)(), props[prop_name]) def testBorderWidths(self): "Make sure the border widths are retrieved correctly" self.assertEquals ( self.ctrl.BorderWidths(), dict( Horizontal = 0, Vertical = 2, Inter = 2, ) ) def testPartCount(self): "Make sure the number of parts is retrieved correctly" self.assertEquals (self.ctrl.PartCount(), 3) def testPartRightEdges(self): "Make sure the part widths are retrieved correctly" for i in range(0, self.ctrl.PartCount()-1): self.assertEquals (self.ctrl.PartRightEdges()[i], self.part_rects[i].right) self.assertEquals(self.ctrl.PartRightEdges()[i+1], -1) def testGetPartRect(self): "Make sure the part rectangles are retrieved correctly" for i in range(0, self.ctrl.PartCount()): self.assertEquals (self.ctrl.GetPartRect(i), self.part_rects[i]) self.assertRaises(IndexError, self.ctrl.GetPartRect, 99) def testClientRects(self): self.assertEquals(self.ctrl.ClientRect(), self.ctrl.ClientRects()[0]) self.assertEquals(self.part_rects, self.ctrl.ClientRects()[1:]) def testGetPartText(self): self.assertRaises(IndexError, self.ctrl.GetPartText, 99) for i, text in enumerate(self.texts): self.assertEquals(text, self.ctrl.GetPartText(i)) class TabControlTestCases(unittest.TestCase): "Unit tests for the TreeViewWrapper class" def setUp(self): """Start the application set some data and ensure the application is in the state we want it.""" # start the application from pywinauto.application import Application app = Application() app.start_(os.path.join(controlspy_folder, "Tab.exe")) self.texts = [ "Pluto", "Neptune", "Uranus", "Saturn", "Jupiter", "Mars", "Earth", "Venus", "Mercury", "Sun"] self.rects = [ RECT(2,2,80,21), RECT(80,2,174,21), RECT(174,2,261,21), RECT(
from rest_framework import status from test_utils import serialized_time def test_get_profile_topics( api_client, enable_premium_requirement, profile_topic_factory, user_factory ): """ Premium users should be able to list their own profile topics. """ password = "password" user = user_factory(has_premium=True, password=password) api_client.log_in(user.primary_email.email, password) topic = profile_topic_factory(profile__km_user__user=user) url = f"/know-me/profile/profiles/{topic.profile.pk}/topics/" response = api_client.get(url) assert response.status_code == status.HTTP_200_OK assert response.json() == [ { "id": topic.pk, "url": api_client.build_full_url( f"/know-me/profile/profile-topics/{topic.pk}/" ), "created_at": se
rialized_time(topic.created_at), "updated_at": serialized_time(topic.updated_at),
"is_detailed": topic.is_detailed, "items_url": api_client.build_full_url( f"/know-me/profile/profile-topics/{topic.pk}/items/" ), "name": topic.name, "permissions": {"read": True, "write": True}, "profile_id": topic.profile.pk, } ] def test_post_create_topic( api_client, enable_premium_requirement, profile_factory, user_factory ): """ Premium users should be able to add new topics to their own profiles. """ password = "password" user = user_factory(has_premium=True, password=password) api_client.log_in(user.primary_email.email, password) profile = profile_factory(km_user__user=user) url = f"/know-me/profile/profiles/{profile.pk}/topics/" data = {"name": "Test Topic"} response = api_client.post(url, data) assert response.status_code == status.HTTP_201_CREATED assert response.json()["name"] == data["name"] def test_put_topic_order( api_client, enable_premium_requirement, profile_topic_factory, user_factory ): """ Premium users should be able to sort their own profile topics with respect to the parent profile. """ password = "password" user = user_factory(has_premium=True, password=password) api_client.log_in(user.primary_email.email, password) t1 = profile_topic_factory(profile__km_user__user=user) t2 = profile_topic_factory(profile=t1.profile) url = f"/know-me/profile/profiles/{t1.profile.pk}/topics/" data = {"order": [t2.pk, t1.pk]} response = api_client.put(url, data) assert response.status_code == status.HTTP_200_OK # The collection should now be sorted topics = api_client.get(url).json() assert list(map(lambda topic: topic["id"], topics)) == data["order"]
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import textwrap import unittest # The sorted one. from SortKeys.ttypes import SortedStruct, NegativeId from SortSets.ttypes import SortedSetStruct from thrift.protocol import TSimpleJSONProtocol from thrift.transport.TTransport import TMemoryBuffer def writeToJSON(obj): trans = TMemoryBuffer() proto = TSimpleJSONProto
col.TSimpleJSONProtocol(trans) obj.write(proto) r
eturn trans.getvalue() def readStructFromJSON(jstr, struct_type): stuff = struct_type() trans = TMemoryBuffer(jstr) proto = TSimpleJSONProtocol.TSimpleJSONProtocol(trans, struct_type.thrift_spec) stuff.read(proto) return stuff class TestSortKeys(unittest.TestCase): def testSorted(self): static_struct = SortedStruct(aMap={"b": 1.0, "a": 1.0}) unsorted_blob = b'{\n "aMap": {\n "b": 1.0,\n "a": 1.0\n }\n}' sorted_blob = b'{\n "aMap": {\n "a": 1.0,\n "b": 1.0\n }\n}' sorted_struct = readStructFromJSON(unsorted_blob, SortedStruct) blob = writeToJSON(sorted_struct) self.assertNotEqual(blob, unsorted_blob) self.assertEqual(blob, sorted_blob) self.assertEqual(static_struct, sorted_struct) def testSetSorted(self): unsorted_set = set(["5", "4", "3", "2", "1", "0"]) static_struct = SortedSetStruct(aSet=unsorted_set) unsorted_blob = ( textwrap.dedent( """\ {{ "aSet": [ "{}" ] }}""" ) .format('",\n "'.join(unsorted_set)) .encode() ) sorted_blob = ( textwrap.dedent( """\ {{ "aSet": [ "{}" ] }}""" ) .format('",\n "'.join(sorted(unsorted_set))) .encode() ) sorted_struct = readStructFromJSON(unsorted_blob, SortedSetStruct) blob = writeToJSON(sorted_struct) self.assertNotEqual(blob, unsorted_blob) self.assertEqual(blob, sorted_blob) self.assertEqual(static_struct, sorted_struct) def testNegativeId(self): obj = NegativeId() self.assertEqual(obj.field1, 1) self.assertEqual(obj.field2, 2) self.assertEqual(obj.field3, 3)
0. self.z = math_ops.multiply(self.x, self.y, name="z") # Should be -4.0. self.sess = session.Session() self.sess.run(variables.global_variables_initializer()) self.sess = session.Session() self.sess.run(variables.global_variables_initializer()) def tearDown(self): ops.reset_default_graph() def testContToFetchNotInTransitiveClosureShouldError(self): stepper = NodeStepper(self.sess, "e:0") sorted_nodes = stepper.sorted_nodes() self.assertEqual(7, len(sorted_nodes)) self.assertLess(sorted_nodes.index("a"), sorted_nodes.index("a/read")) self.assertLess(sorted_nodes.index("b"), sorted_nodes.index("b/read")) self.assertLess(sorted_nodes.index("a"), sorted_nodes.index("c")) self.assertLess(sorted_nodes.index("b"), sorted_nodes.index("c")) self.assertLess(sorted_nodes.index("a"), sorted_nodes.index("d")) self.assertLess(sorted_nodes.index("d"), sorted_nodes.index("e")) self.assertLess(sorted_nodes.index("c"), sorted_nodes.index("e")) self.assertSetEqual( {"e:0", "d:0", "c:0", "a/read:0", "b/read:0", "b:0", "a:0"}, set(stepper.closure_elements())) with self.assertRaisesRegexp( ValueError, "Target \"f:0\" is not in the transitive closure for the fetch of the " "stepper"): stepper.cont("f:0") def testContToNodeNameShouldReturnTensorvalue(self): stepper = NodeStepper(self.sess, "e:0") cont_result = stepper.cont("c") self.assertAllClose(6.0, cont_result) def testUsingNamesNotUsingIntermediateTensors(self): stepper = NodeStepper(self.sess, "e:0") # The first cont() call should have used no feeds. result = stepper.cont("c:0") self.assertAllClose(6.0, result) self.assertEqual({}, stepper.last_feed_types()) # The second cont() call should have used the tensor handle from the # previous cont() call. result = stepper.cont("e:0") self.assertAllClose(24.0, result) self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_HANDLE }, stepper.last_feed_types()) def testUsingNodesNotUsingIntermediateTensors(self): stepper = NodeStepper(self.sess, self.e) # There should be no handles before any cont() calls. self.assertEqual([], stepper.handle_names()) self.assertSetEqual(set(), stepper.handle_node_names()) # Before the cont() call, the stepper should not have access to the value # of c:0. with self.assertRaisesRegexp( ValueError, "This stepper instance does not have access to the value of tensor " "\"c:0\""): stepper.get_tensor_value("c:0") # Using the node/tensor itself, instead of the name str, should work on # cont(). result = stepper.cont(self.c) self.assertAllClose(6.0, result) self.assertEqual({}, stepper.last_feed_types()) self.assertEqual(["c:0"], stepper.handle_names()) self.assertEqual({"c"}, stepper.handle_node_names()) # After the cont() call, the stepper should have access to the value of c:0 # via a tensor handle. self.assertAllClose(6.0, stepper.get_tensor_value("c:0")) result = stepper.cont(self.e) self.assertAllClose(24.0, result) self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_HANDLE }, stepper.last_feed_types()) def testIsFeedableShouldGiveCorrectAnswers(self): stepper = NodeStepper(self.sess, self.e) self.assertTrue(stepper.is_feedable("a/read:0")) self.assertTrue(stepper.is_feedable("b/read:0")) self.assertTrue(stepper.is_feedable("c:0")) self.assertTrue(stepper.is_feedable("d:0")) def testOverrideValue(self): stepper = NodeStepper(self.sess, self.e) result = stepper.cont(self.c) self.assertAllClose(6.0, result) self.assertEqual({}, stepper.last_feed_types()) # There should be no overrides before any cont() calls. self.assertEqual([], stepper.override_names()) # Calling cont() on c again should lead to use of the handle. result = stepper.cont(self.c) self.assertAllClose(6.0, result) self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_HANDLE }, stepper.last_feed_types()) # Override c:0. stepper.override_tensor("c:0", 7.0) # After the overriding, calling get_tensor_value() on c:0 should yield the # overriding value. self.assertEqual(7.0, stepper.get_tensor_value("c:0")) # Now c:0 should have only an override value, but no cached handle, because # the handle should have been invalidated. self.assertEqual([], stepper.handle_names()) self.assertSetEqual(set(), stepper.handle_node_names()) self.assertEqual(["c:0"], stepper.override_names()) # Run a downstream tensor after the value override. result = stepper.cont(self.e) self.assertAllClose(28.0, result) # Should reflect the overriding value. # Should use override, instead of the handle. self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_OVERRIDE }, stepper.last_feed_types()) def testOverrideValueTwice(self): stepper = NodeStepper(self.sess, self.e) # Override once. stepper.override_tensor("c:0", 7.0) self.assertAllClose(28.0, stepper.cont(self.e)) self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_OVERRIDE }, stepper.last_feed_types()) self.assertEqual(["e:0"], stepper.handle_names()) self.assertSetEqual({"e"}, stepper.handle_node_names()) self.assertEqual(["c:0"], stepper.override_names()) # Calling cont(self.e) again. This time the cached tensor handle of e # should be used. self.assertEqual(28.0, stepper.cont(self.e)) self.assertEqual({ "e:0": NodeStepper.FEED_TYPE_HANDLE }, stepper.last_feed_types()) # Override c again. This should have invalidated the cache for e. stepper.override_tensor("c:0", 8.0) self.assertEqual([], stepper.handle_names()) self.assertEqual(set(), stepper.handle_node_names()) self.assertEqual(["c:0"], stepper.override_names()) self.assertAllClose(32.0, stepper.cont(self.e)) self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_OVERRIDE }, stepper.last_feed_types()) def testRemoveOverrideValue(self): stepper = NodeStepper(self.sess, self.e) result = stepper.cont(self.c) self.assertAllClose(6.0, result) self.assertEqual({}, stepper.last_feed_types()) # The previous cont() step should have generated a cached tensor h
andle. self.assertEqual(["c:0"], stepper.handle_names()) self.assertSetEqual({"c"}, stepper.handle_node_names()) # Override c:0. stepper.override_tensor("c:0", 7.0) # The overriding should have invalidated the tensor handle. self.assertEqual([], stepper.handle_names()) self.assertSetEqual(set(), stepper.handle_node_names()) self.assertEqual(["c:0"], stepper.override_names()) result = stepper.cont(self.e
) self.assertAllClose(28.0, result) # Should reflect the overriding value. self.assertEqual({ "c:0": NodeStepper.FEED_TYPE_OVERRIDE }, stepper.last_feed_types()) # The handle to tensor e:0 should have been cached, even though its # transitive closure contains an override. self.assertIn("e:0", stepper.handle_names()) self.assertSetEqual({"e"}, stepper.handle_node_names()) # Remove the override. stepper.remove_override("c:0") # c:0 should not be in the overrides anymore. self.assertEqual([], stepper.override_names()) # Removing the override should have invalidated the tensor handle for c. self.assertNotIn("e:0", stepper.handle_names()) self.assertNotIn("e", stepper.handle_node_names()) # Should reflect the non-overriding value. self.assertAllClose(24.0, stepper.cont(self.e)) # This time, the handle to tensor e:0 should have been cached again, even # thought its transitive closure contains an override. self.assertIn("e:0", stepper.handle_names()) self.assertIn("e", stepper.handle_node_names()) # Calling cont(self.e) again should have used the tensor handle to e:0. self.assertAllClose(24.0, stepper.cont(self.e)) self.assertEqual({ "e:0": NodeStepper.FEED_TYPE_HANDLE }, stepper.last_feed_types())
pinds[0], epinds[-1] + 1)) return inds def linear_interp_basis(toas, dt=30 * 86400): """Provides a basis for linear interpolation. :param toas: Pulsar TOAs in seconds :param dt: Linear interpolation step size in seconds. :returns: Linear interpolation basis and nodes """ # evenly spaced points x = np.arange(toas.min(), toas.max() + dt, dt) M = np.zeros((len(toas), len(x))) # make linear interpolation basis for ii in range(len(x) - 1): idx = np.logical_and(toas >= x[ii], toas <= x[ii + 1]) M[idx, ii] = (toas[idx] - x[ii + 1]) / (x[ii] - x[ii + 1]) M[idx, ii + 1] = (toas[idx] - x[ii]) / (x[ii + 1] - x[ii]) # only return non-zero columns idx = M.sum(axis=0) != 0 return M[:, idx], x[idx] # overlap reduction functions @function def hd_orf(pos1, pos2): """Hellings & Downs spatial correlation function.""" if np.all(pos1 == pos2): return 1 else: omc2 = (1 - np.dot(pos1, pos2)) / 2 return 1.5 * omc2 * np.log(omc2) - 0.25 * omc2 + 0.5 @function def dipole_orf(pos1, pos2): """Dipole spatial correlation functio
n.""" if np.all(pos1 == pos2): return 1 + 1e-5 else: return np.dot(pos1, pos2) @function def monopole_orf(pos1, pos2): """Monopole spatial correlation function.""" if np.all(pos1 == pos2): return 1.0 + 1e-5 else:
return 1.0 @function def anis_orf(pos1, pos2, params, **kwargs): """Anisotropic GWB spatial correlation function.""" anis_basis = kwargs["anis_basis"] psrs_pos = kwargs["psrs_pos"] lmax = kwargs["lmax"] psr1_index = [ii for ii in range(len(psrs_pos)) if np.all(psrs_pos[ii] == pos1)][0] psr2_index = [ii for ii in range(len(psrs_pos)) if np.all(psrs_pos[ii] == pos2)][0] clm = np.zeros((lmax + 1) ** 2) clm[0] = 2.0 * np.sqrt(np.pi) if lmax > 0: clm[1:] = params return sum(clm[ii] * basis for ii, basis in enumerate(anis_basis[: (lmax + 1) ** 2, psr1_index, psr2_index])) @function def unnormed_tm_basis(Mmat): return Mmat, np.ones_like(Mmat.shape[1]) @function def normed_tm_basis(Mmat, norm=None): if norm is None: norm = np.sqrt(np.sum(Mmat ** 2, axis=0)) nmat = Mmat / norm nmat[:, norm == 0] = 0 return nmat, np.ones_like(Mmat.shape[1]) @function def svd_tm_basis(Mmat): u, s, v = np.linalg.svd(Mmat, full_matrices=False) return u, np.ones_like(s) @function def tm_prior(weights): return weights * 1e40 # Physical ephemeris model utility functions def get_planet_orbital_elements(model="setIII"): """Grab physical ephemeris model files""" dpath = enterprise.__path__[0] + "/datafiles/ephemeris/" return ( np.load(dpath + "/jupiter-" + model + "-mjd.npy"), np.load(dpath + "/jupiter-" + model + "-xyz-svd.npy"), np.load(dpath + "/saturn-" + model + "-xyz-svd.npy"), ) def ecl2eq_vec(x): """ Rotate (n,3) vector time series from ecliptic to equatorial. """ M_ecl = const.M_ecl return np.einsum("jk,ik->ij", M_ecl, x) def eq2ecl_vec(x): """ Rotate (n,3) vector time series from equatorial to ecliptic. """ M_ecl = const.M_ecl return np.einsum("kj,ik->ij", M_ecl, x) def euler_vec(z, y, x, n): """ Return (n,3,3) tensor with each (3,3) block containing an Euler rotation with angles z, y, x. Optionally each of z, y, x can be a vector of length n. """ L = np.zeros((n, 3, 3), "d") cosx, sinx = np.cos(x), np.sin(x) L[:, 0, 0] = 1 L[:, 1, 1] = L[:, 2, 2] = cosx L[:, 1, 2] = -sinx L[:, 2, 1] = sinx N = np.zeros((n, 3, 3), "d") cosy, siny = np.cos(y), np.sin(y) N[:, 0, 0] = N[:, 2, 2] = cosy N[:, 1, 1] = 1 N[:, 0, 2] = siny N[:, 2, 0] = -siny ret = np.einsum("ijk,ikl->ijl", L, N) M = np.zeros((n, 3, 3), "d") cosz, sinz = np.cos(z), np.sin(z) M[:, 0, 0] = M[:, 1, 1] = cosz M[:, 0, 1] = -sinz M[:, 1, 0] = sinz M[:, 2, 2] = 1 ret = np.einsum("ijk,ikl->ijl", ret, M) return ret def ss_framerotate(mjd, planet, x, y, z, dz, offset=None, equatorial=False): """ Rotate planet trajectory given as (n,3) tensor, by ecliptic Euler angles x, y, z, and by z rate dz. The rate has units of rad/year, and is referred to offset 2010/1/1. dates must be given in MJD. """ t_offset = 55197.0 # MJD 2010/01/01 if equatorial: planet = eq2ecl_vec(planet) E = euler_vec(z + dz * (mjd - t_offset) / 365.25, y, x, planet.shape[0]) planet = np.einsum("ijk,ik->ij", E, planet) if offset is not None: planet = np.array(offset) + planet if equatorial: planet = ecl2eq_vec(planet) return planet def dmass(planet, dm_over_Msun): return dm_over_Msun * planet @function def physicalephem_spectrum(sigmas): # note the creative use of the "labels" (the very sigmas, not frequencies) return sigmas ** 2 @function def createfourierdesignmatrix_physicalephem( toas, planetssb, pos_t, frame_drift_rate=1e-9, d_jupiter_mass=1.54976690e-11, d_saturn_mass=8.17306184e-12, d_uranus_mass=5.71923361e-11, d_neptune_mass=7.96103855e-11, jup_orb_elements=0.05, sat_orb_elements=0.5, model="setIII", ): """ Construct physical ephemeris perturbation design matrix and 'frequencies'. Parameters can be excluded by setting the corresponding prior sigma to None :param toas: vector of time series in seconds :param pos: pulsar position as Cartesian vector :param frame_drift_rate: normal sigma for frame drift rate :param d_jupiter_mass: normal sigma for Jupiter mass perturbation :param d_saturn_mass: normal sigma for Saturn mass perturbation :param d_uranus_mass: normal sigma for Uranus mass perturbation :param d_neptune_mass: normal sigma for Neptune mass perturbation :param jup_orb_elements: normal sigma for Jupiter orbital elem. perturb. :param sat_orb_elements: normal sigma for Saturn orbital elem. perturb. :param model: vector basis used by Jupiter and Saturn perturb.; see PhysicalEphemerisSignal, defaults to "setIII" :return: F: Fourier design matrix of shape (len(toas), nvecs) :return: sigmas: Phi sigmas (nvecs, to be passed to physicalephem_spectrum) """ # Jupiter + Saturn orbit definitions that we pass to physical_ephem_delay oa = {} (oa["times"], oa["jup_orbit"], oa["sat_orbit"]) = get_planet_orbital_elements(model) dpar = 1e-5 # may need finessing Fl, Phil = [], [] for parname in [ "frame_drift_rate", "d_jupiter_mass", "d_saturn_mass", "d_uranus_mass", "d_neptune_mass", "jup_orb_elements", "sat_orb_elements", ]: ppar = locals()[parname] if ppar: if parname not in ["jup_orb_elements", "sat_orb_elements"]: # need to normalize? Fl.append(physical_ephem_delay(toas, planetssb, pos_t, **{parname: dpar}) / dpar) Phil.append(ppar) else: for i in range(6): c = np.zeros(6) c[i] = dpar # Fl.append(physical_ephem_delay(toas, planetssb, pos_t, # **{parname: c}, **oa)/dpar) kwarg_dict = {parname: c} kwarg_dict.update(oa) Fl.append(physical_ephem_delay(toas, planetssb, pos_t, **kwarg_dict) / dpar) Phil.append(ppar) return np.array(Fl).T.copy(), np.array(Phil) @function def physical_ephem_delay( toas, planetssb, pos_t, frame_drift_rate=0, d_jupiter_mass=0, d_saturn_mass=0, d_uranus_mass=0, d_neptune_mass=0, jup_orb_elements=np.zeros(6, "d"), sat_orb_elements=np.zeros(6, "d"), times=None, jup_orbit=None, sat_orbit=None, equatorial=True, ): # convert toas to MJD mjd = toas / 86400 # grab planet-to-SSB vectors earth = pl
one) # If there are no unique fields on the model, return false if not django_opts.unique_together and not any(x.unique for x in django_opts.fields): return False opts = getattr(model_or_instance, "Djangae", None) if opts: if hasat
tr(opts, "disable_constraint_checks"): if opts.disable_constraint_checks: return False else: return True return not getattr(settings, "DJANGAE_DISABLE_CONSTRA
INT_CHECKS", False) class KeyProperty(db.Property): """A property that stores a datastore.Key reference to another object. Think of this as a Django GenericForeignKey which returns only the PK value, not the whole object, or a db.ReferenceProperty which can point to any model kind, and only returns the Key. """ def validate(self, value): if value is None or isinstance(value, Key): return value raise ValueError("KeyProperty only accepts datastore.Key or None") class UniqueMarker(db.Model): instance = KeyProperty() created = db.DateTimeProperty(required=True, auto_now_add=True) @staticmethod def kind(): return "_djangae_unique_marker" @db.transactional(propagation=TransactionOptions.INDEPENDENT, xg=True) def acquire_identifiers(identifiers, entity_key): return _acquire_identifiers(identifiers, entity_key) def _acquire_identifiers(identifiers, entity_key): # This must always be in a cross-group transaction, because even if there's only 1 identifider, # in the case where that identifier already exists, we then check if its `instance` exists assert entity_key namespace = entity_key.namespace() or None identifier_keys = [ Key.from_path(UniqueMarker.kind(), identifier, namespace=namespace) for identifier in identifiers ] existing_markers = UniqueMarker.get(identifier_keys) markers_to_create = [] markers = [] for identifier_key, existing_marker in zip(identifier_keys, existing_markers): # Backwards compatability: we used to create the markers first in an independent transaction # and then create the entity and update the `instance` on the markers. This meant that it # was possible that the independent marker creation transaction finished first and the outer # transaction failed, causing stale markers to be left behind. We no longer do it this way # but we still want to ignore any old stale markers, hence if instance is None we overwrite. now = datetime.datetime.utcnow() if not existing_marker or existing_marker.instance is None: markers_to_create.append(UniqueMarker( key=identifier_key, instance=entity_key, created=now )) elif existing_marker.instance != entity_key and key_exists(existing_marker.instance): fields_and_values = identifier_key.name().split("|") table_name = fields_and_values[0] fields_and_values = fields_and_values[1:] fields = [x.split(":")[0] for x in fields_and_values] raise IntegrityError("Unique constraint violation for kind {} on fields: {}".format(table_name, ", ".join(fields))) elif existing_marker.instance != entity_key: markers_to_create.append(UniqueMarker( key=identifier_key, instance=entity_key, created=now )) else: # The marker is ours anyway markers.append(existing_marker) db.put(markers_to_create) return markers + markers_to_create def get_markers_for_update(model, old_entity, new_entity): """ Given an old entity state, and the new state, updates the identifiers appropriately. Should be called before saving the new_state """ old_ids = set(unique_identifiers_from_entity(model, old_entity, ignore_pk=True)) new_ids = set(unique_identifiers_from_entity(model, new_entity, ignore_pk=True)) to_release = old_ids - new_ids to_acquire = new_ids - old_ids return to_acquire, to_release def update_instance_on_markers(entity, markers): # TODO: fix me! def update(marker, instance): marker = UniqueMarker.get(marker.key()) if not marker: return marker.instance = instance marker.put() @db.transactional(propagation=TransactionOptions.INDEPENDENT, xg=True) def update_all(): instance = entity.key() for marker in markers: update(marker, instance) update_all() def acquire(model, entity): """ Given a model and entity, this tries to acquire unique marker locks for the instance. If the locks already exist then an IntegrityError will be thrown. """ identifiers = unique_identifiers_from_entity(model, entity, ignore_pk=True) return acquire_identifiers(identifiers, entity.key()) def release_markers(markers): """ Delete the given UniqueMarker objects. """ # Note that these should all be from the same Django model instance, and therefore there should # be a maximum of 25 of them (because everything blows up if you have more than that - limitation) @db.transactional(propagation=TransactionOptions.INDEPENDENT, xg=len(markers) > 1) def txn(): Delete([marker.key() for marker in markers]) txn() def release_identifiers(identifiers, namespace): @db.transactional(propagation=TransactionOptions.INDEPENDENT, xg=len(identifiers) > 1) def txn(): _release_identifiers(identifiers, namespace) txn() def _release_identifiers(identifiers, namespace): keys = [Key.from_path(UniqueMarker.kind(), x, namespace=namespace) for x in identifiers] Delete(keys) def release(model, entity): """ Delete the UniqueMarker objects for the given entity. """ if not has_active_unique_constraints(model): return identifiers = unique_identifiers_from_entity(model, entity, ignore_pk=True) # Key.from_path expects None for an empty namespace, but Key.namespace() returns '' namespace = entity.key().namespace() or None release_identifiers(identifiers, namespace=namespace) @db.transactional(propagation=TransactionOptions.INDEPENDENT, xg=True) def update_identifiers(to_acquire, to_release, key): """ A combination of acquire_identifiers and release_identifiers in a combined transaction. """ _acquire_identifiers(to_acquire, key) _release_identifiers(to_release, key.namespace() or None) class UniquenessMixin(object): """ Mixin overriding the methods checking value uniqueness. For models defining unique constraints this mixin should be inherited from. When iterable (list or set) fields are marked as unique it must be used. This is a copy of Django's implementation, save for the part marked by the comment. """ def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) if lookup_value is None: continue if f.primary_key and not self._state.adding: continue ########################################################################## # This is a modification to Django's native implementation of this method; # we conditionally build a __in lookup if the value is an iterable. lookup = str(field_name) if isinstance(lookup_value, (list, set, tuple)): lookup = "%s__overlap" % lookup lookup_kwargs[lookup] = lookup_value ########################################################################## # / end of changes if len(unique_check) != len(lookup_kwargs): continue ####################################################### # Deal with long __in
# Copyright (c) 2013 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or p
romote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Andreas Hansson from m5.objects import * from base_config import * root = BaseSESystemUniprocessor(mem_mode='atomic', cpu_class=AtomicSimpleCPU, checker=True).create_root()
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ boTAB ===== This solver uses the popular TAB model to simulate the atomization of droplets Author: Adam O'Brien """ from input import * from math import exp, cos, sin, sqrt from fluid import * from evaporation import * from TAB import * from output import * import copy as cp def main(): print "" print "boTAB |" print "-------" print " Compute
the break-up of a drop in a uniform cross-flow", "\n" # Open up a configuration file userInput = readInputFile() freestream = Freestream() initialDroplet = Droplet() dropletInlet = DropletInlet() # Set object parameters from the input file setObjectParametersFromInput(userInput, freestream, initialDroplet, dropletInlet) # Set-up the simulation parameters in accordance with the input maxTime = userInput["maxTime"] nTimeSteps = userInput["nTimeSteps"] # I
nitialize a droplet list, with one copy of the initial droplet droplets = [cp.deepcopy(initialDroplet)] # Initialize misc parameters dt = maxTime/nTimeSteps t = [0.] nChildDroplets = 0 # Begin the simulation print "\nBeginning time-stepping..." ########################################################################### # # # Main Iteration Loop # # # ########################################################################### for stepNo in range(1, nTimeSteps + 1): for droplet in droplets: droplet.advectPredictorCorrector(freestream, dt) evaporate(freestream, droplets, dt) nChildDroplets += breakupTab(freestream, droplets, dt) dropletInlet.addDrops(initialDroplet, droplets, dt) t.append(t[-1] + dt) if stepNo%(nTimeSteps/20) == 0: completionPercentage = float(stepNo)/float(nTimeSteps)*100. print "-----------------------------------------------------------" print "Time-stepping completion : %s%%"%(completionPercentage) print "Number of droplets in domain :", len(droplets) print "Simulation time elapsed : %s seconds"%(t[-1]) print "Simulation time remaining : %s seconds"%(maxTime - t[-1]) print "Number of child drops :", nChildDroplets print "\nTime-stepping complete. Finalizing output..." plotDroplets(droplets) # Execute the main function if __name__ == "__main__": main()
#!/usr/bin/env python # -*- coding: utf-8 -*- ##################################################################### # Author: Cristian Segura L. # # Email: cristian+dot+segura+dot+lepe+arroba+dot+gmail+dot+com # # Creation Date: Sat nov 8 20:11:38 CLST 2014 # # Version: 0.1 # # License: GPL v2.0 (check LICENSE file) # # Usage: Installation of Asterisk IP PBX # # Dependencies: # # + Python 2.7 # # + wget # # Tested on : # # + Ubuntu Desktop 13.10 amd64 # ##################################################################### import subprocess import time import datetime import os # Create working directory to download source code nowDateTime = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") wrkDir = "%s-asterisk11-sources" % ( nowDateTime ) if not os.path.exists( wrkDir ): os.makedirs( wrkDir ) else: print "[+] ERROR: Cannot create directory %s" % (wrkDir) exit -1 #print str(os.path) + wrkDir # print str(os.getcwd()) + '/' + wrkDir print "[+] Changing to directory %s" % (wrkDir) os.chdir( os.getcwd() + '/' + wrkDir ) # Downloading Files baseURL = "http://downloads.asterisk.org/pub/telephony" lAstDir = "asterisk" lAstTgzFile = "asterisk-11-current.tar.gz" lAstDownPath = baseURL + '/' + lAstDir + '/' + lAstTgzFile lPriDir = "libpri" lPriTgzFile = "libpri-1.4-current.tar.gz" lPriDownPath = baseURL + '/' + lPriDir + '/' + lPriTgzFile print "" print "*****************************************************" pri
nt "* *" print "* Downloading LIBPRI *" print "*
*" print "*****************************************************" print "[+]downloading file: %s using wget" % ( lPriTgzFile ) print "" subprocess.call( ["wget", lPriDownPath] ) print "" print "*****************************************************" print "* *" print "* Downloading ASTERISK *" print "* *" print "*****************************************************" print "[+]downloading file: %s using wget" % ( lAstTgzFile ) print "" subprocess.call( ["wget", lAstDownPath] )
import factory from questionnaire.models impor
t Theme class ThemeFactory(factory.DjangoModelFactory): class Meta: model = Theme
name = "A title" description = 'Description'
#!/usr/bin/env python from server import Serve from utils import get_authenticated_user import
os import sys authenticated_user = None try: authenticated_user = get_authenticated_user('server.cfg') except IOError: print ("File 'server.cfg' doesn't exist on disk. Please ensure that it" " does and try again.") sys.exit(1) except ValueError: print ("'server.cfg' is empty. Please run 'python get_oauth_token.py' prior.") sys.exit(1) # Start appserver app = Serve(__name_
_, authenticated_user)
#! /usr/bin/p
ython class Indexer: def __getitem__(self, index): return index ** 2 x = Indexer() for i in range(5):
print x[i], class Stepper: def __getitem__(self, index): return self.data[index] s = Stepper() s.data = "spam" for x in s: print x, print s.data[0]
DEBUG = True USE_TZ = True DATABASES = { 'default':
{ 'ENGINE': 'django.db.backends.sqlite3', } } INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'custom_user', 'test_custom_user_s
ubclass', ] SECRET_KEY = 'not_random' AUTH_USER_MODEL = 'test_custom_user_subclass.MyCustomEmailUser'
#----------------------------------------------------- # # Find the next prime number as the user
keeps asking # #----------------------------------------------------- import sys #------------------------ # is the number a prime? #------------------------ def is_prime( num ): for i in range( 2, num ): if num % i == 0: retu
rn False if num != 1: return True else: return False #-------------------- # main #-------------------- if __name__ == "__main__": curr_prime = 1 #initialize the prime number while True: response = raw_input( "Print the next prime? [Y]es " ) if response.upper().startswith('Y'): while True: curr_prime += 1 if is_prime(curr_prime): print curr_prime break else: break
"""Views for the monitoring app.""" from django.contrib.auth.decorators import login_required from django.db.models import Count from django.http import Http404 from django.utils.decorators impor
t method_decorator from django.views.generic import ListView, TemplateView from .register import monitor class MonitoringViewMixin(object): """Helper methods that all monitoring base views need.""" view_name = None monitor_titl
e = None @method_decorator(login_required) def dispatch(self, request, *args, **kwargs): # pragma: no cover if not request.user.is_staff: raise Http404 self.request = request self.monitor_name = request.GET.get('monitor') return super(MonitoringViewMixin, self).dispatch( request, *args, **kwargs) def get_context_data(self, **kwargs): ctx = super(MonitoringViewMixin, self).get_context_data(**kwargs) ctx.update({ 'monitor_title': self.monitor_title, 'monitor_name': self.monitor_name, }) return ctx def get_template_names(self): """ Returns the template name for the view based on the view's model. """ return [self.model.get_template_name(), ] def get_view_name(self): """ Returns the view name based on the view's model. If you have set the ``view_name`` attribute on the view, that will be returned instead. """ if self.view_name is not None: return self.view_name return 'monitoring_{0}'.format(self.model.__name__.lower()) class IntegerCountView(MonitoringViewMixin, ListView): """Default view for the ``IntegerCountBase`` monitor model.""" monitor_title = 'Integer Count' def get_queryset(self): qs = super(IntegerCountView, self).get_queryset() qs = qs.values('date_created').annotate( count=Count('date_created')).distinct() return qs class MonitoringView(TemplateView): template_name = 'monitoring/index.html' @method_decorator(login_required) def dispatch(self, request, *args, **kwargs): if not request.user.is_staff: raise Http404 self.request = request return super(MonitoringView, self).dispatch( request, *args, **kwargs) def get_context_data(self, **kwargs): ctx = super(MonitoringView, self).get_context_data(**kwargs) ctx.update({ 'monitor': monitor, }) return ctx
""" Page classes to test either the Course Team page or the Library Team page. """ from bok_choy.promise import EmptyPromise from bok_choy.page_object import PageObject from ...tests.helpers import disable_animations from . import BASE_URL def wait_for_ajax_or_reload(browser): """ Wait for all ajax requests to finish, OR for the page to reload. Normal wait_for_ajax() chokes on occasion if the pages reloads, giving "WebDriverException: Message: u'jQuery is not defined'" """ def _is_ajax_finished(): """ Wait for jQuery to finish all AJAX calls, if it is present. """ return browser.execute_script("return typeof(jQuery) == 'undefined' || jQuery.active == 0") EmptyPromise(_is_ajax_finished, "Finished waiting for ajax requests.").fulfill() class UsersPage(PageObject): """ Base class for either the Course Team page or the Library Team page """ def __init__(self, browser, locator): super(UsersPage, self).__init__(browser) self.locator = locator @property def url(self): """ URL to this page - override in subclass """ raise NotImplementedError def is_browser_on_page(self): """ Returns True iff the browser has loaded the page. """ return self.q(css='body.view-team').present @property def users(self): """ Return a list of users listed on this page. """ return self.q(css='.user-list .user-item').map( lambda el: UserWrapper(self.browser, e
l.get_attribute('data-email')) ).results @property def has_add_button(self): """ Is the "New Team Member" button present? """ return self.q(css='.create-user-button').present def click_add_butto
n(self): """ Click on the "New Team Member" button """ self.q(css='.create-user-button').click() @property def new_user_form_visible(self): """ Is the new user form visible? """ return self.q(css='.form-create.create-user .user-email-input').visible def set_new_user_email(self, email): """ Set the value of the "New User Email Address" field. """ self.q(css='.form-create.create-user .user-email-input').fill(email) def click_submit_new_user_form(self): """ Submit the "New User" form """ self.q(css='.form-create.create-user .action-primary').click() wait_for_ajax_or_reload(self.browser) class LibraryUsersPage(UsersPage): """ Library Team page in Studio """ @property def url(self): """ URL to the "User Access" page for the given library. """ return "{}/library/{}/team/".format(BASE_URL, unicode(self.locator)) class UserWrapper(PageObject): """ A PageObject representing a wrapper around a user listed on the course/library team page. """ url = None COMPONENT_BUTTONS = { 'basic_tab': '.editor-tabs li.inner_tab_wrap:nth-child(1) > a', 'advanced_tab': '.editor-tabs li.inner_tab_wrap:nth-child(2) > a', 'save_settings': '.action-save', } def __init__(self, browser, email): super(UserWrapper, self).__init__(browser) self.email = email self.selector = '.user-list .user-item[data-email="{}"]'.format(self.email) def is_browser_on_page(self): """ Sanity check that our wrapper element is on the page. """ return self.q(css=self.selector).present def _bounded_selector(self, selector): """ Return `selector`, but limited to this particular user entry's context """ return '{} {}'.format(self.selector, selector) @property def name(self): """ Get this user's username, as displayed. """ return self.q(css=self._bounded_selector('.user-username')).text[0] @property def role_label(self): """ Get this user's role, as displayed. """ return self.q(css=self._bounded_selector('.flag-role .value')).text[0] @property def is_current_user(self): """ Does the UI indicate that this is the current user? """ return self.q(css=self._bounded_selector('.flag-role .msg-you')).present @property def can_promote(self): """ Can this user be promoted to a more powerful role? """ return self.q(css=self._bounded_selector('.add-admin-role')).present @property def promote_button_text(self): """ What does the promote user button say? """ return self.q(css=self._bounded_selector('.add-admin-role')).text[0] def click_promote(self): """ Click on the button to promote this user to the more powerful role """ self.q(css=self._bounded_selector('.add-admin-role')).click() wait_for_ajax_or_reload(self.browser) @property def can_demote(self): """ Can this user be demoted to a less powerful role? """ return self.q(css=self._bounded_selector('.remove-admin-role')).present @property def demote_button_text(self): """ What does the demote user button say? """ return self.q(css=self._bounded_selector('.remove-admin-role')).text[0] def click_demote(self): """ Click on the button to demote this user to the less powerful role """ self.q(css=self._bounded_selector('.remove-admin-role')).click() wait_for_ajax_or_reload(self.browser) @property def can_delete(self): """ Can this user be deleted? """ return self.q(css=self._bounded_selector('.action-delete:not(.is-disabled) .remove-user')).present def click_delete(self): """ Click the button to delete this user. """ disable_animations(self) self.q(css=self._bounded_selector('.remove-user')).click() # We can't use confirm_prompt because its wait_for_ajax is flaky when the page is expected to reload. self.wait_for_element_visibility('.prompt', 'Prompt is visible') self.wait_for_element_visibility('.prompt .action-primary', 'Confirmation button is visible') self.q(css='.prompt .action-primary').click() wait_for_ajax_or_reload(self.browser) @property def has_no_change_warning(self): """ Does this have a warning in place of the promote/demote buttons? """ return self.q(css=self._bounded_selector('.notoggleforyou')).present @property def no_change_warning_text(self): """ Text of the warning seen in place of the promote/demote buttons. """ return self.q(css=self._bounded_selector('.notoggleforyou')).text[0]