id
stringlengths
1
8
text
stringlengths
6
1.05M
dataset_id
stringclasses
1 value
3411259
<gh_stars>0 import logging import json import os import shutil import threading import traceback from typing import Iterable from xmlrpc.client import ServerProxy import numpy as np from skimage.external.tifffile import imread from skimage.transform import AffineTransform import matplotlib.pyplot as plt import matplotlib.patches as mpatches from IPython import embed # for debugging from simple_detection import read_bf import nis_util from annotation import manually_correct_rois from simple_detection import scale_bbox ND2_SUFFIX = '.nd2' TIFF_SUFFIX = '.tif' def copy_lock(src, dst, copyfun=shutil.copy2, lock_ending='lock'): lock_file = '.'.join([dst if not os.path.isdir(dst) else os.path.join(dst, src.rsplit(os.sep, 1)[-1]), lock_ending]) fd = open(lock_file, 'w') fd.close() copyfun(src, dst) os.remove(lock_file) def copy_lock_to_dir(src, dst, copyfun=shutil.copy2, lock_ending='lock'): if not isinstance(src, list): src = [src] if not os.path.exists(dst): os.makedirs(dst) if os.path.isfile(dst): raise ValueError('destination has to be a dirctory') for s in src: shutil.copy2(s, dst)#, copyfun, lock_ending) def _pix2unit(x, transform): """ transform a point from pixel coordinates to NIS stage coordinates, taking into account offsets, fov, camera rotation or image flipping Parameters ---------- x: 2-tuple point to transform, in pixels transform: AffineTransform affine transform pixel -> stage Returns ------- x_tr: array-like transformed point, in units """ logger = logging.getLogger(__name__) res = np.squeeze(transform(x)) logger.debug('transformed point {} (pixels) to {} (units)'.format(x, res)) return res def bbox_pix2unit(bbox, transform): """ Parameters ---------- x: 4-tuple point to transform, in pixels transform: AffineTransform affine transform pixel -> stage Returns ------- bbox_tr: 4-tuple transformed bounding box (ymin, xmin, ymax, xmax - in units) """ logger = logging.getLogger(__name__) # transform bbox (ymin, xmin, ymax, xmax) = bbox bbox_tr = np.apply_along_axis(lambda x: _pix2unit(x, transform), 1, np.array([[xmin, ymin], [xmin, ymax], [xmax, ymin], [xmax, ymax]], dtype=float) ) # get new min max min_ = np.apply_along_axis(np.min, 0, bbox_tr) max_ = np.apply_along_axis(np.max, 0, bbox_tr) logger.debug('new min: {}, new max: {}'.format(min_, max_)) # NB: we reverse here to preserve original ymin, xmin, ymax, xmax - order bbox_tr_arr = np.array([list(reversed(list(min_))), list(reversed(list(max_)))], dtype=float) res = bbox_tr_arr.ravel() logger.debug('bbox: {}, toUnit: {}'.format(bbox, res)) return tuple(list(res)) class WidgetProgressIndicator: """ thin wrapper around an ipywidgets widget to display progress, e.g. progress bar and an optional (text) status widget Parameters ---------- progress_widget: ipywidgets widget widget to display progress, e.g. FloatProgress status_widget: ipywidgets widget, optional widget to display a status message, e.g. Label min: numeric, optional value of progress_widget corresponding to 0 max: numeric, optional value of progress_widget corresponding to 1, default 100 """ def __init__(self, progress_widget, status_widget=None, min=0, max=100): self.progress_widget = progress_widget self.status_widget = status_widget self.min = min self.max = max def set_progress(self, p): """ update progress Parameters ---------- p: float \in 0,1 percent complete value to set """ self.progress_widget.value = self.min + p * (self.max - self.min) def set_status(self, status): """ update status status: string status message """ if self.status_widget is not None: self.status_widget.value = status class CommonParameters(object): def __init__(self): self.prefix = 'myExperiment' self.path_to_nis = '/dev/null' self.server_path_local = '/dev/null' self.save_base_path = '/dev/null' self.server_path_remote = '/dev/null' self.progress_indicator : WidgetProgressIndicator = None class OverviewParameters(object): def __init__(self): self.export_as_tiff = False self.field_def_file = '/dev/null' self.manual_z = None self.oc_overview = 'oc1' self.return_overview_img = True self._re_use_ov = False class DetectionParameters(object): def __init__(self): self.do_manual_annotation = False self.detector_adress = 'http://eco-gpu:8000/' self.plot_detection = True # TODO: remove self.object_filter = { 'area': (15000, 80000) } class DetailParameters(object): def __init__(self): self.ocs_detail = [] self.stitcher_adress = 'http://eco-gpu:8001/' self.auto_focus_detail = True self.channel_for_autofocus = 0 self.channel_for_stitch = 0 # TODO: make True always self.tiff_export_details = True # TODO: do actual parameters, not just project yes/no self.projection_params = True self.dry_run_details = False self.z_range = 1 self.z_step = 1 self.z_drive = "" def _do_overview(config: CommonParameters, ov_parameters: OverviewParameters): # keep track of all copy threads so we can join on exit logger = logging.getLogger(__name__) # skip if result folder already exists on server if os.path.exists(os.path.join(config.server_path_local, config.prefix)): if not ov_parameters._re_use_ov: # skip overwrite check if we re-use overview (we are probably debuging) raise ValueError( 'Slide {} was already imaged. Either rename the acquisition or clean old acquisition from server'.format( config.prefix)) # reset progress indicator progress_indicator = config.progress_indicator if progress_indicator is not None: progress_indicator.set_progress(0.0) progress_indicator.set_status('doing overview scan') with open(ov_parameters.field_def_file, 'r') as fd: field_calib = json.load(fd) # user specified manual focus position if not ov_parameters.manual_z is None: field_calib['zpos'] = ov_parameters.manual_z # go to defined z position nis_util.set_position(config.path_to_nis, pos_z=field_calib['zpos']) # get field and directions # NB: this is not the actual field being scanned, but rather [min+1/2 fov - max-1/2fov] (left, right, top, bottom) = tuple(field_calib['bbox']) # direction of stage movement (y,x) # TODO: remove if not necessary #direction = [1 if top < bottom else -1, 1 if left < right else -1] # set overview optical configuration nis_util.set_optical_configuration(config.path_to_nis, ov_parameters.oc_overview) # get resolution, binning and fov (xres, yres, siz, mag) = nis_util.get_resolution(config.path_to_nis) live_fmt, capture_fmt = nis_util.get_camera_format(config.path_to_nis) color = nis_util.is_color_camera(config.path_to_nis) # we have to parse capture_fmt differently for color and gray camera # TODO: extract to separate function if color: binning_factor = 1.0 if not '1/3' in capture_fmt else 3.0 else: binning_factor = float(capture_fmt.split()[1].split('x')[0]) fov_x = xres * siz / mag * binning_factor fov_y = yres * siz / mag * binning_factor logger.debug('overview resolution: {}, {}, {}, {}'.format(xres, yres, siz, mag)) logger.debug('overview fov: {}, {}'.format(fov_x, fov_y)) # do overview scan # TODO: save directly to server, not c: ov_path = os.path.join(config.save_base_path, config.prefix + '_overview' + ND2_SUFFIX) if not ov_parameters._re_use_ov: nis_util.do_large_image_scan(config.path_to_nis, ov_path, left, right, top, bottom) if ov_parameters.export_as_tiff: nis_util.export_nd2_to_tiff(config.path_to_nis, ov_path, combine_c=color) tiff_ov_path = ov_path[:-len(ND2_SUFFIX)] + TIFF_SUFFIX # make root folder for slide on server, skip if it already exists if not os.path.exists(os.path.join(config.server_path_local, config.prefix)): os.makedirs(os.path.join(config.server_path_local, config.prefix)) # export optical configurations to server (we save it for every slide) nis_util.backup_optical_configurations(config.path_to_nis, os.path.join(config.server_path_local, config.prefix, 'optical_configurations.xml')) # copy field definition to server shutil.copy2(ov_parameters.field_def_file, os.path.join(config.server_path_local, config.prefix)) # keep image in memory if we need it # TODO: maybe read directly from server? if ov_parameters.return_overview_img: if ov_parameters.export_as_tiff: img = imread(tiff_ov_path) else: img = read_bf(ov_path) # async copy to server def copy_ov_call(): # copy to server mount _ov_path = ov_path _tiff_ov_path = tiff_ov_path _prefix = config.prefix if not os.path.exists(os.path.join(config.server_path_local, _prefix, 'overviews')): os.makedirs(os.path.join(config.server_path_local, _prefix, 'overviews')) if not ov_parameters.export_as_tiff: copy_lock(_ov_path, os.path.join(config.server_path_local, _prefix, 'overviews')) else: copy_lock(_tiff_ov_path, os.path.join(config.server_path_local, _prefix, 'overviews')) # remove local copies of overviews os.remove(_ov_path) if ov_parameters.export_as_tiff: os.remove(_tiff_ov_path) # copy in separate thread # TODO: probably not really necessary, remove copy_ov_thread = threading.Thread(target=copy_ov_call) copy_ov_thread.start() # NB: we have to wait for copy to complete before we initialize the detection on server copy_ov_thread.join() if ov_parameters.return_overview_img: return img def _do_detection(config: CommonParameters, ov_parameters: OverviewParameters, det_params: DetectionParameters, img=None): logger = logging.getLogger(__name__) logger.info('finished overview, detecting wings...') with open(ov_parameters.field_def_file, 'r') as fd: field_calib = json.load(fd) # pixel to world coordinates transformation from 3-point calibration stored in field_calib file coords_px = np.array(field_calib['coords_px'], dtype=np.float) coords_st = np.array(field_calib['coords_st'], dtype=np.float) at = AffineTransform() at.estimate(coords_px, coords_st) _suffix = TIFF_SUFFIX if ov_parameters.export_as_tiff else ND2_SUFFIX remote_path = '/'.join([config.server_path_remote, config.prefix, 'overviews', config.prefix + '_overview' + _suffix]) progress_indicator = config.progress_indicator if progress_indicator is not None: progress_indicator.set_status('detecting wings') # where to save the segmentation to label_export_path = '/'.join([config.server_path_remote, config.prefix, 'overviews', config.prefix + '_segmentation' + TIFF_SUFFIX]) # do the detection with ServerProxy(det_params.detector_adress) as proxy: try: bboxes = proxy.detect_bbox(remote_path, det_params.object_filter, label_export_path) except Exception as e: bboxes = None traceback.print_exc() bboxes = [] if bboxes is None else bboxes print(bboxes) if det_params.do_manual_annotation: if len(bboxes)==0: annots = manually_correct_rois(img, [], [1]) else: annots = manually_correct_rois(img, [[x0, y0, x1 - x0, y1 - y0] for (y0, x0, y1, x1) in bboxes], [1] * len(bboxes)) annotation_out = os.path.join(config.server_path_local, config.prefix, 'overviews', config.prefix + '_manualrois.json') with open(annotation_out, 'w') as fd: json.dump([a.to_json() for a in annots], fd) bboxes = [a.roi for a in annots] logger.debug(bboxes) # change to other format bboxes = [[y0, x0, y0 + h, x0 + w] for (x0, y0, w, h) in bboxes] logger.debug(bboxes) # save rois, regardless of wheter we did manual annotation or not annotation_out = os.path.join(config.server_path_local, config.prefix, 'overviews', config.prefix + '_autorois.json') bboxes_json = [{'y0': int(y0), 'y1': int(y1), 'x0': int(x0), 'x1': int(x1)} for (y0, x0, y1, x1) in bboxes] with open(annotation_out, 'w') as fd: json.dump(bboxes_json, fd) if det_params.plot_detection and img is not None: plt.figure() plt.imshow(img) # extract binning factor again # set overview optical configuration nis_util.set_optical_configuration(config.path_to_nis, ov_parameters.oc_overview) # get resolution, binning and fov (xres, yres, siz, mag) = nis_util.get_resolution(config.path_to_nis) live_fmt, capture_fmt = nis_util.get_camera_format(config.path_to_nis) color = nis_util.is_color_camera(config.path_to_nis) # we have to parse capture_fmt differently for color and gray camera # TODO: extract to separate function if color: binning_factor = 1.0 if not '1/3' in capture_fmt else 3.0 else: binning_factor = float(capture_fmt.split()[1].split('x')[0]) bboxes_scaled = [] for bbox in bboxes: # upsample bounding boxes if necessary bbox_scaled = np.array(tuple(bbox)) * binning_factor logger.debug('bbox: {}, upsampled: {}'.format(bbox, bbox_scaled)) bboxes_scaled.append(bbox_scaled) # plot bbox if det_params.plot_detection and img is not None: minr, minc, maxr, maxc = tuple(list(bbox)) rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=2) plt.gca().add_patch(rect) if det_params.plot_detection and img is not None: plt.show() # use scaled bboxes from here on bboxes = bboxes_scaled # pixels to units bboxes = [bbox_pix2unit(b, at) for b in bboxes] # expand bounding boxes bboxes = [scale_bbox(bbox, expand_factor=.2) for bbox in bboxes] logger.info('detected {} wings:'.format(len(bboxes))) return bboxes def _do_detail(bboxes, config: CommonParameters, detail_params: DetailParameters): logger = logging.getLogger(__name__) threads = [] try: # scan the individual wings for idx, bbox in enumerate(bboxes): logger.info('scanning wing {}: {}'.format(idx, bbox)) (ymin, xmin, ymax, xmax) = bbox # TODO: do we need direction? ''' (ymin, xmin, ymax, xmax) = (ymin if direction[0] > 0 else ymax, xmin if direction[1] > 0 else xmax, ymin if direction[0] < 0 else ymax, xmin if direction[1] < 0 else xmax) ''' # set overview optical configuration nis_util.set_optical_configuration(config.path_to_nis, detail_params.ocs_detail[detail_params.channel_for_stitch]) color = nis_util.is_color_camera(config.path_to_nis) # set oc so we have correct magnification nis_util.set_optical_configuration(config.path_to_nis, detail_params.ocs_detail[detail_params.channel_for_autofocus]) # do autofocus -> move to wing center and focus if detail_params.auto_focus_detail: x_center = (xmin + xmax) / 2 y_center = (ymin + ymax) / 2 nis_util.set_position(config.path_to_nis, [x_center, y_center]) nis_util.do_autofocus(config.path_to_nis) wing_path = os.path.join(config.save_base_path, config.prefix + '_wing' + str(idx) + ND2_SUFFIX) # set to first oc so we have correct magnification nis_util.set_optical_configuration(config.path_to_nis, detail_params.ocs_detail[detail_params.channel_for_autofocus]) # get resolution (xres, yres, siz, mag) = nis_util.get_resolution(config.path_to_nis) fov = nis_util.get_fov_from_res(nis_util.get_resolution(config.path_to_nis)) logger.debug('detail resolution: {}, {}, {}, {}'.format(xres, yres, siz, mag)) logger.debug('fov: {}'.format(fov)) # get fov fov_x = xres * siz / mag fov_y = yres * siz / mag # generate the coordinates of the tiles grid, tilesX, tilesY, overlap = nis_util.gen_grid(fov, [xmin, ymin], [xmax, ymax], 0.15, True, True, True) for g in grid: logger.debug('wing {}: will scan tile at {}'.format(idx - 1, g)) # do not actually do the detail acquisition if detail_params.dry_run_details: continue # NB: we have multiple channels, so we have to do # manual grid acquisition via multipoint nD acquisition -> has to be stitched afterwards # we scan around current z -> get that pos = nis_util.get_position(config.path_to_nis) # setup nD acquisition nda = nis_util.NDAcquisition(wing_path) nda.set_z(int(detail_params.z_range / 2), int(detail_params.z_range / 2), int(detail_params.z_step), detail_params.z_drive) nda.add_points(map(lambda x: (x[0], x[1], pos[2] - pos[3]), grid)) for oc in detail_params.ocs_detail: nda.add_c(oc) nda.prepare(config.path_to_nis) nda.run(config.path_to_nis) if detail_params.tiff_export_details: wing_out_dir = wing_path[:-len(ND2_SUFFIX)] if not os.path.exists(wing_out_dir): os.makedirs(wing_out_dir) nis_util.export_nd2_to_tiff(config.path_to_nis, wing_path, wing_out_dir) def copy_details(): # copy to server mount _wing_path = wing_path _wing_out_dir = wing_out_dir _tilesX, _tilesY, _overlap = tilesX, tilesY, overlap _prefix = config.prefix # make raw data directory on server if not os.path.exists(os.path.join(config.server_path_local, _prefix, 'raw')): os.makedirs(os.path.join(config.server_path_local, _prefix, 'raw')) # copy raw data to server logger.debug('about to copy nd2') shutil.copy2(_wing_path, os.path.join(config.server_path_local, _prefix, 'raw')) logger.debug('done copy nd2') if detail_params.tiff_export_details: files = [os.path.join(_wing_out_dir, f) for f in os.listdir(_wing_out_dir) if os.path.isfile(os.path.join(_wing_out_dir, f))] logger.debug('about to copy tiff') copy_lock_to_dir(files, os.path.join(os.path.join(config.server_path_local, _prefix, 'raw'), _wing_out_dir.rsplit(os.sep)[-1])) logger.debug('done copy') remote_path = '/'.join([config.server_path_remote, _prefix, 'raw', _wing_out_dir.rsplit(os.sep)[-1] if detail_params.tiff_export_details else _wing_path.rsplit(os.sep)[-1]]) # make directories for final stitched files if necessary for oc in detail_params.ocs_detail: if not os.path.exists(os.path.join(config.server_path_local, _prefix, oc)): os.makedirs(os.path.join(config.server_path_local, _prefix, oc)) # parameters for cleanup # move stitching to oc directories, delete raw tiffs and temporary stitching folder # FIXME: check if we mix color and grayscale, error then! if color: cleanup_params = {'stitching_path': remote_path + '_stitched', 'outpaths': ['/'.join([config.server_path_remote, _prefix, oc]) for oc in detail_params.ocs_detail] * 3, 'outnames': [_wing_out_dir.rsplit(os.sep)[-1] + '_' + rgb_suffix + TIFF_SUFFIX for rgb_suffix in ['r', 'g', 'b']], 'raw_paths': [remote_path], 'delete_raw': True, 'delete_stitching': True } else: cleanup_params = {'stitching_path': remote_path + '_stitched', 'outpaths': ['/'.join([config.server_path_remote, _prefix, oc]) for oc in detail_params.ocs_detail], 'outnames': [_wing_out_dir.rsplit(os.sep)[-1] + TIFF_SUFFIX] * len(detail_params.ocs_detail), 'raw_paths': [remote_path], 'delete_raw': True, 'delete_stitching': True } with ServerProxy(detail_params.stitcher_adress) as proxy: proxy.stitch([remote_path, _tilesX, _tilesY, _overlap, detail_params.channel_for_stitch + 1 if not color else 'RGB'], detail_params.tiff_export_details, cleanup_params, detail_params.projection_params) # cleanup local os.remove(_wing_path) if detail_params.tiff_export_details: shutil.rmtree(_wing_out_dir) copy_det_thread = threading.Thread(target=copy_details) threads.append(copy_det_thread) copy_det_thread.start() # update progress progress_indicator = config.progress_indicator if progress_indicator is not None: progress_indicator.set_progress((idx + 1) / len(bboxes)) progress_indicator.set_status('scanning wing {}'.format(idx + 1)) except KeyboardInterrupt: logger.info('Interrupted by user, stopping...') except Exception: traceback.print_exc() finally: progress_indicator = config.progress_indicator if progress_indicator is not None: progress_indicator.set_progress(1.0) progress_indicator.set_status('finishing copy to server') logger.info('Waiting for all copy threads to finish...') for t in threads: t.join() logger.info('Done.') def do_scan(configs: Iterable[CommonParameters], ov_params: Iterable[OverviewParameters], det_params: Iterable[DetectionParameters], detail_params: Iterable[DetailParameters], callback_aftereach=None, callback_beforeeach=None): for (config, ov_param, det_param, detail_param) in zip (configs, ov_params, det_params, detail_params): if callback_beforeeach is not None: callback_beforeeach() img = _do_overview(config, ov_param) bboxes = _do_detection(config, ov_param, det_param, img) _do_detail(bboxes, config, detail_param) if callback_aftereach is not None: callback_aftereach() def do_scan_detection_first(configs: Iterable[CommonParameters], ov_params: Iterable[OverviewParameters], det_params: Iterable[DetectionParameters], detail_params: Iterable[DetailParameters]): bboxes_acc = [] for (config, ov_param, det_param) in zip(configs, ov_params, det_params): img = _do_overview(config, ov_param) bboxes = _do_detection(config, ov_param, det_param, img) bboxes_acc.append(bboxes) for (config, detail_param, bboxes) in zip(configs, detail_params, bboxes_acc): _do_detail(bboxes, config, detail_param)
StarcoderdataPython
89432
# -*- coding: UTF-8 -*- import arcpy import re import os import codecs #ツール定義 class FeatureToWKTCSV(object): def __init__(self): self.label = _("Feature To UTF-8 WKT CSV") self.description = _("Creates a UTF-8 WKT CSV from specified features.") self.category = _("DataManagement") self.canRunInBackground = False def getParameterInfo(self): param0 = arcpy.Parameter( displayName=_("Input Features"), name="in_layer", datatype="GPFeatureLayer", parameterType="Required", direction="Input") param0.filter.list = ["Point", "Multipoint", "Polyline", "Polygon"] param1 = arcpy.Parameter( displayName=_("Output CSV"), name="out_csv", datatype="DEFile", parameterType="Required", #parameterType="Derived", direction="Output") param1.filter.list = ['csv', 'txt'] params = [param0, param1] return params def isLicensed(self): return True def updateParameters(self, parameters): return def updateMessages(self, parameters): return def execute(self, parameters, messages): inFeatures = parameters[0].valueAsText outCsv = parameters[1].valueAsText inDesc = arcpy.Describe(inFeatures) if (inDesc.dataType == "FeatureLayer"): inDesc = inDesc.featureClass fields = arcpy.ListFields(inFeatures) shpField = "" fieldList = [] typeList = [] for field in fields: if (field.type == "Geometry"): shpField = field.name else: fieldList.append(field.name) typeList.append(field.type) fieldList.append("SHAPE@WKT") with arcpy.da.SearchCursor(inFeatures, fieldList) as cursor: with codecs.open(outCsv, 'w', 'utf-8') as csvFile: #header for field in fieldList: if field != "SHAPE@WKT": csvFile.write( field ) csvFile.write( "," ) else: csvFile.write( "WKT" ) csvFile.write( '\r\n' ) isFirstTime = True for row in cursor: if (isFirstTime): isFirstTime = False else: csvFile.write( '\r\n' ) index = 0 for ftype in typeList: val = row[index] if ftype != 'SmallInteger' \ and ftype != 'Integer' \ and ftype != 'Single' \ and ftype != 'Double' \ and ftype != 'OID': csvFile.write( '"' ) if val in '"': val = val.replace('"', '""') csvFile.write( val ) csvFile.write( '"' ) else: csvFile.write( str(val) ) csvFile.write( ',' ) index=index+1 #end loop field if row[-1]: csvFile.write( '"' ) csvFile.write( re.sub( '\r\n|\n|\r', '', row[-1] ) ) csvFile.write( '"' ) else: messages.addWarningMessage( str(row[0]) + " None Geometry")
StarcoderdataPython
245042
<filename>tsp.py # -*- encoding: utf-8 -*- """ Traveling Salesman Problem related utilities. """ import re from random import randint from math import pi as M_PI from math import cos, acos from misc import array_double, array_bool def geo_distance(x1, y1, x2, y2): """ Compute geometric distance between two nodes rounded to next integer for TSPLIB instances. Based on the ACOTSP by <NAME> """ deg = int(x1) minute = x1 - deg lati = M_PI * (deg + 5.0 * minute / 3.0) / 180.0 deg = int(x2) minute = x2 - deg latj = M_PI * (deg + 5.0 * minute / 3.0) / 180.0 deg = int(y1) minute = y1 - deg longi = M_PI * (deg + 5.0 * minute / 3.0) / 180.0 deg = int(y2) minute = y2 - deg longj = M_PI * (deg + 5.0 * minute / 3.0) / 180.0 q1 = cos(longi - longj) q2 = cos(lati - latj) q3 = cos(lati + latj) dd = int(6378.388 * acos(0.5 * ((1.0 + q1) * q2 - (1.0 - q1) * q3)) + 1.0) return dd def euclidean_distance(x1, y1, x2, y2): """ Returns Euclidean distance between two points rounded to the nearest integer. """ return int(((x2-x1)**2 + (y2-y1)**2)**0.5 + 0.5) def calc_dist_matrix_euc_2d(coords, dim, distance_function): """ Calculates a 2d matrix of Euclidean distances from list of coordinates. """ initial_values = [-1.0 for i in range(dim)] matrix = [array_double(initial_values) for j in range(dim)] for i in range(dim): for j in range(dim): if i < j: x1, y1 = coords[i] x2, y2 = coords[j] dist = distance_function(x1, y1, x2, y2) matrix[i][j] = dist elif i > j: matrix[i][j] = matrix[j][i] print('First row sum:', sum(matrix[0])) return matrix def load_tsplib_instance(file_path): """ Loads a (A)TSP instance data from a file using TSPLIB format. The current version supports only EUC_2D, GEO and EXPLICIT format of edge weights. """ coord_re = re.compile(r'\s*\d+\s[\d.e+-]+\s[\d.e+-]+\s*') header_re = re.compile(r'\s*([^:]+):\s*(.+)') coordinates = [] desc = {} dist_matrix = None with open(file_path, 'r') as file_: lines = iter(file_.readlines()) # Read header section for line_raw in lines: line = line_raw.strip().lower() if header_re.match(line): match = header_re.match(line) key, val = match.groups() desc[key.strip()] = val.strip() else: break # Now read edge wegiths section dimension = int(desc['dimension']) weights_section_header = line if weights_section_header == 'node_coord_section': for line in lines: if line == 'eof': break if coord_re.match(line): _, x, y = re.split(r'\s+', line.strip()) coordinates.append((float(x), float(y))) elif weights_section_header == 'edge_weight_section': if desc['edge_weight_format'] == 'full_matrix': glued = ' '.join(lines) tokens = re.split(r'\s+', glued.strip()) if tokens[-1] == 'EOF': del tokens[-1] weights = map(float, tokens) dist_matrix = [weights[i:i+dimension] for i in range(0, dimension**2, dimension)] else: raise RuntimeError('Cannot read edge weight section') else: raise RuntimeError('Cannot read edge weight section') desc['coordinates'] = coordinates desc['dimension'] = dimension distance_function = None edge_weight_type = desc['edge_weight_type'] if edge_weight_type == 'euc_2d': distance_function = euclidean_distance elif edge_weight_type == 'geo': distance_function = geo_distance elif edge_weight_type != 'explicit': print("Unknown edge weight type") return None if not dist_matrix: dist_matrix = calc_dist_matrix_euc_2d(coordinates, dimension, distance_function) desc['dist_matrix'] = dist_matrix desc['is_symmetric'] = (desc['type'] == 'tsp') return desc class Ant(object): """ Ant represents a solution to a TSP problem. """ def __init__(self, node_count): self.node_count = node_count self.visited = [] self.unvisited_mask = array_bool(True for _ in range(node_count)) self.value = 0.0 def is_visited_node(self, node): """ Returns True if node was already visited. """ return not self.unvisited_mask[node] def add_visited_node(self, node): """ Appends a node to the current (partial) solution. """ assert not self.is_visited_node(node) self.visited.append(node) self.unvisited_mask[node] = False def goto_initial_node(self): self.add_visited_node(randint(0, self.node_count-1)) class TSP(object): """ Holds TSP data. """ def __init__(self, instance_data, is_symmetric=True): self.dimension = instance_data['dimension'] self.dist_matrix = instance_data['dist_matrix'] self.is_symmetric = instance_data['is_symmetric'] print('is_symmetric', self.is_symmetric) def evaluate_solution(self, route): """ Returns a value of a solution - it is equal to the length of the route. """ dist_matrix = self.dist_matrix prev = route[-1] total = 0.0 for node in route: total += dist_matrix[prev][node] prev = node return total def create_ant(self): """ Returns a new ant representing an empty solution to the problem described by this instance. """ return Ant(self.dimension) def build_greedy_solution(self): """ Returns a greedily built solution. It starts from node 0 and then goes to a closest available node, and so forth. """ ant = self.create_ant() start_node = 0 remaining_nodes = range(1, self.dimension) ant.add_visited_node(start_node) prev = start_node while remaining_nodes: distances = self.dist_matrix[prev] closest = min(remaining_nodes, key=lambda node: distances[node]) ant.add_visited_node(closest) remaining_nodes.remove(closest) prev = closest return ant
StarcoderdataPython
4969967
#!/usr/bin/env python3 import initIOCs #------------------------------------------------- #---------------- MAIN GUI CLASSES --------------- #------------------------------------------------- # Include guard in case user doesn't have tkinter installed but still wants to use the CLI version WITH_GUI=True try: from tkinter import * from tkinter import messagebox from tkinter import simpledialog import tkinter.scrolledtext as ScrolledText from tkinter import font as tkFont from tkinter import ttk import threading import webbrowser except ImportError: WITH_GUI=False class ToolTip: """ Class for handling tool tips in the initIOC GUI. Attributes ---------- widget : tkinter widget target widget for which to display tooltip tipwindow : window tooltip window id : int id number x, y : int coordinates of the tooltip """ # Written by <NAME> def __init__(self, widget): """Constructor for ToolTip class """ self.widget = widget self.tipwindow = None self.id = None self.x = 0 self.y = 0 def showtip(self, text): """Function that actually displays the tooltip """ self.text = text if self.tipwindow or not self.text: return x, y, _, cy = self.widget.bbox("insert") x = x + self.widget.winfo_rootx() + 57 y = y + cy + self.widget.winfo_rooty() +27 self.tipwindow = tw = Toplevel(self.widget) tw.wm_overrideredirect(1) tw.wm_geometry("+%d+%d" % (x, y)) label = Label(tw, text=self.text, justify=LEFT, background="#ffffe0", relief=SOLID, borderwidth=1, font=("tahoma", "8", "normal")) label.pack(ipadx=1) def hidetip(self): """Function that destroys the tooltip """ tw = self.tipwindow self.tipwindow = None if tw: tw.destroy() def CreateToolTip(widget, text): """Function that binds the tooltip to a widget Parameters ---------- widget : tkinter widget widget to bind to text : str tooltip text """ toolTip = ToolTip(widget) def enter(event): toolTip.showtip(text) def leave(event): toolTip.hidetip() widget.bind('<Enter>', enter) widget.bind('<Leave>', leave) class InitIOCGui: """Class representing the main GUI for initIOCs. Attributes ---------- master : Tk window the containing window frame : tk frame the main frame ioc_num_counter : int counter for number of IOCs deployed. actions, configuration, bin_flat : list of IOCAction, dict of {str,str}, bool configuration of IOCs to generate Methods ------- initWindow() initializes the window writeToIOCPanel() writes to the ioc panel readGUIConfig() parses gui data into actions, configuration, binflat execute() executes the ioc actions save() saves the IOC configuration openAddIOCWindow() opens window to add new IOC """ def __init__(self, master, configuration, actions, manager): """ Constructor for InitIOCGui """ self.master = master self.configuration = configuration self.manager = manager self.master.protocol('WM_DELETE_WINDOW', self.thread_cleanup) self.frame = Frame(self.master) self.frame.pack() self.largeFont = tkFont.Font(size = 12) self.largeFontU = tkFont.Font(size = 12) self.largeFontU.configure(underline = True) self.showPopups = BooleanVar() self.showPopups.set(True) self.askAnother = BooleanVar() self.askAnother.set(False) self.executionThread = threading.Thread() menubar = Menu(self.master) filemenu = Menu(menubar, tearoff=0) filemenu.add_command(label='Save Configuration', command=self.save) filemenu.add_command(label='Save Log', command=self.saveLog) filemenu.add_command(label='Clear Log', command=self.clearLog) filemenu.add_command(label='Exit', command=self.thread_cleanup) menubar.add_cascade(label='File', menu=filemenu) editmenu = Menu(menubar, tearoff=0) editmenu.add_command(label='Add IOC', command=self.openAddIOCWindow) editmenu.add_command(label='Clear IOC table', command=self.initIOCPanel) editmenu.add_checkbutton(label='Toggle Popups', onvalue=True, offvalue=False, variable=self.showPopups) editmenu.add_checkbutton(label='Ask to Add Multiple IOCs', onvalue=True, offvalue=False, variable=self.askAnother) menubar.add_cascade(label='Edit', menu=editmenu) runmenu = Menu(menubar, tearoff=0) runmenu.add_command(label='Generate IOCs', command=self.execute) menubar.add_cascade(label='Run', menu=runmenu) helpmenu = Menu(menubar, tearoff=0) helpmenu.add_command(label='Online Docs', command=lambda: webbrowser.open('https://epicsnsls2-deploy.github.io/Deploy-Docs/#initIOC-step-by-step-example', new=2)) helpmenu.add_command(label='initIOC on Github', command = lambda: webbrowser.open('https://github.com/epicsNSLS2-deploy/initIOC', new=2)) helpmenu.add_command(label='Report an Issue', command = lambda: webbrowser.open('https://github.com/epicsNSLS2-deploy/initIOC/issues', new=2)) helpmenu.add_command(label='Supported Drivers', command=print_supported_drivers) helpmenu.add_command(label='About', command=print_start_message) menubar.add_cascade(label='Help', menu=helpmenu) self.master.config(menu=menubar) # Read initial configuration from save file self.actions = actions # User inputs for all configuration options self.text_inputs = {} row_counter = 0 for elem in self.configuration.keys(): self.text_inputs[elem] = StringVar() Label(self.frame, text=elem).grid(row=row_counter, column=0, padx = 10, pady = 10) elem_entry = Entry(self.frame, textvariable=self.text_inputs[elem], width=30) elem_entry.grid(row=row_counter, column=1, columnspan = 2, padx=10, pady=10) elem_entry.insert(0, self.configuration[elem]) CreateToolTip(elem_entry, config_tooltips[elem]) row_counter = row_counter + 1 self.master.title('initIOC GUI') ttk.Separator(self.frame, orient=HORIZONTAL).grid(row=row_counter, columnspan=3, padx = 5, sticky = 'ew') Label(self.frame, text='IOC Generation Table - You may edit this table manually, or add new IOCs with the Add Button').grid(row = 0, column = 3, columnspan = 5, padx = 10, pady = 10) self.iocPanel = ScrolledText.ScrolledText(self.frame, width = '75', height = '15') self.iocPanel.grid(row = 1, column = 3, padx = 15, pady = 15, columnspan = 5, rowspan = row_counter + 1) self.initIOCPanel() for action in self.actions: self.writeToIOCPanel(action.ioc_type, action.ioc_name, action.dev_prefix, action.asyn_port, action.ioc_port, action.connection) Label(self.frame, text='Log', font=self.largeFontU).grid(row = row_counter + 1, column = 0, padx = 5, pady = 0) self.logPanel = ScrolledText.ScrolledText(self.frame, width='100', height = '15') self.logPanel.grid(row = row_counter + 2, column = 0, rowspan = 5, columnspan = 4, padx = 10, pady = 10) saveButton = Button(self.frame, text="Save", font=self.largeFont, command=self.save, height='3', width='20') runButton = Button(self.frame, text="Run", font=self.largeFont, command=self.execute, height='3', width='20') addButton = Button(self.frame, text="Add IOC", font=self.largeFont, command=self.openAddIOCWindow, height='3', width='20') saveButton.grid(row=row_counter+3, column=5, columnspan=2, padx=5, pady=5) runButton.grid( row=row_counter+4, column=5, columnspan=2, padx=5, pady=5) addButton.grid( row=row_counter+5, column=5, columnspan=2, padx=5, pady=5) def initIOCPanel(self): """ Function that resets the IOC panel """ self.iocPanel.delete('1.0', END) self.iocPanel.insert(INSERT, '# IOC Type IOC Name Device Prefix Asyn Port IOC Port Cam Connection\n') self.iocPanel.insert(INSERT, '#-----------------------------------------------------------------------------------------\n') def writeToIOCPanel(self, ioc_type, name, dev_prefix, asyn, port, connect): """ Function that writes to the iocPanel """ self.iocPanel.insert(INSERT, '{:<18}{:<15}{:<15}{:<15}{:<12}{}\n'.format(ioc_type, name, dev_prefix, asyn, port, connect)) def writeToLog(self, text): """Function that writes text to the GUI log """ self.logPanel.insert(INSERT, text) self.logPanel.see(END) def showError(self, text): if self.showPopups.get(): messagebox.showerror('ERROR', text) self.writeToLog('ERROR - ' + text + '\n') def showWarning(self, text): if self.showPopups.get(): messagebox.showerror('WARNING', text) self.writeToLog('WARNING - ' + text + '\n') def showMessage(self, text): if self.showPopups.get(): messagebox.showerror('Info', text) self.writeToLog(text + '\n') def read_gui_config(self): """Function that reads values entered into gui into actions, configuration, and bin_flat """ for elem in self.text_inputs.keys(): if self.text_inputs[elem].get() != self.configuration[elem]: self.configuration[elem] = self.text_inputs[elem].get() self.manager.ioc_top = self.configuration['IOC_DIR'] self.manager.binary_location = self.configuration['TOP_BINARY_DIR'] self.manager.binaries_flat = self.manager.check_binaries_flat() self.manager.update_mod_paths() del self.actions[:] for line in self.iocPanel.get('1.0', END).splitlines(): if not line.startswith('#') and len(line) > 1: action = parse_line_into_action(line, self.configuration['PREFIX']) action.epics_environment['HOSTNAME'] = self.configuration['HOSTNAME'] action.epics_environment['ENGINEER'] = self.configuration['ENGINEER'] action.epics_environment['EPICS_CA_ADDR_LIST'] = self.configuration['CA_ADDRESS'] if action is not None: self.actions.append(action) self.ioc_num_counter = self.ioc_num_counter + 1 else: self.showWarning('Could not parse one of the IOC lines entered into the table.') def execute(self): """Reads gui info, and runs init_iocs """ if self.executionThread.is_alive(): self.showError('Process thread is already active!') else: self.read_gui_config() self.executionThread = threading.Thread(target=lambda : init_iocs_cli(self.actions, self.manager)) self.executionThread.start() def save(self): """Saves the current IOC configuration """ self.read_gui_config() if os.path.exists('CONFIGURE'): os.remove('CONFIGURE') file = open('CONFIGURE', 'w') file.write('#\n# initIOCs CONFIGURE file autogenerated on {}\n#\n\n'.format(datetime.datetime.now())) for elem in self.configuration.keys(): file.write('# {}\n'.format(config_tooltips[elem])) file.write('{}={}\n\n'.format(elem, self.configuration[elem])) file.write(self.iocPanel.get('1.0', END)) initIOC_print('Saved configuration to CONFIGURE file.') def saveLog(self): """Function that saves the current log into a log file """ if not os.path.exists('logs'): os.mkdir('logs') elif not os.path.isdir('logs'): self.showError('logs directory could not be created, logs file exists') stamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") log = open('logs/initIOC-{}.log'.format(stamp), 'w') log.write(self.logPanel.get('1.0', END)) log.close() self.writeToLog('Wrote log file.\n') def clearLog(self): """Reinitializes the log """ self.logPanel.delete('1.0', END) print_start_message() def openAddIOCWindow(self): """Opens an addIOC window """ AddIOCWindow(self) def thread_cleanup(self): if self.executionThread.is_alive(): self.executionThread.join() self.master.destroy() class AddIOCWindow: """Class representing a window for adding a new IOC into the config """ def __init__(self, root): self.root = root self.master = Toplevel() self.master.title('Add New IOC') # Create the entry fields for all the paramters self.ioc_type_var = StringVar() self.ioc_type_var.set(supported_drivers[0]) self.ioc_name_var = StringVar() self.dev_prefix_var = StringVar() self.asyn_port_var = StringVar() self.ioc_port_var = StringVar() self.cam_connect_var = StringVar() Label(self.master, text="IOC Type").grid(row = 0, column = 0, padx = 10, pady = 10) ioc_type_entry = ttk.Combobox(self.master, textvariable=self.ioc_type_var, values=supported_drivers) ioc_type_entry.grid(row = 0, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(ioc_type_entry, 'The IOC type. Must be from list of supported drivers.') Label(self.master, text="IOC Name").grid(row = 1, column = 0, padx = 10, pady = 10) ioc_name_entry = Entry(self.master, textvariable=self.ioc_name_var) ioc_name_entry.grid(row = 1, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(ioc_name_entry, 'The name of the IOC. Usually cam-$NAME') Label(self.master, text="Device Prefix").grid(row = 1, column = 0, padx = 10, pady = 10) dev_prefix_entry = Entry(self.master, textvariable=self.dev_prefix_var) dev_prefix_entry.grid(row = 1, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(dev_prefix_entry, 'The device-specific prefix. ex. {{Sim-Cam:1}}') Label(self.master, text="Asyn Port").grid(row = 2, column = 0, padx = 10, pady = 10) asyn_port_entry = Entry(self.master, textvariable=self.asyn_port_var) asyn_port_entry.grid(row = 2, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(asyn_port_entry, 'IOC Asyn port. Usually Shorthand of IOC type and number. ex. SIM1') Label(self.master, text="IOC Port").grid(row = 3, column = 0, padx = 10, pady = 10) ioc_port_entry = Entry(self.master, textvariable=self.ioc_port_var) ioc_port_entry.grid(row = 3, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(ioc_port_entry, 'Telnet port used by softioc when running the IOC') Label(self.master, text="Cam Connection").grid(row = 4, column = 0, padx = 10, pady = 10) cam_connect_entry = Entry(self.master, textvariable=self.cam_connect_var) cam_connect_entry.grid(row = 4, column = 1, columnspan=2, padx = 10, pady = 10) CreateToolTip(cam_connect_entry, 'A general parameter used to connect to camera. Typically IP, Serial #, config path, etc.') Button(self.master,text="Submit", command=self.submit).grid(row = 5, column = 0, padx = 10, pady = 10) Button(self.master,text="Cancel", command=self.master.destroy).grid(row = 5, column = 2, padx = 10, pady = 10) def submit(self): """Function that enters the filled IOC values into the configuration """ if self.ioc_type_var.get() not in supported_drivers: self.root.showError('The selected IOC type is not supported.') self.master.destroy() return ioc_type = self.ioc_type_var.get() name = self.ioc_name_var.get() dev_prefix = self.dev_prefix_var.get() asyn = self.asyn_port_var.get() port = self.ioc_port_var.get() connect = self.cam_connect_var.get() if ioc_type == '' or name == '' or dev_prefix == '' or asyn == '' or port == '' or connect == '': self.root.showError('Please enter a valid value for all of the fields.') return self.root.writeToIOCPanel(ioc_type, name, dev_prefix, asyn, port, connect) self.root.writeToLog('Added IOC {} to configuration.\n'.format(name)) if self.root.askAnother.get(): res = messagebox.askyesno('Continue', 'Would you like to add another IOC?') if res is not None and not res: self.master.destroy() elif res is not None: self.ioc_name_var.set('') self.ioc_type_var.set('') self.ioc_port_var.set('') self.asyn_port_var.set('') self.cam_connect_var.set('') else: self.master.destroy() def main(): if not WITH_GUI: initIOC_print('ERROR - TKinter GUI package not installed. Please intall and rerun.') exit() else: root = Tk() USING_GUI = True app = InitIOCGui(root, configuration, actions, manager) GUI_TOP_WINDOW = app print_start_message() root.mainloop()
StarcoderdataPython
11260663
<reponame>vinjn/net-doctor<gh_stars>0 # $Id: dns.py 27 2006-11-21 01:22:52Z dahelder $ # -*- coding: utf-8 -*- """Domain Name System.""" from __future__ import print_function from __future__ import absolute_import import struct import codecs from . import dpkt from .compat import compat_ord DNS_Q = 0 DNS_R = 1 # Opcodes DNS_QUERY = 0 DNS_IQUERY = 1 DNS_STATUS = 2 DNS_NOTIFY = 4 DNS_UPDATE = 5 # Flags DNS_CD = 0x0010 # checking disabled DNS_AD = 0x0020 # authenticated data DNS_Z = 0x0040 # unused DNS_RA = 0x0080 # recursion available DNS_RD = 0x0100 # recursion desired DNS_TC = 0x0200 # truncated DNS_AA = 0x0400 # authoritative answer DNS_QR = 0x8000 # response ( query / response ) # Response codes DNS_RCODE_NOERR = 0 DNS_RCODE_FORMERR = 1 DNS_RCODE_SERVFAIL = 2 DNS_RCODE_NXDOMAIN = 3 DNS_RCODE_NOTIMP = 4 DNS_RCODE_REFUSED = 5 DNS_RCODE_YXDOMAIN = 6 DNS_RCODE_YXRRSET = 7 DNS_RCODE_NXRRSET = 8 DNS_RCODE_NOTAUTH = 9 DNS_RCODE_NOTZONE = 10 # RR types DNS_A = 1 DNS_NS = 2 DNS_CNAME = 5 DNS_SOA = 6 DNS_NULL = 10 DNS_PTR = 12 DNS_HINFO = 13 DNS_MX = 15 DNS_TXT = 16 DNS_AAAA = 28 DNS_SRV = 33 DNS_OPT = 41 # RR classes DNS_IN = 1 DNS_CHAOS = 3 DNS_HESIOD = 4 DNS_ANY = 255 def pack_name(name, off, label_ptrs): name = codecs.encode(name, 'utf-8') if name: labels = name.split(b'.') else: labels = [] labels.append(b'') buf = b'' for i, label in enumerate(labels): key = b'.'.join(labels[i:]).upper() ptr = label_ptrs.get(key) if not ptr: if len(key) > 1: ptr = off + len(buf) if ptr < 0xc000: label_ptrs[key] = ptr i = len(label) buf += struct.pack("B", i) + label else: buf += struct.pack('>H', (0xc000 | ptr)) break return buf def unpack_name(buf, off): name = [] saved_off = 0 start_off = off name_length = 0 while True: if off >= len(buf): raise dpkt.NeedData() n = compat_ord(buf[off]) if n == 0: off += 1 break elif (n & 0xc0) == 0xc0: ptr = struct.unpack('>H', buf[off:off + 2])[0] & 0x3fff if ptr >= start_off: raise dpkt.UnpackError('Invalid label compression pointer') off += 2 if not saved_off: saved_off = off start_off = off = ptr elif (n & 0xc0) == 0x00: off += 1 name.append(buf[off:off + n]) name_length += n + 1 if name_length > 255: raise dpkt.UnpackError('name longer than 255 bytes') off += n else: raise dpkt.UnpackError('Invalid label length %02x' % n) if not saved_off: saved_off = off return codecs.decode(b'.'.join(name), 'utf-8'), saved_off class DNS(dpkt.Packet): """Domain Name System. TODO: Longer class information.... Attributes: __hdr__: Header fields of DNS. TODO. """ __hdr__ = ( ('id', 'H', 0), ('op', 'H', DNS_RD), # recursive query # XXX - lists of query, RR objects ('qd', 'H', []), ('an', 'H', []), ('ns', 'H', []), ('ar', 'H', []) ) @property def qr(self): return int((self.op & DNS_QR) == DNS_QR) @qr.setter def qr(self, v): if v: self.op |= DNS_QR else: self.op &= ~DNS_QR @property def opcode(self): return (self.op >> 11) & 0xf @opcode.setter def opcode(self, v): self.op = (self.op & ~0x7800) | ((v & 0xf) << 11) @property def aa(self): return int((self.op & DNS_AA) == DNS_AA) @aa.setter def aa(self, v): if v: self.op |= DNS_AA else: self.op &= ~DNS_AA @property def tc(self): return int((self.op & DNS_TC) == DNS_TC) @tc.setter def tc(self, v): if v: self.op |= DNS_TC else: self.op &= ~DNS_TC @property def rd(self): return int((self.op & DNS_RD) == DNS_RD) @rd.setter def rd(self, v): if v: self.op |= DNS_RD else: self.op &= ~DNS_RD @property def ra(self): return int((self.op & DNS_RA) == DNS_RA) @ra.setter def ra(self, v): if v: self.op |= DNS_RA else: self.op &= ~DNS_RA @property def zero(self): return int((self.op & DNS_Z) == DNS_Z) @zero.setter def zero(self, v): if v: self.op |= DNS_Z else: self.op &= ~DNS_Z @property def rcode(self): return self.op & 0xf @rcode.setter def rcode(self, v): self.op = (self.op & ~0xf) | (v & 0xf) class Q(dpkt.Packet): """DNS question.""" __hdr__ = ( ('name', '1025s', b''), ('type', 'H', DNS_A), ('cls', 'H', DNS_IN) ) # XXX - suk def __len__(self): raise NotImplementedError __str__ = __len__ def unpack(self, buf): raise NotImplementedError class RR(Q): """DNS resource record.""" __hdr__ = ( ('name', '1025s', b''), ('type', 'H', DNS_A), ('cls', 'H', DNS_IN), ('ttl', 'I', 0), ('rlen', 'H', 4), ('rdata', 's', b'') ) def pack_rdata(self, off, label_ptrs): # XXX - yeah, this sux if self.rdata: return self.rdata if self.type == DNS_A: return self.ip elif self.type == DNS_NS: return pack_name(self.nsname, off, label_ptrs) elif self.type == DNS_CNAME: return pack_name(self.cname, off, label_ptrs) elif self.type == DNS_PTR: return pack_name(self.ptrname, off, label_ptrs) elif self.type == DNS_SOA: l_ = [] l_.append(pack_name(self.mname, off, label_ptrs)) l_.append(pack_name(self.rname, off + len(l_[0]), label_ptrs)) l_.append(struct.pack('>IIIII', self.serial, self.refresh, self.retry, self.expire, self.minimum)) return b''.join(l_) elif self.type == DNS_MX: return struct.pack('>H', self.preference) + \ pack_name(self.mxname, off + 2, label_ptrs) elif self.type == DNS_TXT or self.type == DNS_HINFO: return b''.join(struct.pack('B', len(x)) + x for x in self.text) elif self.type == DNS_AAAA: return self.ip6 elif self.type == DNS_SRV: return struct.pack('>HHH', self.priority, self.weight, self.port) + \ pack_name(self.srvname, off + 6, label_ptrs) elif self.type == DNS_OPT: return b'' # self.rdata else: raise dpkt.PackError('RR type %s is not supported' % self.type) def unpack_rdata(self, buf, off): if self.type == DNS_A: self.ip = self.rdata elif self.type == DNS_NS: self.nsname, off = unpack_name(buf, off) elif self.type == DNS_CNAME: self.cname, off = unpack_name(buf, off) elif self.type == DNS_PTR: self.ptrname, off = unpack_name(buf, off) elif self.type == DNS_SOA: self.mname, off = unpack_name(buf, off) self.rname, off = unpack_name(buf, off) self.serial, self.refresh, self.retry, self.expire, self.minimum = \ struct.unpack('>IIIII', buf[off:off + 20]) elif self.type == DNS_MX: self.preference = struct.unpack('>H', self.rdata[:2]) self.mxname, off = unpack_name(buf, off + 2) elif self.type == DNS_TXT or self.type == DNS_HINFO: self.text = [] buf = self.rdata while buf: n = compat_ord(buf[0]) self.text.append(codecs.decode(buf[1:1 + n], 'utf-8')) buf = buf[1 + n:] elif self.type == DNS_AAAA: self.ip6 = self.rdata elif self.type == DNS_NULL: self.null = codecs.encode(self.rdata, 'hex') elif self.type == DNS_SRV: self.priority, self.weight, self.port = struct.unpack('>HHH', self.rdata[:6]) self.srvname, off = unpack_name(buf, off + 6) elif self.type == DNS_OPT: pass # RFC-6891: OPT is a pseudo-RR not carrying any DNS data else: raise dpkt.UnpackError('RR type %s is not supported' % self.type) def pack_q(self, buf, q): """Append packed DNS question and return buf.""" return buf + pack_name(q.name, len(buf), self.label_ptrs) + struct.pack('>HH', q.type, q.cls) def unpack_q(self, buf, off): """Return DNS question and new offset.""" q = self.Q() q.name, off = unpack_name(buf, off) q.type, q.cls = struct.unpack('>HH', buf[off:off + 4]) off += 4 return q, off def pack_rr(self, buf, rr): """Append packed DNS RR and return buf.""" name = pack_name(rr.name, len(buf), self.label_ptrs) rdata = rr.pack_rdata(len(buf) + len(name) + 10, self.label_ptrs) return buf + name + struct.pack('>HHIH', rr.type, rr.cls, rr.ttl, len(rdata)) + rdata def unpack_rr(self, buf, off): """Return DNS RR and new offset.""" rr = self.RR() rr.name, off = unpack_name(buf, off) rr.type, rr.cls, rr.ttl, rdlen = struct.unpack('>HHIH', buf[off:off + 10]) off += 10 rr.rdata = buf[off:off + rdlen] rr.rlen = rdlen rr.unpack_rdata(buf, off) off += rdlen return rr, off def unpack(self, buf): dpkt.Packet.unpack(self, buf) off = self.__hdr_len__ cnt = self.qd # FIXME: This relies on this being properly set somewhere else self.qd = [] for _ in range(cnt): q, off = self.unpack_q(buf, off) self.qd.append(q) for x in ('an', 'ns', 'ar'): cnt = getattr(self, x, 0) setattr(self, x, []) for _ in range(cnt): rr, off = self.unpack_rr(buf, off) getattr(self, x).append(rr) self.data = b'' def __len__(self): # XXX - cop out return len(bytes(self)) def __bytes__(self): # XXX - compress names on the fly self.label_ptrs = {} buf = struct.pack(self.__hdr_fmt__, self.id, self.op, len(self.qd), len(self.an), len(self.ns), len(self.ar)) for q in self.qd: buf = self.pack_q(buf, q) for x in ('an', 'ns', 'ar'): for rr in getattr(self, x): buf = self.pack_rr(buf, rr) del self.label_ptrs return buf # TESTS def define_testdata(): """ Reference test data is stored in the dynamically defined class. It is created in this way so that we can import unhexlify only during testing, and not during normal use. """ from binascii import unhexlify class TestData(object): a_resp = unhexlify( "059c8180000100010000000106676f6f676c6503636f6d0000010001c00c00010" "0010000012b0004d83ace2e0000290200000000000000" ) aaaa_resp = unhexlify( "7f228180000100010000000005676d61696c03636f6d00001c0001c00c001c000" "10000012b00102a001450400908020000000000002005" ) cname_resp = unhexlify( "a154818000010001000000000377777705676d61696c03636f6d0000010001c00" "c000500010000545f000e046d61696c06676f6f676c65c016" ) invalid_rr = unhexlify( "000001000000000100000000046e616d650000150001000000000000" ) mx_resp = unhexlify( "053b8180000100010000000006676f6f676c6503636f6d00000f0001c00c000f0" "001000002570011001e04616c7432056173706d78016cc00c" ) null_resp = unhexlify( "12b0840000010001000000000b626c6168626c616836363606706972617465037" "3656100000a0001c00c000a00010000000000095641434b4403c5e901" ) opt_resp = unhexlify( "8d6e0110000100000000000104783131310678787878313106616b616d6169036" "e657400000100010000290fa0000080000000" ) ptr_resp = unhexlify( "67028180000100010003000001310131033231310331343107696e2d616464720" "46172706100000c0001c00c000c000100000d3600240764656661756c740a762d" "756d63652d69667305756d6e657405756d6963680365647500c00e00020001000" "00d36000d0673686162627903696673c04fc00e0002000100000d36000f0c6669" "73682d6c6963656e7365c06dc00e0002000100000d36000b04646e73320369746" "4c04f" ) soa_resp = unhexlify( "851f8180000100010000000006676f6f676c6503636f6d0000060001c00c00060" "001000000230026036e7332c00c09646e732d61646d696ec00c0a747447000003" "8400000384000007080000003c" ) srv_resp = unhexlify( "7f2281800001000100000000075f6a6162626572045f746370066a61626265720" "3636f6d0000210001c00c0021000100000e0f001a000a000014950764656e6a61" "6232066a616262657203636f6d00" ) txt_resp = unhexlify( "10328180000100010000000006676f6f676c6503636f6d0000100001c00c00100" "0010000010e00100f763d7370663120707472203f616c6c" ) return TestData() def test_basic(): buf = define_testdata().a_resp my_dns = DNS(buf) assert my_dns.qd[0].name == 'google.com' assert my_dns.an[0].name == 'google.com' assert bytes(my_dns) == buf class TryExceptException: def __init__(self, exception_type, msg=''): self.exception_type = exception_type self.msg = msg def __call__(self, f, *args, **kwargs): def wrapper(*args, **kwargs): try: f() except self.exception_type as e: if self.msg: assert str(e) == self.msg else: raise Exception("There should have been an Exception raised") return wrapper @TryExceptException(Exception, msg='There should have been an Exception raised') def test_TryExceptException(): """Check that we can catch a function which does not throw an exception when it is supposed to""" @TryExceptException(NotImplementedError) def fun(): pass try: fun() except Exception as e: raise e @TryExceptException(NotImplementedError) def test_Q_len(): """Test in place for when the method is written""" q = DNS.Q() len(q) @TryExceptException(NotImplementedError) def test_Q_unpack(): """Test in place for when the method is written""" q = DNS.Q() q.unpack(None) def property_runner(prop, ops, set_to=None): if set_to is None: set_to = [False, True, False] buf = define_testdata().a_resp dns = DNS(buf) for set_to, op in zip(set_to, ops): setattr(dns, prop, set_to) assert dns.op == op assert getattr(dns, prop) == set_to def test_qr(): property_runner('qr', ops=[384, 33152, 384]) def test_opcode(): property_runner('opcode', ops=[33152, 35200, 33152]) def test_aa(): property_runner('aa', ops=[33152, 34176, 33152]) def test_tc(): property_runner('tc', ops=[33152, 33664, 33152]) def test_rd(): property_runner('rd', ops=[32896, 33152, 32896]) def test_ra(): property_runner('ra', ops=[33024, 33152, 33024]) def test_zero(): property_runner('zero', ops=[33152, 33216, 33152]) def test_rcode(): property_runner('rcode', ops=[33152, 33153, 33152]) def test_PTR(): buf = define_testdata().ptr_resp my_dns = DNS(buf) assert my_dns.qd[0].name == '192.168.3.11.in-addr.arpa' and \ my_dns.an[0].ptrname == 'default.v-umce-ifs.umnet.umich.edu' and \ my_dns.ns[0].nsname == 'shabby.ifs.umich.edu' and \ my_dns.ns[1].ttl == 3382 and \ my_dns.ns[2].nsname == 'dns2.itd.umich.edu' assert buf == bytes(my_dns) def test_OPT(): buf = define_testdata().opt_resp my_dns = DNS(buf) my_rr = my_dns.ar[0] assert my_rr.type == DNS_OPT assert my_rr.rlen == 0 and my_rr.rdata == b'' assert bytes(my_dns) == buf my_rr.rdata = b'\x00\x00\x00\x02\x00\x00' # add 1 attribute tlv my_dns2 = DNS(bytes(my_dns)) my_rr2 = my_dns2.ar[0] assert my_rr2.rlen == 6 and my_rr2.rdata == b'\x00\x00\x00\x02\x00\x00' def test_pack_name(): # Empty name is \0 x = pack_name('', 0, {}) assert x == b'\0' @TryExceptException(dpkt.UnpackError) def test_unpack_name(): """If the offset is longer than the buffer, there will be an UnpackError""" unpack_name(b' ', 0) @TryExceptException(dpkt.UnpackError) def test_random_data(): DNS(b'\x83z0\xd2\x9a\xec\x94_7\xf3\xb7+\x85"?\xf0\xfb') @TryExceptException(dpkt.UnpackError) def test_circular_pointers(): DNS(b'\xc0\x00\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x07example\x03com\xc0\x00') @TryExceptException(dpkt.UnpackError) def test_very_long_name(): DNS(b'\x00\x00\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00' + (b'\x10abcdef0123456789' * 16) + b'\x00') def test_null_response(): buf = define_testdata().null_resp my_dns = DNS(buf) assert my_dns.qd[0].name == 'blahblah666.pirate.sea' and \ my_dns.an[0].null == b'5641434b4403c5e901' assert str(buf) == str(my_dns) def test_txt_response(): buf = define_testdata().txt_resp my_dns = DNS(buf) my_rr = my_dns.an[0] assert my_rr.type == DNS_TXT assert my_rr.name == 'google.com' assert my_rr.text == ['v=spf1 ptr ?all'] assert str(my_dns) == str(buf) assert bytes(my_dns) == buf def test_rdata_TXT(): rr = DNS.RR( type=DNS_TXT, text=[b'v=spf1 ptr ?all', b'a=something'] ) packdata = rr.pack_rdata(0, {}) correct = b'\x0fv=spf1 ptr ?all\x0ba=something' assert packdata == correct def test_rdata_HINFO(): rr = DNS.RR( type=DNS_HINFO, text=[b'v=spf1 ptr ?all', b'a=something'] ) packdata = rr.pack_rdata(0, {}) correct = b'\x0fv=spf1 ptr ?all\x0ba=something' assert packdata == correct def test_rdata_rdata(): rr = DNS.RR( name='zc.akadns.org', ttl=123446, rdata=b'?\xf1\xc76', ) packdata = rr.pack_rdata(0, {}) correct = b'?\xf1\xc76' assert packdata == correct def test_rdata_A(): rr = DNS.RR( name='zc.akadns.org', ttl=123446, ip=b'?\xf1\xc76', type=DNS_A, ) packdata = rr.pack_rdata(0, {}) correct = b'?\xf1\xc76' assert packdata == correct def test_rdata_NS(): rr = DNS.RR( nsname='zc.akadns.org', ttl=123446, ip=b'?\xf1\xc76', type=DNS_NS, ) packdata = rr.pack_rdata(0, {}) correct = b'\x02zc\x06akadns\x03org\x00' assert packdata == correct def test_rdata_CNAME(): rr = DNS.RR( cname='zc.akadns.org', ttl=123446, ip=b'?\xf1\xc76', type=DNS_CNAME, ) packdata = rr.pack_rdata(0, {}) correct = b'\x02zc\x06akadns\x03org\x00' assert packdata == correct def test_rdata_PTR(): rr = DNS.RR( ptrname='default.v-umce-ifs.umnet.umich.edu', ttl=1236, ip=b'?\xf1\xc76', type=DNS_PTR, ) packdata = rr.pack_rdata(0, {}) correct = b'\x07default\nv-umce-ifs\x05umnet\x05umich\x03edu\x00' assert packdata == correct def test_rdata_SOA(): rr = DNS.RR( mname='blah.google.com', rname='moo.blah.com', serial=12345666, refresh=123463, retry=209834, minimum=9000, expire=28341, type=DNS_SOA, ) packdata = rr.pack_rdata(0, {}) correct = ( b'\x04blah\x06google\x03com\x00\x03moo\x04blah\xc0\x0c\x00\xbcaB' b'\x00\x01\xe2G\x00\x033\xaa\x00\x00n\xb5\x00\x00#(') assert packdata == correct def test_rdata_MX(): rr = DNS.RR( type=DNS_MX, preference=2124, mxname='mail.google.com', ) packdata = rr.pack_rdata(0, {}) correct = b'\x08L\x04mail\x06google\x03com\x00' assert packdata == correct def test_rdata_AAAA(): ip6 = b'&\x07\xf8\xb0@\x0c\x0c\x03\x00\x00\x00\x00\x00\x00\x00\x1a' rr = DNS.RR( type=DNS_AAAA, ip6=ip6, ) packdata = rr.pack_rdata(0, {}) correct = ip6 assert packdata == correct def test_rdata_SRV(): rr = DNS.RR( type=DNS_SRV, ttl=86400, priority=0, weight=5, port=5060, srvname='_sip._tcp.example.com', ) packdata = rr.pack_rdata(0, {}) correct = b'\x00\x00\x00\x05\x13\xc4\x04_sip\x04_tcp\x07example\x03com\x00' assert packdata == correct def test_rdata_OPT(): rr = DNS.RR( type=DNS_OPT, ) # TODO: This is hardcoded to return b''. Is this intentional? packdata = rr.pack_rdata(0, {}) correct = b'' assert packdata == correct def test_dns_len(): my_dns = DNS() assert len(my_dns) == 12 @TryExceptException(dpkt.PackError) def test_rdata_FAIL(): DNS.RR(type=12345666).pack_rdata(0, {}) def test_soa(): buf = define_testdata().soa_resp soa = DNS(buf) assert soa.id == 34079 assert soa.op == 33152 assert len(soa.qd) == 1 q = soa.qd[0] assert q.name == 'google.com' assert q.type == DNS_SOA assert q.cls == DNS_IN assert len(soa.an) == 1 a = soa.an[0] assert a.name == 'google.com' assert a.type == DNS_SOA assert a.cls == DNS_IN assert a.ttl == 35 assert a.retry == 900 assert a.mname == 'ns2.google.com' assert a.minimum == 60 assert a.refresh == 900 assert a.expire == 1800 assert a.serial == 175404103 assert a.rlen == 38 assert a.rname == 'dns-admin.google.com' assert a.rdata == b'\x03ns2\xc0\x0c\tdns-admin\xc0\x0c\nttG\x00\x00\x03\x84\x00\x00\x03\x84\x00\x00\x07\x08\x00\x00\x00<' assert soa.ar == [] def test_mx(): buf = define_testdata().mx_resp mx = DNS(buf) assert mx.id == 1339 assert mx.op == 33152 assert len(mx.qd) == 1 q = mx.qd[0] assert q.name == 'google.com' assert q.type == DNS_MX assert q.cls == DNS_IN assert len(mx.an) == 1 a = mx.an[0] assert a.type == DNS_MX assert a.cls == DNS_IN assert a.name == 'google.com' assert a.ttl == 599 assert a.mxname == 'alt2.aspmx.l.google.com' assert a.preference == (30,) assert a.rlen == 17 assert a.rdata == b'\x00\x1e\x04alt2\x05aspmx\x01l\xc0\x0c' assert mx.ar == [] def test_aaaa(): buf = define_testdata().aaaa_resp aaaa = DNS(buf) aaaa.id = 32546 aaaa.op = 33152 assert len(aaaa.qd) == 1 q = aaaa.qd[0] assert q.type == DNS_AAAA assert q.name == 'gmail.com' assert len(aaaa.an) == 1 a = aaaa.an[0] assert a.type == DNS_AAAA assert a.cls == DNS_IN assert a.name == 'gmail.com' assert a.ttl == 299 assert a.ip6 == b'*\x00\x14P@\t\x08\x02\x00\x00\x00\x00\x00\x00 \x05' assert a.rlen == 16 assert a.rdata == b'*\x00\x14P@\t\x08\x02\x00\x00\x00\x00\x00\x00 \x05' assert aaaa.ar == [] def test_srv(): buf = define_testdata().srv_resp srv = DNS(buf) srv.id = 32546 srv.op = 33152 assert len(srv.qd) == 1 q = srv.qd[0] assert q.type == DNS_SRV assert q.name == '_jabber._tcp.jabber.com' assert q.cls == DNS_IN assert len(srv.an) == 1 a = srv.an[0] assert a.type == DNS_SRV assert a.cls == DNS_IN assert a.name == '_jabber._tcp.jabber.com' assert a.port == 5269 assert a.ttl == 3599 assert a.srvname == 'denjab2.jabber.com' assert a.priority == 10 assert a.weight == 0 assert a.rlen == 26 assert a.rdata == b'\x00\n\x00\x00\x14\x95\x07denjab2\x06jabber\x03com\x00' assert srv.ar == [] def test_cname(): buf = define_testdata().cname_resp cname = DNS(buf) cname.id = 41300 cname.op = 33152 assert len(cname.qd) == 1 q = cname.qd[0] assert q.type == DNS_A assert q.cls == DNS_IN assert q.name == 'www.gmail.com' assert len(cname.an) == 1 a = cname.an[0] assert a.type == DNS_CNAME assert a.cls == DNS_IN assert a.name == 'www.gmail.com' assert a.ttl == 21599 assert a.cname == 'mail.google.com' assert a.rlen == 14 assert a.rdata == b'\x04mail\x06google\xc0\x16' assert cname.ar == [] @TryExceptException(dpkt.UnpackError) def test_invalid_rr(): buf = define_testdata().invalid_rr DNS(buf)
StarcoderdataPython
6616156
<reponame>MrKosif/Neural-Networks-From-Scratch import numpy as np from nnfs import spiral_data input = [[1, -2, 3], [-3, 6 ,-8]] class Layer_Dense: def __init__(self, no_of_inputs, no_of_neurons): self.weight = 0.10*np.random.randn(no_of_inputs, no_of_neurons) self.bias = np.zeros((1, no_of_neurons)) def forward(self, input): output = np.dot(input, self.weight) + self.bias class Activation_ReLU: def forward(self, input): output = np.maximum(0, input) print(output) input = [[1, 2, 3, 5, 6, 8, 5], [2, 5, 3, 7, 4, 3, 1]] class Softmax_Activation: def forward(self, input): # softmaxin yaptığı şey şu: atıyorum 1 e 3 şül bir array var birini al diğerlerinin toplamına böl output = input / np.sum(input, axis=1, keepdims=True) print(output) class Catagorical_Crossentrophy: # olay su one hot encoding alınacak ve class doğruysa yani birse o classın one hopt # coding ile çarpılır sonra toplanır en son negatıfı alınır pass #relu = Activation_ReLU() #relu.forward(input) #softi = Softmax_Activation() #softi.forward(input) X, y = nnfs.spiral_data(samples=100, classes=3) print(y) #layer1 = Layer_Dense(3, 4) #layer2 = Layer_Dense(4, 5) #layer2.forward(layer1.forward(input))
StarcoderdataPython
4863966
<reponame>LucasRR94/RPG_Pirates_and_Fishers #!/usr/bin/python3 # -*- coding: utf-8 -*- #--------------------------------------------------------------------------- from Item import Item from Weapon import Weapon import random import string def testAssignname_Weapon(weapon,name,numberofsum,num): if(len(name)<=32 and len(name)>=5): try: assert weapon.getAttack() == numberofsum except(AssertionError): print("Error : Attack failed :"+str(num)) else: try: assert weapon.getName() == name except (AssertionError): print("Error : Attack name:"+str(num)) else: print("test successfull, number:",num,'\n') else: try: assert weapon.getAttack() == numberofsum except(AssertionError): print("Error : Attack failed :"+str(num)) else: print("test successfull, number:",num,'\n') if __name__ == "__main__": sword = Weapon("ICE",100,0) sword1 = Weapon("ICE","NVO",10) sword2 = Weapon("ICE",2,10) sword3 = Weapon("ICE",1,10) sword4 = Weapon("ICE",0,0) sword5 = Weapon("ICE",100,-110) print(sword.getDetail()) print(sword1.getDetail()) print(sword2.getDetail()) print(sword3.getDetail()) print(sword4.getDetail()) print(sword5.getDetail()) testAssignname_Weapon(sword,"ICE",100,1) testAssignname_Weapon(sword1,"ICE",100,2) testAssignname_Weapon(sword2,"ICE",20,3) testAssignname_Weapon(sword3,"ICE",10,4) testAssignname_Weapon(sword4,"ICE",0,5) testAssignname_Weapon(sword5,"ICE",0,6) for i in range(1000): randomimpact1 = random.randint(0,10) randomspeed1 = random.randint(0,10) tam = random.randint(0,100) wordgen = '' for j in range(tam): # just to generate random names lettersused = str(string.ascii_uppercase + string.digits + string.ascii_lowercase) wordgen += random.choice(lettersused) genericobject = Weapon(wordgen,randomimpact1,randomspeed1) testAssignname_Weapon(genericobject,wordgen,randomimpact1*randomspeed1,i) print("Test has finished")
StarcoderdataPython
1619212
<reponame>extra2000/nginx-podman # This file is generated from semantic-release bot version = '3.0.0'
StarcoderdataPython
3219408
import requests def create_header(access_token): ''' Prepare headers to attach to request. ''' headers = { 'Authorization': f'Bearer {access_token}' } return headers def call_api(access_token, data_dictionary, method, endpoint, path, mapped_fields, id): # assemble the url without the parameters url = data_dictionary['base_url'] + data_dictionary['endpoint_mapping'][endpoint]['endpoint'] if id != None: url += '/' + id if path != None: url += '/' + data_dictionary['endpoint_mapping'][endpoint]['paths'][path]['path'] # remove possible duplicates mapped_fields = list(dict.fromkeys(mapped_fields)) # concatenate parameters into string temp = '' for field in mapped_fields: temp += field + ',' parameters = 'fields=' + temp[:-1] headers = create_header(access_token) # call API response = requests.get(f'{url}?{parameters}', headers=headers) return response
StarcoderdataPython
255028
import pyautogui import win32api, win32con from time import sleep import os def clear_term(): """ Clears the terminal. """ os.system('cls' if os.name =='nt' else 'clear') def get_next_click_pos(): """ Gets the x, y coordinates of the next left-click. Returns: (int, int) - the x and y coordinates. """ while win32api.GetKeyState(0x01) > -127: sleep(0.001) print("GOT") pos = win32api.GetCursorPos() while win32api.GetKeyState(0x01) <= -127: sleep(0.001) return pos def click(coords): """ Raises a mouse left-click action at the provided coordinates. Params: coords (Tuple(int, int)): The x, y coordinates of the click as a tuple. """ x, y = coords win32api.SetCursorPos(coords) win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN, x, y, 0, 0) sleep(0.001) win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP, x, y, 0, 0) def scroll(interval): """ Raises a mouse scroll event with the specified scroll amount. Params: interval (int): How much to scroll. """ win32api.mouse_event(win32con.MOUSEEVENTF_WHEEL, 0, 0, interval, 0) if __name__ == '__main__': clear_term() print('Welcome to the Manga Autoscroller. To begin, please go to your desired manga chapter.') print('Once you are there, we will need to determine where the "Next Chapter" button is at the end of a chapter.') input('Press Enter when you are ready...') clear_term() print('Click as close as you can to the top left corner of the "Next Chapter" button as you can without pressing it.') tlx, tly = get_next_click_pos() clear_term() print('Click as close as you can to the bottom right corner of the "Next Chapter" button as you can without pressing it.') brx, bry = get_next_click_pos() clear_term() region = (tlx, tly, brx - tlx, bry - tly) next_button = pyautogui.screenshot(region=region) print('Image saved.') # Set and bound scrolling speed. scroll_speed = int(input('Choose a scroll speed from 1 to 50 (10 is usually good):\n')) scroll_speed = min(50, max(0, scroll_speed)) # Set time delay before next page. delay = int(input('Choose a time delay in seconds for how long the bot should wait before moving to the next page:\n')) delay = max(0, delay) clear_term() input("Everything is ready. You can stop this program at any time by left-clicking your mouse.\nWhen you wish for scrolling to start, press Enter in this terminal...") sleep(3) while win32api.GetKeyState(0x01) > -127: win32api.mouse_event(win32con.MOUSEEVENTF_WHEEL, tlx, tly, -scroll_speed, 0) pos = pyautogui.locateOnScreen(next_button, confidence=0.9, region=(region[0], region[1] - 100, region[2] + 100, region[3] + 100)) if pos: sleep(delay) click((pos[0] + pos[2] // 2, pos[1] + pos[3] // 2)) sleep(1) win32api.SetCursorPos((pos[0] + 300, pos[1])) sleep(1)
StarcoderdataPython
1791886
import unittest from passlocker import passlocker import Pyperclip class Testpasslockers(unittest.TestCase): def setup(self): """ setup before running test """ self.new_passlocker = ("millywayne", "<PASSWORD>" "github" "<EMAIL>") def test_init(self): """ clear list """ passlocker.pass_list = [] """ check initialization """ self.assertEqual(self.new_pass.user_name, "millywayne") self.assertEqual(self.new_pass.passlock, "<PASSWORD>") self.assertEqual(self.new_pass.account, "github") self.assertEqual(self.new_pass.email, "<EMAIL>") def test_save_passlocker(self): """ check if objects can be saved in the passlockers list """ self.new_pass.save_pass() self.assertEqual(len(passlocker.pass_list),1) def test_saving_multiple_pass(self): """ checking if multiple passlock can be saved """ self.new_pass.save_pass() test_pass = passlocker("Facebook", "<PASSWORD>", "passlock") test_pass.save_pass() self.assertEqual(len(passlocker.pass_pass_list),2) def test_delete_passlockers(self): """ testing if objects can be deleted """ self.new_pass.save__pass test_pass = passlocker("Facebook", "<PASSWORD>", "pass<PASSWORD>") test_pass.save_pass() self.new_pass.delete_pass() self.assertEqual(len(passlocker.pass_list),1) def test_search_for_pass(self): """ checking if object search is possible """ self.new_save_pass() test_pass = passlocker("Facebook", "<PASSWORD>", "passlock") test_pass.save_pass() find_pass = passlocker.find_account("Facebook") self.assertEqual(find_pass.account, test_pass.account) def test_confirm_pass_exists(self): """ confirm the existance of objects """ self.new_pass.save_pass() test_pass = passlocker("Facebook", "<PASSWORD>", "<PASSWORD>") test_pass.save_pass() pass_exists = passlocker.pass_exists("Facebook") self.assertTrue(pass_exists) def test_display_passlock(self): """ checking if objects can be displayed """ self.assertEqual(passlocker.display_pass(), passlocker.pass_list) def test_copy_passlock(self): """ checking if passlock can be copied """ self.new_pass.save_pass() passlocker.copy_password("<PASSWORD>") self.assertEqual(self.new_pass.password, Pyperclip.paste())
StarcoderdataPython
6500369
<reponame>a6350202/harvester<gh_stars>0 import os import errno import datetime import tempfile import threading import random from concurrent.futures import ThreadPoolExecutor import re from math import sqrt, log1p from pandaharvester.harvesterconfig import harvester_config from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper from pandaharvester.harvestercore import core_utils from pandaharvester.harvestercore.plugin_base import PluginBase from pandaharvester.harvestermisc.info_utils import PandaQueuesDict from pandaharvester.harvestermisc.htcondor_utils import get_job_id_tuple_from_batchid from pandaharvester.harvestermisc.htcondor_utils import CondorJobSubmit # logger baseLogger = core_utils.setup_logger('htcondor_submitter') # Integer division round up def _div_round_up(a, b): return a // b + int(a % b > 0) # Compute weight of each CE according to worker stat, return tuple(dict, total weight score) def _get_ce_weighting(ce_endpoint_list=[], worker_ce_all_tuple=None): multiplier = 1000. n_ce = len(ce_endpoint_list) worker_limits_dict, worker_ce_stats_dict, worker_ce_backend_throughput_dict, time_window, n_new_workers = worker_ce_all_tuple N = float(n_ce) Q = float(worker_limits_dict['nQueueLimitWorker']) W = float(worker_limits_dict['maxWorkers']) Q_good_init = float(sum(worker_ce_backend_throughput_dict[_ce][_st] for _st in ('submitted', 'running', 'finished') for _ce in worker_ce_backend_throughput_dict)) Q_good_fin = float(sum(worker_ce_backend_throughput_dict[_ce][_st] for _st in ('submitted',) for _ce in worker_ce_backend_throughput_dict)) thruput_avg = (log1p(Q_good_init) - log1p(Q_good_fin)) n_new_workers = float(n_new_workers) def _get_thruput(_ce_endpoint): if _ce_endpoint not in worker_ce_backend_throughput_dict: q_good_init = 0. q_good_fin = 0. else: q_good_init = float(sum(worker_ce_backend_throughput_dict[_ce_endpoint][_st] for _st in ('submitted', 'running', 'finished'))) q_good_fin = float(sum(worker_ce_backend_throughput_dict[_ce_endpoint][_st] for _st in ('submitted',))) thruput = (log1p(q_good_init) - log1p(q_good_fin)) return thruput def _get_thruput_adj_ratio(thruput): try: thruput_adj_ratio = thruput/thruput_avg + 1/N except ZeroDivisionError: if thruput == 0.: thruput_adj_ratio = 1/N else: raise return thruput_adj_ratio ce_base_weight_sum = sum((_get_thruput_adj_ratio(_get_thruput(_ce)) for _ce in ce_endpoint_list)) def _get_init_weight(_ce_endpoint): if _ce_endpoint not in worker_ce_stats_dict: q = 0. r = 0. else: q = float(worker_ce_stats_dict[_ce_endpoint]['submitted']) r = float(worker_ce_stats_dict[_ce_endpoint]['running']) # q_avg = sum(( float(worker_ce_stats_dict[_k]['submitted']) for _k in worker_ce_stats_dict )) / N # r_avg = sum(( float(worker_ce_stats_dict[_k]['running']) for _k in worker_ce_stats_dict )) / N if ( _ce_endpoint in worker_ce_stats_dict and q > Q ): return float(0) ce_base_weight_normalized = _get_thruput_adj_ratio(_get_thruput(_ce_endpoint))/ce_base_weight_sum q_expected = (Q + n_new_workers) * ce_base_weight_normalized # weight by difference ret = max((q_expected - q), 2**-10) # # Weight by running ratio # _weight_r = 1 + N*r/R if r == 0: # Penalty for dead CE (no running worker) ret = ret / (1 + log1p(q)**2) return ret init_weight_iterator = map(_get_init_weight, ce_endpoint_list) sum_of_weights = sum(init_weight_iterator) total_score = multiplier * N try: regulator = total_score / sum_of_weights except ZeroDivisionError: regulator = 1. ce_weight_dict = {_ce: _get_init_weight(_ce) * regulator for _ce in ce_endpoint_list} ce_thruput_dict = {_ce: _get_thruput(_ce) * 86400. / time_window for _ce in ce_endpoint_list} return total_score, ce_weight_dict, ce_thruput_dict # Choose a CE accroding to weighting def _choose_ce(weighting): total_score, ce_weight_dict, ce_thruput_dict = weighting lucky_number = random.random() * total_score cur = 0. ce_now = None for _ce, _w in ce_weight_dict.items(): if _w == 0.: continue ce_now = _ce cur += _w if cur >= lucky_number: return _ce if ce_weight_dict.get(ce_now, -1) > 0.: return ce_now else: return None # Get better string to display the statistics and weightng of CEs def _get_ce_stats_weighting_display(ce_list, worker_ce_all_tuple, ce_weighting): worker_limits_dict, worker_ce_stats_dict, worker_ce_backend_throughput_dict, time_window, n_new_workers = worker_ce_all_tuple total_score, ce_weight_dict, ce_thruput_dict = ce_weighting worker_ce_stats_dict_sub_default = {'submitted': 0, 'running': 0} worker_ce_backend_throughput_dict_sub_default = {'submitted': 0, 'running': 0, 'finished': 0} general_dict = { 'maxWorkers': int(worker_limits_dict.get('maxWorkers')), 'nQueueLimitWorker': int(worker_limits_dict.get('nQueueLimitWorker')), 'nNewWorkers': int(n_new_workers), 'history_time_window': int(time_window), } general_str = ( 'maxWorkers={maxWorkers} ' 'nQueueLimitWorker={nQueueLimitWorker} ' 'nNewWorkers={nNewWorkers} ' 'hist_timeWindow={history_time_window} ' ).format(**general_dict) ce_str_list = [] for _ce in ce_list: schema_sub_dict = { 'submitted_now': int(worker_ce_stats_dict.get(_ce, worker_ce_stats_dict_sub_default).get('submitted')), 'running_now': int(worker_ce_stats_dict.get(_ce, worker_ce_stats_dict_sub_default).get('running')), 'submitted_history': int(worker_ce_backend_throughput_dict.get(_ce, worker_ce_backend_throughput_dict_sub_default).get('submitted')), 'running_history': int(worker_ce_backend_throughput_dict.get(_ce, worker_ce_backend_throughput_dict_sub_default).get('running')), 'finished_history': int(worker_ce_backend_throughput_dict.get(_ce, worker_ce_backend_throughput_dict_sub_default).get('finished')), 'thruput_score': ce_thruput_dict.get(_ce), 'weight_score': ce_weight_dict.get(_ce), } ce_str = ( '"{_ce}": ' 'now_S={submitted_now} ' 'now_R={running_now} ' 'hist_S={submitted_history} ' 'hist_R={running_history} ' 'hist_F={finished_history} ' 'T={thruput_score:.02f} ' 'W={weight_score:.03f} ' ).format(_ce=_ce, **schema_sub_dict) ce_str_list.append(ce_str) stats_weighting_display_str = general_str + ' ; ' + ' , '.join(ce_str_list) return stats_weighting_display_str # Replace condor Marco from SDF file, return string def _condor_macro_replace(string, **kwarg): new_string = string macro_map = { '\$\(Cluster\)': str(kwarg['ClusterId']), '\$\(Process\)': str(kwarg['ProcId']), } for k, v in macro_map.items(): new_string = re.sub(k, v, new_string) return new_string # Parse resource type from string for Unified PanDA Queue def _get_resource_type(string, is_unified_queue, is_pilot_option=False, pilot_version='1'): string = str(string) if not is_unified_queue: ret = '' elif string in set(['SCORE', 'MCORE', 'SCORE_HIMEM', 'MCORE_HIMEM']): if is_pilot_option: if pilot_version == '2': ret = '--resource-type {0}'.format(string) else: ret = '-R {0}'.format(string) else: ret = string else: ret = '' return ret # Map "pilotType" (defined in harvester) to prodSourceLabel and pilotType option (defined in pilot, -i option) # and piloturl (pilot option --piloturl) # Depending on pilot version 1 or 2 def _get_prodsourcelabel_pilotypeopt_piloturlstr(pilot_type, pilot_version='1'): if pilot_version == '2': # pilot 2 pt_psl_map = { 'RC': ('rc_test2', 'RC', '--piloturl http://cern.ch/atlas-panda-pilot/pilot2-dev.tar.gz'), 'ALRB': ('rc_alrb', 'ALRB', ''), 'PT': ('ptest', 'PR', ''), } else: # pilot 1, need not piloturl since wrapper covers it pt_psl_map = { 'RC': ('rc_test', 'RC', ''), 'ALRB': ('rc_alrb', 'ALRB', ''), 'PT': ('ptest', 'PR', ''), } pilot_opt_tuple = pt_psl_map.get(pilot_type, None) return pilot_opt_tuple # submit a bag of workers def submit_bag_of_workers(data_list): # make logger tmpLog = core_utils.make_logger(baseLogger, method_name='submit_bag_of_workers') # keep order of workers in data_list workerIDs_list = [ data['workspec'].workerID for data in data_list ] # initialization worker_retval_map = {} worker_data_map = {} host_jdl_list_workerid_map = {} # go for data in data_list: workspec = data['workspec'] workerID = workspec.workerID worker_data_map[workerID] = data to_submit = data['to_submit'] # no need to submit bad worker if not to_submit: errStr = '{0} not submitted due to incomplete data of the worker'.format(workerID) tmpLog.warning(errStr) tmpRetVal = (None, errStr) # return tmpRetVal, workspec.get_changed_attributes() worker_retval_map[workerID] = (tmpRetVal, workspec.get_changed_attributes()) # attributes try: ce_info_dict = data['ce_info_dict'] batch_log_dict = data['batch_log_dict'] use_spool = data['use_spool'] except KeyError: errStr = '{0} not submitted due to incomplete data of the worker'.format(workerID) tmpLog.warning(errStr) tmpRetVal = (None, errStr) # return tmpRetVal, workspec.get_changed_attributes() worker_retval_map[workerID] = (tmpRetVal, workspec.get_changed_attributes()) else: workspec.reset_changed_list() # fill in host_jdl_list_workerid_map a_jdl = make_a_jdl(**data) val = (workspec, a_jdl) try: host_jdl_list_workerid_map[workspec.submissionHost].append(val) except KeyError: host_jdl_list_workerid_map[workspec.submissionHost] = [val] # loop over submissionHost for host, val_list in host_jdl_list_workerid_map.items(): # make jdl string of workers jdl_list = [ val[1] for val in val_list ] # condor job submit object tmpLog.debug('submitting to submissionHost={0}'.format(host)) # submit try: condor_job_submit = CondorJobSubmit(id=host) batchIDs_list, ret_err_str = condor_job_submit.submit(jdl_list, use_spool=use_spool) except Exception as e: batchIDs_list = None ret_err_str = 'Exception {0}: {1}'.format(e.__class__.__name__, e) # result if batchIDs_list: # submitted n_workers = len(val_list) tmpLog.debug('submitted {0} workers to submissionHost={1}'.format(n_workers, host)) for val_i in range(n_workers): val = val_list[val_i] workspec = val[0] # got batchID workspec.batchID = batchIDs_list[val_i] tmpLog.debug('workerID={0} submissionHost={1} batchID={2}'.format( workspec.workerID, workspec.submissionHost, workspec.batchID)) # get worker data data = worker_data_map[workspec.workerID] # set computingElement ce_info_dict = data['ce_info_dict'] workspec.computingElement = ce_info_dict.get('ce_endpoint', '') # set log batch_log_dict = data['batch_log_dict'] (clusterid, procid) = get_job_id_tuple_from_batchid(workspec.batchID) batch_log = _condor_macro_replace(batch_log_dict['batch_log'], ClusterId=clusterid, ProcId=procid) batch_stdout = _condor_macro_replace(batch_log_dict['batch_stdout'], ClusterId=clusterid, ProcId=procid) batch_stderr = _condor_macro_replace(batch_log_dict['batch_stderr'], ClusterId=clusterid, ProcId=procid) try: batch_jdl = '{0}.jdl'.format(batch_stderr[:-4]) except Exception: batch_jdl = None workspec.set_log_file('batch_log', batch_log) workspec.set_log_file('stdout', batch_stdout) workspec.set_log_file('stderr', batch_stderr) workspec.set_log_file('jdl', batch_jdl) if not workspec.get_jobspec_list(): tmpLog.debug('No jobspec associated in the worker of workerID={0}'.format(workspec.workerID)) else: for jobSpec in workspec.get_jobspec_list(): # using batchLog and stdOut URL as pilotID and pilotLog jobSpec.set_one_attribute('pilotID', workspec.workAttributes['stdOut']) jobSpec.set_one_attribute('pilotLog', workspec.workAttributes['batchLog']) tmpLog.debug('Done set_log_file after submission of workerID={0}'.format(workspec.workerID)) tmpRetVal = (True, '') worker_retval_map[workspec.workerID] = (tmpRetVal, workspec.get_changed_attributes()) else: # failed tmpLog.debug('failed to submit workers to submissionHost={0} ; {1}'.format(host, ret_err_str)) for val in val_list: workspec = val[0] errStr = 'submission failed: {0}'.format(ret_err_str) tmpLog.error(errStr) tmpRetVal = (None, errStr) worker_retval_map[workspec.workerID] = (tmpRetVal, workspec.get_changed_attributes()) # make return list retValList = [ worker_retval_map[w_id] for w_id in workerIDs_list ] return retValList # make a condor jdl for a worker def make_a_jdl(workspec, template, n_core_per_node, log_dir, panda_queue_name, executable_file, x509_user_proxy, log_subdir=None, ce_info_dict=dict(), batch_log_dict=dict(), special_par='', harvester_queue_config=None, is_unified_queue=False, pilot_version='1', **kwarg): # make logger tmpLog = core_utils.make_logger(baseLogger, 'workerID={0}'.format(workspec.workerID), method_name='make_a_jdl') # Note: In workspec, unit of minRamCount and of maxDiskCount are both MB. # In HTCondor SDF, unit of request_memory is MB, and request_disk is KB. n_core_total = workspec.nCore if workspec.nCore else n_core_per_node request_ram = max(workspec.minRamCount, 1 * n_core_total) if workspec.minRamCount else 1 * n_core_total request_disk = workspec.maxDiskCount * 1024 if workspec.maxDiskCount else 1 request_walltime = workspec.maxWalltime if workspec.maxWalltime else 0 io_intensity = workspec.ioIntensity if workspec.ioIntensity else 0 ce_info_dict = ce_info_dict.copy() batch_log_dict = batch_log_dict.copy() # possible override by AGIS special_par if special_par: special_par_attr_list = ['queue', 'maxWallTime', 'xcount', ] _match_special_par_dict = { attr: re.search('\({attr}=([^)]+)\)'.format(attr=attr), special_par) \ for attr in special_par_attr_list } for attr, _match in _match_special_par_dict.items(): if not _match: continue elif attr == 'queue': ce_info_dict['ce_queue_name'] = str(_match.group(1)) elif attr == 'maxWallTime': request_walltime = int(_match.group(1)) elif attr == 'xcount': n_core_total = int(_match.group(1)) tmpLog.debug('job attributes override by AGIS special_par: {0}={1}'.format(attr, str(_match.group(1)))) # derived job attributes n_node = _div_round_up(n_core_total, n_core_per_node) request_ram_per_core = _div_round_up(request_ram * n_node, n_core_total) request_cputime = request_walltime * n_core_total request_walltime_minute = _div_round_up(request_walltime, 60) request_cputime_minute = _div_round_up(request_cputime, 60) # decide prodSourceLabel pilot_opt_tuple = _get_prodsourcelabel_pilotypeopt_piloturlstr(workspec.pilotType, pilot_version) if pilot_opt_tuple is None: prod_source_label = harvester_queue_config.get_source_label() pilot_type_opt = workspec.pilotType pilot_url_str = '' else: prod_source_label, pilot_type_opt, pilot_url_str = pilot_opt_tuple # open tmpfile as submit description file tmpFile = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='_submit.sdf', dir=workspec.get_access_point()) # fill in template string jdl_str = template.format( sdfPath=tmpFile.name, executableFile=executable_file, nCorePerNode=n_core_per_node, nCoreTotal=n_core_total, nNode=n_node, requestRam=request_ram, requestRamPerCore=request_ram_per_core, requestDisk=request_disk, requestWalltime=request_walltime, requestWalltimeMinute=request_walltime_minute, requestCputime=request_cputime, requestCputimeMinute=request_cputime_minute, accessPoint=workspec.accessPoint, harvesterID=harvester_config.master.harvester_id, workerID=workspec.workerID, computingSite=workspec.computingSite, pandaQueueName=panda_queue_name, x509UserProxy=x509_user_proxy, ceEndpoint=ce_info_dict.get('ce_endpoint', ''), ceHostname=ce_info_dict.get('ce_hostname', ''), ceFlavour=ce_info_dict.get('ce_flavour', ''), ceJobmanager=ce_info_dict.get('ce_jobmanager', ''), ceQueueName=ce_info_dict.get('ce_queue_name', ''), ceVersion=ce_info_dict.get('ce_version', ''), logDir=log_dir, logSubdir=log_subdir, gtag=batch_log_dict.get('gtag', 'fake_GTAG_string'), prodSourceLabel=prod_source_label, resourceType=_get_resource_type(workspec.resourceType, is_unified_queue), pilotResourceTypeOption=_get_resource_type(workspec.resourceType, is_unified_queue, True, pilot_version), ioIntensity=io_intensity, pilotType=pilot_type_opt, pilotUrlOption=pilot_url_str, ) # save jdl to submit description file tmpFile.write(jdl_str) tmpFile.close() tmpLog.debug('saved sdf at {0}'.format(tmpFile.name)) tmpLog.debug('done') return jdl_str # parse log, stdout, stderr filename def parse_batch_job_filename(value_str, file_dir, batchID, guess=False): _filename = os.path.basename(value_str) if guess: # guess file name before files really created; possibly containing condor macros return _filename else: _sanitized_list = re.sub('\{(\w+)\}|\[(\w+)\]|\((\w+)\)|#(\w+)#|\$', '', _filename).split('.') _prefix = _sanitized_list[0] _suffix = _sanitized_list[-1] if len(_sanitized_list) > 1 else '' for _f in os.listdir(file_dir): if re.match('{prefix}(.*)\.{batchID}\.(.*)\.{suffix}'.format(prefix=_prefix, suffix=_suffix, batchID=batchID), _f): return _f return None # submitter for HTCONDOR batch system class HTCondorSubmitter(PluginBase): # constructor def __init__(self, **kwarg): self.logBaseURL = None PluginBase.__init__(self, **kwarg) # number of processes try: self.nProcesses except AttributeError: self.nProcesses = 1 else: if (not self.nProcesses) or (self.nProcesses < 1): self.nProcesses = 1 # executable file try: self.executableFile except AttributeError: self.executableFile = None # condor log directory try: self.logDir except AttributeError: self.logDir = os.getenv('TMPDIR') or '/tmp' # x509 proxy try: self.x509UserProxy except AttributeError: self.x509UserProxy = os.getenv('X509_USER_PROXY') # ATLAS AGIS try: self.useAtlasAGIS = bool(self.useAtlasAGIS) except AttributeError: self.useAtlasAGIS = False # ATLAS Grid CE, requiring AGIS try: self.useAtlasGridCE = bool(self.useAtlasGridCE) except AttributeError: self.useAtlasGridCE = False finally: self.useAtlasAGIS = self.useAtlasAGIS or self.useAtlasGridCE # sdf template directories of CEs try: self.CEtemplateDir except AttributeError: self.CEtemplateDir = '' # remote condor schedd and pool name (collector), and spool option try: self.condorSchedd except AttributeError: self.condorSchedd = None try: self.condorPool except AttributeError: self.condorPool = None # condor spool mechanism. If False, need shared FS across remote schedd try: self.useSpool except AttributeError: self.useSpool = False # number of workers less than this number will be bulkily submitted in only one schedd try: self.minBulkToRamdomizedSchedd except AttributeError: self.minBulkToRamdomizedSchedd = 20 # record of information of CE statistics self.ceStatsLock = threading.Lock() self.ceStats = dict() # get CE statistics of a site def get_ce_statistics(self, site_name, n_new_workers, time_window=21600): if site_name in self.ceStats: return self.ceStats[site_name] with self.ceStatsLock: if site_name in self.ceStats: return self.ceStats[site_name] else: worker_limits_dict = self.dbInterface.get_worker_limits(self.queueName) worker_ce_stats_dict = self.dbInterface.get_worker_ce_stats(self.queueName) worker_ce_backend_throughput_dict = self.dbInterface.get_worker_ce_backend_throughput(self.queueName, time_window=time_window) return (worker_limits_dict, worker_ce_stats_dict, worker_ce_backend_throughput_dict, time_window, n_new_workers) # submit workers def submit_workers(self, workspec_list): tmpLog = self.make_logger(baseLogger, method_name='submit_workers') nWorkers = len(workspec_list) tmpLog.debug('start nWorkers={0}'.format(nWorkers)) # whether to submit any worker to_submit_any = True # get log subdirectory name from timestamp timeNow = datetime.datetime.utcnow() log_subdir = timeNow.strftime('%y-%m-%d_%H') log_subdir_path = os.path.join(self.logDir, log_subdir) if self.condorSchedd is None or not self.useSpool: try: os.mkdir(log_subdir_path) except OSError as e: if e.errno != errno.EEXIST: raise else: pass # get info from harvester queue config _queueConfigMapper = QueueConfigMapper() harvester_queue_config = _queueConfigMapper.get_queue(self.queueName) # get queue info from AGIS by cacher in db if self.useAtlasAGIS: panda_queues_dict = PandaQueuesDict() panda_queue_name = panda_queues_dict.get_panda_queue_name(self.queueName) this_panda_queue_dict = panda_queues_dict.get(self.queueName, dict()) # tmpLog.debug('panda_queues_name and queue_info: {0}, {1}'.format(self.queueName, panda_queues_dict[self.queueName])) else: panda_queues_dict = dict() panda_queue_name = self.queueName this_panda_queue_dict = dict() # get default information from queue info n_core_per_node_from_queue = this_panda_queue_dict.get('corecount', 1) if this_panda_queue_dict.get('corecount', 1) else 1 is_unified_queue = this_panda_queue_dict.get('capability', '') == 'ucore' pilot_version_orig = str(this_panda_queue_dict.get('pilot_version', '')) pilot_version_suffix_str = '_pilot2' if pilot_version_orig == '2' else '' # get override requirements from queue configured try: n_core_per_node = self.nCorePerNode if self.nCorePerNode else n_core_per_node_from_queue except AttributeError: n_core_per_node = n_core_per_node_from_queue # deal with Condor schedd and central managers; make a random list the choose n_bulks = _div_round_up(nWorkers, self.minBulkToRamdomizedSchedd) if isinstance(self.condorSchedd, list) and len(self.condorSchedd) > 0: if isinstance(self.condorPool, list) and len(self.condorPool) > 0: orig_list = list(zip(self.condorSchedd, self.condorPool)) else: orig_list = [ (_schedd, self.condorPool) for _schedd in self.condorSchedd ] if n_bulks < len(orig_list): schedd_pool_choice_list = random.sample(orig_list, n_bulks) else: schedd_pool_choice_list = orig_list else: schedd_pool_choice_list = [(self.condorSchedd, self.condorPool)] # deal with CE special_par = '' ce_weighting = None if self.useAtlasGridCE: # If ATLAS Grid CE mode used tmpLog.debug('Using ATLAS Grid CE mode...') queues_from_queue_list = this_panda_queue_dict.get('queues', []) special_par = this_panda_queue_dict.get('special_par', '') ce_auxilary_dict = {} for _queue_dict in queues_from_queue_list: if not ( _queue_dict.get('ce_endpoint') and str(_queue_dict.get('ce_state', '')).upper() == 'ACTIVE' and str(_queue_dict.get('ce_flavour', '')).lower() in set(['arc-ce', 'cream-ce', 'htcondor-ce']) ): continue ce_endpoint = _queue_dict.get('ce_endpoint') if ( ce_endpoint in ce_auxilary_dict and str(_queue_dict.get('ce_queue_name', '')).lower() == 'default' ): pass else: ce_auxilary_dict[ce_endpoint] = _queue_dict # qualified CEs from AGIS info n_qualified_ce = len(ce_auxilary_dict) if n_qualified_ce > 0: # Get CE weighting tmpLog.debug('Get CE weighting') worker_ce_all_tuple = self.get_ce_statistics(self.queueName, nWorkers) ce_weighting = _get_ce_weighting(ce_endpoint_list=list(ce_auxilary_dict.keys()), worker_ce_all_tuple=worker_ce_all_tuple) stats_weighting_display_str = _get_ce_stats_weighting_display( ce_auxilary_dict.keys(), worker_ce_all_tuple, ce_weighting) tmpLog.debug('CE stats and weighting: {0}'.format(stats_weighting_display_str)) else: tmpLog.error('No valid CE endpoint found') to_submit_any = False def _handle_one_worker(workspec, to_submit=to_submit_any): # make logger tmpLog = core_utils.make_logger(baseLogger, 'workerID={0}'.format(workspec.workerID), method_name='_handle_one_worker') ce_info_dict = dict() batch_log_dict = dict() data = {'workspec': workspec, 'to_submit': to_submit,} if to_submit: if self.useAtlasGridCE: # choose a CE tmpLog.info('choose a CE...') ce_chosen = _choose_ce(ce_weighting) try: ce_info_dict = ce_auxilary_dict[ce_chosen].copy() except KeyError: tmpLog.info('Problem choosing CE with weighting. Choose an arbitrary CE endpoint') ce_info_dict = random.choice(list(ce_auxilary_dict.values())).copy() # go on info of the CE ce_endpoint_from_queue = ce_info_dict.get('ce_endpoint', '') ce_flavour_str = str(ce_info_dict.get('ce_flavour', '')).lower() ce_version_str = str(ce_info_dict.get('ce_version', '')).lower() ce_info_dict['ce_hostname'] = re.sub(':\w*', '', ce_endpoint_from_queue) if ce_info_dict['ce_hostname'] == ce_endpoint_from_queue: # add default port to ce_endpoint if missing default_port_map = { 'cream-ce': 8443, 'arc-ce': 2811, 'htcondor-ce': 9619, } if ce_flavour_str in default_port_map: default_port = default_port_map[ce_flavour_str] ce_info_dict['ce_endpoint'] = '{0}:{1}'.format(ce_endpoint_from_queue, default_port) tmpLog.debug('For site {0} got pilot version: "{1}"; CE endpoint: "{2}", flavour: "{3}"'.format( self.queueName, pilot_version_orig, ce_endpoint_from_queue, ce_flavour_str)) if os.path.isdir(self.CEtemplateDir) and ce_flavour_str: sdf_template_filename = '{ce_flavour_str}{pilot_version_suffix_str}.sdf'.format( ce_flavour_str=ce_flavour_str, pilot_version_suffix_str=pilot_version_suffix_str) self.templateFile = os.path.join(self.CEtemplateDir, sdf_template_filename) else: try: # Manually define site condor schedd as ceHostname and central manager as ceEndpoint if self.ceHostname and isinstance(self.ceHostname, list) and len(self.ceHostname) > 0: if isinstance(self.ceEndpoint, list) and len(self.ceEndpoint) > 0: ce_info_dict['ce_hostname'], ce_info_dict['ce_endpoint'] = random.choice(list(zip(self.ceHostname, self.ceEndpoint))) else: ce_info_dict['ce_hostname'] = random.choice(self.ceHostname) ce_info_dict['ce_endpoint'] = self.ceEndpoint else: ce_info_dict['ce_hostname'] = self.ceHostname ce_info_dict['ce_endpoint'] = self.ceEndpoint except AttributeError: pass # template for batch script try: tmpFile = open(self.templateFile) sdf_template_raw = tmpFile.read() tmpFile.close() except AttributeError: tmpLog.error('No valid templateFile found. Maybe templateFile, CEtemplateDir invalid, or no valid CE found') to_submit = False return data else: # get batch_log, stdout, stderr filename, and remobe commented liness sdf_template_str_list = [] for _line in sdf_template_raw.split('\n'): if _line.startswith('#'): continue sdf_template_str_list.append(_line) _match_batch_log = re.match('log = (.+)', _line) _match_stdout = re.match('output = (.+)', _line) _match_stderr = re.match('error = (.+)', _line) if _match_batch_log: batch_log_value = _match_batch_log.group(1) continue if _match_stdout: stdout_value = _match_stdout.group(1) continue if _match_stderr: stderr_value = _match_stderr.group(1) continue sdf_template = '\n'.join(sdf_template_str_list) # Choose from Condor schedd and central managers condor_schedd, condor_pool = random.choice(schedd_pool_choice_list) # set submissionHost if not condor_schedd and not condor_pool: workspec.submissionHost = 'LOCAL' else: workspec.submissionHost = '{0},{1}'.format(condor_schedd, condor_pool) tmpLog.debug('set submissionHost={0}'.format(workspec.submissionHost)) # Log Base URL if self.logBaseURL and '[ScheddHostname]' in self.logBaseURL: schedd_hostname = re.sub(r'(?:[a-zA-Z0-9_.\-]*@)?([a-zA-Z0-9.\-]+)(?::[0-9]+)?', lambda matchobj: matchobj.group(1) if matchobj.group(1) else '', condor_schedd) log_base_url = re.sub(r'\[ScheddHostname\]', schedd_hostname, self.logBaseURL) else: log_base_url = self.logBaseURL # URLs for log files if not (log_base_url is None): if workspec.batchID: batchID = workspec.batchID guess = False else: batchID = '' guess = True batch_log_filename = parse_batch_job_filename(value_str=batch_log_value, file_dir=log_subdir_path, batchID=batchID, guess=guess) stdout_path_file_name = parse_batch_job_filename(value_str=stdout_value, file_dir=log_subdir_path, batchID=batchID, guess=guess) stderr_path_filename = parse_batch_job_filename(value_str=stderr_value, file_dir=log_subdir_path, batchID=batchID, guess=guess) batch_log = '{0}/{1}/{2}'.format(log_base_url, log_subdir, batch_log_filename) batch_stdout = '{0}/{1}/{2}'.format(log_base_url, log_subdir, stdout_path_file_name) batch_stderr = '{0}/{1}/{2}'.format(log_base_url, log_subdir, stderr_path_filename) workspec.set_log_file('batch_log', batch_log) workspec.set_log_file('stdout', batch_stdout) workspec.set_log_file('stderr', batch_stderr) batch_log_dict['batch_log'] = batch_log batch_log_dict['batch_stdout'] = batch_stdout batch_log_dict['batch_stderr'] = batch_stderr batch_log_dict['gtag'] = workspec.workAttributes['stdOut'] tmpLog.debug('Done set_log_file before submission') tmpLog.debug('Done jobspec attribute setting') # set data dict data.update({ 'workspec': workspec, 'to_submit': to_submit, 'template': sdf_template, 'executable_file': self.executableFile, 'log_dir': self.logDir, 'log_subdir': log_subdir, 'n_core_per_node': n_core_per_node, 'panda_queue_name': panda_queue_name, 'x509_user_proxy': self.x509UserProxy, 'ce_info_dict': ce_info_dict, 'batch_log_dict': batch_log_dict, 'special_par': special_par, 'harvester_queue_config': harvester_queue_config, 'is_unified_queue': is_unified_queue, 'condor_schedd': condor_schedd, 'condor_pool': condor_pool, 'use_spool': self.useSpool, 'pilot_version': pilot_version_orig, }) return data def _propagate_attributes(workspec, tmpVal): # make logger tmpLog = core_utils.make_logger(baseLogger, 'workerID={0}'.format(workspec.workerID), method_name='_propagate_attributes') (retVal, tmpDict) = tmpVal workspec.set_attributes_with_dict(tmpDict) tmpLog.debug('Done workspec attributes propagation') return retVal tmpLog.debug('finished preparing worker attributes') # map(_handle_one_worker, workspec_list) with ThreadPoolExecutor(self.nProcesses * 4) as thread_pool: dataIterator = thread_pool.map(_handle_one_worker, workspec_list) tmpLog.debug('{0} workers handled'.format(nWorkers)) # submit retValList = submit_bag_of_workers(list(dataIterator)) tmpLog.debug('{0} workers submitted'.format(nWorkers)) # propagate changed attributes with ThreadPoolExecutor(self.nProcesses) as thread_pool: retIterator = thread_pool.map(lambda _wv_tuple: _propagate_attributes(*_wv_tuple), zip(workspec_list, retValList)) retList = list(retIterator) tmpLog.debug('done') return retList
StarcoderdataPython
6637560
"""Ndnt related classes.""" from pathlib import Path import sys from ndnt.arguments import Arguments from ndnt.extension import Extension from ndnt.paths import ExcludeGitignoredPaths, ExtensionPaths, FilesPaths from ndnt.summary import DirectorySummary, FileSummary class Ndnts: """Main class of this tool.""" def __init__(self, arguments: Arguments): self.arguments = arguments def run(self): """Run itself.""" paths = self.arguments.paths() extension = self.arguments.extension() no_gitignore = self.arguments.no_gitignore() for path in paths: Ndnt(path, extension, no_gitignore).run() class Ndnt: """Analyze of one path.""" def __init__(self, path: Path, extension: Extension, no_gitignore: bool): self.path = path self.extension = extension self.no_gitignore = no_gitignore def run(self): """Run itself.""" no_gitignore = ( self.no_gitignore or not (self.path / ".gitignore").exists() ) if self.path.is_file(): summary = FileSummary(self.path) elif self.path.is_dir() and no_gitignore: summary = DirectorySummary( ExtensionPaths(FilesPaths(self.path), self.extension) ) elif self.path.is_dir(): summary = DirectorySummary( ExtensionPaths( ExcludeGitignoredPaths( self.path, self.path / ".gitignore" ), self.extension, ) ) else: print("Something is wrong with provided path.") return summary.print(sys.stdout)
StarcoderdataPython
60160
# Copyright (c) Microsoft Corporation. # Licensed under the MIT License. from functools import partial import torch from torch import nn from timm.models.layers import DropPath from einops.layers.torch import Reduce from .layers import DWConv, SPATIAL_FUNC, ChannelMLP, STEM_LAYER from .misc import reshape2n class MixingBlock(nn.Module): def __init__(self, dim, spatial_func=None, scaled=True, init_values=1e-4, shared_spatial_func=False, norm_layer=partial(nn.LayerNorm, eps=1e-6), act_layer=nn.GELU, drop_path=0., cpe=True, num_heads=None, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., # attn in_features=None, hidden_features=None, drop=0., # mlp channel_ratio=2.0 ): super(MixingBlock, self).__init__() spatial_kwargs = dict(act_layer=act_layer, in_features=in_features, hidden_features=hidden_features, drop=drop, # mlp dim=dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=proj_drop # attn ) self.valid_spatial_func = True if spatial_func is not None: if shared_spatial_func: self.spatial_func = spatial_func else: self.spatial_func = spatial_func(**spatial_kwargs) self.norm1 = norm_layer(dim) if scaled: self.gamma_1 = nn.Parameter(init_values * torch.ones(1, 1, dim), requires_grad=True) else: self.gamma_1 = 1. else: self.valid_spatial_func = False self.channel_func = ChannelMLP(in_features=dim, hidden_features=int(dim*channel_ratio), act_layer=act_layer, drop=drop) self.norm2 = norm_layer(dim) self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() self.cpe = cpe if cpe: self.cpe_net = DWConv(dim) def forward(self, x): in_x = x if self.valid_spatial_func: x = x + self.drop_path(self.gamma_1 * self.spatial_func(self.norm1(in_x))) if self.cpe: x = x + self.cpe_net(in_x) x = x + self.drop_path(self.channel_func(self.norm2(x))) return x def flops(self, input_shape): _, N, C = input_shape flops = 0 if self.valid_spatial_func: flops += self.spatial_func.flops(input_shape) flops += N * C * 2 # norm + skip if self.cpe: flops += self.cpe_net.flops(input_shape) flops += self.channel_func.flops(input_shape) flops += N * C * 2 return flops class Spach(nn.Module): def __init__(self, num_classes=1000, img_size=224, in_chans=3, hidden_dim=384, patch_size=16, net_arch=None, act_layer=nn.GELU, norm_layer=partial(nn.LayerNorm, eps=1e-6), stem_type='conv1', scaled=True, init_values=1e-4, drop_path_rate=0., cpe=True, shared_spatial_func=False, # mixing block num_heads=12, qkv_bias=True, qk_scale=None, attn_drop=0., proj_drop=0., # attn token_ratio=0.5, channel_ratio=2.0, drop_rate=0., # mlp downstream=False, **kwargs ): super(Spach, self).__init__() self.num_classes = num_classes self.hidden_dim = hidden_dim self.downstream = downstream self.stem = STEM_LAYER[stem_type]( img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=hidden_dim, downstream=downstream) self.norm1 = norm_layer(hidden_dim) block_kwargs = dict(dim=hidden_dim, scaled=scaled, init_values=init_values, cpe=cpe, shared_spatial_func=shared_spatial_func, norm_layer=norm_layer, act_layer=act_layer, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=proj_drop, # attn in_features=self.stem.num_patches, hidden_features=int(self.stem.num_patches * token_ratio), channel_ratio=channel_ratio, drop=drop_rate) # mlp self.blocks = self.make_blocks(net_arch, block_kwargs, drop_path_rate, shared_spatial_func) self.norm2 = norm_layer(hidden_dim) if not downstream: self.pool = Reduce('b n c -> b c', reduction='mean') self.head = nn.Linear(hidden_dim, self.num_classes) self.init_weights() def make_blocks(self, net_arch, block_kwargs, drop_path, shared_spatial_func): if shared_spatial_func: assert len(net_arch) == 1, '`shared_spatial_func` only support unitary spatial function' assert net_arch[0][0] != 'pass', '`shared_spatial_func` do not support pass' spatial_func = SPATIAL_FUNC[net_arch[0][0]](**block_kwargs) else: spatial_func = None blocks = [] for func_type, depth in net_arch: for i in range(depth): blocks.append(MixingBlock(spatial_func=spatial_func or SPATIAL_FUNC[func_type], drop_path=drop_path, **block_kwargs)) return nn.Sequential(*blocks) def init_weights(self): for n, m in self.named_modules(): _init_weights(m, n) def forward_features(self, x): x = self.stem(x) x = reshape2n(x) x = self.norm1(x) x = self.blocks(x) x = self.norm2(x) return x def forward(self, x): x = self.forward_features(x) x = self.pool(x) x = self.head(x) return x def flops(self): flops = 0 shape = (1, self.stem.num_patches, self.hidden_dim) # stem flops += self.stem.flops() flops += sum(shape) # blocks flops += sum([i.flops(shape) for i in self.blocks]) flops += sum(shape) # head flops += self.hidden_dim * self.num_classes return flops def _init_weights(m, n: str): if isinstance(m, nn.Linear): if n.startswith('head'): nn.init.zeros_(m.weight) nn.init.zeros_(m.bias) else: nn.init.xavier_uniform_(m.weight) if m.bias is not None: if 'mlp' in n: nn.init.normal_(m.bias, std=1e-6) else: nn.init.zeros_(m.bias) elif isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') if m.bias is not None: nn.init.zeros_(m.bias) elif isinstance(m, (nn.LayerNorm, nn.BatchNorm2d, nn.GroupNorm)): nn.init.ones_(m.weight) nn.init.zeros_(m.bias)
StarcoderdataPython
89833
<gh_stars>100-1000 """ Testing for TG2 Configuration """ from nose import SkipTest from nose.tools import eq_, raises import sys, os from datetime import datetime from sqlalchemy.orm import scoped_session from sqlalchemy.orm import sessionmaker from sqlalchemy.engine import Engine from ming import Session from ming.orm import ThreadLocalORMSession from tg.configurator.base import ConfigurationComponent, Configurator, BeforeConfigConfigurationAction from tg.configurator.components.app_globals import AppGlobalsConfigurationComponent from tg.configurator.components.auth import SimpleAuthenticationConfigurationComponent from tg.configurator.components.caching import CachingConfigurationComponent from tg.configurator.components.dispatch import DispatchConfigurationComponent from tg.configurator.components.helpers import HelpersConfigurationComponent from tg.configurator.components.i18n import I18NConfigurationComponent from tg.configurator.components.ming import MingConfigurationComponent from tg.configurator.components.paths import PathsConfigurationComponent from tg.configurator.components.registry import RegistryConfigurationComponent from tg.configurator.components.rendering import \ TemplateRenderingConfigurationComponent from tg.configurator.components.session import SessionConfigurationComponent from tg.configurator.components.sqlalchemy import SQLAlchemyConfigurationComponent from tg.configurator.components.transactions import \ TransactionManagerConfigurationComponent from tg.configuration.tgconfig import _init_default_global_config from tg.appwrappers.mingflush import MingApplicationWrapper from tg.util import Bunch from tg.configuration import config from tg.configurator import FullStackApplicationConfigurator from tg.configurator import ApplicationConfigurator from tg.configuration.app_config import AppConfig from tg.configuration.auth import _AuthenticationForgerPlugin from tg.configuration.auth.metadata import _AuthMetadataAuthenticator from tg.configuration.utils import coerce_config, coerce_options, TGConfigError from tg.configuration import milestones from tg.support.converters import asint, asbool import tg.i18n from tg import TGController, expose, response, request, abort, MinimalApplicationConfigurator from tests.base import setup_session_dir, teardown_session_dir from webtest import TestApp from tg.renderers.base import RendererFactory from tg.wsgiapp import TGApp from tg._compat import PY3 def setup(): milestones._reset_all() setup_session_dir() def teardown(): milestones._reset_all() teardown_session_dir() def _reset_global_config(): milestones._reset_all() try: config.config_proxy.pop_thread_config() except: pass try: config.config_proxy.pop_process_config() except: pass class PackageWithModel: __name__ = 'tests' __file__ = __file__ def __init__(self): self.model = self.ModelClass() self.model.DBSession = self.model.FakeDBSession() class ModelClass: class FakeDBSession: def remove(self): self.DBSESSION_REMOVED=True def init_model(self, engine): if isinstance(engine, Engine): # SQLA return self.DBSession else: # Ming return dict(ming=True) class lib: class app_globals: class Globals: pass PackageWithModel.__name__ = 'tests' class UncopiableList(list): """ This is to test configuration methods that make a copy of a list to modify it, using this we can check how it has been modified """ def __copy__(self): return self class FakeTransaction: def get(self): return self def begin(self): self.aborted = False self.doomed = False return self def abort(self): self.aborted = True def commit(self): self.aborted = False def _retryable(self, *args): return True note = _retryable def isDoomed(self): return self.doomed def doom(self): self.doomed = True from tg.configuration.auth import TGAuthMetadata class ApplicationAuthMetadata(TGAuthMetadata): def get_user(self, identity, userid): return {'name':'None'} class ApplicationAuthMetadataWithAuthentication(TGAuthMetadata): def authenticate(self, environ, identity): return 1 def get_user(self, identity, userid): return {'name':'None'} class AtExitTestException(Exception): pass class RootController(TGController): @expose() def test(self): return 'HI!' class TestPylonsConfigWrapper: def setup(self): _reset_global_config() _init_default_global_config() self.config = config def tearDown(self): _reset_global_config() _init_default_global_config() def test_create(self): pass def test_getitem(self): expected_keys = ['debug', 'package', 'tg.app_globals', 'tg.strict_tmpl_context'] for key in expected_keys: self.config[key] def test_repr(self): _reset_global_config() assert repr(self.config) == '<TGConfig: missing>' _init_default_global_config() assert repr(self.config) == repr(self.config.config_proxy.current_conf()) @raises(KeyError) def test_getitem_bad(self): self.config['no_such_key'] def test_setitem(self): self.config['no_such_key'] = 'something' def test_delattr(self): del self.config.debug eq_(hasattr(self.config, 'debug'), False) self.config.debug = False @raises(AttributeError) def test_delattr_bad(self): del self.config.i_dont_exist def test_keys(self): k = self.config.keys() assert 'tg.app_globals' in k def test_coerce_config(): opts = {'ming.connection.max_pool_size': '5'} conf = coerce_config(opts, 'ming.connection.', {'max_pool_size':asint}) assert conf['max_pool_size'] == 5 assert opts['ming.connection.max_pool_size'] == '5' def test_coerce_options(): opts = {'connection': 'false'} conf = coerce_options(opts, {'connection': asbool}) assert conf['connection'] is False assert opts['connection'] == 'false' class TestConfigurator: def setup(self): _reset_global_config() def teardown(self): _reset_global_config() tg.hooks._clear() # Reset hooks def test_repr_action(self): act = BeforeConfigConfigurationAction() assert repr(act) == "<BeforeConfigConfigurationAction: None>" def test_reqlocal_configuration_dictionary(self): cfg = FullStackApplicationConfigurator() cfg.update_blueprint({'RANDOM_VALUE': 5}) conf = cfg.configure({}, {}) assert config['RANDOM_VALUE'] == 5 assert len(config) == len(conf) def test_blueprint_invalid_view(self): cfg = FullStackApplicationConfigurator() try: cfg.get_blueprint_view('this.that.') except ValueError as e: assert str(e) == 'A Blueprint key cannot end with a .' else: assert False, 'Should have raised' def test_invalid_component(self): cfg = FullStackApplicationConfigurator() try: cfg.register(str) except ValueError as e: assert str(e) == 'Configuration component must inherit ConfigurationComponent' else: assert False, 'Should have raised' def test_replace_component(self): cfg = FullStackApplicationConfigurator() class TestComponentFirst(ConfigurationComponent): id = 'TESTCOMPONENT' class TestComponentSecond(ConfigurationComponent): id = 'TESTCOMPONENT2' cfg.register(TestComponentFirst) try: cfg.replace(TestComponentFirst, str) except ValueError as e: assert str(e) == 'Configuration component must inherit ConfigurationComponent' else: assert False, 'Should have raised' cfg.replace('TESTCOMPONENT', TestComponentSecond) comp = cfg.get_component('TESTCOMPONENT') assert isinstance(comp, TestComponentSecond), comp def test_component_without_id(self): cfg = FullStackApplicationConfigurator() class TestComponentFirst(ConfigurationComponent): pass try: cfg.register(TestComponentFirst) except ValueError as e: assert str(e).startswith('ConfigurationComponent must provide an id class attribute') else: assert False, 'Should have raised' try: cfg.replace(TestComponentFirst, TestComponentFirst) except ValueError as e: assert str(e).startswith('ConfigurationComponent must provide an id class attribute') else: assert False, 'Should have raised' def test_retrieve_current_configurator(self): cfg = FullStackApplicationConfigurator() cfg.update_blueprint({'RANDOM_VALUE': 5}) cfg.configure({}, {}) configurator = FullStackApplicationConfigurator.current() assert configurator.get_blueprint_value('RANDOM_VALUE') == 5 def test_application_wrapper_replacement(self): class AppWrapperTest(object): def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kw): return tg.Response('AppWrapper #1') class AppWrapperTestReplacement(object): def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kw): return tg.Response('AppWrapper #2') cfg = FullStackApplicationConfigurator() cfg.update_blueprint({'root_controller': Bunch(index=lambda *args, **kwargs: 'HI')}) cfg.register_application_wrapper(AppWrapperTest) app = TestApp(cfg.make_wsgi_app({'debug': True}, {})) assert app.get('/').text == 'AppWrapper #1', app.get('/').text cfg.replace_application_wrapper('AppWrapperTest', AppWrapperTestReplacement) app = TestApp(cfg.make_wsgi_app({}, {})) assert app.get('/').text == 'AppWrapper #2', app.get('/').text def test_sa_auth_requires_app_config(self): configurator = Configurator() configurator.register(SimpleAuthenticationConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e) == 'Simple Authentication only works on an ApplicationConfigurator' else: assert False, 'Should have raised' def test_sa_auth_authmetadata_without_authenticate(self): cfg = FullStackApplicationConfigurator() class FakeAuthMetadata(): pass cfg.update_blueprint({ 'root_controller': Bunch(index=lambda *args, **kwargs: 'HI'), 'auth_backend': 'authmetadata', 'sa_auth.authmetadata': FakeAuthMetadata(), 'sa_auth.cookie_secret': 'SECRET!' }) cfg.make_wsgi_app({}, {}) def test_caching_required_app_config(self): configurator = Configurator() configurator.register(CachingConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e) == 'Caching only works on an ApplicationConfigurator' else: assert False, 'Should have raised' def test_i18n_required_app_config(self): configurator = Configurator() configurator.register(I18NConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e) == 'I18N only works on an ApplicationConfigurator' else: assert False, 'Should have raised' def test_ming_required_app_config(self): configurator = Configurator() configurator.register(MingConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e).endswith('only works on an ApplicationConfigurator') else: assert False, 'Should have raised' def test_session_required_app_config(self): configurator = Configurator() configurator.register(SessionConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e).endswith('only work on an ApplicationConfigurator') else: assert False, 'Should have raised' def test_sqlalchemy_required_app_config(self): configurator = Configurator() configurator.register(SQLAlchemyConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e).endswith('only works on an ApplicationConfigurator') else: assert False, 'Should have raised' def test_transaction_required_app_config(self): configurator = Configurator() configurator.register(TransactionManagerConfigurationComponent) try: configurator.configure({}, {}) except TGConfigError as e: assert str(e).endswith('only works on an ApplicationConfigurator') else: assert False, 'Should have raised' def test_dispatch_without_mimetypes(self): # This is exactly like MinimalApplicationConfigurator # but without the mimetypes component. apc = ApplicationConfigurator() apc.register(PathsConfigurationComponent, after=False) apc.register(DispatchConfigurationComponent, after=False) apc.register(AppGlobalsConfigurationComponent) apc.register(HelpersConfigurationComponent) apc.register(TemplateRenderingConfigurationComponent) apc.register(RegistryConfigurationComponent, after=True) class MinimalController(TGController): @expose() def index(self): return 'HI' apc.update_blueprint({ 'root_controller': MinimalController() }) app = TestApp(apc.make_wsgi_app({}, {})) assert app.get('/').text == 'HI' def test_app_without_controller(self): cfg = MinimalApplicationConfigurator() app = TestApp(cfg.make_wsgi_app({}, {})) try: app.get('/') except TGConfigError as e: assert str(e) == 'Unable to load controllers, no controllers path configured!' else: assert False, 'Should have raised.' def test_tgapp_caches_controller_classes(self): class RootController(TGController): @expose() def index(self): return 'HI' tgapp = Bunch(app=None) def save_app(app): tgapp.app = app return app cfg = MinimalApplicationConfigurator() app = TestApp(cfg.make_wsgi_app({}, {}, wrap_app=save_app)) tgapp.app.controller_classes['root'] = RootController assert app.get('/').text == 'HI' class TestAppConfig: def __init__(self): self.fake_package = PackageWithModel def setup(self): _reset_global_config() def teardown(self): _reset_global_config() tg.hooks._clear() # Reset hooks def test_get_value(self): conf = AppConfig(minimal=True) conf['existing_value'] = 5 assert conf['existing_value'] == 5 assert conf.get('non_existing_value') == None def test_missing_attribute(self): conf = AppConfig(minimal=True) conf['existing_value'] = 5 assert conf['existing_value'] == 5 assert conf.existing_value == 5 try: conf['missing_value'] except KeyError: pass else: raise RuntimeError('Should have raised KeyError') try: conf.missing_value except AttributeError: pass else: raise RuntimeError('Should have raised AttributeError') def test_lang_can_be_changed_by_ini(self): conf = AppConfig(minimal=True) conf.make_wsgi_app(**{'i18n.lang': 'ru'}) assert config['i18n.lang'] == 'ru' def test_create_minimal_app(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') def test_create_minimal_app_with_factory(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) app_factory = conf.setup_tg_wsgi_app() app = app_factory() app = TestApp(app) assert 'HI!' in app.get('/test') def test_minimal_app_with_sqlalchemy(self): class RootController(TGController): @expose() def test(self): return 'HI!' DBSession = scoped_session(sessionmaker(autoflush=True, autocommit=False)) def init_model(engine): DBSession.configure(bind=engine) conf = AppConfig(minimal=True, root_controller=RootController()) conf['use_sqlalchemy'] = True conf['sqlalchemy.url'] = 'sqlite://' conf['model'] = Bunch(DBSession=DBSession, init_model=init_model) app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') @raises(TGConfigError) def test_sqlalchemy_without_models(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf['use_sqlalchemy'] = True conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() def test_minimal_app_with_ming(self): class RootController(TGController): @expose() def test(self): return 'HI!' mainsession = Session() DBSession = ThreadLocalORMSession(mainsession) def init_model(engine): mainsession.bind = engine conf = AppConfig(minimal=True, root_controller=RootController()) conf['use_ming'] = True conf['ming.url'] = 'mim:///dbname' conf['model'] = Bunch(init_model=init_model, DBSession=DBSession) app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') @raises(TGConfigError) def test_ming_without_models(self): class RootController(TGController): @expose() def test(self): return 'HI!' DBSession = scoped_session(sessionmaker(autoflush=True, autocommit=False)) def init_model(engine): DBSession.configure(bind=engine) conf = AppConfig(minimal=True, root_controller=RootController()) conf['use_ming'] = True conf['ming.url'] = 'mim://' app = conf.make_wsgi_app() def test_setup_jinja_without_package(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf.renderers = ['jinja'] app = conf.make_wsgi_app() def test_setup_sqlalchemy(self): class RootController(TGController): @expose() def test(self): return 'HI!' package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert package.model.DBSession.DBSESSION_REMOVED def test_sqlalchemy_commit_veto(self): class RootController(TGController): @expose() def test(self): return 'HI!' @expose() def crash(self): raise Exception('crash') @expose() def forbidden(self): response.status = 403 return 'FORBIDDEN' @expose() def notfound(self): response.status = 404 return 'NOTFOUND' def custom_commit_veto(environ, status, headers): if status.startswith('404'): return True return False fake_transaction = FakeTransaction() import transaction prev_transaction_manager = transaction.manager transaction.manager = fake_transaction package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf['package'] = package conf['model'] = package.model conf['use_sqlalchemy'] = True conf['tm.enabled'] = True conf['tm.commit_veto'] = custom_commit_veto conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() app = TestApp(app) assert hasattr(conf, 'use_transaction_manager') is False app.get('/test') assert fake_transaction.aborted == False try: app.get('/crash') except: pass assert fake_transaction.aborted == True app.get('/forbidden', status=403) assert fake_transaction.aborted == False app.get('/notfound', status=404) assert fake_transaction.aborted == True transaction.manager = prev_transaction_manager def test_sqlalchemy_doom(self): fake_transaction = FakeTransaction() import transaction prev_transaction_manager = transaction.manager transaction.manager = fake_transaction class RootController(TGController): @expose() def test(self): fake_transaction.doom() return 'HI!' package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf['tm.enabled'] = True conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() app = TestApp(app) assert hasattr(conf, 'use_transaction_manager') is False app.get('/test') assert fake_transaction.aborted == True transaction.manager = prev_transaction_manager def test_sqlalchemy_retry(self): fake_transaction = FakeTransaction() import transaction prev_transaction_manager = transaction.manager transaction.manager = fake_transaction from transaction.interfaces import TransientError class RootController(TGController): attempts = [] @expose() def test(self): self.attempts.append(True) if len(self.attempts) == 3: return 'HI!' raise TransientError() package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf['tm.enabled'] = True conf['sqlalchemy.url'] = 'sqlite://' conf['tm.attempts'] = 3 app = conf.make_wsgi_app() app = TestApp(app) assert hasattr(conf, 'use_transaction_manager') is False resp = app.get('/test') assert 'HI' in resp transaction.manager = prev_transaction_manager def test_setup_sqla_persistance(self): conf = AppConfig(minimal=True, root_controller=RootController()) conf['sqlalchemy.url'] = 'sqlite://' conf.use_sqlalchemy = True conf.package = PackageWithModel() conf.make_wsgi_app() def test_setup_sqla_balanced(self): conf = AppConfig(minimal=True, root_controller=RootController()) conf['sqlalchemy.master.url'] = 'sqlite://' conf['sqlalchemy.slaves.slave1.url'] = 'sqlite://' conf.use_sqlalchemy = True conf.package = PackageWithModel() conf.make_wsgi_app() @raises(TGConfigError) def test_setup_sqla_balanced_prevent_slave_named_master(self): conf = AppConfig(minimal=True, root_controller=RootController()) conf['sqlalchemy.master.url'] = 'sqlite://' conf['sqlalchemy.slaves.master.url'] = 'sqlite://' conf.use_sqlalchemy = True conf.package = PackageWithModel() conf.make_wsgi_app() @raises(TGConfigError) def test_setup_sqla_balanced_no_slaves(self): conf = AppConfig(minimal=True, root_controller=RootController()) conf['sqlalchemy.master.url'] = 'sqlite://' conf.use_sqlalchemy = True conf.package = PackageWithModel() conf.make_wsgi_app() def test_setup_ming_persistance(self): class RootController(TGController): @expose() def test(self): return 'HI!' package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mim://' conf['ming.db'] = 'inmemdb' app = conf.make_wsgi_app() tgapp = app.application while not isinstance(tgapp, TGApp): tgapp = tgapp.app ming_handler = tgapp.wrapped_dispatch while ming_handler != tgapp._dispatch: if isinstance(ming_handler, MingApplicationWrapper): break ming_handler = ming_handler.next_handler assert isinstance(ming_handler, MingApplicationWrapper), ming_handler class FakeMingSession(object): actions = [] def flush_all(self): self.actions.append('FLUSH') def close_all(self): self.actions.append('CLOSE') ming_handler.ThreadLocalODMSession = FakeMingSession() app = TestApp(app) resp = app.get('/test') assert 'HI' in resp assert ming_handler.ThreadLocalODMSession.actions == ['FLUSH'] def test_setup_ming_persistance_closes_on_failure(self): class RootController(TGController): @expose() def test(self): raise Exception('CRASH!') package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mim://' conf['ming.db'] = 'inmemdb' app = conf.make_wsgi_app() tgapp = app.application while not isinstance(tgapp, TGApp): tgapp = tgapp.app ming_handler = tgapp.wrapped_dispatch while ming_handler != tgapp._dispatch: if isinstance(ming_handler, MingApplicationWrapper): break ming_handler = ming_handler.next_handler assert isinstance(ming_handler, MingApplicationWrapper), ming_handler class FakeMingSession(object): actions = [] def flush_all(self): self.actions.append('FLUSH') def close_all(self): self.actions.append('CLOSE') ming_handler.ThreadLocalODMSession = FakeMingSession() app = TestApp(app) try: app.get('/test', status=500) except: assert ming_handler.ThreadLocalODMSession.actions == ['CLOSE'] else: assert False, 'Should have raised exception' def test_setup_ming_persistance_with_url_alone(self): package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mim://inmemdb' app = conf.make_wsgi_app() assert app is not None dstore = config['tg.app_globals'].ming_datastore dstore_name = dstore.name # Looks like ming has empty dstore.name when using MIM. assert dstore_name == '', dstore def test_setup_sqla_and_ming_both(self): package = PackageWithModel() base_config = AppConfig(minimal=True, root_controller=None) base_config.package = package base_config.model = package.model base_config.use_ming = True base_config['ming.url'] = 'mim://inmemdb' base_config.use_sqlalchemy = True base_config['sqlalchemy.url'] = 'sqlite://' app = base_config.make_wsgi_app() assert app is not None assert config['MingSession'], config assert config['tg.app_globals'].ming_datastore, config['tg.app_globals'] assert config['SQLASession'], config assert config['tg.app_globals'].sa_engine, config['tg.app_globals'] assert config['DBSession'] is config['SQLASession'], config def test_setup_ming_persistance_with_url_and_db(self): package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mim://inmemdb' conf['ming.db'] = 'realinmemdb' app = conf.make_wsgi_app() assert app is not None dstore = config['tg.app_globals'].ming_datastore dstore_name = dstore.name assert dstore_name == 'realinmemdb', dstore def test_setup_ming_persistance_advanced_options(self): package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mim://inmemdb' conf['ming.connection.read_preference'] = 'PRIMARY' app = conf.make_wsgi_app() assert app is not None def test_setup_ming_persistance_replica_set(self): if sys.version_info[:2] == (2, 6): raise SkipTest() package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mongodb://localhost:27017,localhost:27018/testdb?replicaSet=test' conf['ming.db'] = '' app = conf.make_wsgi_app() assert app is not None expected_url = 'mongodb://localhost:27017,localhost:27018/testdb?replicaSet=test' expected_db = 'testdb' dstore = config['tg.app_globals'].ming_datastore assert expected_db == dstore.name, dstore.name assert dstore.bind._conn_args[0] == expected_url def test_setup_ming_persistance_replica_set_option(self): package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.use_ming = True conf['ming.url'] = 'mongodb://localhost:27017,localhost:27018/testdb' conf['ming.connection.replicaSet'] = 'test' app = conf.make_wsgi_app() assert app is not None expected_url = 'mongodb://localhost:27017,localhost:27018/testdb' expected_db = 'testdb' dstore = config['tg.app_globals'].ming_datastore assert expected_db == dstore.name, dstore.name assert dstore.bind._conn_args[0] == expected_url assert 'test' == dstore.bind._conn_kwargs.get('replicaSet'), dstore.bind._conn_kwargs def test_setup_sqla_auth_repozesqla(self): if PY3: raise SkipTest() class RootController(TGController): @expose() def test(self): return str(request.environ) package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf.auth_backend = 'sqlalchemy' conf['sa_auth'] = {'authmetadata': ApplicationAuthMetadata(), 'dbsession': None, 'user_class': None, 'cookie_secret': '12345'} conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') assert 'repoze.who.plugins' in resp, resp def test_setup_sqla_auth(self): class RootController(TGController): @expose() def test(self): return str(request.environ) package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf.auth_backend = 'sqlalchemy' conf['sa_auth'] = {'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class': None, 'cookie_secret': '12345'} conf['sqlalchemy.url'] = 'sqlite://' app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') assert 'repoze.who.plugins' in resp, resp def test_setup_ming_auth_tgming(self): if PY3: raise SkipTest() class RootController(TGController): @expose() def test(self): return str(request.environ) package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_ming = True conf.auth_backend = 'ming' conf['sa_auth'] = {'authmetadata': ApplicationAuthMetadata(), 'cookie_secret': '12345', 'user_class': None} conf['ming.url'] = 'mim:///testdb' app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') assert 'repoze.who.plugins' in resp, resp def test_setup_ming_auth(self): class RootController(TGController): @expose() def test(self): return str(request.environ) package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_ming = True conf.auth_backend = 'ming' conf['sa_auth'] = {'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'cookie_secret': '12345', 'user_class': None} conf['ming.url'] = 'mim:///testdb' app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') assert 'repoze.who.plugins' in resp, resp def test_setup_authtkt(self): class RootController(TGController): @expose() def test(self): return str(request.environ) package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.use_sqlalchemy = True conf.auth_backend = 'sqlalchemy' conf['sa_auth'] = {'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class': None, 'cookie_secret': '12345', 'post_login_url': '/'} conf['sqlalchemy.url'] = 'sqlite://' secure_app = conf.make_wsgi_app(**{'sa_auth.authtkt.secure': True}) secure_app = TestApp(secure_app) resp = secure_app.post('/login_handler', params={'login': 'l', 'password': 'p'}) assert 'HttpOnly' in resp.headers["Set-Cookie"], resp.headers insecure_app = conf.make_wsgi_app(**{'sa_auth.authtkt.secure': False}) insecure_app = TestApp(insecure_app) resp = insecure_app.post('/login_handler', params={'login': 'l', 'password': 'p'}) assert 'HttpOnly' not in resp.headers["Set-Cookie"], resp.headers def test_sessions_enabled(self): class RootController(TGController): @expose('json') def test(self): try: tg.session['counter'] += 1 except KeyError: tg.session['counter'] = 0 tg.session.save() return dict(counter=tg.session['counter']) conf = AppConfig(minimal=True, root_controller=RootController()) conf['session.enabled'] = True app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') assert resp.json['counter'] == 0, resp resp = app.get('/test') assert resp.json['counter'] == 1, resp def test_caching_enabled(self): class RootController(TGController): @expose('json') def test(self): cache = tg.cache.get_cache('test_caching_enabled') now = cache.get_value('test_cache_key', createfunc=datetime.utcnow) return dict(now=now) conf = AppConfig(minimal=True, root_controller=RootController()) conf['cache.enabled'] = True app = conf.make_wsgi_app() app = TestApp(app) resp = app.get('/test') now = resp.json['now'] for x in range(20): resp = app.get('/test') assert resp.json['now'] == now, (resp, now) def test_controler_wrapper_setup(self): from tg.configurator.components.dispatch import _call_controller orig_caller = _call_controller appcfg = AppConfig(minimal=True, root_controller=RootController()) conf = {} dispatch = appcfg._configurator.get_component('dispatch') dispatch._controller_wrappers[:] = [] dispatch._setup_controller_wrappers(conf, None) assert conf['controller_caller'] == orig_caller def controller_wrapper(caller): def call(*args, **kw): return caller(*args, **kw) return call conf = {} dispatch = appcfg._configurator.get_component('dispatch') dispatch._controller_wrappers[:] = [controller_wrapper] dispatch._setup_controller_wrappers(conf, None) assert conf['controller_caller'].__name__ == controller_wrapper(orig_caller).__name__ def test_global_controller_wrapper(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(controller_wrapper) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] is True def test_multiple_global_controller_wrapper(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call def controller_wrapper2(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call def controller_wrapper3(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(controller_wrapper2) conf.register_controller_wrapper(controller_wrapper3) conf.register_controller_wrapper(controller_wrapper) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert len(wrapper_has_been_visited) == 3 def test_dedicated_controller_wrapper(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(controller_wrapper, controller=RootController.test) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] is True def test_dedicated_controller_wrapper_old(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(controller_wrapper, controller=RootController.test) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] is True def test_mixed_controller_wrapper(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' app_wrapper_has_been_visited = [] def app_controller_wrapper(caller): def call(*args, **kw): app_wrapper_has_been_visited.append(True) return caller(*args, **kw) return call wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(app_controller_wrapper) conf.register_controller_wrapper(controller_wrapper, controller=RootController.test) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] is True assert app_wrapper_has_been_visited[0] is True def test_controler_wrapper_after_environment_setup(self): milestones._reset_all() class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] def controller_wrapper(caller): def call(*args, **kw): wrapper_has_been_visited.append(True) return caller(*args, **kw) return call conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_controller_wrapper(controller_wrapper) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] is True assert len(wrapper_has_been_visited) == 1 conf.register_controller_wrapper(controller_wrapper) app2 = conf.make_wsgi_app() app2 = TestApp(app2) wrapper_has_been_visited[:] = [] assert 'HI!' in app2.get('/test') assert wrapper_has_been_visited[0] is True assert len(wrapper_has_been_visited) == 2 def test_application_wrapper_setup(self): class RootController(TGController): @expose() def test(self): return 'HI!' wrapper_has_been_visited = [] class AppWrapper(object): def __init__(self, dispatcher): self.dispatcher = dispatcher def __call__(self, *args, **kw): wrapper_has_been_visited.append(True) return self.dispatcher(*args, **kw) conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_wrapper(AppWrapper) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') assert wrapper_has_been_visited[0] == True def test_application_wrapper_ordering_after(self): class AppWrapper1: pass class AppWrapper2: pass class AppWrapper3: pass class AppWrapper4: pass class AppWrapper5: pass conf = AppConfig(minimal=True) conf.register_wrapper(AppWrapper2) conf.register_wrapper(AppWrapper4, after=AppWrapper3) conf.register_wrapper(AppWrapper3) conf.register_wrapper(AppWrapper1, after=False) conf.register_wrapper(AppWrapper5, after=AppWrapper3) milestones.environment_loaded.reach() app_wrappers = list(conf._configurator._application_wrappers.values()) assert app_wrappers[0] == AppWrapper1 assert app_wrappers[1] == AppWrapper2 assert app_wrappers[2] == AppWrapper3 assert app_wrappers[3] == AppWrapper4 assert app_wrappers[4] == AppWrapper5 def test_wrap_app(self): class RootController(TGController): @expose() def test(self): return 'HI!' middleware_has_been_visited = [] class AppWrapper(object): def __init__(self, app): self.app = app def __call__(self, environ, start_response): middleware_has_been_visited.append(True) return self.app(environ, start_response) conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() app = conf.make_wsgi_app(wrap_app=AppWrapper) app = TestApp(app) assert 'HI!' in app.get('/test') assert middleware_has_been_visited[0] == True @raises(TGConfigError) def test_unsupported_renderer(self): conf = AppConfig(root_controller=RootController()) conf['renderers'] = ['unknwon'] try: conf.make_wsgi_app() except TGConfigError as e: assert 'This configuration object does not support the unknwon renderer' in str(e) raise @raises(TGConfigError) def test_cookie_secret_required(self): conf = AppConfig(root_controller=RootController()) conf['auth_backend'] = 'sqlalchemy' conf['sa_auth'] = {} try: conf.make_wsgi_app() except TGConfigError as e: assert str(e).startswith('You must provide a value for authentication cookies secret') raise def test_sqla_auth_middleware(self): if PY3: raise SkipTest() conf = AppConfig(minimal=True, root_controller=RootController()) conf.auth_backend = 'sqlalchemy' conf.skip_authentication = True conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'dbsession': None, 'user_class':None, 'cookie_secret':'12345', 'authenticators':UncopiableList([('default', None)])}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert 'cookie' in authenticators assert 'sqlauth' in authenticators def test_sqla_auth_middleware_using_translations(self): if PY3: raise SkipTest() conf = AppConfig(minimal=True, root_controller=RootController()) conf.auth_backend = 'sqlalchemy' conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'dbsession': None, 'user_class':None, 'translations': {'user_name':'SomethingElse'}, 'cookie_secret':'12345', 'authenticators':UncopiableList([('default', None)])}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert 'cookie' in authenticators assert 'sqlauth' in authenticators auth = None for authname, authobj in config['sa_auth.authenticators']: if authname == 'sqlauth': auth = authobj break assert auth is not None, config['sa_auth.authenticators'] assert auth.translations['user_name'] == 'SomethingElse', auth.translations def test_sqla_auth_middleware_default_after(self): if PY3: raise SkipTest() conf = AppConfig(minimal=True, root_controller=RootController()) conf.auth_backend = 'sqlalchemy' conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'cookie_secret':'12345', 'dbsession': None, 'user_class': None, 'authenticators':UncopiableList([('superfirst', None), ('default', None)])}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert authenticators[1] == 'superfirst' assert 'cookie' in authenticators assert 'sqlauth' in authenticators def test_sqla_auth_middleware_no_authenticators(self): if PY3: raise SkipTest() conf = AppConfig(minimal=True, root_controller=RootController()) conf.auth_backend = 'sqlalchemy' conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'dbsession': None, 'user_class': None, 'cookie_secret':'12345'}) # In this case we can just test it doesn't crash # as the sa_auth dict doesn't have an authenticators key to check for conf.make_wsgi_app() def test_sqla_auth_middleware_only_mine(self): class RootController(TGController): @expose() def test(self): return str(request.environ) @expose() def forbidden(self): response.status = "401" package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = package conf.model = package.model conf.auth_backend = 'sqlalchemy' conf.use_sqlalchemy = True conf['sqlalchemy.url'] = 'sqlite://' alwaysadmin = _AuthenticationForgerPlugin(fake_user_key='FAKE_USER') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'cookie_secret':'12345', 'form_plugin':alwaysadmin, 'authenticators':UncopiableList([('alwaysadmin', alwaysadmin)]), 'identifiers':[('alwaysadmin', alwaysadmin)], 'challengers':[]}) app = conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert authenticators[0] == 'alwaysadmin' assert 'sqlauth' not in authenticators challengers = [x[1] for x in config['sa_auth.challengers']] assert alwaysadmin in challengers app = TestApp(app) assert 'repoze.who.identity' in app.get('/test', extra_environ={'FAKE_USER':'admin'}) assert app.get('/forbidden', status=401) def test_sqla_auth_logging_stderr(self): package = PackageWithModel() conf = AppConfig(minimal=True, root_controller=None) conf.package = package conf.model = package.model conf.auth_backend = 'sqlalchemy' conf.use_sqlalchemy = True conf['sqlalchemy.url'] = 'sqlite://' alwaysadmin = _AuthenticationForgerPlugin(fake_user_key='FAKE_USER') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'cookie_secret':'12345', 'form_plugin':alwaysadmin, 'log_level':'DEBUG', 'authenticators':UncopiableList([('alwaysadmin', alwaysadmin)]), 'identifiers':[('alwaysadmin', alwaysadmin)], 'challengers':[]}) conf['sa_auth']['log_file'] = 'stderr' app = conf.make_wsgi_app() conf['sa_auth']['log_file'] = 'stdout' app = conf.make_wsgi_app() import tempfile f = tempfile.NamedTemporaryFile() conf['sa_auth']['log_file'] = f.name app = conf.make_wsgi_app() def test_ming_auth_middleware(self): if PY3: raise SkipTest() conf = AppConfig(root_controller=RootController(), auth_backend='ming') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'user_class':None, 'cookie_secret':'12345', 'authenticators': UncopiableList([('default', None)])}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert 'cookie' in authenticators assert 'mingauth' in authenticators @raises(KeyError) def test_sqla_auth_middleware_no_backend(self): conf = AppConfig(root_controller=RootController()) conf.auth_backend = None conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadata(), 'cookie_secret':'12345'}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert 'cookie' in authenticators assert len(authenticators) == 1 def test_tgauthmetadata_auth_middleware(self): conf = AppConfig(root_controller=RootController(), auth_backend='sqlalchemy') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class':None, 'cookie_secret':'12345', 'authenticators':UncopiableList([('default', None)])}) conf.make_wsgi_app() authenticators = [x[0] for x in config['sa_auth.authenticators']] assert 'cookie' in authenticators assert 'tgappauth' in authenticators def test_auth_setup_default_identifier(self): conf = AppConfig(root_controller=RootController(), auth_backend='sqlalchemy') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class':None, 'cookie_secret':'12345', 'identifiers': UncopiableList([('default', None)])}) conf.make_wsgi_app() identifiers = [x[0] for x in tg.config['sa_auth.identifiers']] assert 'cookie' in identifiers def test_auth_setup_custom_identifier(self): conf = AppConfig(root_controller=RootController(), auth_backend='sqlalchemy') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class':None, 'cookie_secret':'12345', 'identifiers': UncopiableList([('custom', None)])}) conf.make_wsgi_app() identifiers = [x[0] for x in config['sa_auth.identifiers']] assert 'custom' in identifiers def test_auth_middleware_doesnt_touch_authenticators(self): # Checks that the auth middleware process doesn't touch original authenticators # list, to prevent regressions on this. conf = AppConfig(root_controller=RootController(), auth_backend='sqlalchemy') conf['sa_auth'].update({'authmetadata': ApplicationAuthMetadataWithAuthentication(), 'dbsession': None, 'user_class':None, 'cookie_secret':'12345', 'authenticators':[('default', None)]}) conf.make_wsgi_app() authenticators = [x[0] for x in conf['sa_auth.authenticators']] assert len(authenticators) == 1 def test_tgauthmetadata_loginpwd(self): who_authenticator = _AuthMetadataAuthenticator(ApplicationAuthMetadataWithAuthentication(), using_password=True) assert who_authenticator.authenticate({}, {}) == None def test_tgauthmetadata_nologinpwd(self): who_authenticator = _AuthMetadataAuthenticator(ApplicationAuthMetadataWithAuthentication(), using_password=False) assert who_authenticator.authenticate({}, {}) == 1 def test_error_middleware_disabled_with_optimize(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() os.environ['PYTHONOPTIMIZE'] = '2' app = conf.make_wsgi_app() os.environ.pop('PYTHONOPTIMIZE') app = TestApp(app) assert 'HI!' in app.get('/test') def test_serve_statics(self): class RootController(TGController): @expose() def test(self): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() conf.serve_static = True app = conf.make_wsgi_app() assert app.__class__.__name__.startswith('Statics') app = TestApp(app) assert 'HI!' in app.get('/test') def test_mount_point_with_minimal(self): class SubController(TGController): @expose() def test(self): return self.mount_point class RootController(TGController): sub = SubController() conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert '/sub' in app.get('/sub/test') def test_application_test_vars(self): class RootController(TGController): pass conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) assert 'DONE' in app.get('/_test_vars') assert request.path == '/_test_vars' # This should trash away the preserved registry to avoid # leaking memory. app.get('/', status=404) try: request.path except TypeError: # TypeError means the request has been properly removed pass else: assert False, 'There should have been no requests in place...' def test_application_empty_controller(self): class RootController(object): def __call__(self, environ, start_response): return None conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) try: r = app.get('/something') except Exception as e: assert 'No content returned by controller' in str(e) else: assert False, 'Should have raised "No content returned by controller"' def test_application_test_mode_detection(self): class FakeRegistry(object): def register(self, *args, **kw): pass def track_app(app): # Save a reference to the plain TGApp before it's wrapped by middlewares. track_app.app = app return app conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = PackageWithModel() conf.make_wsgi_app(wrap_app=track_app) testmode, __, __ = track_app.app._setup_app_env({'paste.registry':FakeRegistry()}) assert testmode is False testmode, __, __ = track_app.app._setup_app_env({'paste.registry':FakeRegistry(), 'paste.testing_variables':{}}) assert testmode is True def test_application_no_controller_hijacking(self): class RootController(TGController): @expose() def test(self): return 'HI!' class AppWrapper(object): def __init__(self, dispatcher): self.dispatcher = dispatcher def __call__(self, controller, environ, start_response): return self.dispatcher(None, environ, start_response) conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_wrapper(AppWrapper) conf.package = PackageWithModel() app = conf.make_wsgi_app() app = TestApp(app) app.get('/test', status=404) def test_package_no_app_globals(self): class RootController(TGController): pass conf = AppConfig(minimal=True, root_controller=RootController()) conf.package = sys.modules[__name__] app = conf.make_wsgi_app() def test_custom_error_document(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(403) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' in resp, resp def test_custom_error_document_with_streamed_response(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): response.status_code = 403 def _output(): yield 'Hi' yield 'World' return _output() conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' in resp, resp def test_error_document_passthrough(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): request.disable_error_pages() abort(403, detail='Custom Detail') conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'Custom Detail' in resp, resp def test_custom_old_error_document(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(403) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf.status_code_redirect = True app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' in resp, resp def test_custom_old_error_document_with_streamed_response(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): response.status_code = 403 def _output(): yield 'Hi' yield 'World' return _output() conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf.status_code_redirect = True app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' in resp, resp def test_custom_500_document(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(500) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['debug'] = False conf['errorpage.handle_exceptions'] = False conf['errorpage.status_codes'] += [500] app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500) assert 'ERROR!!!' in resp, resp def test_custom_500_document_on_crash(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): raise Exception('Crash!') conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['debug'] = False conf['errorpage.handle_exceptions'] = True app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500) assert 'ERROR!!!' in resp, resp def test_errorpage_reraises_exceptions(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): raise Exception('Crash!') conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['debug'] = False conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=False) app = TestApp(app) try: resp = app.get('/test', status=500) except Exception as e: assert 'Crash!' in str(e) else: assert False, 'Should have raised Crash! exception' def test_old_custom_500_document(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(500) conf = AppConfig(minimal=True, root_controller=RootController()) conf['debug'] = False conf.status_code_redirect = True conf['errorpage.enabled'] = True conf['errorpage.status_codes'] += [500] app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500) assert 'ERROR!!!' in resp, resp def test_skips_custom_500_document_when_debug(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(500) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['debug'] = True conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500) assert 'ERROR!!!' not in resp, resp def test_skips_old_custom_500_document_when_debug(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(500) conf = AppConfig(minimal=True, root_controller=RootController()) conf['debug'] = True conf.status_code_redirect = True conf['errorpage.enabled'] = True app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500) assert 'ERROR!!!' not in resp, resp def test_skips_custom_error_document_when_disabled(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(403) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = False conf['errorpage.status_codes'] = (403, 404) conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' not in resp, resp def test_skips_custom_error_document_when_disabled_and_manually_registered(self): class ErrorController(TGController): @expose() def document(self, *args, **kw): return 'ERROR!!!' class RootController(TGController): error = ErrorController() @expose() def test(self): abort(403) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = False conf['errorpage.status_codes'] = (403, 404) conf['errorpage.handle_exceptions'] = False app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=403) assert 'ERROR!!!' not in resp, resp def test_custom_500_json(self): class ErrorController(TGController): @expose(content_type="text/html") @expose('json', content_type="application/json") def document(self, *args, **kw): return dict(a=5) class RootController(TGController): error = ErrorController() @expose() def test(self): abort(500) conf = AppConfig(minimal=True, root_controller=RootController()) conf['errorpage.enabled'] = True conf['debug'] = False conf['errorpage.handle_exceptions'] = False conf['errorpage.status_codes'] += [500] app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test', status=500, headers={'Accept': 'application/json'}) assert '{"a": 5}' in resp.text, resp assert 'application/json' == resp.content_type def test_errorware_configuration(self): class RootController(TGController): @expose() def test(self, *args, **kwargs): return 'HI' conf = AppConfig(minimal=True, root_controller=RootController()) app = conf.make_wsgi_app(full_stack=True, **{'trace_errors.error_email': '<EMAIL>'}) app = TestApp(app) resp = app.get('/test') assert 'HI' in resp, resp assert config['tg.errorware']['error_email'] == '<EMAIL>' assert config['tg.errorware']['error_subject_prefix'] == 'WebApp Error: ' assert config['tg.errorware']['error_message'] == 'An internal server error occurred' def test_tw2_unsupported_renderer(self): import tw2.core class RootController(TGController): @expose() def test(self, *args, **kwargs): rl = tw2.core.core.request_local() tw2conf = rl['middleware'].config return ','.join(tw2conf.preferred_rendering_engines) conf = AppConfig(minimal=True, root_controller=RootController()) conf.prefer_toscawidgets2 = True conf.renderers = ['json', 'kajiki'] conf.default_renderer = 'json' app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test') assert 'kajiki' in resp, resp def test_tw2_renderers_preference(self): import tw2.core class RootController(TGController): @expose() def test(self, *args, **kwargs): rl = tw2.core.core.request_local() tw2conf = rl['middleware'].config return ','.join(tw2conf.preferred_rendering_engines) conf = AppConfig(minimal=True, root_controller=RootController()) conf.prefer_toscawidgets2 = True conf.renderers = ['kajiki'] conf.default_renderer = 'kajiki' app = conf.make_wsgi_app(full_stack=True) app = TestApp(app) resp = app.get('/test') assert 'kajiki' in resp, resp def test_tw2_unsupported(self): import tw2.core class RootController(TGController): @expose() def test(self, *args, **kwargs): rl = tw2.core.core.request_local() tw2conf = rl['middleware'].config return ','.join(tw2conf.preferred_rendering_engines) conf = AppConfig(minimal=True, root_controller=RootController()) conf.prefer_toscawidgets2 = True conf.renderers = ['json'] conf.default_renderer = 'json' try: app = conf.make_wsgi_app(full_stack=True) assert False except TGConfigError as e: assert 'None of the configured rendering engines' in str(e) assert 'is supported by ToscaWidgets2, unable to configure ToscaWidgets.' in str(e) def test_render_factory_success(self): class RootController(TGController): @expose() def test(self, *args, **kwargs): return 'HELLO' class FailedFactory(RendererFactory): engines = {'broken': {'content_type': 'text/plain'}} @classmethod def create(cls, config, app_globals): return {'broken': 'BROKEN'} conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_rendering_engine(FailedFactory) conf.renderers = ['json', 'broken'] app = conf.make_wsgi_app(full_stack=True) assert config['renderers'] == ['json', 'broken'] assert config['render_functions']['broken'] == 'BROKEN' def test_render_factory_failure(self): class RootController(TGController): @expose() def test(self, *args, **kwargs): return 'HELLO' class FailedFactory(RendererFactory): engines = {'broken': {'content_type': 'text/plain'}} @classmethod def create(cls, config, app_globals): return None conf = AppConfig(minimal=True, root_controller=RootController()) conf.register_rendering_engine(FailedFactory) conf.renderers = ['json', 'broken'] conf.make_wsgi_app(full_stack=True) assert config['renderers'] == ['json'] def test_make_body_seekable(self): class RootController(TGController): @expose() def test(self, *args, **kwargs): request.body_file.seek(0) return 'HELLO' conf = AppConfig(minimal=True, root_controller=RootController()) conf['make_body_seekable'] = True app = conf.make_wsgi_app(full_stack=False) assert app.application.__class__.__name__ == 'SeekableRequestBodyMiddleware', \ app.application.__class__ app = TestApp(app) assert 'HELLO' in app.get('/test') def test_make_body_seekable_disabled(self): class RootController(TGController): @expose() def test(self, *args, **kwargs): request.body_file.seek(0) return 'HELLO' conf = AppConfig(minimal=True, root_controller=RootController()) conf['make_body_seekable'] = False app = conf.make_wsgi_app(full_stack=False) app = TestApp(app) assert 'HELLO' in app.get('/test') def test_debug_middleware(self): class RootController(TGController): @expose() def test(self): raise Exception('Crash!') conf = AppConfig(root_controller=RootController()) conf['errorpage.enabled'] = True app = conf.make_wsgi_app(debug=True, full_stack=True) app = TestApp(app) resp = app.get('/test', status=500, expect_errors=True) assert 'Exception: Crash! // Backlash' in resp, resp def test_make_app_with_custom_appglobals(self): class RootController(TGController): @expose('') def test(self, *args, **kwargs): return tg.app_globals.TEXT class FakeGlobals(Bunch): def __init__(self): super(FakeGlobals, self).__init__() self['TEXT'] = 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf.app_globals = FakeGlobals app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') def test_make_app_with_appglobals_submodule(self): class RootController(TGController): @expose('') def test(self, *args, **kwargs): return tg.app_globals.text conf = AppConfig(minimal=True, root_controller=RootController()) from .fixtures import package_with_helpers_submodule conf['package'] = package_with_helpers_submodule app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!!' in app.get('/test') def test_make_app_with_custom_helpers(self): class RootController(TGController): @expose('') def test(self, *args, **kwargs): return config['helpers'].get_text() class FakeHelpers(object): @classmethod def get_text(cls): return 'HI!' conf = AppConfig(minimal=True, root_controller=RootController()) conf.helpers = FakeHelpers() app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!' in app.get('/test') def test_make_app_with_helpers_submodule(self): class RootController(TGController): @expose('') def test(self, *args, **kwargs): return config['helpers'].get_text() conf = AppConfig(minimal=True, root_controller=RootController()) from .fixtures import package_with_helpers_submodule conf['package'] = package_with_helpers_submodule app = conf.make_wsgi_app() app = TestApp(app) assert 'HI!!' in app.get('/test')
StarcoderdataPython
273423
# -*- coding: utf-8 -*- #twFuncs.py from twitter import Twitter, OAuth import yweather import requests from operator import itemgetter import re from bs4 import BeautifulSoup if __name__ != "__main__": from . import config #-------------------------------------------------------------------------- # twitter Module 반환 #-------------------------------------------------------------------------- def get_twitter(): return Twitter(auth = OAuth( config.access_key, config.access_secret, config.consumer_key, config.consumer_secret)) #-------------------------------------------------------------------------- # 지역명에 해당하는 WOEID 반환 #-------------------------------------------------------------------------- def get_woeid(location): return yweather.Client().fetch_woeid(location) #-------------------------------------------------------------------------- # Twitter API를 이용한 실시간 트렌드 10개 #-------------------------------------------------------------------------- def get_api_trends(twitter, woeid): try: places = twitter.trends.place(_id = woeid) compactList = [] for location in places: for trend in location["trends"]: name = re.sub('#', '', trend["name"]) name = re.sub('_', ' ', name) volume = trend["tweet_volume"] volume = 0 if volume == None else volume compactList.append({"name" : name, "volume" : volume, "len" : len(name)}) sortedList = sorted(compactList, key=itemgetter("volume", "len"), reverse=True) finalList = [] for trend in sortedList[:10]: finalList.append(trend["name"]) # #메타 정보 확인을 위한 테스트 코드 # for location in results: # for trend in location["trends"]: # #print(" - %s" % trend["name"]) # print(trend) except Exception as e: print(e) return finalList #-------------------------------------------------------------------------- # 비로그인 GET 방식을 이용한 수동 수집 꼼수 트렌드 10개 #-------------------------------------------------------------------------- def get_web_trends(woeid): try: trendsHtml = requests.get( "https://twitter.com/i/trends?id=" + "23424868").json()['module_html'] soup = BeautifulSoup(trendsHtml, 'html.parser') tagList = soup.select('.trend-name') trendList = [] for tag in tagList: trend = re.sub(r"^[#]", "", tag.text) trend = re.sub(r"_", " ", trend) trendList.append(trend) except Exception as e: print(e) return trendList #-------------------------------------------------------------------------- # 키워드를 받아 검색하여 cnt개의 tweet 내용을 list로 반환하는 함수 #-------------------------------------------------------------------------- def get_tweets(twitter, keyword, cnt): tweetList = [] try: query = twitter.search.tweets(q = keyword, count = cnt) # 트위터의 경우 매우 짧은 문장이기때문에 리스트로 만들지 않고 한 문자열로 합친 후 # 일관성 있는 처리를 위해 1개짜리 리스트를 만들어 리턴한다 tweets = "" for result in query["statuses"]: tweets = tweets + result["text"] + "\n" tweetList += [tweets] except Exception as e: print(e) return tweetList #-------------------------------------------------------------------------- # module test conde #-------------------------------------------------------------------------- if __name__ == "__main__": import config woeid = get_woeid("Korea"); print(woeid) trends = get_web_trends(woeid); print(trends) twitter = get_twitter() tweets = get_tweets(twitter, trends[1], 100) for tweet in tweets: print(tweet) #-------------------------------------------------------------------------- # module test code - getWebTrends() #-------------------------------------------------------------------------- # if __name__ == "__main__": # trends_html = requests.get( # "https://twitter.com/i/trends?id=" + "23424868").json()['module_html'] # soup = BeautifulSoup(trends_html, 'html.parser') # tag_list = soup.select('.trend-name') # for tag in tag_list: # trend = re.sub(r"^[#]", "", tag.text) # trend = re.sub(r"_", " ", trend) # print(trend)
StarcoderdataPython
5095203
<reponame>pskrunner14/info-retrieval import math class BooleanModel(): @staticmethod def and_operation(left_operand, right_operand): # perform 'merge' result = [] # results list to be returned l_index = 0 # current index in left_operand r_index = 0 # current index in right_operand l_skip = int(math.sqrt(len(left_operand))) # skip pointer distance for l_index r_skip = int(math.sqrt(len(right_operand))) # skip pointer distance for r_index while (l_index < len(left_operand) and r_index < len(right_operand)): l_item = left_operand[l_index] # current item in left_operand r_item = right_operand[r_index] # current item in right_operand # case 1: if match if (l_item == r_item): result.append(l_item) # add to results l_index += 1 # advance left index r_index += 1 # advance right index # case 2: if left item is more than right item elif (l_item > r_item): # if r_index can be skipped (if new r_index is still within range and resulting item is <= left item) if (r_index + r_skip < len(right_operand)) and right_operand[r_index + r_skip] <= l_item: r_index += r_skip # else advance r_index by 1 else: r_index += 1 # case 3: if left item is less than right item else: # if l_index can be skipped (if new l_index is still within range and resulting item is <= right item) if (l_index + l_skip < len(left_operand)) and left_operand[l_index + l_skip] <= r_item: l_index += l_skip # else advance l_index by 1 else: l_index += 1 return result @staticmethod def or_operation(left_operand, right_operand): result = [] # union of left and right operand l_index = 0 # current index in left_operand r_index = 0 # current index in right_operand # while lists have not yet been covered while (l_index < len(left_operand) or r_index < len(right_operand)): # if both list are not yet exhausted if (l_index < len(left_operand) and r_index < len(right_operand)): l_item = left_operand[l_index] # current item in left_operand r_item = right_operand[r_index] # current item in right_operand # case 1: if items are equal, add either one to result and advance both pointers if (l_item == r_item): result.append(l_item) l_index += 1 r_index += 1 # case 2: l_item greater than r_item, add r_item and advance r_index elif (l_item > r_item): result.append(r_item) r_index += 1 # case 3: l_item lower than r_item, add l_item and advance l_index else: result.append(l_item) l_index += 1 # if left_operand list is exhausted, append r_item and advance r_index elif (l_index >= len(left_operand)): r_item = right_operand[r_index] result.append(r_item) r_index += 1 # else if right_operand list is exhausted, append l_item and advance l_index else: l_item = left_operand[l_index] result.append(l_item) l_index += 1 return result @staticmethod def not_operation(right_operand, indexed_docIDs): # complement of an empty list is list of all indexed docIDs if (not right_operand): return indexed_docIDs result = [] r_index = 0 # index for right operand for item in indexed_docIDs: # if item do not match that in right_operand, it belongs to compliment if (item != right_operand[r_index]): result.append(item) # else if item matches and r_index still can progress, advance it by 1 elif (r_index + 1 < len(right_operand)): r_index += 1 return result
StarcoderdataPython
8026020
import glob import pickle import numpy as np import matplotlib.pyplot as plt import tensorflow as tf from tf_fourier_features.fourier_features_mlp import FourierFeatureMLP BATCH_SIZE = 8192 IMAGE_SIZE = 800 IMAGE_EMBED = 8 img_filepath_1 = '../data/blue_flower.jpg' img_filepath_2 = '../data/fur-style.jpg' img_filepath_3 = '../data/celtic_spiral_knot.jpg' images_paths = [img_filepath_1, img_filepath_2, img_filepath_3] image_ground_truths = [] for img_filepath in images_paths: img_raw = tf.io.read_file(img_filepath) img_ground_truth = tf.io.decode_image(img_raw, channels=3, dtype=tf.float32) img_ground_truth = tf.image.resize(img_ground_truth, [IMAGE_SIZE, IMAGE_SIZE], method=tf.image.ResizeMethod.BICUBIC) image_ground_truths.append(img_ground_truth) print("Decoded {} images of shape {}".format(len(image_ground_truths), image_ground_truths[0].shape)) rows, cols, channels = image_ground_truths[0].shape pixel_count = rows * cols checkpoint_dir = 'checkpoints/multi_fourier_features/inpainting/' checkpoint_path = checkpoint_dir + 'model' if len(glob.glob(checkpoint_path + "*.index")) == 0: raise FileNotFoundError("Model checkpoint not found !") # Load context vectors with open(checkpoint_dir + 'image_contexts.pkl', 'rb') as f: image_contexts = pickle.load(f) print("Loaded {} image contexts of size {}".format(len(image_contexts), image_contexts[0].shape[1])) def build_eval_tensors(): img_mask_x = tf.range(0, rows, dtype=tf.int32) img_mask_y = tf.range(0, cols, dtype=tf.int32) img_mask_x, img_mask_y = tf.meshgrid(img_mask_x, img_mask_y, indexing='ij') img_mask_x = tf.expand_dims(img_mask_x, axis=-1) img_mask_y = tf.expand_dims(img_mask_y, axis=-1) img_mask_x = tf.cast(img_mask_x, tf.float32) / rows img_mask_y = tf.cast(img_mask_y, tf.float32) / cols img_mask = tf.concat([img_mask_x, img_mask_y], axis=-1) img_mask = tf.reshape(img_mask, [-1, 2]) image_masks = [] for ix in range(len(image_ground_truths)): img_context = image_contexts[ix] img_context = tf.broadcast_to(img_context, [img_mask.shape[0], IMAGE_EMBED]) context_mask = tf.concat([img_mask, img_context], axis=-1) image_masks.append(context_mask) eval_images = [] for image in image_ground_truths: eval_images.append(image) return image_masks, eval_images image_masks, eval_images = build_eval_tensors() eval_datasets = [] for mask in image_masks: eval_dataset = tf.data.Dataset.from_tensor_slices((mask,)) eval_dataset = eval_dataset.batch(BATCH_SIZE).cache() eval_dataset = eval_dataset.prefetch(tf.data.experimental.AUTOTUNE) eval_datasets.append(eval_dataset) # Build model model = FourierFeatureMLP(units=256, final_units=3, final_activation='sigmoid', num_layers=4, gaussian_projection=256, gaussian_scale=1.0) # instantiate model _ = model(tf.zeros([1, 2 + IMAGE_EMBED])) # load checkpoint model.load_weights(checkpoint_path).expect_partial() # skip optimizer loading model.summary() # Predict pixels of the different images output_images = [] for eval_dataset in eval_datasets: predicted_image = model.predict(eval_dataset, batch_size=BATCH_SIZE, verbose=1) predicted_image = predicted_image.reshape((rows, cols, channels)) # type: np.ndarray predicted_image = predicted_image.clip(0.0, 1.0) output_images.append(predicted_image) fig, axes = plt.subplots(len(output_images), 2) for ix, (ground_truth_img, predicted_img) in enumerate(zip(eval_images, output_images)): plt.sca(axes[ix, 0]) gt_img = ground_truth_img.numpy() gt_img = gt_img.clip(0.0, 1.0) plt.imshow(gt_img) plt.title("Ground Truth Image") plt.sca(axes[ix, 1]) plt.imshow(predicted_img) plt.title("Predicted Image") fig.tight_layout() plt.show()
StarcoderdataPython
86894
# -*- coding: utf-8 -*- import datetime import tempfile import unittest from pathlib import Path import pytest from _pytest.logging import caplog from dsg_lib.logging_config import config_log import logging def some_func(var1, var2): """ some function to test logging """ if var1 < 1: logging.warning(f"Oh no!") return var1 + var2 # class Test(unittest.TestCase): # `some_func` adds two numbers, and logs a warning if the first is < 1 def test_some_func_logs_warning(caplog): config_log() assert some_func(-1, 3) == 2 assert "Oh no!" in caplog.text def test_exit_log_level(): with pytest.raises(SystemExit) as e: # The command to test config_log(logging_level="bob") # Here's the trick assert e.type == SystemExit # assert e.value.code == 2 def test_exit_log_name(): with pytest.raises(SystemExit) as e: # The command to test config_log(log_name="bob.l") # Here's the trick assert e.type == SystemExit def test_exit_file_name(): log_name = "log" app_name = "123" service_id = "456" config_log( log_name=f"{log_name}.log", app_name="123", service_id="456", append_app_name=True, append_service_id=True, ) log_path = ( Path.cwd().joinpath("log").joinpath(f"{log_name}_{app_name}_{service_id}.log") ) test_name = str(log_path) assert log_path.exists() assert log_path.is_file() assert test_name.endswith(".log") assert "123" in test_name assert "456" in test_name
StarcoderdataPython
1852410
import logging from django.core.management.base import BaseCommand from projects.models import Project logger = logging.getLogger(__name__) # Creates indexes by re-saving all projects class Command(BaseCommand): help = "Index all projects" def handle(self, *args, **options): for project in Project.objects.all(): project.save()
StarcoderdataPython
3307995
<reponame>jessejohn01/CSCI446PA2<gh_stars>0 import network_3_0 import argparse import time from time import sleep import hashlib class Packet: ## the number of bytes used to store packet length seq_num_S_length = 10 length_S_length = 10 ## length of md5 checksum in hex checksum_length = 32 def __init__(self, seq_num, msg_S): self.seq_num = seq_num self.msg_S = msg_S @classmethod def from_byte_S(self, byte_S): if Packet.corrupt(byte_S): raise RuntimeError('Cannot initialize Packet: byte_S is corrupt') #extract the fields seq_num = int(byte_S[Packet.length_S_length : Packet.length_S_length+Packet.seq_num_S_length]) msg_S = byte_S[Packet.length_S_length+Packet.seq_num_S_length+Packet.checksum_length :] return self(seq_num, msg_S) def get_byte_S(self): #convert sequence number of a byte field of seq_num_S_length bytes seq_num_S = str(self.seq_num).zfill(self.seq_num_S_length) #convert length to a byte field of length_S_length bytes length_S = str(self.length_S_length + len(seq_num_S) + self.checksum_length + len(self.msg_S)).zfill(self.length_S_length) #compute the checksum checksum = hashlib.md5((length_S+seq_num_S+self.msg_S).encode('utf-8')) checksum_S = checksum.hexdigest() #compile into a string return length_S + seq_num_S + checksum_S + self.msg_S @staticmethod def corrupt(byte_S): #extract the fields length_S = byte_S[0:Packet.length_S_length] seq_num_S = byte_S[Packet.length_S_length : Packet.seq_num_S_length+Packet.seq_num_S_length] checksum_S = byte_S[Packet.seq_num_S_length+Packet.seq_num_S_length : Packet.seq_num_S_length+Packet.length_S_length+Packet.checksum_length] msg_S = byte_S[Packet.seq_num_S_length+Packet.seq_num_S_length+Packet.checksum_length :] #compute the checksum locally checksum = hashlib.md5(str(length_S+seq_num_S+msg_S).encode('utf-8')) computed_checksum_S = checksum.hexdigest() #and check if the same return checksum_S != computed_checksum_S class RDT: ## latest sequence number used in a packet seq_num = 1 ## buffer of bytes read from network byte_buffer = '' def __init__(self, role_S, server_S, port): self.network = network_3_0.NetworkLayer(role_S, server_S, port) def disconnect(self): self.network.disconnect() def rdt_1_0_send(self, msg_S): p = Packet(self.seq_num, msg_S) self.seq_num += 1 self.network.udt_send(p.get_byte_S()) def rdt_1_0_receive(self): ret_S = None byte_S = self.network.udt_receive() self.byte_buffer += byte_S #keep extracting packets - if reordered, could get more than one while True: #check if we have received enough bytes if(len(self.byte_buffer) < Packet.length_S_length): return ret_S #not enough bytes to read packet length #extract length of packet length = int(self.byte_buffer[:Packet.length_S_length]) if len(self.byte_buffer) < length: return ret_S #not enough bytes to read the whole packet #create packet from buffer content and add to return string p = Packet.from_byte_S(self.byte_buffer[0:length]) ret_S = p.msg_S if (ret_S is None) else ret_S + p.msg_S #remove the packet bytes from the buffer self.byte_buffer = self.byte_buffer[length:] #if this was the last packet, will return on the next iteration def waitForACK(self,p): #Wait for an ACK Packet. Basically listening for a packet. byte_S = self.network.udt_receive() self.byte_buffer += byte_S while True: #Keep grabbing bytes. byte_S = self.network.udt_receive() self.byte_buffer += byte_S if(len(self.byte_buffer) >= Packet.length_S_length): # Check to make sure we have enough bytes for a packet. length = int(self.byte_buffer[:Packet.length_S_length]) if(len(self.byte_buffer) >= length): # Check our bytes are the right length if(Packet.corrupt(self.byte_buffer[0:length])): #Check for corruption self.byte_buffer = self.byte_buffer[length:] self.network.udt_send(p.get_byte_S()) #If not resend. else: receivedPacket = Packet.from_byte_S(self.byte_buffer[0:length]) self.byte_buffer = self.byte_buffer[length:] if(receivedPacket.msg_S == 'ACK' and receivedPacket.seq_num >= self.seq_num):#Check if ACK packet. self.seq_num = self.seq_num + 1 return else: self.network.udt_send(p.get_byte_S()) def waitForMore(self, ack): #Method for making sure there is no resends. Wait for .1 seconds end = time.time() + .1 byte_buffer2 = '' while (time.time() < end): isDuplicate = False bytes2 = self.network.udt_receive() byte_buffer2 += bytes2 if (len(byte_buffer2) < Packet.length_S_length): #restarts if not enough bytes continue #restart loop length = int(byte_buffer2[:Packet.length_S_length]) if (len(byte_buffer2) < length): #Restart if not matching length continue #restart if (Packet.corrupt(byte_buffer2[0:length])): #Is the packet corrupt? nack = Packet(self.seq_num, 'NACK') #Create NACK packet. self.network.udt_send(nack.get_byte_S()) #Send byte_buffer2 = '' #Empty the buffer. if (isDuplicate): #Checks for duplicates and adds more time end = end + .1 continue else: # Time expired p2 = Packet.from_byte_S(byte_buffer2[0:length]) if (p2.seq_num == self.seq_num - 1): #Check if it was a different packet. isDuplicate = True end = end + .1 self.network.udt_send(ack.get_byte_S()) #We don't have to wait anymore send ACK. byte_buffer2 = '' else: nack = Packet(self.seq_num, 'NACK') self.network.udt_send(nack.get_byte_S()) break def rdt_3_0_send(self, msg_S): p = Packet(self.seq_num, msg_S) self.network.udt_send(p.get_byte_S()) byte_S = self.network.udt_receive() self.byte_buffer += byte_S while True: # Keep checking for packets. timeoutTime = time.time() + .05 #Timer for lost packets. while(time.time() < timeoutTime): byte_S = self.network.udt_receive() self.byte_buffer += byte_S if(len(self.byte_buffer) >= Packet.length_S_length): length = int(self.byte_buffer[:Packet.length_S_length]) if(len(self.byte_buffer) >= length): #Make sure packet is right length. if(Packet.corrupt(self.byte_buffer[0:length])): #Check to make sure not corrupt. self.byte_buffer = self.byte_buffer[length:] break else: receivedPacket = Packet.from_byte_S(self.byte_buffer[0:length]) self.byte_buffer = self.byte_buffer[length:] if(receivedPacket.msg_S == 'ACK' and receivedPacket.seq_num >= self.seq_num): #Check if right ACK packet for one we sent. self.seq_num = self.seq_num + 1 self.byte_buffer = self.byte_buffer[length:] return else: break #Break out the timer somethings wrong. self.network.udt_send(p.get_byte_S()) def rdt_3_0_receive(self): ret_S = None byte_S = self.network.udt_receive() self.byte_buffer += byte_S while True: #Keep checking for packets. if (len(self.byte_buffer) < Packet.length_S_length): #Is packet right length return ret_S length = int(self.byte_buffer[:Packet.length_S_length]) if (len(self.byte_buffer) < length): return ret_S if(Packet.corrupt(self.byte_buffer[0:length])): # Check for corrupt packets. nack = Packet(self.seq_num, 'NACK') self.network.udt_send(nack.get_byte_S()) self.byte_buffer = self.byte_buffer[length:] else: p = Packet.from_byte_S(self.byte_buffer[0:length]) if (p.seq_num <= self.seq_num): #Is packet right sequence number. ret_S = p.msg_S if (ret_S is None) else ret_S + p.msg_S self.seq_num = self.seq_num + 1 ack = Packet(p.seq_num, 'ACK') self.network.udt_send(ack.get_byte_S()) end = time.time() + .2 byte_buffer2 = '' while(time.time() < end): isDuplicate = False bytes2 = self.network.udt_receive() byte_buffer2 += bytes2 try: if (len(byte_buffer2) < Packet.length_S_length): continue except ValueError: continue length = int(byte_buffer2[:Packet.length_S_length]) if (len(byte_buffer2) < length): continue if(Packet.corrupt(byte_buffer2[0:length])): nack = Packet(self.seq_num, 'NACK') self.network.udt_send(nack.get_byte_S()) byte_buffer2 = '' if(isDuplicate): end = end + .2 continue else: p2 = Packet.from_byte_S(byte_buffer2[0:length]) if (p2.seq_num <= self.seq_num-1): isDuplicate = True end = end + .2 ack1 = Packet(p2.seq_num, 'ACK') self.network.udt_send(ack1.get_byte_S()) byte_buffer2 = '' else: nack = Packet(self.seq_num, 'NACK') self.network.udt_send(nack.get_byte_S()) break else: nack = Packet(self.seq_num, 'NACK') self.network.udt_send(nack.get_byte_S()) self.byte_buffer = self.byte_buffer[length:] if __name__ == '__main__': parser = argparse.ArgumentParser(description='RDT implementation.') parser.add_argument('role', help='Role is either client or server.', choices=['client', 'server']) parser.add_argument('server', help='Server.') parser.add_argument('port', help='Port.', type=int) args = parser.parse_args() rdt = RDT(args.role, args.server, args.port) if args.role == 'client': rdt.rdt_1_0_send('MSG_FROM_CLIENT') sleep(2) print(rdt.rdt_1_0_receive()) rdt.disconnect() else: sleep(1) print(rdt.rdt_1_0_receive()) rdt.rdt_1_0_send('MSG_FROM_SERVER') rdt.disconnect()
StarcoderdataPython
5184127
#!/usr/bin/env python ''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. The code in this file was developed at Harvard University (2018) and modified at ChemOS Inc. (2019) as stated in the NOTICE file. ''' __author__ = '<NAME>' #========================================================================= import sys sys.path.append('../../phoenics') import pickle from phoenics import Phoenics from branin import branin as loss #========================================================================= class OptimizationManager(object): def __init__(self, config_file, loss_function): # creates instance of Phoenics optimizer self.phoenics = Phoenics(config_file) self.loss_function = loss_function def optimize(self, max_iter = 10): observations = [] for num_iter in range(max_iter): # query for new parameters based on prior observations params = self.phoenics.recommend(observations = observations) # use parameters for evaluation ... # ... experimentally or computationally for param in params: observation = self.loss_function(param) observations.append(observation) # log observations in a pickle file for future analysis pickle.dump(observations, open('observations.pkl', 'wb')) # print observations to file logfile = open('logfile.dat', 'a') for param in params: new_line = '' for param_name in sorted(self.phoenics.config.param_names): for param_value in param[param_name]: new_line += '%.5e\t' % (param[param_name]) for obj_name in sorted(self.phoenics.config.obj_names): new_line += '%.5e\t' % (param[obj_name]) logfile.write(new_line + '\n') logfile.close() #========================================================================= if __name__ == '__main__': logfile = open('logfile.dat', 'w') logfile.close() manager = OptimizationManager('config.json', loss) manager.optimize()
StarcoderdataPython
5103111
<reponame>jxgu1016/GCN_PyTorch import torch from torch.autograd import gradcheck from gcn.layers.GConv import GOF_Function def gradchecking(use_cuda=False): print('-'*80) GOF = GOF_Function.apply device = torch.device("cuda" if use_cuda else "cpu") weight = torch.randn(8,8,4,3,3).to(device).double().requires_grad_() gfb = torch.randn(4,3,3).to(device).double() test = gradcheck(GOF, (weight, gfb), eps=1e-6, atol=1e-4, rtol=1e-3, raise_exception=True) print(test) if __name__ == "__main__": gradchecking() if torch.cuda.is_available(): gradchecking(use_cuda=True)
StarcoderdataPython
12805732
<filename>Chap8.py import os import sys import tarfile import time import pyprind import pandas as pd import numpy as np from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_extraction.text import TfidfTransformer import re from nltk.stem.porter import PorterStemmer import nltk from nltk.corpus import stopwords from sklearn.pipeline import Pipeline from sklearn.linear_model import LogisticRegression from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.model_selection import GridSearchCV from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import cross_val_score from sklearn.feature_extraction.text import HashingVectorizer from sklearn.linear_model import SGDClassifier from sklearn.decomposition import LatentDirichletAllocation from distutils.version import LooseVersion as Version from sklearn import __version__ as sklearn_version source = 'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz' target = 'aclImdb_v1.tar.gz' def reporthook(count, block_size, total_size): global start_time if count == 0: start_time = time.time() return duration = time.time() - start_time progress_size = int(count * block_size) speed = progress_size / (1024.**2 * duration) percent = count * block_size * 100. / total_size sys.stdout.write("\r%d%% | %d MB | %.2f MB/s | %d sec elapsed" % (percent, progress_size / (1024.**2), speed, duration)) sys.stdout.flush() if not os.path.isdir('aclImdb') and not os.path.isfile('aclImdb_v1.tar.gz'): if (sys.version_info < (3, 0)): import urllib urllib.urlretrieve(source, target, reporthook) else: import urllib.request urllib.request.urlretrieve(source, target, reporthook) if not os.path.isdir('aclImdb'): with tarfile.open(target, 'r:gz') as tar: tar.extractall() # ---------------------------------------------------------------------------------------- # 映画レビューデータセットをより便利なフォーマットに変換する basepath = 'aclImdb' """ labels = {'pos': 1, 'neg': 0} pbar = pyprind.ProgBar(50000) df = pd.DataFrame() for s in ('test', 'train'): for l in ('pos', 'neg'): path = os.path.join(basepath, s, l) for file in os.listdir(path): with open(os.path.join(path, file), 'r', encoding='utf-8') as infile: txt = infile.read() df = df.append([[txt, labels[l]]], ignore_index=True) pbar.update() df.columns = ['review', 'sentiment'] np.random.seed(0) df = df.reindex(np.random.permutation(df.index)) df.to_csv('movie_data.csv', index=False, encoding='utf-8') """ # ---------------------------------------------------------------------------------------- # CSVの読み込み df = pd.read_csv('movie_data.csv', encoding='utf-8') #print(df.head()) count = CountVectorizer() docs = np.array([ 'The sun is shining', 'The weather is sweet', 'The sun is shining, the weather is sweet, and one and one is two']) bag = count.fit_transform(docs) #print(count.vocabulary_) #print(bag.toarray()) # ---------------------------------------------------------------------------------------- # TF-IDF tfidf = TfidfTransformer(use_idf=True, norm='l2', smooth_idf=True) np.set_printoptions(precision=2) #print(tfidf.fit_transform(count.fit_transform(docs)).toarray()) # ---------------------------------------------------------------------------------------- # テキストデータの確認 for クレンジング df.loc[0, 'review'][-50:] # ---------------------------------------------------------------------------------------- # preprocessorの処理 def preprocessor(text): text = re.sub('<[^>]*>', '', text) emoticons = re.findall('(?::|;|=)(?:-)?(?:\)|\(|D|P)', text) text = (re.sub('[\W]+', ' ', text.lower()) + ' '.join(emoticons).replace('-', '')) return text #print(preprocessor(df.loc[0, 'review'][-50:])) #print(preprocessor("</a>This :) is :( a test :-)!")) # ---------------------------------------------------------------------------------------- # トークン化する def tokenizer(text): return text.split() tokenizer('runners like running and thus they run') # ---------------------------------------------------------------------------------------- # Porterステミングアルゴリズム def tokenizer_porter(text): return [porter.stem(word) for word in text.split()] porter = PorterStemmer() tokenizer_porter('runners like running and thus they run') # ---------------------------------------------------------------------------------------- # ストップワードの除去 nltk.download("stopwords") stop = stopwords.words('english') T = [w for w in tokenizer_porter('a runner likes running and runs a lot')[-10:] if w not in stop] # ---------------------------------------------------------------------------------------- # トレーニングデータとテストデータの分割 X_train = df.loc[:25000, 'review'].values y_train = df.loc[:25000, 'sentiment'].values X_test = df.loc[25000:, 'review'].values y_test = df.loc[25000:, 'sentiment'].values """ # ---------------------------------------------------------------------------------------- # 最適パラメータの探索 tfidf = TfidfVectorizer(strip_accents=None, lowercase=False, preprocessor=None) param_grid = [{'vect__ngram_range': [(1, 1)], 'vect__stop_words': [stop, None], 'vect__tokenizer': [tokenizer, tokenizer_porter], 'clf__penalty': ['l1', 'l2'], 'clf__C': [1.0, 10.0, 100.0]}, {'vect__ngram_range': [(1, 1)], 'vect__stop_words': [stop, None], 'vect__tokenizer': [tokenizer, tokenizer_porter], 'vect__use_idf':[False], 'vect__norm':[None], 'clf__penalty': ['l1', 'l2'], 'clf__C': [1.0, 10.0, 100.0]}, ] lr_tfidf = Pipeline([('vect', tfidf), ('clf', LogisticRegression(random_state=0))]) gs_lr_tfidf = GridSearchCV(lr_tfidf, param_grid, scoring='accuracy', cv=5, verbose=1, n_jobs=-1) gs_lr_tfidf.fit(X_train, y_train) # ---------------------------------------------------------------------------------------- # 最良パラメータの出力と精度 print('Best parameter set: %s ' % gs_lr_tfidf.best_params_) print('CV Accuracy: %.3f' % gs_lr_tfidf.best_score_) clf = gs_lr_tfidf.best_estimator_ print('Test Accuracy: %.3f' % clf.score(X_test, y_test)) """ # ---------------------------------------------------------------------------------------- # トークナイザの定義 def tokenizer(text): text = re.sub('<[^>]*>', '', text) emoticons = re.findall('(?::|;|=)(?:-)?(?:\)|\(|D|P)', text.lower()) text = re.sub('[\W]+', ' ', text.lower()) + ' '.join(emoticons).replace('-', '') tokenized = [w for w in text.split() if w not in stop] return tokenized # ---------------------------------------------------------------------------------------- # ジェネレータ関数stream_docsを定義 def stream_docs(path): with open(path, 'r', encoding='utf-8') as csv: next(csv) # skip header for line in csv: text, label = line[:-3], int(line[-2]) yield text, label next(stream_docs(path='movie_data.csv')) # ---------------------------------------------------------------------------------------- # get_minibatch関数を定義 def get_minibatch(doc_stream, size): docs, y = [], [] try: for _ in range(size): text, label = next(doc_stream) docs.append(text) y.append(label) except StopIteration: return None, None return docs, y # ---------------------------------------------------------------------------------------- # HashingVectorizer vect = HashingVectorizer(decode_error='ignore', n_features=2**21, preprocessor=None, tokenizer=tokenizer) if Version(sklearn_version) < '0.18': clf = SGDClassifier(loss='log', random_state=1, n_iter=1) else: clf = SGDClassifier(loss='log', random_state=1, max_iter=1) doc_stream = stream_docs(path='movie_data.csv') # ---------------------------------------------------------------------------------------- # アウトオブコア学習の開始 pbar = pyprind.ProgBar(45) classes = np.array([0, 1]) for _ in range(45): X_train, y_train = get_minibatch(doc_stream, size=1000) if not X_train: break X_train = vect.transform(X_train) clf.partial_fit(X_train, y_train, classes=classes) pbar.update() X_test, y_test = get_minibatch(doc_stream, size=5000) X_test = vect.transform(X_test) print('Accuracy: %.3f' % clf.score(X_test, y_test)) clf = clf.partial_fit(X_test, y_test) # ---------------------------------------------------------------------------------------- # 潜在ディリクレ配分 df = pd.read_csv('movie_data.csv', encoding='utf-8') print(df.head()) count = CountVectorizer(stop_words='english', max_df=.1, max_features=5000) X = count.fit_transform(df['review'].values) lda = LatentDirichletAllocation(n_components=10, random_state=123, learning_method='batch') X_topics = lda.fit_transform(X) print(lda.components_.shape) # ---------------------------------------------------------------------------------------- # 10種類のトピックごとに最も重要な5つの単語を出力してみる n_top_words = 5 feature_names = count.get_feature_names() for topic_idx, topic in enumerate(lda.components_): print("Topic %d:" % (topic_idx + 1)) print(" ".join([feature_names[i] for i in topic.argsort()\ [:-n_top_words - 1:-1]]))
StarcoderdataPython
5153450
<filename>source/brailleDisplayDrivers/freedomScientific.py #brailleDisplayDrivers/freedomScientific.py #A part of NonVisual Desktop Access (NVDA) #This file is covered by the GNU General Public License. #See the file COPYING for more details. #Copyright (C) 2008-2011 <NAME> <<EMAIL>>, <NAME> <<EMAIL>> from ctypes import * from ctypes.wintypes import * from collections import OrderedDict import itertools import hwPortUtils import braille import inputCore from baseObject import ScriptableObject from winUser import WNDCLASSEXW, WNDPROC, LRESULT, HCURSOR from logHandler import log import brailleInput #Try to load the fs braille dll try: fsbLib=windll.fsbrldspapi except: fsbLib=None #Map the needed functions in the fs braille dll if fsbLib: fbOpen=getattr(fsbLib,'_fbOpen@12') fbGetCellCount=getattr(fsbLib,'_fbGetCellCount@4') fbWrite=getattr(fsbLib,'_fbWrite@16') fbClose=getattr(fsbLib,'_fbClose@4') fbConfigure=getattr(fsbLib, '_fbConfigure@8') fbGetDisplayName=getattr(fsbLib, "_fbGetDisplayName@12") fbGetFirmwareVersion=getattr(fsbLib, "_fbGetFirmwareVersion@12") fbBeep=getattr(fsbLib, "_fbBeep@4") FB_INPUT=1 FB_DISCONNECT=2 FB_EXT_KEY=3 LRESULT=c_long HCURSOR=c_long appInstance=windll.kernel32.GetModuleHandleW(None) nvdaFsBrlWm=windll.user32.RegisterWindowMessageW(u"nvdaFsBrlWm") inputType_keys=3 inputType_routing=4 inputType_wizWheel=5 # Names of freedom scientific bluetooth devices bluetoothNames = ( "F14", "Focus 14 BT", "Focus 40 BT", "Focus 80 BT", ) keysPressed=0 extendedKeysPressed=0 @WNDPROC def nvdaFsBrlWndProc(hwnd,msg,wParam,lParam): global keysPressed, extendedKeysPressed keysDown=0 extendedKeysDown=0 if msg==nvdaFsBrlWm and wParam in (FB_INPUT, FB_EXT_KEY): if wParam==FB_INPUT: inputType=lParam&0xff if inputType==inputType_keys: keyBits=lParam>>8 keysDown=keyBits keysPressed |= keyBits elif inputType==inputType_routing: routingIndex=(lParam>>8)&0xff isRoutingPressed=bool((lParam>>16)&0xff) isTopRoutingRow=bool((lParam>>24)&0xff) if isRoutingPressed: gesture=RoutingGesture(routingIndex,isTopRoutingRow) try: inputCore.manager.executeGesture(gesture) except inputCore.NoInputGestureAction: pass elif inputType==inputType_wizWheel: numUnits=(lParam>>8)&0x7 isRight=bool((lParam>>12)&1) isDown=bool((lParam>>11)&1) #Right's up and down are rversed, but NVDA does not want this if isRight: isDown=not isDown for unit in xrange(numUnits): gesture=WizWheelGesture(isDown,isRight) try: inputCore.manager.executeGesture(gesture) except inputCore.NoInputGestureAction: pass elif wParam==FB_EXT_KEY: keyBits=lParam>>4 extendedKeysDown=keyBits extendedKeysPressed|=keyBits if keysDown==0 and extendedKeysDown==0 and (keysPressed!=0 or extendedKeysPressed!=0): gesture=KeyGesture(keysPressed,extendedKeysPressed) keysPressed=extendedKeysPressed=0 try: inputCore.manager.executeGesture(gesture) except inputCore.NoInputGestureAction: pass return 0 else: return windll.user32.DefWindowProcW(hwnd,msg,wParam,lParam) nvdaFsBrlWndCls=WNDCLASSEXW() nvdaFsBrlWndCls.cbSize=sizeof(nvdaFsBrlWndCls) nvdaFsBrlWndCls.lpfnWndProc=nvdaFsBrlWndProc nvdaFsBrlWndCls.hInstance=appInstance nvdaFsBrlWndCls.lpszClassName=u"nvdaFsBrlWndCls" class BrailleDisplayDriver(braille.BrailleDisplayDriver,ScriptableObject): name="freedomScientific" # Translators: Names of braille displays. description=_("Freedom Scientific Focus/PAC Mate series") @classmethod def check(cls): return bool(fsbLib) @classmethod def getPossiblePorts(cls): ports = OrderedDict([cls.AUTOMATIC_PORT, ("USB", "USB",)]) try: cls._getBluetoothPorts().next() ports["bluetooth"] = "Bluetooth" except StopIteration: pass return ports @classmethod def _getBluetoothPorts(cls): for p in hwPortUtils.listComPorts(): try: btName = p["bluetoothName"] except KeyError: continue if not any(btName == prefix or btName.startswith(prefix + " ") for prefix in bluetoothNames): continue yield p["port"].encode("mbcs") wizWheelActions=[ # Translators: The name of a key on a braille display, that scrolls the display to show previous/next part of a long line. (_("display scroll"),("globalCommands","GlobalCommands","braille_scrollBack"),("globalCommands","GlobalCommands","braille_scrollForward")), # Translators: The name of a key on a braille display, that scrolls the display to show the next/previous line. (_("line scroll"),("globalCommands","GlobalCommands","braille_previousLine"),("globalCommands","GlobalCommands","braille_nextLine")), ] def __init__(self, port="auto"): self.leftWizWheelActionCycle=itertools.cycle(self.wizWheelActions) action=self.leftWizWheelActionCycle.next() self.gestureMap.add("br(freedomScientific):leftWizWheelUp",*action[1]) self.gestureMap.add("br(freedomScientific):leftWizWheelDown",*action[2]) self.rightWizWheelActionCycle=itertools.cycle(self.wizWheelActions) action=self.rightWizWheelActionCycle.next() self.gestureMap.add("br(freedomScientific):rightWizWheelUp",*action[1]) self.gestureMap.add("br(freedomScientific):rightWizWheelDown",*action[2]) super(BrailleDisplayDriver,self).__init__() self._messageWindowClassAtom=windll.user32.RegisterClassExW(byref(nvdaFsBrlWndCls)) self._messageWindow=windll.user32.CreateWindowExW(0,self._messageWindowClassAtom,u"nvdaFsBrlWndCls window",0,0,0,0,0,None,None,appInstance,None) if port == "auto": portsToTry = itertools.chain(["USB"], self._getBluetoothPorts()) elif port == "bluetooth": portsToTry = self._getBluetoothPorts() else: # USB portsToTry = ["USB"] fbHandle=-1 for port in portsToTry: fbHandle=fbOpen(port,self._messageWindow,nvdaFsBrlWm) if fbHandle!=-1: break if fbHandle==-1: windll.user32.DestroyWindow(self._messageWindow) windll.user32.UnregisterClassW(self._messageWindowClassAtom,appInstance) raise RuntimeError("No display found") self.fbHandle=fbHandle self._configureDisplay() numCells=self.numCells self.gestureMap.add("br(freedomScientific):topRouting1","globalCommands","GlobalCommands","braille_scrollBack") self.gestureMap.add("br(freedomScientific):topRouting%d"%numCells,"globalCommands","GlobalCommands","braille_scrollForward") def terminate(self): super(BrailleDisplayDriver,self).terminate() fbClose(self.fbHandle) windll.user32.DestroyWindow(self._messageWindow) windll.user32.UnregisterClassW(self._messageWindowClassAtom,appInstance) def _get_numCells(self): return fbGetCellCount(self.fbHandle) def display(self,cells): cells="".join([chr(x) for x in cells]) fbWrite(self.fbHandle,0,len(cells),cells) def _configureDisplay(self): # See what display we are connected to displayName= firmwareVersion="" buf = create_string_buffer(16) if fbGetDisplayName(self.fbHandle, buf, 16): displayName=buf.value if fbGetFirmwareVersion(self.fbHandle, buf, 16): firmwareVersion=buf.value if displayName and firmwareVersion and displayName=="Focus" and ord(firmwareVersion[0])>=ord('3'): # Focus 2 or later. Make sure extended keys support is enabled. log.debug("Activating extended keys on freedom Scientific display. Display name: %s, firmware version: %s.", displayName, firmwareVersion) fbConfigure(self.fbHandle, 0x02) def script_toggleLeftWizWheelAction(self,gesture): action=self.leftWizWheelActionCycle.next() self.gestureMap.add("br(freedomScientific):leftWizWheelUp",*action[1],replace=True) self.gestureMap.add("br(freedomScientific):leftWizWheelDown",*action[2],replace=True) braille.handler.message(action[0]) def script_toggleRightWizWheelAction(self,gesture): action=self.rightWizWheelActionCycle.next() self.gestureMap.add("br(freedomScientific):rightWizWheelUp",*action[1],replace=True) self.gestureMap.add("br(freedomScientific):rightWizWheelDown",*action[2],replace=True) braille.handler.message(action[0]) __gestures={ "br(freedomScientific):leftWizWheelPress":"toggleLeftWizWheelAction", "br(freedomScientific):rightWizWheelPress":"toggleRightWizWheelAction", } gestureMap=inputCore.GlobalGestureMap({ "globalCommands.GlobalCommands" : { "braille_routeTo":("br(freedomScientific):routing",), "braille_scrollBack" : ("br(freedomScientific):leftAdvanceBar", "br(freedomScientific]:leftBumperBarUp","br(freedomScientific):rightBumperBarUp",), "braille_scrollForward" : ("br(freedomScientific):rightAdvanceBar","br(freedomScientific):leftBumperBarDown","br(freedomScientific):rightBumperBarDown",), "braille_previousLine" : ("br(freedomScientific):leftRockerBarUp", "br(freedomScientific):rightRockerBarUp",), "braille_nextLine" : ("br(freedomScientific):leftRockerBarDown", "br(freedomScientific):rightRockerBarDown",), "kb:backspace" : ("br(freedomScientific):dot7",), "kb:enter" : ("br(freedomScientific):dot8",), "kb:shift+tab": ("br(freedomScientific):dot1+dot2+brailleSpaceBar",), "kb:tab" : ("br(freedomScientific):dot4+dot5+brailleSpaceBar",), "kb:upArrow" : ("br(freedomScientific):dot1+brailleSpaceBar",), "kb:downArrow" : ("br(freedomScientific):dot4+brailleSpaceBar",), "kb:leftArrow" : ("br(freedomScientific):dot3+brailleSpaceBar",), "kb:rightArrow" : ("br(freedomScientific):dot6+brailleSpaceBar",), "kb:control+leftArrow" : ("br(freedomScientific):dot2+brailleSpaceBar",), "kb:control+rightArrow" : ("br(freedomScientific):dot5+brailleSpaceBar",), "kb:home" : ("br(freedomScientific):dot1+dot3+brailleSpaceBar",), "kb:control+home" : ("br(freedomScientific):dot1+dot2+dot3+brailleSpaceBar",), "kb:end" : ("br(freedomScientific):dot4+dot6+brailleSpaceBar",), "kb:control+end" : ("br(freedomScientific):dot4+dot5+dot6+brailleSpaceBar",), "kb:alt" : ("br(freedomScientific):dot1+dot3+dot4+brailleSpaceBar",), "kb:alt+tab" : ("br(freedomScientific):dot2+dot3+dot4+dot5+brailleSpaceBar",), "kb:escape" : ("br(freedomScientific):dot1+dot5+brailleSpaceBar",), "kb:windows" : ("br(freedomScientific):dot2+dot4+dot5+dot6+brailleSpaceBar",), "kb:windows+d" : ("br(freedomScientific):dot1+dot2+dot3+dot4+dot5+dot6+brailleSpaceBar",), "reportCurrentLine" : ("br(freedomScientific):dot1+dot4+brailleSpaceBar",), "showGui" :("br(freedomScientific):dot1+dot3+dot4+dot5+brailleSpaceBar",), "braille_toggleTether" : ("br(freedomScientific):leftGDFButton+rightGDFButton",), } }) class InputGesture(braille.BrailleDisplayGesture): source = BrailleDisplayDriver.name class KeyGesture(InputGesture, brailleInput.BrailleInputGesture): keyLabels=[ #Braille keys (byte 1) 'dot1','dot2','dot3','dot4','dot5','dot6','dot7','dot8', #Assorted keys (byte 2) 'leftWizWheelPress','rightWizWheelPress', 'leftShiftKey','rightShiftKey', 'leftAdvanceBar','rightAdvanceBar', None, 'brailleSpaceBar', #GDF keys (byte 3) 'leftGDFButton','rightGDFButton', None, 'leftBumperBarUp','leftBumperBarDown','rightBumperBarUp','rightBumperBarDown', ] extendedKeyLabels = [ # Rocker bar keys. "leftRockerBarUp", "leftRockerBarDown", "rightRockerBarUp", "rightRockerBarDown", ] def __init__(self,keyBits, extendedKeyBits): super(KeyGesture,self).__init__() keys=[self.keyLabels[num] for num in xrange(24) if (keyBits>>num)&1] extendedKeys=[self.extendedKeyLabels[num] for num in xrange(4) if (extendedKeyBits>>num)&1] self.id="+".join(set(keys+extendedKeys)) # Don't say is this a dots gesture if some keys either from dots and space are pressed. if not extendedKeyBits and not keyBits & ~(0xff | (1 << 0xf)): self.dots = keyBits & 0xff # Is space? if keyBits & (1 << 0xf): self.space = True class RoutingGesture(InputGesture): def __init__(self,routingIndex,topRow=False): if topRow: self.id="topRouting%d"%(routingIndex+1) else: self.id="routing" self.routingIndex=routingIndex super(RoutingGesture,self).__init__() class WizWheelGesture(InputGesture): def __init__(self,isDown,isRight): which="right" if isRight else "left" direction="Down" if isDown else "Up" self.id="%sWizWheel%s"%(which,direction) super(WizWheelGesture,self).__init__()
StarcoderdataPython
6627252
# -*- coding: utf-8 -*- from setuptools import setup setup( name='CellCounting', version='0.1', author='<NAME>', author_email='<EMAIL>', packages=['cell_counting', 'cell_counting.validation'], install_requires=['numpy', 'scikit-learn', 'scipy', 'keras', 'shapely', 'joblib'] )
StarcoderdataPython
9703085
<reponame>HongminWu/HMM #!/usr/bin/env python import os import pandas as pd import numpy as np from sklearn.externals import joblib from math import ( log, exp ) from matplotlib import pyplot as plt import time import util def assess_deri_threshold_and_decide( threshold_c_value, mean_of_log_curve, std_of_log_curve, np_matrix_traj_by_time, curve_owner, state_no, figure_save_path, score_time_cost_per_point): fig = plt.figure(1) ax = fig.add_subplot(111) np_matrix_trajmeandiff_by_time = np_matrix_traj_by_time-mean_of_log_curve np_matrix_deri_by_time = np_matrix_trajmeandiff_by_time[:, 1:]-np_matrix_trajmeandiff_by_time[:, :-1] from matplotlib.pyplot import cm import numpy as np color=iter(cm.rainbow(np.linspace(0,1,len(np_matrix_deri_by_time)))) # plot log curves of all trials for row_no in range(np_matrix_deri_by_time.shape[0]): c=next(color) trial_name = curve_owner[row_no] ax.plot(np_matrix_deri_by_time[row_no].tolist()[0], linestyle="solid", label=trial_name, color=c) fig.show() title = 'state %s trial deri plot(on average use %ss to compute each log likelihood point)'%(state_no, score_time_cost_per_point) ax.set_title(title) if not os.path.isdir(figure_save_path+'/deri_plot'): os.makedirs(figure_save_path+'/deri_plot') fig.savefig(os.path.join(figure_save_path, 'deri_plot', title+".eps"), format="eps") plt.close(1) return abs(np_matrix_deri_by_time).max() def run(model_save_path, figure_save_path, threshold_c_value, trials_group_by_folder_name): trials_group_by_folder_name = util.make_trials_of_each_state_the_same_length(trials_group_by_folder_name) one_trial_data_group_by_state = trials_group_by_folder_name.itervalues().next() state_amount = len(one_trial_data_group_by_state) threshold_constant = 10 threshold_offset = 10 model_group_by_state = {} for state_no in range(1, state_amount+1): try: model_group_by_state[state_no] = joblib.load(model_save_path+"/model_s%s.pkl"%(state_no,)) except IOError: print 'model of state %s not found'%(state_no,) continue expected_log = [] std_of_log = [] deri_threshold = [] for state_no in model_group_by_state: compute_score_time_cost = 0 total_step_times = 0 all_log_curves_of_this_state = [] curve_owner = [] for trial_name in trials_group_by_folder_name: curve_owner.append(trial_name) one_log_curve_of_this_state = [] start_time = time.time() one_log_curve_of_this_state = util.fast_log_curve_calculation( trials_group_by_folder_name[trial_name][state_no], model_group_by_state[state_no] ) compute_score_time_cost += time.time()-start_time total_step_times += len(trials_group_by_folder_name[trial_name][state_no]) all_log_curves_of_this_state.append(one_log_curve_of_this_state) # use np matrix to facilitate the computation of mean curve and std np_matrix_traj_by_time = np.matrix(all_log_curves_of_this_state) mean_of_log_curve = np_matrix_traj_by_time.mean(0) std_of_log_curve = np_matrix_traj_by_time.std(0) score_time_cost_per_point = float(compute_score_time_cost)/total_step_times decided_deri_threshold= assess_deri_threshold_and_decide( threshold_c_value, mean_of_log_curve, std_of_log_curve, np_matrix_traj_by_time, curve_owner, state_no, figure_save_path, score_time_cost_per_point) deri_threshold.append(decided_deri_threshold) if not os.path.isdir(model_save_path): os.makedirs(model_save_path) joblib.dump(deri_threshold, model_save_path+"/deri_threshold.pkl")
StarcoderdataPython
3561197
<filename>investing_algorithm_framework/core/market_services/__init__.py<gh_stars>1-10 from investing_algorithm_framework.core.market_services.ccxt import \ CCXTMarketService from investing_algorithm_framework.core.market_services.market_service \ import MarketService __all__ = [ "MarketService", "CCXTMarketService" ]
StarcoderdataPython
335435
# encoding: utf-8 import re from sqlalchemy.orm import (joinedload, joinedload_all, subqueryload, subqueryload_all) from sqlalchemy.orm.exc import NoResultFound import pyramid.httpexceptions as exc import pokedex.db.tables as t from .. import db from . import caching def ability_list(request): c = request.tmpl_context c.abilities = db.pokedex_session.query(t.Ability) \ .join(t.Ability.names_local) \ .filter(t.Ability.is_main_series) \ .options(joinedload(t.Ability.prose_local)) \ .order_by(t.Ability.generation_id.asc(), t.Ability.names_table.name.asc()) \ .all() return {} def ability_view(request): name = request.matchdict.get('name') c = request.tmpl_context try: # Make sure that any ability we get is from the main series c.ability = (db.get_by_name_query(t.Ability, name) .filter(t.Ability.is_main_series) .one()) except NoResultFound: raise exc.HTTPNotFound ### Prev/next for header c.prev_ability, c.next_ability = db.prev_next( table=t.Ability, current=c.ability, language=c.game_language, filters=[t.Ability.is_main_series], ) caching.cache_content( request=request, key=c.ability.identifier, do_work=_do_ability, ) return {} def _do_ability(request, cache_key): c = request.tmpl_context # Eagerload db.pokedex_session.query(t.Ability) \ .filter_by(id=c.ability.id) \ .options( joinedload(t.Ability.names_local), subqueryload(t.Ability.flavor_text), joinedload(t.Ability.flavor_text, t.AbilityFlavorText.version_group), joinedload(t.Ability.flavor_text, t.AbilityFlavorText.version_group, t.VersionGroup.versions), # Pokémon stuff subqueryload(t.Ability.pokemon), subqueryload(t.Ability.hidden_pokemon), subqueryload(t.Ability.all_pokemon), subqueryload(t.Ability.all_pokemon, t.Pokemon.abilities), subqueryload(t.Ability.all_pokemon, t.Pokemon.species, t.PokemonSpecies.egg_groups), subqueryload(t.Ability.all_pokemon, t.Pokemon.types), subqueryload(t.Ability.all_pokemon, t.Pokemon.stats), joinedload(t.Ability.all_pokemon, t.Pokemon.stats, t.PokemonStat.stat), ) \ .one() c.method_labels = { 'Normal': u'May be found normally on Pokémon.', 'Hidden': u'Found on Pokémon from the Dream World and Dream Radar, ' u'as well as a few Pokémon from specific in-game encounters.', } hidden_pokemon = [pokemon for pokemon in c.ability.hidden_pokemon if pokemon not in c.ability.pokemon] c.pokemon = [] if c.ability.pokemon: c.pokemon.append(('Normal', c.ability.pokemon)) if hidden_pokemon: c.pokemon.append(('Hidden', hidden_pokemon)) move_flag = None if c.ability.identifier == u'soundproof': move_flag = 'sound' elif c.ability.identifier == u'iron-fist': move_flag = 'punch' c.moves = [] if move_flag: c.moves = db.pokedex_session.query(t.Move) \ .join(t.MoveFlagMap, t.MoveFlag) \ .filter(t.MoveFlag.identifier == move_flag) \ .join(t.Move.names_local) \ .order_by(t.Move.names_table.name) \ .options( subqueryload('move_effect'), subqueryload('type'), subqueryload('damage_class') ) \ .all()
StarcoderdataPython
6572663
<reponame>kant/ComputerVision<filename>tests/unit/detection/test_detection_bbox.py<gh_stars>0 # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import pytest from utils_cv.detection.bbox import DetectionBbox, AnnotationBbox, _Bbox @pytest.fixture(scope="session") def basic_bbox() -> "_Bbox": return _Bbox(left=0, top=10, right=100, bottom=1000) @pytest.fixture(scope="session") def anno_bbox() -> "AnnotationBbox": return AnnotationBbox(left=0, top=10, right=100, bottom=1000, label_idx=0) @pytest.fixture(scope="session") def det_bbox() -> "DetectionBbox": return DetectionBbox( left=0, top=10, right=100, bottom=1000, label_idx=0, score=0.5 ) def validate_bbox(bbox: _Bbox) -> bool: assert bbox.left == 0 assert bbox.top == 10 assert bbox.right == 100 assert bbox.bottom == 1000 def text__bbox_init(basic_bbox): assert type(basic_bbox) == _Bbox validate_bbox(basic_bbox) def test__bbox_from_array(basic_bbox): # test `from_array()` bbox initialization method bbox_from_array = _Bbox.from_array([0, 10, 100, 1000]) validate_bbox(bbox_from_array) # test `from_array_xymh()` bbox initialization method bbox_from_array_xywh = _Bbox.from_array_xywh([0, 10, 100, 990]) validate_bbox(bbox_from_array_xywh) def test__bbox_basic_funcs(basic_bbox): # test rect() assert basic_bbox.rect() == [0, 10, 100, 1000] # test width() assert basic_bbox.width() == 100 # test height() assert basic_bbox.height() == 990 # test surface_area() assert basic_bbox.surface_area() == 99000 def test__bbox_overlap(basic_bbox): # test bbox that does not overlap non_overlapping_bbox = _Bbox(left=200, top=10, right=300, bottom=1000) overlap = basic_bbox.get_overlap_bbox(non_overlapping_bbox) assert overlap is None # test bbox that does overlap overlapping_bbox = _Bbox(left=0, top=500, right=100, bottom=2000) overlap = basic_bbox.get_overlap_bbox(overlapping_bbox) assert overlap == _Bbox(left=0, top=500, right=100, bottom=1000) def test__bbox_crop(basic_bbox): # test valid crop sizes cropped_bbox = basic_bbox.crop(max_width=10, max_height=10) assert cropped_bbox.width() == 10 assert cropped_bbox.height() == 10 assert cropped_bbox.left == 0 assert cropped_bbox.top == 10 assert cropped_bbox.right == 10 assert cropped_bbox.bottom == 20 # test invalid crop sizes with pytest.raises(Exception): basic_bbox.crap(max_width=101, max_height=10) def test__bbox_standardization(): non_standard_bbox_0 = _Bbox(left=100, top=1000, right=0, bottom=10) validate_bbox(non_standard_bbox_0) def test__bbox_is_valid(basic_bbox): assert basic_bbox.is_valid() is True assert _Bbox(left=0, top=0, right=0, bottom=0).is_valid() is False def test_annotation_bbox_init(anno_bbox): validate_bbox(anno_bbox) assert type(anno_bbox) == AnnotationBbox def test_annotation_bbox_from_array(anno_bbox): bbox_from_array = AnnotationBbox.from_array( [0, 10, 100, 1000], label_idx=0 ) validate_bbox(bbox_from_array) assert type(bbox_from_array) == AnnotationBbox def test_detection_bbox_init(det_bbox): validate_bbox(det_bbox) assert type(det_bbox) == DetectionBbox def test_detection_bbox_from_array(det_bbox): bbox_from_array = DetectionBbox.from_array( [0, 10, 100, 1000], label_idx=0, score=0 ) validate_bbox(det_bbox) assert type(bbox_from_array) == DetectionBbox
StarcoderdataPython
9692862
# Units : SI Units import numpy as np import scipy from scipy.integrate import quad as integrate from matplotlib import pyplot as plt pi = np.pi mu0 = 4e-7 * pi def vec(*args): return np.atleast_2d(args).T def R(x,y,z): # Rotation Matrix np.matrix() class Pose(object): def __init__(self): self.position = vec(0,0,0) self.rotation = vec(0,0,0) class Magnet(object): def __init__(self, pose=None): if pose: self.pose = pose else: self.pose = Pose() def field(self, current, position): pass class Levitron(Magnet): # The object to levitate # Assumed to be neodymium, cylinder def __init__(self, pose=None): super(Levitron, self).__init__(pose) # magnetic susceptibility of magnet self.x_M = 1 pass def field(self, current, position): # depends on H-Field # NOTHING HERE # can't proceed without defining field for permanent magnet return vec(0, 0, 1) # Magnetic Field Strength, H: def fieldStrength(self, current, position): return self.field(current, position) / self.x_M def magMoment(self, fieldStrength): return self.x_M * self.fieldStrength(1, 1) class Solenoid(Magnet): def __init__(self, radius, length, loops, pose=None): # Solenoid with origin at its center super(Solenoid, self).__init__(pose) self.radius = radius self.length = length self.loops = loops self.current = 0.0 def pos(self, t): # Parametrized position of the wire # t = 0.0 ~ 1.0 # at t = 1.0, theta = 2*pi*self.loops x = self.radius * np.cos(2 * pi * self.loops * t) y = self.radius * np.sin(2 * pi * self.loops * t) z = self.length * t # returns column vector if len(t) > 1: return np.vstack((x,y,z)) else: return vec(x, y, z) def d_pos(self,t): dx = - self.radius * 2 * pi * np.sin(2*pi * self.loops * t) dy = self.radius * 2 * pi * np.cos(2*pi*self.loops*t) dz = self.length * np.ones(t.shape) if len(t) > 1: return np.vstack((dx,dy,dz)) else: return vec(dx, dy, dz) def set_current(self, current): self.current = current def field(self, position): # <NAME> pos_local = position # pos_local = translate(rotate(position)) # doesn't matter since solenoid definition is in global coordinates def integrand(t): pos = self.pos(t) dp = np.subtract(pos_local, pos) # difference in position dpt = self.d_pos(t) c = np.cross(dpt, dp, axis=0) m = np.linalg.norm(dp, axis=0) res = c / (m**3) return res t = np.linspace(0.0, 1.0, 10000) y = integrand(t) Bi = np.trapz(y, t, axis=1) #Bi = integrate(integrand, 0.0, 1.0) B = mu0/4 * pi * self.current * Bi return B class Model(object): def __init__(self,solenoid,magnet): self.solenoid = solenoid self.magnet = magnet self.reset() # params['s_r'] # solenoid radius # params['s_l'] # solenoid length # params['m_r'] # magnet radius # params['m_l'] # magnet length def reset(self): pass if __name__ == "__main__": magnet = Levitron() solenoid = Solenoid(1.0,0.01,1.0) solenoid.set_current(1.0) Bs = [] # Fs = [] for i in range(250): z = i * 0.01 B = solenoid.field(vec(0,0,z)) Bs.append(B[2]) # print Bs plt.plot(Bs) plt.show() m = Model(solenoid, magnet) print m.getHvalue() plt.show() # # Method for getting a list of H values, goes in the levitron class # def getHvalues(self. solenoid): # Bs = [] # Hs = [] # dotproductprevious = 0 # deltas = 0 # for i in range(250): # z = i * 0.01 # B = solenoid.field(vec(0,0,z)) # H = B[2]/mu0 # Hs.append(H) # return Hs
StarcoderdataPython
1811886
<filename>wtdepth_bins_distinland_21Nov19.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Nov 21 09:08:30 2019 @author: kbefus """ import sys,os import numpy as np import glob import pandas as pd import geopandas as gpd #import dask.array as da import rasterio from rasterio import mask from rasterio.enums import Resampling from rasterio.vrt import WarpedVRT from rasterio.io import MemoryFile from scipy.spatial import cKDTree as KDTree res_dir = r'/mnt/data2/CloudStation' code_dir = os.path.join(res_dir,r'ca_slr/scripts') sys.path.insert(1,code_dir) #%% def xy_from_affine(tform=None,nx=None,ny=None): X,Y = np.meshgrid(np.arange(nx)+0.5,np.arange(ny)+0.5)*tform return X,Y def read_geotiff(in_fname,band=0): with rasterio.open(in_fname) as src: data = src.read()[band] data[data==src.nodata]=np.nan ny,nx = data.shape X,Y = xy_from_affine(src.transform,nx,ny) profile = src.profile return X,Y,data,profile #%% # ----------- Region directory information ----------- research_dir_orig = os.path.join(res_dir,'ca_slr') data_dir_orig = os.path.join(research_dir_orig,'data') research_dir = r'/mnt/762D83B545968C9F' output_dir = os.path.join(research_dir,'data','outputs_fill_gdal_29Oct19') results_dir = os.path.join(research_dir,'results','no_ghb','wt_analysis') if not os.path.isdir(results_dir): os.makedirs(results_dir) model_types = ['model_lmsl_noghb','model_mhhw_noghb'] id_col = 'Id' sealevel_elevs = np.hstack([np.arange(0,2.25,.25),2.5,3.,5.])# m Kh_vals = [0.1,1.,10.] datum_type = 'MHHW' cell_spacing = 10. # meters file_fmt = '{0}_{1}_{2}_Kh{3:3.2f}_slr{4:3.2f}m' head_fmt = '{}_head.tif' wt_fmt = '{}_wtdepth.tif' cell_fmt = '{}_celltypes.tif' marine_value = -500. other_nan_val = -9999. active_date = '6Nov19' col_fmt = '{0}_count_sl{1:3.2f}_Kh{2:3.2f}_{3}_{4}' wt_col = 'wtdepth' dx = 20 dist_inland_bins = np.arange(0,1e4+dx,dx) dist_inland_bins = np.hstack([dist_inland_bins,np.inf]) #%% out_hist_data = [] out_hist_cols = [] out_stats = [] for linear_resp_bool in [False,True]: for model_type in model_types: datum_type = model_type.split('_')[1].upper() scenario_type = '_'.join(model_type.split('_')[1:]) wt_dir = os.path.join(output_dir,model_type,'wt') county_dirs = [idir for idir in glob.glob(os.path.join(wt_dir,'*')) if os.path.isdir(idir)] for Kh in Kh_vals: print('------------ Kh = {} ---------------'.format(Kh)) kh_dir = 'Kh{0:3.2f}mday'.format(Kh) kh_dir=kh_dir.replace('.','p') for county_dir in county_dirs: county_name = os.path.basename(county_dir) print('------- {} --------'.format(county_name)) for sl in sealevel_elevs: print('--- SL = {} ----'.format(sl)) # Load water table depth tifs only for modern sl if sl==0.0: tempname = file_fmt.format(county_name,'wt',scenario_type,Kh,sl) tempname = tempname.replace('.','p') wt_fname = os.path.join(county_dir,kh_dir,'{}.tif'.format(tempname)) x,y,wt_sl0,profile = read_geotiff(wt_fname) with np.errstate(invalid='ignore'): wt_sl0[(wt_sl0<0) & (wt_sl0!=marine_value)]=0 # set negative water tables to zero # Assign marine mask marine_mask = wt_sl0 == marine_value wt_sl0[marine_mask] = np.nan # Calculate distance inland raster notnan_or_marine = ~np.isnan(wt_sl0) marine_tree = KDTree(np.c_[x[marine_mask],y[marine_mask]]) dist,marine_inds = marine_tree.query(np.c_[x[notnan_or_marine],y[notnan_or_marine]]) dist_inland_array = np.nan*np.ones_like(wt_sl0) dist_inland_array[notnan_or_marine] = dist.copy() # Load shapefile wt bins cdir_wt = os.path.join(output_dir,model_type,'shp',os.path.basename(county_dir)) if linear_resp_bool and sl==0: # Load original, not lin, modeled output for sl=0 temp_fname = '{0}_{1}_slr{2:3.2f}m_Kh{3:3.2f}mday_emergent'.format(county_name,scenario_type,sl,Kh) temp_fname = temp_fname.replace('.','p') shp_name = os.path.join(cdir_wt,kh_dir,'{}.shp'.format(temp_fname)) shp_df = gpd.read_file(shp_name) else: temp_fname = '{0}_{1}_slr{2:3.2f}m_Kh{3:3.2f}mday_emergent'.format(county_name,scenario_type,sl,Kh) if linear_resp_bool: kh_dir2 = '_'.join(['linresponse',kh_dir]) temp_fname = '{}_lin'.format(temp_fname) else: kh_dir2 = kh_dir temp_fname = temp_fname.replace('.','p') shp_name = os.path.join(cdir_wt,kh_dir2,'{}.shp'.format(temp_fname)) shp_df = gpd.read_file(shp_name) unique_types = shp_df[wt_col].unique() with MemoryFile() as memfile: with memfile.open(**profile) as dataset: dataset.write(dist_inland_array[None,:]) for temp_type in unique_types: temp_shp = shp_df[shp_df[wt_col]==temp_type].copy() type_all = [] type_hist_list = np.zeros_like(dist_inland_bins[:-1]) for ifeature in temp_shp.geometry.values: # Sample distance array using the feature mask_dist,tform = mask.mask(dataset,[ifeature],crop=True) mask_dist = mask_dist.squeeze() mask_dist[mask_dist==other_nan_val]=np.nan counts,edges = np.histogram(mask_dist[~np.isnan(mask_dist)],bins=dist_inland_bins) type_hist_list += counts # sum for each feature in bin type_all.extend(mask_dist[~np.isnan(mask_dist)]) if 'bin_left' not in out_hist_cols: left,right = edges[:-1],edges[1:] out_hist_cols.extend(['bin_left','bin_right']) out_hist_data.extend([left,right]) # Store in main list for saving to csv out_hist_data.append(type_hist_list) if linear_resp_bool: d2 = '_'.join([datum_type,'linresp']) else: d2 = datum_type out_hist_cols.append(col_fmt.format(county_name,sl,Kh,d2,temp_type)) # save basic stats on dist data if len(type_all)>0: out_stats.append([county_name,sl,Kh,d2,temp_type, np.nanmedian(type_all),np.nanmean(type_all), np.nanmax(type_all),np.nanmin(type_all), np.nanstd(type_all)]) # Save outputs out_fname = os.path.join(results_dir,'wtdepth_bins_distinland_hists_{}.csv'.format(active_date)) out_df = pd.DataFrame(np.array(out_hist_data).T,columns=out_hist_cols) out_df.to_csv(out_fname,index_label='type') out_fname2 = os.path.join(results_dir,'wtdepth_bins_distinland_stats_{}.csv'.format(active_date)) out_cols2 = ['County','Sea_level_m','Kh_mday','Datum_Model', 'WT_bin','Median_dist_m','Mean_dist_m','Max_dist_m', 'Min_dist_m','Std_dist_m'] out_df2 = pd.DataFrame(out_stats,columns=out_cols2) out_df2.to_csv(out_fname2,index=False)
StarcoderdataPython
9613926
<reponame>iwasakishuto/PyVideoEditor<filename>docs/veditor-utils-video_utils-1.py from veditor.utils import show_frames, SampleData fig = show_frames(video=SampleData().VIDEO_PATH, step=300, ncols=2) fig.show()
StarcoderdataPython
82000
import json import os config = {} default_config = {} extra_config = {} def load(): global config load_default() composer = get_value('project-dir')+'composer.json' if not os.path.isfile(composer): raise SystemExit('You have to define a composer.json in your project.') data = load_json(composer) if 'extra' in data: if 'php-code-checker' in data['extra']: config = data['extra']['php-code-checker'] def load_default(): global default_config data = load_json(get_value('checker-dir')+'data/default_configuration.json') default_config = data['extra']['php-code-checker'] def load_json(path): if not os.path.isfile(path): raise SystemExit('The json '+path+' was not found.') with open(path) as data_file: return json.load(data_file) def add(key, value): global extra_config extra_config.update({key: value}) def get_value(key): if key in extra_config: return extra_config[key] elif key in config: return config[key] elif key in default_config: return default_config[key] else: raise SystemExit('The key '+key+' does not exists in the configs (composer.json and default config).')
StarcoderdataPython
235258
<gh_stars>0 import os, sys def main(): if not os.path.exists('examples/thumbs'): os.makedirs('examples/thumbs') generate_thumbnails() else: print('Thumbnails already exist, skipping generation') def generate_thumbnails(): print('Generating thumbnails') if sys.platform == 'linux': command = 'mogrify' else: command = 'magick mogrify' os.chdir('examples') os.system(command + ' -resize 200x158 -extent 200x158 -background transparent -path thumbs *.png') # Do we want animated thumbnails? # os.system(command + ' -resize 200x158 -extent 200x158 -background transparent -path thumbs *.gif') if __name__ == '__main__': main()
StarcoderdataPython
4805776
<gh_stars>0 likes = '0' loves = '853 Yêu thích' def reaction_string_to_number(text: str): multiplier = 1 stringNumber = text.split(' ')[0].replace(',', '').replace('.', '') hasK = stringNumber.find('K') if(hasK != -1): stringNumber = stringNumber.replace('K', '') multiplier = 1000 result = int(stringNumber) * multiplier return result reaction_string_to_number(likes)
StarcoderdataPython
8022397
from typing import Union from django.contrib.postgres.fields import ArrayField from django.db import models from castledice.common.constants import DeckName from .decks import CastleDeck, MarketDeck, VillagerDeck from .exceptions import InvalidDeckTypeError class GameDeck(models.Model): game = models.ForeignKey(to="game.Game", on_delete=models.CASCADE) deck_type = models.CharField(max_length=10, choices=DeckName.django_choices()) draw_pile = ArrayField(models.TextField()) discard_pile = ArrayField(models.TextField()) class Meta: constraints = [ models.UniqueConstraint( fields=("game_id", "deck_type"), name="unique deck per game" ) ] def get_deck(self) -> Union[CastleDeck, MarketDeck, VillagerDeck]: """Convert this GameDeck to a Deck object""" if self.deck_type == DeckName.CASTLE: deck_class = CastleDeck elif self.deck_type == DeckName.MARKET: deck_class = MarketDeck elif self.deck_type == DeckName.VILLAGER: deck_class = VillagerDeck else: raise InvalidDeckTypeError("Unknown deck class for %s" % self.deck_type) return deck_class(draw_pile=self.draw_pile, discard_pile=self.discard_pile)
StarcoderdataPython
8189581
<reponame>HolisticCoders/mop """Minimal observer/publisher implementation for all your GUI needs.""" import traceback from collections import defaultdict _SIGNALS = defaultdict(list) def clear_all_signals(): """Clear all signals. Calling this function will unsubscribe all functions. """ _SIGNALS.clear() def subscribe(name, func): """Subscribe ``func`` to a publisher. :param name: Signal to observe. When ``name`` is called by :func:`publish`, ``func`` will be called. :param func: Function to register to the ``name`` signal. :type name: str :type func: callable """ _SIGNALS[name].append(func) def unsubscribe(name, func): """Unsubscribe ``func`` from a publisher. :param name: Signal to stop to observe. :param func: Function to disconnect from the ``name`` signal. :type name: str :type func: callable """ while func in _SIGNALS[name]: _SIGNALS[name].remove(func) def publish(name, *args, **kwargs): """Emits a signal to all observers subscribed. You can use ``args`` and ``kwargs`` to call subscribers with these arguments. Observers return values are collected and returned in a :class:`weakref.WeakKeyDictionary` keyed by observer. :param name: Name of the signal to emit. Observers subscribed to the same ``name`` will be notified. :type name: str :rtype: weakref.WeakKeyDictionary """ ret = {} for func in _SIGNALS[name]: try: res = func(*args, **kwargs) except Exception: traceback.print_exc() continue ret[func] = res return ret
StarcoderdataPython
107316
from blog.models import Post,comment from django.shortcuts import render,get_object_or_404,redirect from django.utils import timezone from django.views.generic import (TemplateView, CreateView,ListView,DetailView,UpdateView) from django.contrib.auth.mixins import LoginRequiredMixin from blog.forms import PostForm,commentForm from django.urls import reverse_lazy # Create your views here. class AboutView(TemplateView): template_name = 'about.html' class PostListView(ListView): model = Post def get_queryset(self): return Post.objects.filter(publish_Date__lte=timezone.now()).order_by('-publish_date')) # اولا در اینجا # lte به معنی # less than or equal to # هست دوما در order_by # وقتی منها مینویسیم یعنی برای ما نزولی سورت کنه class PostDetailView(DetailView): model = Post class CreatePostView(CreateView,LoginRequiredMixin): model = Post # اگر بخواهیم که هیچ کسی به این ویو به جز اعضا دست رسی نداشته باشد # مثل @login_required بجای آن از میکس اینز استفاده میکنیم # mixins # حالا باید یک لینک برای لاگین کردن بهش بدیم login_url = '/login/' redirect_field_name = 'blog/post_detail.html' model = Post form_class = PostForm class UpdatePostView(UpdateView,LoginRequiredMixin): model = Post login_url = '/login/' redirect_field_name = 'blog/post_detail.html' model = Post form_class = PostForm class PostDeleteView(DeleteView,LoginRequiredMixin): model = Post success_url = reverse_lazy('post_list') class DraftListView(ListView,LoginRequiredMixin): login_url = '/login/' redirect_field_name = 'blog/post_detail.html' model = Post def get_queryset(self): return Post.objects.filter(publish_Date__isnull=True).order_by('created_date') ##################################### ##################################### @login_required def add_comment_to_post(request,pk): post = get_object_or_404(Post,pk = pk) # یک آبجکت از پست بساز if request.method == 'POST': from = commentForm(request.POST) if form.is_valid(): comment = form.save(commit = False) comment.post = post comment.save() return redirect('post_detail',pk = post.pk) else: form = commentForm() return render ( request , ' blog/comment_form.html',{'form': form}) @login_required def comment_approve (requst, pk): comment = get_object_or_404(comment,pk=pk) comment.approve() return redirect('post_detail',pk = comment.post.pk) @login_required def comment_remove(request,pk): comment = get_object_or_404(comment,pk = pk) post_pk = comment.post.pk comment.delete() return redirect('post_detail',pk = post_pk) @login_required def post_publish(request,pk): post = get_object_or_404(Post,pk = pk) post.publish() return redirect('post_detail',pk = pk)
StarcoderdataPython
6442346
#!/bin/python3 # author: <NAME> import os import subprocess def write_file(f, s, mode='w'): if f: os.makedirs(os.path.dirname(f), 0o777, True) if s is None: s = '' with open(f, mode) as fp: fp.write(s) def read_file(f, mode='r'): if not f: return '' if not os.path.exists(f): return '' with open(f, mode) as fp: return fp.read() def secure_open_file(f, mode='w'): if not os.path.exists(f): write_file(f, '') return open(f, mode) def remove_file(f): try: os.unlink(f) return True except: return False class FileEx(object): def __init__(self, sink=None, mode='w', autoremove=False): self._sink = sink self._fp = None self._is_opened = False self._open_func = lambda f: f self._close_func = lambda f, fp: f self._read_func = None self._remove_func = None self._content = '' self.autoremove = autoremove if self._sink is None: self._open_func = lambda f: None self._close_func = lambda f, fp: None elif self._sink in (subprocess.PIPE, subprocess.DEVNULL, subprocess.STDOUT): self._open_func = lambda f: f self._close_func = lambda f, fp: f elif isinstance(self._sink, str): self._open_func = lambda f: secure_open_file(f, mode) self._close_func = lambda f, fp: fp.close() self._read_func = lambda f: read_file(f) self._remove_func = lambda f: remove_file(f) def open(self): if not self._is_opened: self._is_opened = True self._fp = self._open_func(self._sink) return self._fp def close(self): if self._is_opened: self._is_opened = False self._close_func(self._sink, self._fp) self._fp = None if self.autoremove: self.remove_empty() def read(self): if self._content: return self._content if self._read_func: self._content = self._read_func(self._sink) return self._content def emtpy(self): return self._content == '' def remove_empty(self): if self._remove_func and self.emtpy(): self._remove_func(self._sink) def __enter__(self): return self.open() def __exit__(self, exc_type, exc_val, exc_tb): self.close() return False def __repr__(self): return 'FileEx(%s)' % self._sink @property def path(self): if isinstance(self._sink, str): return self._sink
StarcoderdataPython
3346024
<filename>src/factories/service_factory.py<gh_stars>0 """Service factory module""" import factory from faker import Faker from faker.providers import lorem from src.factories import BaseFactory from src.models.service import Service from app import database as db faker = Faker() faker.add_provider(lorem) class ServiceFactory(BaseFactory): """Service factory""" name = factory.Sequence(lambda o: f'{faker.word().upper()}_SERVICE') is_active = factory.Iterator([False, True]) class Meta: """Service Meta class""" model = Service sqlalchemy_session = db.session
StarcoderdataPython
6557115
from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.decorators import api_view, permission_classes from apps.account.api.serializers import RegistrationSerializer, UserSerializer from rest_framework.authtoken.models import Token ''' def get_random_string(length): return ''.join(random.choice(string.ascii_letters) for i in range(length)) ''' from apps.account.models import User @api_view(['POST']) @permission_classes([]) def registration_view(request): serializer = RegistrationSerializer(data=request.data) data = {} if serializer.is_valid(): user = serializer.save() # user.business_card_url = get_random_string(10) # user.save() data['response'] = "registration_successful" data['email'] = user.email token = Token.objects.get(user=user).key data['token'] = token else: data = serializer.errors return Response({'errors': data}, status=400) return Response(data) @api_view(['PUT']) @permission_classes([]) def update_business_card_visibility(request): calling_user = request.user serializer = UserSerializer(calling_user, data=request.data) if serializer.is_valid(): serializer.save() return Response({ 'response': 'success' }) else: data = serializer.errors return Response({'errors': data}, status=400) @api_view(['GET']) @permission_classes([]) def get_business_card_visibility(request): calling_user = request.user serializer = UserSerializer(calling_user) return Response(serializer.data) @api_view(['GET']) @permission_classes([IsAuthenticated]) def get_info(request): serialised = UserSerializer(request.user) return Response(serialised.data)
StarcoderdataPython
3220484
<reponame>nogproject/nog #!/usr/bin/env python3 # Import only packages that are usually available (preferrably only core # packages), so that the file is self-contained and can, in principle, be used # to resolve dependencies when dependencies such as nogpy are not yet # available. from glob import glob import json import os.path class Resolver: def __init__(self, warn=None): self.registry = {} self.warn = warn or False def resolve(self, name): return self.registry[name]['localpath'] def addPackage(self, abspath): if os.path.isdir(abspath): abspath = abspath + '/nogpackage.json' with open(abspath) as fp: nogpackage = json.load(fp) nogpackage['localpath'] = os.path.dirname(abspath) name = nogpackage['package']['name'] if name in self.registry: if nogpackage['localpath'] == self.registry[name]['localpath']: return if not self.warn: return msg = ('Warning: Duplicate package `{0}` at `{1}`; ' 'previous at `{2}`.') print(msg.format(name, nogpackage['localpath'], self.registry[name]['localpath'])) return self.registry[name] = nogpackage def addWithParents(self, abspath): for p in glob(abspath + '/nogpackage.json'): self.addPackage(p) while abspath != '/': for p in glob(abspath + '/nogpackages/*/nogpackage.json'): self.addPackage(p) (abspath, tail) = os.path.split(abspath) def addWithLocal(self, abspath): for p in glob(abspath + '/nogpackage.json'): self.addPackage(p) self.addLocal(abspath) def addLocal(self, abspath): for p in glob(abspath + '/nogpackages/*/nogpackage.json'): self.addPackage(p)
StarcoderdataPython
247168
import random def Tonelli_Shanks(n, p): """ Находит дискретный корень x^2 = n (mod p) :param n: prime :param p: prime :return: x """ from Helper import EulerCriterion, moduloPow assert(EulerCriterion(n, p)) S = 0 # number of offsets (количество смещений) m = (p - 1) while m % 2 == 0: S += 1 m >>= 1 Q = m while True: z = random.randint(0, p - 1) if not EulerCriterion(z, p): break #for z in range(p): # if not EulerCriterion(z, p): # break #if z == p: # raise Exception("Can't find non-residue") M = S c = moduloPow(z, Q, p) t = moduloPow(n, Q, p) R = moduloPow(n, (Q + 1) // 2, p) while True: if t == 0: return 0 if t == 1: return R tpower = (t ** 2) % p i = 1 while tpower != 1: i += 1 tpower = (tpower ** 2) % p #b = moduloPow(c, 2 ** (M - i - 1), p) b = moduloPow(c, 1 << (M - i), p) M = i c = (b ** 2) % p t = moduloPow(t, b ** 2, p) R = (R * b) % p
StarcoderdataPython
4847296
<reponame>unclechu/py-radio-class # -*- coding: utf-8 -*- from setuptools import setup from test import TestCommand CLASSIFIERS = [ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] setup( name='radio-class', version='0.1.0', description='Event-bus implementation', long_description='Event-bus implementation inspired by backbone.radio', url='https://github.com/unclechu/py-radio-class', author='<NAME>', author_email='<EMAIL>', license='MIT', platforms=['any'], classifiers=CLASSIFIERS, keywords='events bus library radio', packages=['radio'], cmdclass={'test': TestCommand} )
StarcoderdataPython
6401816
<gh_stars>10-100 from machine import Pin, I2C from oled import Write, GFX, SSD1306_I2C from oled.fonts import ubuntu_mono_15, ubuntu_mono_20 scl = Pin(15) sda = Pin(4) i2c = I2C(scl=scl, sda=sda) Pin(16, Pin.OUT, value=1) oled = SSD1306_I2C(128, 64, i2c) gfx = GFX(128, 64, oled.pixel) write15 = Write(oled, ubuntu_mono_15) write20 = Write(oled, ubuntu_mono_20) write15.text("Espresso IDE", 0, 0) write15.text("micropython-oled", 0, 15) gfx.line(0, 32, 127, 32, 1) gfx.line(0, 33, 127, 33, 1) write20.text("1234567890", 0, 35) write15.text("Ubuntu Mono font", 0, 52) oled.show()
StarcoderdataPython
12844198
"""Template filter for rendering Markdown to HTML.""" from django import template from django.utils.safestring import mark_safe from django.template.defaultfilters import stringfilter from markdownx.utils import markdownify register = template.Library() @register.filter @stringfilter def markdown(raw_markdown): """Render Markdown as HTML. Args: raw_markdown (str): Text of raw Markdown. Returns: HTML string of rendered Markdown marked as safe. """ return mark_safe(markdownify(raw_markdown))
StarcoderdataPython
54039
import datetime from .celery import celery from backend.news import hot_topics from backend.cache import sadd from backend.utils import time_now_formatted @celery.task(bind=True) def store_hot_topics(a): sadd(time_now_formatted('PESTO_SYSTEM_HOT_TOPICS'), hot_topics())
StarcoderdataPython
300076
import chainer class PreprocessSVHN(chainer.link.Chain): def __init__(self): super(PreprocessSVHN, self).__init__() def augment(self, x): return x def __call__(self, x): x, t, l = x if isinstance(l, int) or isinstance(l, float): xp = chainer.cuda.get_array_module(x) l = xp.array([l], dtype=xp.float32) return x, t, l
StarcoderdataPython
4840597
from setuptools import setup, find_namespace_packages setup( name="pyskip_blox", version="0.0.1", author="<NAME>, <NAME>", description="A pyskip wrapper library for loading and operating on Minecraft assets", packages=find_namespace_packages(), install_requires=["pyskip>=0.0.1", "nbt", "tqdm", "pillow"], )
StarcoderdataPython
11348813
<reponame>ychen820/microblog # Copyright 2013 Google Inc. All Rights Reserved. """A calliope command that calls a help function.""" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import cli from googlecloudsdk.calliope import exceptions as c_exc from googlecloudsdk.core import log from googlecloudsdk.core import metrics @base.ReleaseTracks(base.ReleaseTrack.GA) class Help(base.Command): """Prints detailed help messages for the specified commands. This command prints a detailed help message for the commands specified after the ``help`' operand. """ @staticmethod def Args(parser): command_arg = parser.add_argument( 'command', nargs='*', help='The commands to get help for.') command_arg.detailed_help = """\ A sequence of group and command names with no flags. """ @c_exc.RaiseToolExceptionInsteadOf(cli.NoHelpFoundError) def Run(self, args): # pylint: disable=protected-access help_func = self.cli._HelpFunc() command_path = [self.cli._TopElement().name] + (args.command or []) metrics.Help('.'.join(command_path), 'help') def RaiseError(): raise c_exc.ToolException( 'Unknown command: {command}'.format(command='.'.join(args.command))) def ShowShortHelp(): """Print short help text.""" segments = [segment.replace('-', '_') for segment in args.command] # pylint: disable=protected-access current_element = self.cli._TopElement() for segment in segments: current_element = current_element.LoadSubElement(segment) if not current_element: RaiseError() log.out.write((current_element.GetShortHelp())) if not help_func: ShowShortHelp() else: try: help_func(command_path) except cli.NoHelpFoundError: ShowShortHelp()
StarcoderdataPython
1702305
<reponame>arifulhaqueuc/python-algorithm-excersice ## Print the items form the following list ## if the items ONLY start with "ba" list_all_1 = [ 'bb1' ,'bb2' ,'bb3' ,'ba4' ] bb_remove = [x for x in list_all_1 if x[:2]=='ba'] for i in ba_remove: print i ##################### ##################### list_all_2 = [ 'b000b1' ,'b000b2' ,'b000b3' ,'b000a4' ,'ba1' ,'bb2' ,'ba3' ,'ba4' ] ## let's print the items if ## "ba" is in a item ba_print = [s for s in list_all_2 if 'ba' in s] for m in ba_print: print m
StarcoderdataPython
3529636
# TODO: add unit tests for utils.py import numpy as np import pandas as pd import pytest from sfrmaker.utils import (assign_layers, width_from_arbolate_sum, arbolate_sum, make_config_summary) def test_assign_layers(shellmound_sfrdata, shellmound_model): reach_data = shellmound_sfrdata.reach_data.copy() reach_data.reset_index(inplace=True, drop=True) botm = shellmound_model.dis.botm.array.copy() idomain = shellmound_model.dis.idomain.array.copy() # test cases # invalid bottom in stack of all inactive celle i, j = reach_data['i'], reach_data['j'] original_botm_at_rno0 = botm[-1, i[0], j[0]] reach_data.loc[0, 'strtop'] = original_botm_at_rno0 - 10 idomain[:, i[0], j[0]] = 0 # active cells on top of inactive cells # lowest active layer should be pushed downward, # not lower (inactive) layer reach_data.loc[1, 'strtop'] = botm[-1, i[1], j[1]] - 10 lowest_active_layer = 9 botm[lowest_active_layer+1:, i[1], j[1]] = botm[-1, i[1], j[1]] idomain[lowest_active_layer+1:, i[1], j[1]] = 0 k, new_botm = assign_layers(reach_data, botm_array=botm, idomain=idomain) reach_data['k'] = k active_reaches = idomain[k, i, j] > 0 shellmound_model.dis.botm = new_botm assert np.all(reach_data.loc[active_reaches].strtop.values > shellmound_model.dis.botm.array[reach_data.k.values, reach_data.i.values, reach_data.j.values][active_reaches]) # model bottom at reach 0 should have been left alone (all inactive cells) assert new_botm[-1, i[0], j[0]] == original_botm_at_rno0 # reach 1 should be in lowest active layer, not lowest layer assert reach_data.loc[1, 'k'] == lowest_active_layer # inactive layers below should all be zero-thickness assert np.allclose(new_botm[lowest_active_layer:, i[1], j[1]], new_botm[-1, i[1], j[1]]) # no idomain supplied k2, new_botm2 = assign_layers(reach_data, botm_array=botm) # for now, return a 2D (model bottom) if no idomain # (for backwards compatibility) assert len(new_botm2.shape) == 2 reach_data['k'] = k2 botm[-1] = new_botm2 assert np.all(reach_data.strtop.values > botm[k2, i, j]) @pytest.mark.parametrize('asum,expected', ((1e6, 124.7*.3048),) ) def test_width_from_arbolate_sum_defaults(asum, expected): result = width_from_arbolate_sum(asum) assert np.allclose(result, expected, rtol=1e-4) @pytest.mark.parametrize('asum,a,b,input_units,output_units,expected', ((1e6, 0.1193, 0.5032, 'meters', 'feet', 124.7), (1e3, 0.1193, 0.5032, 'km', 'meters', 124.7*.3048), (1e6, 0.0628, 0.5099, 'meters', 'feet', 72.00), (1e3, 0.0628, 0.5099, 'km', 'meters', 72.00*.3048), (0, 0.0628, 0.5099, 'km', 'meters', 1), ([1e3], 0.0628, 0.5099, 'km', 'meters', [72.00*.3048]), (np.array([1e3]), 0.0628, 0.5099, 'km', 'meters', [72.00*.3048]), (np.array([1e3, 0]), 0.0628, 0.5099, 'km', 'meters', [72.00*.3048, 1])) ) def test_width_from_arbolate_sum(asum, a, b, input_units, output_units, expected): result = width_from_arbolate_sum(asum, a, b, minimum_width=1, input_units=input_units, output_units=output_units) assert np.allclose(result, expected, rtol=1e-4) def test_asum(sfr_test_numbering): rd, sd = sfr_test_numbering graph = dict(zip(sd.nseg, sd.outseg)) lengths = dict(zip(sd.nseg, np.arange(len(sd)))) asum = arbolate_sum(sd.nseg, lengths, graph) assert (asum[1] == 6) & (asum[2] == np.arange(len(sd)).sum()) def test_make_config_summary(): results = make_config_summary()
StarcoderdataPython
8155441
def test_node_version_set_to_12(host): assert host.exists("node") assert host.run("node --version").stdout.startswith('v12') def test_given_node_packages_are_installed(host): packages = ['ionic', 'cordova', 'appcenter'] for package in packages: assert host.exists(package)
StarcoderdataPython
12848695
<reponame>liquidgecka/twitcher<filename>twitcher/inotify.py #!/usr/bin/python26 """Watches a list of directories for file updates. The classes in this module will watch a list of subdirectories for file updates. A class is passed in at object initialization time and is used to create objects as new files are discovered. If a file is updated then the reload() function on that class will be called. If the file is removed the class will be deleted. It is important to verify that __init__, __del__, and reload() are all defined properly. A simple example of this module use looks like this: class watcher(object): def __init__(self, filename): self._filename = filename print 'Init: %s' % filename def __del__(self): print 'Del: %s' % self._filename def reload(self): print 'reload: %s' % self._filename x = inotify.InotifyWatcher(['/tmp/bar'], watcher) Only one InotifyWatcher can be registered per process due to the way that inotify works. Author: <NAME> (<EMAIL>) """ import fcntl import logging import os import signal import stat WATCH_MASK = (fcntl.DN_MODIFY | fcntl.DN_CREATE | fcntl.DN_DELETE | fcntl.DN_RENAME | fcntl.DN_MULTISHOT) class WatchClass(object): """Interface class to be passed into InotifyWatcher()""" def __init__(self, filename): pass def __del__(self): pass def reload(self): """Called when the file is updated on disk.""" pass class InotifyWatcher(object): """Watches a list of directories for updates to the files in them. This class will watch the directories in watch_directories and will automatically make a class of watch_class type when a new one is found. Args: watch_directories: An iterable list of directories to watch for files in. watch_class: The class that will be used to wrap each file. file_pattern: An optional function that filters filenames. The basic footprint takes a single parameter (the filename) and returns True/False if it should be watched or not. If this is not given then all files will be watched. """ def __init__(self, watch_directories, watch_class, file_pattern=None): if file_pattern is None: file_pattern = (lambda x: True) self._watch_directories = watch_directories self._watch_class = watch_class self._file_pattern = file_pattern self._watch_fds = {} self._watch_files = {} signal.signal(signal.SIGIO, self._inotify) signal.signal(signal.SIGHUP, self._inotify) self.rescan() def _recurse_directory(self): """Recurses through all self._watch_directories finding files.""" all_files = set() dirs = set(self._watch_directories) all_dirs = set() while dirs: dir = dirs.pop() try: files = [os.path.join(dir, f) for f in os.listdir(dir)] all_dirs.add(dir) all_files.update([f for f in files if os.path.isfile(f) and self._file_pattern(f)]) dirs.update([f for f in files if os.path.isdir(f) and f[0] != '.']) except IOError, e: logging.warning('Unable to access: %s' % dir) except OSError, e: logging.warning('Unable to access: %s' % dir) return (all_dirs, all_files) def _register_inotify(self, dir): """Registers a watch on the given directory.""" if dir in self._watch_fds: return logging.info('Registering a inotify watch on %s' % dir) try: fd = os.open(dir, os.O_RDONLY) fcntl.fcntl(fd, fcntl.F_NOTIFY, WATCH_MASK) self._watch_fds[dir] = fd except IOError, e: logging.error('Unable to register watch on %s: %s' % (dir, e)) def _unregister_inotify(self, dir): """Unregisters the directory for update notification.""" if dir not in self._watch_fds: return logging.info('Unregistering a inotify watch on %s' % dir) del self._watch_fds[dir] def _inotify(self, signum, frame): """Called when either SIGHUP or SIGIO (inotify) is received.""" logging.info('Received SIGHUP or a file update notification.') signal.signal(signal.SIGIO, self._inotify) signal.signal(signal.SIGHUP, self._inotify) self.rescan() def _mtime(self, filename): """Returns the mtime of the given file (in seconds).""" try: s = os.stat(filename) return s[stat.ST_MTIME] except IOError: # On error we just return zero.. # FIXME[brady]: Make this work better. return 0 def files(self): """Returns a list of all WatchFile objects we are watching. This will return a list of all WatchFile objects associated with config files in the list of directories that we are currently watching. Returns: A list of all WatchConfig objects we are maintaining. """ return [w for _, w in self._watch_files.itervalues()] def rescan(self): """Rescans all directories looking for files inside. This will walk all the directories listed when this class was created looking for configuration files. If new config files are found then a object will be created using the class passed in at init time. If a file that used to exist was deleted then the config object for it will also be deleted. """ new_dirs, new_files = self._recurse_directory() # Old directories, unregister watches. for dir in set(self._watch_fds.iterkeys()).difference(new_dirs): self._unregister_inotify(dir) # New directories, register watches. for dir in new_dirs: self._register_inotify(dir) # Walk through all files that no longer exist. for file in set(self._watch_files).difference(new_files): logging.info('File deleted (%s): Removing its object.', file) del self._watch_files[file] for file in new_files: if file not in self._watch_files: w = self._watch_class(file) self._watch_files[file] = [None, w] logging.info('Found new file (%s): Making new object', file) t = self._watch_files[file] m = self._mtime(file) if t and t[0] != m: t[0] = m t[1].reload()
StarcoderdataPython
5178182
<filename>hold_grudge/blog/admin.py<gh_stars>1-10 from django.contrib import admin from django.utils.html import format_html from django.urls import reverse from .models import Post, Category, Tag from .adminforms import PostAdminForm from hold_grudge.custom_site import custom_site @admin.register(Category) class CategoryAdmin(admin.ModelAdmin): list_display = ('name', 'status', 'is_nav', 'creator', 'create_time') fields = ('name', 'status', 'is_nav') def post_count(self, obj): return obj.post_set.count() post_count.short_description = "帖子数量" def save_model(self, request, obj, form, change): obj.creator = request.user return super(CategoryAdmin, self).save_model(request, obj, form, change) @admin.register(Tag) class TagAdmin(admin.ModelAdmin): list_display = ('name', 'status', 'creator', 'create_time') fields = ('name', 'status') def save_model(self, request, obj, form, change): obj.creator = request.user return super(TagAdmin, self).save_model(request, obj, form, change) class CategoryOwnerFilter(admin.SimpleListFilter): title = "类别" parameter_name = "owner_category" def lookups(self, request, model_admin): return Category.objects.filter(creator=request.user).values_list('id', 'name') def queryset(self, request, queryset): category_id = self.value() if category_id: return queryset.filter(category_id=category_id) return queryset @admin.register(Post, site=custom_site) class PostAdmin(admin.ModelAdmin): form = PostAdminForm list_display = ('title', 'status', 'creator', 'category', 'operator') list_display_links = [] list_filter = [CategoryOwnerFilter] search_fields = ['title', 'category__name'] actions_on_top = True actions_on_bottom = True fieldsets = ( ('基础配置', { 'description': '基础配置', 'fields': ( ('title', 'category'), 'status', ) }), ('内容', { 'fields': ('desc', 'content') }), ('额外信息', { 'classes': ('wide',), 'fields': ('tag',) }) ) def operator(self, obj): return format_html( '<a href="{}">编辑</a>', reverse('cus_admin:blog_post_change', args=(obj.id,)) ) operator.short_description = '操作' def save_model(self, request, obj, form, change): obj.creator = request.user return super(PostAdmin, self).save_model(request, obj, form, change) def get_queryset(self, request): qs = super(admin.ModelAdmin, self).get_queryset(request) return qs.filter(creator=request.user) class Media: css = { 'all': ('https://cdn.bootcss.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css',), } js = ('https://cdn.bootcss.com/bootstrap/4.0.0-beta.2/js/bootstrap.bundle.js',)
StarcoderdataPython
9777335
<filename>exercicio_02.py """ Faça um programa que leia um número real e o imprima. """ num = float(input('Digite um numero real: ')) print(num)
StarcoderdataPython
11382327
from __future__ import division import dqn import gym import numpy as np import random # import matplotlib.pyplot as plt import scipy.misc import os from gridworld import gameEnv env = gameEnv(partial=False, size=5) print('bal') testMnih = dqn.QnetworkMnih13() testMnih.runTraining(env)
StarcoderdataPython
6490137
<filename>torchvex/cam/__init__.py<gh_stars>1-10 from .cam import CAM from .grad_cam import GradCAM
StarcoderdataPython
1673114
<filename>tests/vcr_support.py import os import vcr as vcrpy test_dir = os.path.dirname(__file__) test_data_dir = os.path.join(test_dir, "data", "cassettes") vcr = vcrpy.VCR( cassette_library_dir=test_data_dir, record_mode=os.environ.get("VCR_RECORD_MODE", "new_episodes"), )
StarcoderdataPython
6571386
<reponame>ujjwalsh/cs #! /usr/bin/env python from __future__ import print_function import base64 import hashlib import hmac import os import re import sys import time from datetime import datetime, timedelta from fnmatch import fnmatch try: from configparser import ConfigParser except ImportError: # python 2 from ConfigParser import ConfigParser try: from urllib.parse import quote except ImportError: # python 2 from urllib import quote import pytz import requests from requests.structures import CaseInsensitiveDict PY2 = sys.version_info < (3, 0) if PY2: text_type = unicode # noqa string_type = basestring # noqa integer_types = int, long # noqa binary_type = str else: text_type = str string_type = str integer_types = int binary_type = bytes if sys.version_info >= (3, 5): try: from . import AIOCloudStack # noqa except ImportError: pass TIMEOUT = 10 PAGE_SIZE = 500 POLL_INTERVAL = 2.0 EXPIRATION = timedelta(minutes=10) EXPIRES_FORMAT = "%Y-%m-%dT%H:%M:%S%z" REQUIRED_CONFIG_KEYS = {"endpoint", "key", "secret", "method", "timeout"} ALLOWED_CONFIG_KEYS = {"verify", "cert", "retry", "theme", "expiration", "poll_interval", "trace", "dangerous_no_tls_verify", "header_*"} DEFAULT_CONFIG = { "timeout": 10, "method": "get", "retry": 0, "verify": None, "cert": None, "name": None, "expiration": 600, "poll_interval": POLL_INTERVAL, "trace": None, "dangerous_no_tls_verify": False, } PENDING = 0 SUCCESS = 1 FAILURE = 2 def strtobool(val): """Convert a string representation of truth to true (1) or false (0). True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if 'val' is anything else. This function has been borrowed from distutils.util module in order to avoid pulling a dependency on deprecated module "imp". """ val = val.lower() if val in ('y', 'yes', 't', 'true', 'on', '1'): return 1 elif val in ('n', 'no', 'f', 'false', 'off', '0'): return 0 else: raise ValueError("invalid truth value %r" % (val,)) def check_key(key, allowed): """ Validate that the specified key is allowed according the provided list of patterns. """ if key in allowed: return True for pattern in allowed: if fnmatch(key, pattern): return True return False def cs_encode(s): """Encode URI component like CloudStack would do before signing. java.net.URLEncoder.encode(s).replace('+', '%20') """ if PY2 and isinstance(s, text_type): s = s.encode("utf-8") return quote(s, safe="*") def transform(params): """ Transforms an heterogeneous map of params into a CloudStack ready mapping of parameter to values. It handles lists and dicts. >>> p = {"a": 1, "b": "foo", "c": ["eggs", "spam"], "d": {"key": "value"}} >>> transform(p) >>> print(p) {'a': '1', 'b': 'foo', 'c': 'eggs,spam', 'd[0].key': 'value'} """ for key, value in list(params.items()): if value is None: params.pop(key) continue if isinstance(value, (string_type, binary_type)): continue if isinstance(value, integer_types): params[key] = text_type(value) elif isinstance(value, (list, tuple, set, dict)): if not value: params.pop(key) else: if isinstance(value, dict): value = [value] if isinstance(value, set): value = list(value) if not isinstance(value[0], dict): params[key] = ",".join(value) else: params.pop(key) for index, val in enumerate(value): for name, v in val.items(): k = "%s[%d].%s" % (key, index, name) params[k] = text_type(v) else: raise ValueError(type(value)) class CloudStackException(Exception): """Exception nicely wrapping a request response.""" def __init__(self, *args, **kwargs): self.response = kwargs.pop('response') super(CloudStackException, self).__init__(*args, **kwargs) class CloudStackApiException(CloudStackException): def __init__(self, *args, **kwargs): self.error = kwargs.pop('error') super(CloudStackApiException, self).__init__(*args, **kwargs) def __str__(self): return '{0}, error: {1}'.format( super(CloudStackApiException, self).__str__(), self.error ) class CloudStack(object): def __init__(self, endpoint, key, secret, timeout=10, method='get', verify=None, cert=None, name=None, retry=0, job_timeout=None, poll_interval=POLL_INTERVAL, expiration=timedelta(minutes=10), trace=False, dangerous_no_tls_verify=False, headers=None, session=None, fetch_result=False): self.endpoint = endpoint self.key = key self.secret = secret self.timeout = int(timeout) self.method = method.lower() if verify: self.verify = verify else: self.verify = not dangerous_no_tls_verify if headers is None: headers = {} self.headers = headers self.session = session if session is not None else requests.Session() self.cert = cert self.name = name self.retry = int(retry) self.job_timeout = int(job_timeout) if job_timeout else None self.poll_interval = float(poll_interval) if not hasattr(expiration, "seconds"): expiration = timedelta(seconds=int(expiration)) self.expiration = expiration self.trace = bool(trace) self.fetch_result = fetch_result def __repr__(self): return '<CloudStack: {0}>'.format(self.name or self.endpoint) def __getattr__(self, command): def handler(**kwargs): return self._request(command, **kwargs) return handler def _prepare_request(self, command, json=True, opcode_name='command', fetch_list=False, **kwargs): params = CaseInsensitiveDict(**kwargs) params.update({ 'apiKey': self.key, opcode_name: command, }) if json: params['response'] = 'json' if 'page' in kwargs or fetch_list: params.setdefault('pagesize', PAGE_SIZE) if 'expires' not in params and self.expiration.total_seconds() >= 0: params['signatureVersion'] = '3' tz = pytz.utc expires = tz.localize(datetime.utcnow() + self.expiration) params['expires'] = expires.astimezone(tz).strftime(EXPIRES_FORMAT) kind = 'params' if self.method == 'get' else 'data' return kind, dict(params.items()) def _request(self, command, json=True, opcode_name='command', fetch_list=False, headers=None, **params): fetch_result = params.pop('fetch_result', self.fetch_result) kind, params = self._prepare_request(command, json, opcode_name, fetch_list, **params) if headers is None: headers = {} headers.update(self.headers) done = False max_retry = self.retry final_data = [] page = 1 while not done: if fetch_list: params['page'] = page transform(params) params.pop('signature', None) self._sign(params) req = requests.Request(self.method, self.endpoint, headers=headers, **{kind: params}) prepped = req.prepare() if self.trace: print(prepped.method, prepped.url, file=sys.stderr) if prepped.headers: print(prepped.headers, "\n", file=sys.stderr) if prepped.body: print(prepped.body, file=sys.stderr) else: print(file=sys.stderr) try: with self.session as session: response = session.send(prepped, timeout=self.timeout, verify=self.verify, cert=self.cert) except requests.exceptions.ConnectionError: max_retry -= 1 if ( max_retry < 0 or not command.startswith(('list', 'queryAsync')) ): raise continue max_retry = self.retry if self.trace: print(response.status_code, response.reason, file=sys.stderr) headersTrace = "\n".join("{}: {}".format(k, v) for k, v in response.headers.items()) print(headersTrace, "\n", file=sys.stderr) print(response.text, "\n", file=sys.stderr) data = self._response_value(response, json) if fetch_list: try: [key] = [k for k in data.keys() if k != 'count'] except ValueError: done = True else: final_data.extend(data[key]) page += 1 if len(final_data) >= data.get('count', PAGE_SIZE): done = True elif fetch_result and 'jobid' in data: final_data = self._jobresult(jobid=data['jobid'], headers=headers) done = True else: final_data = data done = True return final_data def _response_value(self, response, json=True): """Parses the HTTP response as a the cloudstack value. It throws an exception if the server didn't answer with a 200. """ if json: contentType = response.headers.get("Content-Type", "") if not contentType.startswith(("application/json", "text/javascript")): if response.status_code == 200: raise CloudStackException( "JSON (application/json) was expected, got {!r}" .format(contentType), response=response) raise CloudStackException( "HTTP {0.status_code} {0.reason}" .format(response), "Make sure endpoint URL {!r} is correct." .format(self.endpoint), response=response) try: data = response.json() except ValueError as e: raise CloudStackException( "HTTP {0.status_code} {0.reason}" .format(response), "{0!s}. Malformed JSON document".format(e), response=response) [key] = data.keys() data = data[key] else: data = response.text if response.status_code != 200: raise CloudStackApiException( "HTTP {0} response from CloudStack".format( response.status_code), error=data, response=response) return data def _jobresult(self, jobid, json=True, headers=None): """Poll the async job result. To be run via in a Thread, the result is put within the result list which is a hack. """ failures = 0 total_time = self.job_timeout or 2**30 remaining = timedelta(seconds=total_time) endtime = datetime.now() + remaining while remaining.total_seconds() > 0: timeout = max(min(self.timeout, remaining.total_seconds()), 1) try: kind, params = self._prepare_request('queryAsyncJobResult', jobid=jobid) transform(params) self._sign(params) req = requests.Request(self.method, self.endpoint, headers=headers, **{kind: params}) prepped = req.prepare() if self.trace: print(prepped.method, prepped.url, file=sys.stderr) if prepped.headers: print(prepped.headers, "\n", file=sys.stderr) if prepped.body: print(prepped.body, file=sys.stderr) else: print(file=sys.stderr) with self.session as session: response = session.send(prepped, timeout=timeout, verify=self.verify, cert=self.cert) j = self._response_value(response, json) if self.trace: print(response.status_code, response.reason, file=sys.stderr) headersTrace = "\n".join( "{}: {}".format(k, v) for k, v in response.headers.items()) print(headersTrace, "\n", file=sys.stderr) print(response.text, "\n", file=sys.stderr) failures = 0 if j['jobstatus'] != PENDING: if j['jobresultcode'] or j['jobstatus'] != SUCCESS: raise CloudStackApiException("Job failure", error=j['jobresult'], response=response) if 'jobresult' not in j: raise CloudStackException("Unknown job result", response=response) return j['jobresult'] except CloudStackException: raise except Exception: failures += 1 if failures > 10: raise time.sleep(self.poll_interval) remaining = endtime - datetime.now() if response: response.status_code = 408 raise CloudStackException("Timeout waiting for async job result", jobid, response=response) def _sign(self, data): """ Compute a signature string according to the CloudStack signature method (hmac/sha1). """ # Python2/3 urlencode aren't good enough for this task. params = "&".join( "=".join((key, cs_encode(value))) for key, value in sorted(data.items()) ) digest = hmac.new( self.secret.encode('utf-8'), msg=params.lower().encode('utf-8'), digestmod=hashlib.sha1).digest() data['signature'] = base64.b64encode(digest).decode('utf-8').strip() def read_config_from_ini(ini_group=None): # Config file: $PWD/cloudstack.ini or $HOME/.cloudstack.ini # Last read wins in configparser paths = [ os.path.join(os.path.expanduser('~'), '.cloudstack.ini'), os.path.join(os.getcwd(), 'cloudstack.ini'), ] # Look at CLOUDSTACK_CONFIG first if present if 'CLOUDSTACK_CONFIG' in os.environ: paths.append(os.path.expanduser(os.environ['CLOUDSTACK_CONFIG'])) if not any([os.path.exists(c) for c in paths]): raise SystemExit("Config file not found. Tried {0}".format( ", ".join(paths))) conf = ConfigParser() conf.read(paths) if not ini_group: ini_group = os.getenv('CLOUDSTACK_REGION', 'cloudstack') if not conf.has_section(ini_group): return dict(name=None) ini_config = { k: v for k, v in conf.items(ini_group) if v and check_key(k, REQUIRED_CONFIG_KEYS.union(ALLOWED_CONFIG_KEYS)) } ini_config["name"] = ini_group # Convert individual header_* settings into a single dict for k in list(ini_config): if k.startswith("header_"): ini_config.setdefault("headers", {}) ini_config["headers"][k[len("header_"):]] = ini_config.pop(k) return ini_config def read_config(ini_group=None): """ Read the configuration from the environment, or config. First it try to go for the environment, then it overrides those with the cloudstack.ini file. """ env_conf = dict(DEFAULT_CONFIG) for key in REQUIRED_CONFIG_KEYS.union(ALLOWED_CONFIG_KEYS): env_key = "CLOUDSTACK_{0}".format(key.upper()) value = os.getenv(env_key) if value: env_conf[key] = value # overrides means we have a .ini to read overrides = os.getenv('CLOUDSTACK_OVERRIDES', '').strip() if not overrides and set(env_conf).issuperset(REQUIRED_CONFIG_KEYS): return env_conf ini_conf = read_config_from_ini(ini_group) overrides = {s.lower() for s in re.split(r'\W+', overrides)} config = dict(dict(env_conf, **ini_conf), **{k: v for k, v in env_conf.items() if k in overrides}) missings = REQUIRED_CONFIG_KEYS.difference(config) if missings: raise ValueError("the configuration is missing the following keys: " + ", ".join(missings)) # convert booleans values. bool_keys = ('dangerous_no_tls_verify',) for bool_key in bool_keys: if isinstance(config[bool_key], string_type): try: config[bool_key] = strtobool(config[bool_key]) except ValueError: pass return config
StarcoderdataPython
154488
#!/usr/bin/env python """ This script extracts btsnooz content from bugreports and generates a valid btsnoop log file which can be viewed using standard tools like Wireshark. btsnooz is a custom format designed to be included in bugreports. It can be described as: base64 { file_header deflate { repeated { record_header record_data } } } where the file_header and record_header are modified versions of the btsnoop headers. """ import base64 import fileinput import struct import sys import zlib # Enumeration of the values the 'type' field can take in a btsnooz # header. These values come from the Bluetooth stack's internal # representation of packet types. TYPE_IN_EVT = 0x10 TYPE_IN_ACL = 0x11 TYPE_IN_SCO = 0x12 TYPE_IN_ISO = 0x17 TYPE_OUT_CMD = 0x20 TYPE_OUT_ACL = 0x21 TYPE_OUT_SCO = 0x22 TYPE_OUT_ISO = 0x2d def type_to_direction(type): """ Returns the inbound/outbound direction of a packet given its type. 0 = sent packet 1 = received packet """ if type in [TYPE_IN_EVT, TYPE_IN_ACL, TYPE_IN_SCO, TYPE_IN_ISO]: return 1 return 0 def type_to_hci(type): """ Returns the HCI type of a packet given its btsnooz type. """ if type == TYPE_OUT_CMD: return b'\x01' if type == TYPE_IN_ACL or type == TYPE_OUT_ACL: return b'\x02' if type == TYPE_IN_SCO or type == TYPE_OUT_SCO: return b'\x03' if type == TYPE_IN_EVT: return b'\x04' if type == TYPE_IN_ISO or type == TYPE_OUT_ISO: return b'\x05' raise RuntimeError("type_to_hci: unknown type (0x{:02x})".format(type)) def decode_snooz(snooz): """ Decodes all known versions of a btsnooz file into a btsnoop file. """ version, last_timestamp_us = struct.unpack_from('=bQ', snooz) if version != 1 and version != 2: sys.stderr.write('Unsupported btsnooz version: %s\n' % version) exit(1) # Oddly, the file header (9 bytes) is not compressed, but the rest is. decompressed = zlib.decompress(snooz[9:]) outf.write(b'btsnoop\x00\x00\x00\x00\x01\x00\x00\x03\xea') if version == 1: decode_snooz_v1(decompressed, last_timestamp_us) elif version == 2: decode_snooz_v2(decompressed, last_timestamp_us) def decode_snooz_v1(decompressed, last_timestamp_ms): """ Decodes btsnooz v1 files into a btsnoop file. """ # An unfortunate consequence of the file format design: we have to do a # pass of the entire file to determine the timestamp of the first packet. first_timestamp_ms = last_timestamp_ms + 0x00dcddb30f2f8000 offset = 0 while offset < len(decompressed): length, delta_time_ms, type = struct.unpack_from('=HIb', decompressed, offset) offset += 7 + length - 1 first_timestamp_ms -= delta_time_ms # Second pass does the actual writing. offset = 0 while offset < len(decompressed): length, delta_time_ms, type = struct.unpack_from('=HIb', decompressed, offset) first_timestamp_ms += delta_time_ms offset += 7 outf.write(struct.pack('>II', length, length)) outf.write(struct.pack('>II', type_to_direction(type), 0)) outf.write(struct.pack('>II', (first_timestamp_ms >> 32), (first_timestamp_ms & 0xFFFFFFFF))) outf.write(type_to_hci(type)) outf.write(decompressed[offset:offset + length - 1]) offset += length - 1 def decode_snooz_v2(decompressed, last_timestamp_ms): """ Decodes btsnooz v2 files into a btsnoop file. """ # An unfortunate consequence of the file format design: we have to do a # pass of the entire file to determine the timestamp of the first packet. first_timestamp_ms = last_timestamp_ms + 0x00dcddb30f2f8000 offset = 0 while offset < len(decompressed): length, packet_length, delta_time_ms, snooz_type = struct.unpack_from('=HHIb', decompressed, offset) offset += 9 + length - 1 first_timestamp_ms -= delta_time_ms # Second pass does the actual writing out to stdout. offset = 0 while offset < len(decompressed): length, packet_length, delta_time_ms, snooz_type = struct.unpack_from('=HHIb', decompressed, offset) first_timestamp_ms += delta_time_ms offset += 9 outf.write(struct.pack('>II', packet_length, length)) outf.write(struct.pack('>II', type_to_direction(snooz_type), 0)) outf.write(struct.pack('>II', first_timestamp_ms >> 32, (first_timestamp_ms & 0xFFFFFFFF))) outf.write(type_to_hci(snooz_type)) outf.write(decompressed[offset:offset + length - 1]) offset += length - 1 def main(): if len(sys.argv) != 3: sys.stderr.write('Usage: %s [bugreport] [outfile]\n' % sys.argv[0]) exit(1) iterator = fileinput.input(files=sys.argv[1]) global outf outf = open(sys.argv[2], "wb") found = False base64_string = "" for line in iterator: if found: if line.find('--- END:BTSNOOP_LOG_SUMMARY') != -1: decode_snooz(base64.standard_b64decode(base64_string)) sys.exit(0) base64_string += line.strip() if line.find('--- BEGIN:BTSNOOP_LOG_SUMMARY') != -1: found = True if not found: sys.stderr.write('No btsnooz section found in bugreport.\n') sys.exit(1) if __name__ == '__main__': main()
StarcoderdataPython
382085
from celery import Celery from flask import current_app celery_app = Celery(__name__) @celery_app.task def add(x, y): """ 加法 :param x: :param y: :return: """ return str(x + y) @celery_app.task def flask_app_context(): """ celery使用Flask上下文 :return: """ with current_app.app_context(): return str(current_app.config)
StarcoderdataPython
6439860
<reponame>dzshn/python-tetris<filename>examples/cli.py import curses import time import tetris from tetris import MinoType from tetris import Move @curses.wrapper def main(screen: curses.window) -> None: game_start = time.monotonic() game = tetris.BaseGame() moves: dict[int, Move] = { ord("z"): Move.rotate(-1), curses.KEY_UP: Move.rotate(+1), curses.KEY_LEFT: Move.left(), curses.KEY_RIGHT: Move.right(), curses.KEY_DOWN: Move.soft_drop(), ord(" "): Move.hard_drop(), ord("c"): Move.swap(), } curses.use_default_colors() for i in range(8): curses.init_pair(i, i, -1) curses_colors = [ curses.COLOR_BLACK, curses.COLOR_BLUE, curses.COLOR_CYAN, curses.COLOR_GREEN, curses.COLOR_MAGENTA, curses.COLOR_RED, curses.COLOR_WHITE, curses.COLOR_YELLOW, ] black, blue, cyan, green, magenta, red, white, yellow = ( curses.color_pair(i) for i in curses_colors ) colors = { MinoType.EMPTY: curses.A_NORMAL, MinoType.I: cyan, MinoType.L: yellow, MinoType.J: blue, MinoType.S: green, MinoType.Z: red, MinoType.T: magenta, MinoType.O: yellow, MinoType.GARBAGE: black, MinoType.GHOST: white, } curses.curs_set(0) screen.nodelay(True) def render() -> None: screen.erase() my, mx = screen.getmaxyx() board = screen.subwin(22, 22, my // 2 - 11, mx // 2 - 22) status = screen.subwin(22, 20, my // 2 - 11, 23 + mx // 2 - 22) for x, line in enumerate(game.playfield): for y, i in enumerate(line): paint = colors[i] ch = "[]" if i == MinoType.GARBAGE: ch = "X " elif i == MinoType.GHOST: ch = "@ " elif i == MinoType.EMPTY: ch = " " board.addstr(x + 1, y * 2 + 1, ch, paint) status.addstr(1, 2, " Queue ", curses.A_STANDOUT) for i, piece in enumerate(game.queue): status.addstr(2, 4 + i * 3, piece.name, colors[piece]) # type: ignore if i < 3: status.addstr(2, 5 + i * 3, ",", curses.A_DIM) status.addstr(4, 2, " Hold ", curses.A_STANDOUT) if game.hold is not None: status.addstr( 5, 4, game.hold.name + " piece", colors[game.hold] # type: ignore ) else: status.addstr(5, 4, ". . .", curses.A_DIM) status.addstr(7, 2, " Score ", curses.A_STANDOUT) status.addstr(8, 4, format(game.score, ",")) line_clears = game.scorer.line_clears # type: ignore status.addstr(10, 2, " Level ", curses.A_STANDOUT) status.addstr(11, 4, f"{game.level}") status.addstr( 11, 5 + len(str(game.level)), f"[{line_clears}/{(line_clears // 10 + 1) * 10}]", curses.A_DIM, ) elapsed = time.monotonic() - game_start status.addstr(14, 2, " Elapsed ", curses.A_STANDOUT) status.addstr(15, 4, f"{int(elapsed / 60)}:{elapsed % 60:0>6.3f}") status.addstr(20, 2, "[h]elp for info", curses.A_DIM) if game.lost: board.addstr(11, 2, " Game over! ", curses.A_REVERSE | red) if game.paused: board.addstr(11, 2, " Paused ", curses.A_REVERSE | yellow) board.border() status.border() def render_help() -> None: my, mx = screen.getmaxyx() help_menu = screen.subwin(16, 33, my // 2 - 8, mx // 2 - 17) help_menu.erase() help_menu.addstr(2, 5, "♥ dzshn/python-tetris") help_menu.addstr(4, 4, " Controls ", curses.A_STANDOUT) for i, line in enumerate( [ "rotate z / ↑", "move ← / →", "soft drop ↓", "hard drop ␣", "swap piece c", "pause p", "restart r", "quit Ctrl-C / q", ] ): help_menu.addstr(i + 6, 6, line) help_menu.border() try: while True: render() game.tick() ch = screen.getch() if ch == ord("q"): break elif ch == ord("p"): game.pause() elif ch == ord("r"): game.reset() game_start = time.monotonic() elif ch == ord("h"): paused = game.paused game.pause(state=True) screen.nodelay(False) render_help() screen.getch() screen.nodelay(True) game.pause(state=paused) elif ch in moves: game.push(moves[ch]) time.sleep(1 / 120) except KeyboardInterrupt: pass
StarcoderdataPython
6697557
from terrascript import Terrascript, provider from terrascript.vsphere.r import vsphere_virtual_machine from terrascript.vsphere.d import vsphere_datastore from terrascript.vsphere.d import vsphere_datacenter from terrascript.vsphere.d import vsphere_resource_pool from terrascript.vsphere.d import vsphere_network from terrascript.vsphere.d import vsphere_virtual_machine \ as data_vsphere_virtual_machine import logging class TerrascriptVM: def __init__(self, name): self.name = name self.ts = Terrascript() def CreateResourceFile(self, filename): pass class TerrascriptVSphereVM(TerrascriptVM): def __init__(self, name): TerrascriptVM.__init__(self, name) self.guestid = None self.cpu = None self.memory = None self.folder = None self.provider = None self.datacenter = None self.datastore = None self.template = None self.gateway = None self.disks = [] self.networks = [] self.interfaces = [] self.iface_customization = [] self.dns = [] self.dns_suffix = [] def dumpResourceFile(self): linuxOptions = {} linuxOptions["host_name"] = self.name linuxOptions["domain"] = self.domain linuxOptions["time_zone"] = self.timezone customize = {} customize["linux_options"] = linuxOptions customize["network_interface"] = self.iface_customization customize["ipv4_gateway"] = self.gateway customize["dns_server_list"] = self.dns customize["dns_suffix_list"] = self.dns_suffix clone = {} clone["template_uuid"] = self.template.id clone["linked_clone"] = False clone["customize"] = customize if self.folder != '': virtualMachine = vsphere_virtual_machine( 'vm', name=self.name, resource_pool_id=self.pool.id, datastore_id=self.datastore.id, guest_id=self.guestid, folder=self.folder, num_cpus=str(self.cpu), memory=self.memory, network_interface=self.interfaces, disk=self.disks, clone=clone) else: virtualMachine = vsphere_virtual_machine( 'vm', name=self.name, resource_pool_id=self.pool.id, datastore_id=self.datastore.id, guest_id=self.guestid, num_cpus=str(self.cpu), memory=self.memory, network_interface=self.interfaces, disk=self.disks, clone=clone) self.ts.add(virtualMachine) return self.ts.dump() def setVSphereConfig(self, host, username, password): logger = logging.getLogger() logger.debug("Set VSphere provider to {}".format(host)) self.provider = provider( "vsphere", user=username, password=password, vsphere_server=host, allow_unverified_ssl=True) self.ts.add(self.provider) def setDatacenter(self, datacenter): logger = logging.getLogger() logger.debug("Set VSphere datacenter to {}".format(datacenter)) self.datacenter = vsphere_datacenter( "dc", name=datacenter) self.ts.add(self.datacenter) def setDatastore(self, datastore): if not self.datacenter: raise Exception else: logger = logging.getLogger() logger.debug("Set VSphere datastore to {}".format(datastore)) self.datastore = vsphere_datastore( "ds", name=datastore, datacenter_id=self.datacenter.id) self.ts.add(self.datastore) def setResourcePool(self, pool): if not self.datacenter: raise Exception else: logger = logging.getLogger() logger.debug("Set VSphere Resource Pool to {}".format(pool)) self.pool = vsphere_resource_pool( "pool", name=pool, datacenter_id=self.datacenter.id) self.ts.add(self.pool) def setTemplate(self, template): if not self.datacenter: raise Exception else: logger = logging.getLogger() logger.debug("Set VSphere template to {}".format(template)) self.template = data_vsphere_virtual_machine( "template", name=template, datacenter_id=self.datacenter.id) self.ts.add(self.template) def addDisk(self, size): idx = len(self.disks) logger = logging.getLogger() logger.debug("Add {}GB disk".format(size)) if len(self.disks) == 0: unitNumber = 0 else: unitNumber = 1 self.disks.append({ "label": "disk{}".format(idx+1), "size": size, "unit_number": unitNumber}) def addSuffix(self, dns): self.dns_suffix.append(dns) def addDns(self, dns): logger = logging.getLogger() logger.debug("Add {} to DNS list".format(dns)) self.dns.append(dns) def addNetworkInterface(self, dvp, ipaddr, cidr): if not self.datacenter: raise Exception else: logger = logging.getLogger() logger.debug("Add network card on {} DVP, with {}/{}".format(dvp, ipaddr, cidr)) vnet = vsphere_network( dvp, name=dvp, datacenter_id=self.datacenter.id) self.networks.append(vnet) self.ts.add(vnet) self.interfaces.append({"network_id": vnet.id}) self.iface_customization.append({ "ipv4_address": ipaddr, "ipv4_netmask": cidr})
StarcoderdataPython
143999
#!/usr/bin/env python # # Author: <NAME> (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2019 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ all Python Standard Library objects (currently: CH 1-15 @ 2.7) and some other common objects (i.e. numpy.ndarray) """ __all__ = ['registered','failures','succeeds'] # helper imports import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning) import sys PY3 = (hex(sys.hexversion) >= '0x30000f0') if PY3: import queue as Queue import dbm as anydbm else: import Queue import anydbm import sets # deprecated/removed import mutex # removed try: from cStringIO import StringIO # has StringI and StringO types except ImportError: # only has StringIO type if PY3: from io import BytesIO as StringIO else: from StringIO import StringIO import re import array import collections import codecs import struct import datetime import calendar import weakref import pprint import decimal import functools import itertools import operator import tempfile import shelve import zlib import gzip import zipfile import tarfile import xdrlib import csv import hashlib import hmac import os import logging import optparse #import __hello__ import threading import socket import contextlib try: import bz2 import sqlite3 if PY3: import dbm.ndbm as dbm else: import dbm HAS_ALL = True except ImportError: # Ubuntu HAS_ALL = False try: #import curses #from curses import textpad, panel HAS_CURSES = True except ImportError: # Windows HAS_CURSES = False try: import ctypes HAS_CTYPES = True # if using `pypy`, pythonapi is not found IS_PYPY = not hasattr(ctypes, 'pythonapi') except ImportError: # MacPorts HAS_CTYPES = False IS_PYPY = False # helper objects class _class: def _method(self): pass # @classmethod # def _clsmethod(cls): #XXX: test me # pass # @staticmethod # def _static(self): #XXX: test me # pass class _class2: def __call__(self): pass _instance2 = _class2() class _newclass(object): def _method(self): pass # @classmethod # def _clsmethod(cls): #XXX: test me # pass # @staticmethod # def _static(self): #XXX: test me # pass class _newclass2(object): __slots__ = ['descriptor'] def _function(x): yield x def _function2(): try: raise except: from sys import exc_info e, er, tb = exc_info() return er, tb if HAS_CTYPES: class _Struct(ctypes.Structure): pass _Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))] _filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup _tmpf = tempfile.TemporaryFile('w') # put the objects in order, if possible try: from collections import OrderedDict as odict except ImportError: try: from ordereddict import OrderedDict as odict except ImportError: odict = dict # objects used by dill for type declaration registered = d = odict() # objects dill fails to pickle failures = x = odict() # all other type objects succeeds = a = odict() # types module (part of CH 8) a['BooleanType'] = bool(1) a['BuiltinFunctionType'] = len a['BuiltinMethodType'] = a['BuiltinFunctionType'] a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1) a['ClassType'] = _class a['ComplexType'] = complex(1) a['DictType'] = _dict = {} a['DictionaryType'] = a['DictType'] a['FloatType'] = float(1) a['FunctionType'] = _function a['InstanceType'] = _instance = _class() a['IntType'] = _int = int(1) a['ListType'] = _list = [] a['NoneType'] = None a['ObjectType'] = object() a['StringType'] = _str = str(1) a['TupleType'] = _tuple = () a['TypeType'] = type if PY3: a['LongType'] = _int a['UnicodeType'] = _str else: a['LongType'] = long(1) a['UnicodeType'] = unicode(1) # built-in constants (CH 4) a['CopyrightType'] = copyright # built-in types (CH 5) a['ClassObjectType'] = _newclass # <type 'type'> a['ClassInstanceType'] = _newclass() # <type 'class'> a['SetType'] = _set = set() a['FrozenSetType'] = frozenset() # built-in exceptions (CH 6) a['ExceptionType'] = _exception = _function2()[0] # string services (CH 7) a['SREPatternType'] = _srepattern = re.compile('') # data types (CH 8) a['ArrayType'] = array.array("f") a['DequeType'] = collections.deque([0]) a['DefaultDictType'] = collections.defaultdict(_function, _dict) a['TZInfoType'] = datetime.tzinfo() a['DateTimeType'] = datetime.datetime.today() a['CalendarType'] = calendar.Calendar() if not PY3: a['SetsType'] = sets.Set() a['ImmutableSetType'] = sets.ImmutableSet() a['MutexType'] = mutex.mutex() # numeric and mathematical types (CH 9) a['DecimalType'] = decimal.Decimal(1) a['CountType'] = itertools.count(0) # data compression and archiving (CH 12) a['TarInfoType'] = tarfile.TarInfo() # generic operating system services (CH 15) a['LoggerType'] = logging.getLogger() a['FormatterType'] = logging.Formatter() # pickle ok a['FilterType'] = logging.Filter() # pickle ok a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok a['OptionType'] = optparse.Option('--foo') # pickle ok if HAS_CTYPES: a['CCharType'] = _cchar = ctypes.c_char() a['CWCharType'] = ctypes.c_wchar() # fail == 2.6 a['CByteType'] = ctypes.c_byte() a['CUByteType'] = ctypes.c_ubyte() a['CShortType'] = ctypes.c_short() a['CUShortType'] = ctypes.c_ushort() a['CIntType'] = ctypes.c_int() a['CUIntType'] = ctypes.c_uint() a['CLongType'] = ctypes.c_long() a['CULongType'] = ctypes.c_ulong() a['CLongLongType'] = ctypes.c_longlong() a['CULongLongType'] = ctypes.c_ulonglong() a['CFloatType'] = ctypes.c_float() a['CDoubleType'] = ctypes.c_double() a['CSizeTType'] = ctypes.c_size_t() a['CLibraryLoaderType'] = ctypes.cdll a['StructureType'] = _Struct if not IS_PYPY: a['BigEndianStructureType'] = ctypes.BigEndianStructure() #NOTE: also LittleEndianStructureType and UnionType... abstract classes #NOTE: remember for ctypesobj.contents creates a new python object #NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__ #NOTE: base class of all ctypes data types is non-public _CData try: # python 2.6 import fractions import number import io from io import StringIO as TextIO # built-in functions (CH 2) a['ByteArrayType'] = bytearray([1]) # numeric and mathematical types (CH 9) a['FractionType'] = fractions.Fraction() a['NumberType'] = numbers.Number() # generic operating system services (CH 15) a['IOBaseType'] = io.IOBase() a['RawIOBaseType'] = io.RawIOBase() a['TextIOBaseType'] = io.TextIOBase() a['BufferedIOBaseType'] = io.BufferedIOBase() a['UnicodeIOType'] = TextIO() # the new StringIO a['LoggingAdapterType'] = logging.LoggingAdapter(_logger,_dict) # pickle ok if HAS_CTYPES: a['CBoolType'] = ctypes.c_bool(1) a['CLongDoubleType'] = ctypes.c_longdouble() except ImportError: pass try: # python 2.7 import argparse # data types (CH 8) a['OrderedDictType'] = collections.OrderedDict(_dict) a['CounterType'] = collections.Counter(_dict) if HAS_CTYPES: a['CSSizeTType'] = ctypes.c_ssize_t() # generic operating system services (CH 15) a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7 a['ArgParseFileType'] = argparse.FileType() # pickle ok except (AttributeError, ImportError): pass # -- pickle fails on all below here ----------------------------------------- # types module (part of CH 8) a['CodeType'] = compile('','','exec') a['DictProxyType'] = type.__dict__ a['DictProxyType2'] = _newclass.__dict__ a['EllipsisType'] = Ellipsis a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close() a['GetSetDescriptorType'] = array.array.typecode a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported! a['MemberDescriptorType'] = _newclass2.descriptor if not IS_PYPY: a['MemberDescriptorType2'] = datetime.timedelta.days a['MethodType'] = _method = _class()._method #XXX: works when not imported! a['ModuleType'] = datetime a['NotImplementedType'] = NotImplemented a['SliceType'] = slice(1) a['UnboundMethodType'] = _class._method #XXX: works when not imported! a['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+' a['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b' a['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1) a['BufferedWriterType'] = open(os.devnull, 'wb') try: # oddities: deprecated from _pyio import open as _open a['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1) a['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1) a['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1) a['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1) except ImportError: pass # other (concrete) object types if PY3: d['CellType'] = (_lambda)(0).__closure__[0] a['XRangeType'] = _xrange = range(1) else: d['CellType'] = (_lambda)(0).func_closure[0] a['XRangeType'] = _xrange = xrange(1) if not IS_PYPY: d['MethodDescriptorType'] = type.__dict__['mro'] d['WrapperDescriptorType'] = type.__repr__ a['WrapperDescriptorType2'] = type.__dict__['__module__'] d['ClassMethodDescriptorType'] = type.__dict__['__prepare__' if PY3 else 'mro'] # built-in functions (CH 2) if PY3 or IS_PYPY: _methodwrap = (1).__lt__ else: _methodwrap = (1).__cmp__ d['MethodWrapperType'] = _methodwrap a['StaticMethodType'] = staticmethod(_method) a['ClassMethodType'] = classmethod(_method) a['PropertyType'] = property() d['SuperType'] = super(Exception, _exception) # string services (CH 7) if PY3: _in = _bytes else: _in = _str a['InputType'] = _cstrI = StringIO(_in) a['OutputType'] = _cstrO = StringIO() # data types (CH 8) a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary() a['WeakValueDictionaryType'] = weakref.WeakValueDictionary() a['ReferenceType'] = weakref.ref(_instance) a['DeadReferenceType'] = weakref.ref(_class()) a['ProxyType'] = weakref.proxy(_instance) a['DeadProxyType'] = weakref.proxy(_class()) a['CallableProxyType'] = weakref.proxy(_instance2) a['DeadCallableProxyType'] = weakref.proxy(_class2()) a['QueueType'] = Queue.Queue() # numeric and mathematical types (CH 9) d['PartialType'] = functools.partial(int,base=2) if PY3: a['IzipType'] = zip('0','1') else: a['IzipType'] = itertools.izip('0','1') a['ChainType'] = itertools.chain('0','1') d['ItemGetterType'] = operator.itemgetter(0) d['AttrGetterType'] = operator.attrgetter('__repr__') # file and directory access (CH 10) if PY3: _fileW = _cstrO else: _fileW = _tmpf # data persistence (CH 11) if HAS_ALL: a['ConnectionType'] = _conn = sqlite3.connect(':memory:') a['CursorType'] = _conn.cursor() a['ShelveType'] = shelve.Shelf({}) # data compression and archiving (CH 12) if HAS_ALL: if (hex(sys.hexversion) < '0x2070ef0') or PY3: a['BZ2FileType'] = bz2.BZ2File(os.devnull) #FIXME: fail >= 3.3, 2.7.14 a['BZ2CompressorType'] = bz2.BZ2Compressor() a['BZ2DecompressorType'] = bz2.BZ2Decompressor() #a['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w') #FIXME: fail >= 3.2 #_zip.write(_tempfile,'x') [causes annoying warning/error printed on import] #a['ZipInfoType'] = _zip.getinfo('x') a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w') # file formats (CH 13) a['DialectType'] = csv.get_dialect('excel') a['PackerType'] = xdrlib.Packer() # optional operating system services (CH 16) a['LockType'] = threading.Lock() a['RLockType'] = threading.RLock() # generic operating system services (CH 15) # also closed/open and r/w/etc... a['NamedLoggerType'] = _logger = logging.getLogger(__name__) #FIXME: fail >= 3.2 and <= 2.6 #a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..." # interprocess communication (CH 17) if PY3: a['SocketType'] = _socket = socket.socket() #FIXME: fail >= 3.3 a['SocketPairType'] = socket.socketpair()[0] #FIXME: fail >= 3.3 else: a['SocketType'] = _socket = socket.socket() a['SocketPairType'] = _socket._sock # python runtime services (CH 27) if PY3: a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1]) else: a['GeneratorContextManagerType'] = contextlib.GeneratorContextManager(max) try: # ipython __IPYTHON__ is True # is ipython except NameError: # built-in constants (CH 4) a['QuitterType'] = quit d['ExitType'] = a['QuitterType'] try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy from numpy import ufunc as _numpy_ufunc from numpy import array as _numpy_array from numpy import int32 as _numpy_int32 a['NumpyUfuncType'] = _numpy_ufunc a['NumpyArrayType'] = _numpy_array a['NumpyInt32Type'] = _numpy_int32 except ImportError: pass try: # python 2.6 # numeric and mathematical types (CH 9) a['ProductType'] = itertools.product('0','1') # generic operating system services (CH 15) a['FileHandlerType'] = logging.FileHandler(os.devnull) #FIXME: fail >= 3.2 and <= 2.6 a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull) a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514) a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1) except AttributeError: pass try: # python 2.7 # data types (CH 8) a['WeakSetType'] = weakref.WeakSet() # 2.7 # # generic operating system services (CH 15) [errors when dill is imported] # a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG') # a['NamespaceType'] = _parser.parse_args() # pickle ok # a['SubParsersActionType'] = _parser.add_subparsers() # a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group() # a['ArgumentGroupType'] = _parser.add_argument_group() except AttributeError: pass # -- dill fails in some versions below here --------------------------------- # types module (part of CH 8) a['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+' # FIXME: FileType fails >= 3.1 # built-in functions (CH 2) a['ListIteratorType'] = iter(_list) # empty vs non-empty FIXME: fail < 3.2 a['TupleIteratorType']= iter(_tuple) # empty vs non-empty FIXME: fail < 3.2 a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty FIXME: fail < 3.2 # data types (CH 8) a['PrettyPrinterType'] = pprint.PrettyPrinter() #FIXME: fail >= 3.2 and == 2.5 # numeric and mathematical types (CH 9) a['CycleType'] = itertools.cycle('0') #FIXME: fail < 3.2 # file and directory access (CH 10) a['TemporaryFileType'] = _tmpf #FIXME: fail >= 3.2 and == 2.5 # data compression and archiving (CH 12) a['GzipFileType'] = gzip.GzipFile(fileobj=_fileW) #FIXME: fail > 3.2 and <= 2.6 # generic operating system services (CH 15) a['StreamHandlerType'] = logging.StreamHandler() #FIXME: fail >= 3.2 and == 2.5 try: # python 2.6 # numeric and mathematical types (CH 9) a['PermutationsType'] = itertools.permutations('0') #FIXME: fail < 3.2 a['CombinationsType'] = itertools.combinations('0',1) #FIXME: fail < 3.2 except AttributeError: pass try: # python 2.7 # numeric and mathematical types (CH 9) a['RepeatType'] = itertools.repeat(0) #FIXME: fail < 3.2 a['CompressType'] = itertools.compress('0',[1]) #FIXME: fail < 3.2 #XXX: ...and etc except AttributeError: pass # -- dill fails on all below here ------------------------------------------- # types module (part of CH 8) x['GeneratorType'] = _generator = _function(1) #XXX: priority x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe() x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo) # other (concrete) object types # (also: Capsule / CObject ?) # built-in functions (CH 2) x['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty # built-in types (CH 5) if PY3: x['DictionaryItemIteratorType'] = iter(type.__dict__.items()) x['DictionaryKeyIteratorType'] = iter(type.__dict__.keys()) x['DictionaryValueIteratorType'] = iter(type.__dict__.values()) else: x['DictionaryItemIteratorType'] = type.__dict__.iteritems() x['DictionaryKeyIteratorType'] = type.__dict__.iterkeys() x['DictionaryValueIteratorType'] = type.__dict__.itervalues() # string services (CH 7) x['StructType'] = struct.Struct('c') x['CallableIteratorType'] = _srepattern.finditer('') x['SREMatchType'] = _srepattern.match('') x['SREScannerType'] = _srepattern.scanner('') x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc # python object persistence (CH 11) # x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo if HAS_ALL: x['DbmType'] = dbm.open(_tempfile,'n') # x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo # x['DbType'] = _dbcursor.db # data compression and archiving (CH 12) x['ZlibCompressType'] = zlib.compressobj() x['ZlibDecompressType'] = zlib.decompressobj() # file formats (CH 13) x['CSVReaderType'] = csv.reader(_cstrI) x['CSVWriterType'] = csv.writer(_cstrO) x['CSVDictReaderType'] = csv.DictReader(_cstrI) x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{}) # cryptographic services (CH 14) x['HashType'] = hashlib.md5() if (hex(sys.hexversion) < '0x30800a1'): x['HMACType'] = hmac.new(_in) else: x['HMACType'] = hmac.new(_in, digestmod='md5') # generic operating system services (CH 15) if HAS_CURSES: pass #x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty #x['CursesTextPadType'] = textpad.Textbox(_curwin) #x['CursesPanelType'] = panel.new_panel(_curwin) if HAS_CTYPES: x['CCharPType'] = ctypes.c_char_p() x['CWCharPType'] = ctypes.c_wchar_p() x['CVoidPType'] = ctypes.c_void_p() if sys.platform[:3] == 'win': x['CDLLType'] = _cdll = ctypes.cdll.msvcrt else: x['CDLLType'] = _cdll = ctypes.CDLL(None) if not IS_PYPY: x['PyDLLType'] = _pydll = ctypes.pythonapi x['FuncPtrType'] = _cdll._FuncPtr() x['CCharArrayType'] = ctypes.create_string_buffer(1) x['CWCharArrayType'] = ctypes.create_unicode_buffer(1) x['CParamType'] = ctypes.byref(_cchar) x['LPCCharType'] = ctypes.pointer(_cchar) x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char) x['NullPtrType'] = _lpchar() x['NullPyObjectType'] = ctypes.py_object() x['PyObjectType'] = ctypes.py_object(lambda :None) x['FieldType'] = _field = _Struct._field x['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char) x['CFunctionType'] = _cfunc(str) try: # python 2.6 # numeric and mathematical types (CH 9) x['MethodCallerType'] = operator.methodcaller('mro') # 2.6 except AttributeError: pass try: # python 2.7 # built-in types (CH 5) x['MemoryType'] = memoryview(_in) # 2.7 x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7 if PY3: x['DictItemsType'] = _dict.items() # 2.7 x['DictKeysType'] = _dict.keys() # 2.7 x['DictValuesType'] = _dict.values() # 2.7 else: x['DictItemsType'] = _dict.viewitems() # 2.7 x['DictKeysType'] = _dict.viewkeys() # 2.7 x['DictValuesType'] = _dict.viewvalues() # 2.7 # generic operating system services (CH 15) x['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG') x['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG') x['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG') except NameError: pass try: # python 2.7 (and not 3.1) x['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2 x['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2 except AttributeError: pass if PY3: # oddities: removed, etc x['BufferType'] = x['MemoryType'] else: x['BufferType'] = buffer('') # -- cleanup ---------------------------------------------------------------- a.update(d) # registered also succeed if sys.platform[:3] == 'win': os.close(_filedescrip) # required on win32 os.remove(_tempfile) # EOF
StarcoderdataPython
1834561
<gh_stars>0 #### Chain CLG Model #### import numpy as np import random ##### Lattice Methods ##### #============================================================================== # create_clg_lattice(n,L) # Arguments - N is the number of particles # L is the number of sites # returns an array with N particles randomly organized on L sites #============================================================================== def create_clg_lattice(n,L): #length of the dimension lattice = np.zeros(L) if n < L: for it in random.sample(range(L), n): lattice[it]+=1 elif n==L: for it in range(L): lattice[it]+=1 else: print("number of particles cant be bigger than the system's size") return lattice #### Update Methods #### #============================================================================== # find_active_sites(lattice): # a site is considered according to the following logic - # it has to be occcupied # it has to have atleast one empty neighbor and atleast one occupied neighbor # returns an array of the active sites #============================================================================== def find_active_sites(lattice): L = len(lattice) active_sites = [] for site in range(len(lattice)): if lattice[site]: if (lattice[(site+1)%L]==1.0) ^ (lattice[(site-1)%L]==1.0): active_sites.append(site) return active_sites #============================================================================== # find_empty_neighbor(lattice,site) # returns the empty neighbor of the actice site # assumes that the site is active # if the empty neighbor is on the left of the active site return -1 # else, if the empty neighbor is on the right of the active site returns 1 #============================================================================== def find_empty_neighbor(lattice,site): L = len(lattice) if lattice[(site+1)%L] == 1.0: return -1 return 1 #============================================================================== # fix_competition(active_particles, lattice) # this function aims to resolve the case where two active particles are # competiting for the same unoccupied neighbor. It will automatically resolve # all such cases by randomly (with equal probabilities) deactivate one of the # active competing sites # although not utilized it will also return the resulting active sites #============================================================================== def fix_competition(active_particles,lattice): L_a = len(active_particles) L = len(lattice) active_particles_to_be_deactivated = set() for activity_index,particle_loc in enumerate(active_particles): if active_particles[(activity_index+1)%L_a] == particle_loc + 2: if (random.uniform(0,1) < 0.5): active_particles_to_be_deactivated.add(particle_loc) else: active_particles_to_be_deactivated.add(active_particles[(activity_index+1)%L_a]) if (0 in active_particles and (L-2) in active_particles): if (random.uniform(0,1) < 0.5): active_particles_to_be_deactivated.add(0) else: active_particles_to_be_deactivated.add(L-2) if (1 in active_particles and L-1 in active_particles): if (random.uniform(0,1) < 0.5): active_particles_to_be_deactivated.add(1) else: active_particles_to_be_deactivated.add(L-1) for particle in active_particles_to_be_deactivated: active_particles.remove(particle) return active_particles #============================================================================== # parallel_update(lattice,timesteps) # propages the lattice using parallel update for every active site # if two active sites are competing over the same empty neighbor one will be # randomly chosen # will make a total number of timesteps updates to the lattice #============================================================================== def parallel_update(lattice,timesteps=1,randomize=False): L = len(lattice) for t in range(timesteps): active_sites = find_active_sites(lattice) if (len(active_sites) == 0): break if randomize: active_site = random.choice(active_sites) lattice[active_site] -= 1 lattice[(active_site+(find_empty_neighbor(lattice,active_site)))%L] += 1 else: fix_competition(active_sites,lattice) for active_site in active_sites: ## assumes competion is resolved -> displace active particles lattice[active_site] -= 1 lattice[(active_site+(find_empty_neighbor(lattice,active_site)))%L] += 1 def clg_activity(lattice): return float(len(find_active_sites(lattice)))/len(lattice)
StarcoderdataPython
1675330
<gh_stars>0 # These settings will always be overriding for all test runs EMAIL_FROM_ADDRESS = '<EMAIL>' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', )
StarcoderdataPython
8001869
# -*- coding: utf-8 -*- import sys, os, platform, subprocess from .flowpy_switcher.setupfile import setup def getargs(argv): tuples = [] def gen(key): if key.startswith("-"): key = key[1:] if key == 'h': tuples.append((key,'True')) return gen def setarg(value): tuples.append( (key,value) ) return gen return setarg else: return gen op = gen for arg in argv: op = op(arg) return dict(tuples) if __name__ == '__main__': dict_args = getargs(sys.argv[1:]) Arch = platform.architecture()[0] __Platform__ = platform.platform().lower() if 'h' in dict_args: print("enable : python -m flowpy -m enable [-p PATH] \disable : python -m flowpy -m disable [-p PATH]") print("PATH : path of python intepreter.") else: assert 'm' in dict_args, "mode not selected: use command ' -m [enable|disable] ' " pattern_match_test = (dict_args["m"], Arch, __Platform__) try: (mode, arch, platf) = pattern_match_test jd = 'windows' in platf or 'linux' in platf if not jd: raise EnvironmentError("unsupported os") platf = 'windows' if 'windows' in platf else 'linux' def getpypath(): try: search_command = 'where' if platf == 'windows' else 'which' ret = subprocess.Popen([search_command, 'python'],stdout = subprocess.PIPE)\ .stdout\ .readlines()[0]\ .decode('utf8') except IndexError as e: raise BaseException('No python distribution found in PATH.') from e return ret pypath = dict_args['p'] if 'p' in dict_args else getpypath() setup(pypath, arch, platf)(mode) except Exception as e: raise BaseException('unknown platform...') from e
StarcoderdataPython
5111756
<filename>rock/rules/rule_manager.py<gh_stars>1-10 # Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from rock.rules.rule_parser import RuleParser CONF = cfg.CONF LOG = logging.getLogger(__name__) class RuleManager(object): """ Load all cases and run them. """ def __init__(self, path): LOG.info('Initializing rule manager.') self.path = path self.cases = [] self.periodic_task = None self._load_all_cases() def _load_all_cases(self): for path in self.path.split(':'): if os.path.exists(path): self._get_all_cases_recursively(path) else: LOG.error("Extension path '%s' doesn't exist!", path) def after_start(self): self.periodic_task = \ loopingcall.FixedIntervalLoopingCall(self.calculate_task) self.periodic_task.start(interval=CONF.check_cases_interval) self.periodic_task.wait() def calculate_task(self): for case in self.cases: if isinstance(case, dict): self._calculate(case) def _get_all_cases_recursively(self, path): for dir_path, dir_names, file_names in os.walk(path): for file_name in file_names: with open(os.path.join(dir_path, file_name), 'r') as f: try: self.cases.append(json.loads(f.read())) LOG.info("Case %s loaded", file_name) except Exception as e: LOG.warning( 'Load case error, error %s, case_file %s.' % (e.message, file_name)) def _calculate(self, rule_detail): LOG.info("Calculating %s", rule_detail) parser = RuleParser(rule_detail) parser.calculate()
StarcoderdataPython
8042976
# Accessing webcam using mobile device and saving video in hardisk # importing library import cv2 as cv # Mobile camera ip camera = "http://192.168.43.90:4747/video" cap = cv.VideoCapture(0) # 0-> Internal webcam, 1-> External webcam cap.open(camera) print("Cap is Opened",cap.isOpened()) # For saving the read video # Video formats - DIVX, XVID, MJPG, X264,WMV1, WMV2 # Recommended - XVID - best quality fourcc = cv.VideoWriter_fourcc(*"XVID") # Codec 4byte(video manager) fps = 60 res = (16*50,9*50) dest="output.mp4" output = cv.VideoWriter(dest,fourcc,fps,res) # add more parameter 0 for gray scale image while cap.isOpened(): ret, frame = cap.read() if ret == True: # frame = cv.resize(frame,(16*50,9*50)) # Resizing frame frame = cv.flip(frame,1) grayFrame = cv.cvtColor(frame,cv.COLOR_BGR2GRAY) # Converting frame to gray cv.imshow("Frame",frame) cv.imshow("Grayscale video",grayFrame) output.write(frame) if cv.waitKey(1) & 0xFF == ord('q'): break cap.release() cv.destroyAllWindows()
StarcoderdataPython
3309987
<filename>SellBuildKRD/account/models.py from django.db import models from django.contrib.auth import get_user_model User = get_user_model() class ContactSend(models.Model): name_agent = models.ForeignKey(User, on_delete=models.CASCADE, related_name="ContactSend") theme = models.TextField(max_length=250) message = models.TextField()
StarcoderdataPython
3317407
<reponame>emerginganalytics/cyberarena<gh_stars>1-10 """ Prepares a build spec for an image with an Apache Guacamole server and adds startup scripts to insert the correct users and connections into the guacamole database. This server becomes the entrypoint for all students in the arena. """ import random import string from netaddr import IPAddress, IPNetwork from utilities.globals import ds_client, LOG_LEVELS, cloud_log, LogIDs, \ BuildTypes, guac_db_password from utilities.infrastructure_as_code.server_spec_to_cloud import ServerSpecToCloud __author__ = "<NAME>" __copyright__ = "Copyright 2022, UA Little Rock, Emerging Analytics Center" __credits__ = ["<NAME>"] __license__ = "MIT" __version__ = "1.0.0" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Production" class StudentEntrySpecToCloud: STUDENT_ENTRY_IMAGE = 'image-labentry' def __init__(self, type, build, build_id, competition_guacamole_connections=None, student_entry_network=None, student_entry_ip_address=None): """ @param type: The build type, COMPUTE or ARENA @type type: str @param build: A datstore entry of the build specification @type build: dict @param build_id: The key used for storing the build specification @type build_id: str @param competition_guacamole_connections: The guacamole connection for competition arenas. @type competition_guacamole_connections: list @param student_entry_network: The name of the network to place the student entry server (for ARENA build only) @type student_entry_network: str @param student_entry_ip_address: The IP address of the guac server, default of None @type student_entry_ip_address: str """ self.type = type self.build = build self.build_id = build_id self.competition_guacamole_connections = competition_guacamole_connections self.student_entry_ip_address = student_entry_ip_address # The student entry network is only specified for the arena build. Otherwise, we pull from the build spec self.student_entry_network = student_entry_network if student_entry_network \ else self.build['student_entry']['network'] self.guac_connections = [] self.student_credentials = [] def commit_to_cloud(self): """ Store the student entry server specification for the given workout. @return: None @rtype: """ self._prepare_guac_connections() guac_startup_script = self._get_guac_startup_script(self.guac_connections) student_entry_ip = self._get_student_entry_ip_address(self.build, self.student_entry_network) \ if not self.student_entry_ip_address else self.student_entry_ip_address if not student_entry_ip: cloud_log(LogIDs.MAIN_APP, "Could not find available IP address for student entry guacamole server", LOG_LEVELS.ERROR) raise LookupError server_spec = { 'name': "student-guacamole", 'image': self.STUDENT_ENTRY_IMAGE, 'tags': {'items': ['student-entry']}, 'machine_type': 'n1-standard-1', 'nics': [ { "network": self.student_entry_network, "subnet": "default", "external_NAT": True, "internal_IP": student_entry_ip } ], 'guacamole_startup_script': guac_startup_script } self._update_build_spec() ServerSpecToCloud(server_spec, self.build_id, student_entry=True).commit_to_cloud() def _prepare_guac_connections(self): """ Prepare the guacamole connections and the student credentials. This considers the following build types: 1) Compute Workout with only 1 credential - This is an older structure that predated the need for multiple credentials. 2) Compute Workout with multiple credential - In this case, the specification has a 'connections' element 3) Competition arenas - These have 1 user per student in the competition and have multiple workout IDs @return: None @rtype: None """ raw_connections = None if self.type == BuildTypes.ARENA: raw_connections = self.competition_guacamole_connections else: if 'connections' in self.build['student_entry']: raw_connections = self.build['student_entry']['connections'] else: connection = self._create_guac_connection(self.build_id, self.build['student_entry'], 0) self.student_credentials.append({ "workout_user": connection['student_user'], "workout_password": connection['student_password'] }) self.guac_connections.append(connection) # This occurs for 1) competition builds and 2) compute builds with more than one connection if raw_connections: i = 0 for entry in raw_connections: connection = self._create_guac_connection(self.build_id, entry, i) self.student_credentials.append({ "workout_id": entry.get('workout_id', None), "workout_user": connection['student_user'], "workout_password": connection['<PASSWORD>'] }) self.guac_connections.append(connection) i += 1 def _create_guac_connection(self, build_id, config, connection_number): """ Creates a guacamole connection ready for inserting into the student entry server configuration. @param build_id: ID of the workout being created @type build_id: string @param config: Specification string for a student guacamole connection @type config: dict @param connection_number: The iterator number for the connection @type connection_number: int @return: The specification with the workout ID and student credentials added @rtype: dict """ student_entry_username = config['username'] if 'username' in config else None rdp_domain = config['domain'] if 'domain' in config else None security_mode = config['security-mode'] if 'security-mode' in config else 'nla' connection = { 'build_id': build_id, 'entry_type': config['type'], 'ip': config['ip'], "student_user": f'cybergym{connection_number + 1}', "student_password": self._get_random_alphaNumeric_string(), "connection_name": f"{build_id}-{connection_number}", 'connection_user': student_entry_username, 'connection_password': self._get_safe_password(config['password']), 'domain': rdp_domain, 'security-mode': security_mode } return connection def _update_build_spec(self): """ Add credentials for the student entry to the workout, and add a default firewall rule to allow access to the student entry server. When the build type is an arena, each workout in the arena needs to have added the recently generated credentials. @param credentials: Credentials for users in a given build @type credentials: list of dict @param network_name: The name of the network the student entry server resides in @type: str @return: Status @rtype: bool """ # Build the firewall rule to allow external access to the student entry. firewall_rule = { 'name': 'allow-student-entry', 'network': self.student_entry_network, 'target_tags': ['student-entry'], 'protocol': None, 'ports': ['tcp/80,8080,443'], 'source_ranges': ['0.0.0.0/0'] } if self.type == BuildTypes.COMPUTE: if len(self.student_credentials) > 1: self.build['workout_credentials'] = self.student_credentials else: self.build['workout_user'] = self.student_credentials[0]['workout_user'] self.build['workout_password'] = self.student_credentials[0]['workout_password'] self.build['firewall_rules'].append(firewall_rule) elif self.type == BuildTypes.ARENA: for credential in self.student_credentials: workout_id = credential['workout_id'] student_user = credential['workout_user'] student_password = credential['workout_password'] workout = ds_client.get(ds_client.key('cybergym-workout', workout_id)) workout['workout_user'] = student_user workout['workout_password'] = student_password ds_client.put(workout) self.build['arena']['firewall_rules'].append(firewall_rule) ds_client.put(self.build) return True @staticmethod def _get_random_alphaNumeric_string(stringLength=12): lettersAndDigits = string.ascii_letters + string.digits password = StudentEntrySpecToCloud\ ._get_safe_password(''.join((random.choice(lettersAndDigits) for i in range(stringLength)))) return password @staticmethod def _get_safe_password(password): safe_password = password.replace('$', '\$') safe_password = safe_password.replace("'", "\'") return safe_password @staticmethod def _get_guac_startup_script(guac_connections): startup_script = GuacSQL.guac_startup_begin.format(guacdb_password=guac_db_password) for connection in guac_connections: guac_user = connection['student_user'] guac_password = connection['<PASSWORD>'] connection_name = connection['connection_name'] startup_script += GuacSQL.guac_startup_user_add.format(user=guac_user, name=guac_user, guac_password=gu<PASSWORD>) if connection['entry_type'] == 'vnc': startup_script += GuacSQL.guac_startup_vnc.format(ip=connection['ip'], connection=connection_name, vnc_password=connection['connection_password']) else: startup_script += GuacSQL.guac_startup_rdp.format(ip=connection['ip'], connection=connection_name, rdp_username=connection['connection_user'], rdp_password=connection['connection_password'], security_mode=connection['security-mode']) if connection['domain']: startup_script += GuacSQL.guac_startup_rdp_domain.format(domain=connection['domain']) startup_script += GuacSQL.guac_startup_join_connection_user startup_script += GuacSQL.guac_startup_end return startup_script @staticmethod def _get_student_entry_ip_address(build, network): """ Find the first available IP address to use for the student entry server. @param workout: Datastore entry for the workout @param network: The network name for the workout @return: An available IP address @rtype: str """ build_id = build.key.name network = network.replace(f"{build_id}-", '') ip_subnet = None for network_name in build['networks']: if network_name['name'] == network: ip_subnet = IPNetwork(network_name['subnets'][0]['ip_subnet']) unavailable = [] for server in build['servers']: for n in server['nics']: if n['network'] == network: unavailable.append(IPAddress(n['internal_IP'])) if not ip_subnet: return False i = 0 for ip_address in ip_subnet: if i > 1 and ip_address not in unavailable: return str(ip_address) i += 1 return False class GuacSQL: # These next few constants build the startup scripts for guacamole. This is VERY helpful! # The elusive Apache Guacamole documentation for the SQL commands are here: https://guacamole.apache.org/doc/gug/jdbc-auth.html guac_startup_begin = \ '#!/bin/bash\n' \ 'mysql -u guacamole_user -p{guacdb_password} -D guacamole_db <<MY_QUERY\n' guac_startup_user_add = \ 'SET @salt = UNHEX(SHA2(UUID(), 256));\n' \ 'INSERT INTO guacamole_entity (name, type) VALUES (\'{user}\', \'USER\');\n' \ 'SELECT entity_id INTO @entity_id FROM guacamole_entity WHERE name = \'{user}\';\n' \ 'INSERT INTO guacamole_user (entity_id, password_salt, password_hash, password_date) ' \ 'VALUES (@entity_id, @salt, UNHEX(SHA2(CONCAT(\'{guac_password}\', HEX(@salt)), 256)), \'2020-06-12 00:00:00\');\n' guac_startup_vnc = \ 'INSERT INTO guacamole_connection (connection_name, protocol) VALUES (\'{connection}\', \'vnc\');\n' \ 'SELECT connection_id INTO @connection_id FROM guacamole_connection WHERE connection_name = \'{connection}\';\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'hostname\', \'{ip}\');\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'password\', \"{<PASSWORD>}\");\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'port\', \'5901\');\n' guac_startup_rdp = \ 'INSERT INTO guacamole_connection (connection_name, protocol) VALUES (\'{connection}\', \'rdp\');\n' \ 'SELECT connection_id INTO @connection_id FROM guacamole_connection WHERE connection_name = \'{connection}\';\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'hostname\', \'{ip}\');\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'password\', \"{<PASSWORD>}\");\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'port\', \'3389\');\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'username\', \'{rdp_username}\');\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'security\', \'{security_mode}\');\n' \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'ignore-cert\', \'true\');\n' guac_startup_rdp_domain = \ 'INSERT INTO guacamole_connection_parameter VALUES (@connection_id, \'domain\', \'{domain}\');\n' guac_startup_join_connection_user = \ 'INSERT INTO guacamole_connection_permission (entity_id, connection_id, permission) VALUES (@entity_id, @connection_id, \'READ\');\n' guac_startup_end = 'MY_QUERY\n'
StarcoderdataPython
3550002
#!/usr/bin/python # encoding: utf-8 #主要是对python中的re的相关操作的封装 import re import nltk class re_wrapper(object): def __init__(self): pass def re_show(self, regexp, string, left='{', right='}'): ''' 把找到的符合regexp的non-overlapping matches标记出来 如: nltk.re_show('[a-zA-Z]+','12fFdsDFDS3rtG4')#12{fFdsDFDS}3{rtG}4 ''' return nltk.re_show(regexp, string, left, right) def findall(self,regexp, string): ''' 如果regexp中不包含小括号,如 re.findall('[a-zA-Z]+','12fFdsDFDS3rtG4')#['fFdsDFDS', 'rtG'] 等价于re.findall('([a-zA-Z]+)','12fFdsDFDS3rtG4')#['fFdsDFDS', 'rtG'] 否则: re.findall('(\d)\s+(\d)','12 3fFdsDFDS3 4rtG4')#[('2', '3'), ('3', '4')] :return: list ''' return re.findall(regexp, string)
StarcoderdataPython
5025234
#!/usr/bin/python # -*- coding: utf-8 -*- import unittest from viacep import ViaCEP class TestCase(unittest.TestCase): def test_localidade(self): d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data['localidade'], 'Cuiabá') def test_logradouro(self): d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data['logradouro'], 'Avenida Miguel Sutil') def test_bairro(self): d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data['bairro'], 'Alvorada') def test_uf(self): d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data['uf'], 'MT') def test_ibge(self): d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data['ibge'], '5103403') def test_01311200_ibge(self): d = ViaCEP('01311200') data = d.getDadosCEP() self.assertEqual(data['ibge'], '3550308') """ def test_78048000_json(self): test_78048000 = u"{'cep': '78048-000', 'logradouro': '<NAME>', 'complemento': 'de 5686 a 6588 - lado par', 'bairro': 'Alvorada', 'localidade': 'Cuiabá', 'uf': 'MT', 'unidade': '', 'ibge': '5103403', 'gia': ''}" d = ViaCEP('78048000') data = d.getDadosCEP() self.assertEqual(data, test_78048000) def test_error(self): data_error = '{\'erro\': True}' d = ViaCEP('08048000') data = d.getDadosCEP() print(data) self.assertEqual(data, data_error) """ if __name__ == '__main__': unittest.main()
StarcoderdataPython
3261791
# Generated by Django 2.1.13 on 2019-12-25 04:04 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('nablaforum', '0003_auto_20191025_2140'), ] operations = [ migrations.AddField( model_name='thread', name='created', field=models.DateTimeField(default=django.utils.timezone.now, editable=False), preserve_default=False, ), ]
StarcoderdataPython
6435635
import exp import sys import pprint parts = {} # Parts database. comps = {} # Lists component instances nets = {} # Netlist with list of connected comps/pins. infile = sys.argv[1] def process_comment(rawlines): pass def process_parts(s): current_parts = None for ln in s: if not ln[0].isspace(): current_parts = exp.expand([ln.split()[0]]) for part in current_parts: parts[part] = {} else: pindef = ln.split() pins = exp.expand([pindef[0]]) names = exp.expand([pindef[1]]) if len(pindef) == 2 else pins if len(pins) == 1: # One pin with several names. pins = pins * len(names) if len(names) == 1: # Several pins wth the same name. names = names * len(pins) if len(names) != len(pins): sys.exit("Error for part %s pin %s." % (current_parts, ln)) for n,p in zip(names, pins): for part in current_parts: parts[part][n] = parts[part].get(n, []) + [p] def process_comps(s): for ln in s: compdef = ln.split() if len(compdef) == 2: compdef += [compdef[1]] if len(compdef) != 3: sys.exit("Error in component defnition: %s." % ln) if compdef[1] not in parts: sys.exit("Unknown part %s." % compdef[1]) for comp in exp.expand([compdef[0]]): if comp in comps: sys.exit("Error: Duplicate component %s." % comp) comps[comp] = compdef[1:] return comps def process_nets(s): for net in s: ns = net.split() netname = ns[0] raw_terms = [] for rp in ns[1:]: raw_terms += exp.expand([rp]) terms = [] for rt in raw_terms: comp_name, pin_name = rt.split('/', 1) part_name = comps[comp_name][0] pin_numbers = parts[part_name][pin_name] for pn in pin_numbers: terms.append('%s/%s' % (comp_name, pn)) nets[netname] = terms ''' netnames = expand([netname]) if len(netnames) == 1: # Just expand nodes terms = ' '.join(expand(terms)) print(netname, terms) else: # Expand netnames & each node in parallel and zip up. nodes = [expand([n]) for n in terms] for i in range(len(netnames)): print(netnames[i], ' '.join([x[i] for x in nodes])) ''' def process(section, rawlines): if section is not None: section = section.lower() if section == 'comment': process_comment(rawlines) elif section == 'parts': process_parts(rawlines) elif section == 'comps': process_comps(rawlines) elif section == 'nets': process_nets(rawlines) else: sys.exit("Unknown section %s." % section) section = None rawlines = [] for ln in open(infile, 'rt').read().splitlines(): ln = ln.partition('#')[0] if not ln or ln.isspace(): continue if ln.lstrip()[0] == '$': process(section, rawlines) rawlines = [] section = ln.lstrip()[1:].split()[0] else: if section is None: sys.exit("Missing initial section header.") else: rawlines.append(ln) process(section, rawlines) for n in nets: print(n, ' '.join(nets[n]))
StarcoderdataPython
6434629
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import random from collections import deque, defaultdict import typing from rasa_core.domain import Domain from typing import List, Text, Dict, Optional from rasa_core.interpreter import RegexInterpreter, NaturalLanguageInterpreter from rasa_core import utils if typing.TYPE_CHECKING: from rasa_core.training_utils.dsl import StoryStep, Story, \ TrainingsDataExtractor class StoryGraph(object): def __init__(self, story_steps): # type: (List[StoryStep]) -> None self.story_steps = story_steps self.step_lookup = {s.id: s for s in self.story_steps} self.ordered_ids = StoryGraph.order_steps(story_steps) def ordered_steps(self): # type: () -> List[StoryStep] """Returns the story steps ordered by topological order of the DAG.""" return [self.get(step_id) for step_id in self.ordered_ids] def get(self, step_id): # type: (Text) -> Optional[StoryStep] """Looks a story step up by its id.""" return self.step_lookup.get(step_id) def build_stories(self, domain, max_number_of_trackers=2000): # type: (Domain, NaturalLanguageInterpreter, bool, int) -> List[Story] """Build the stories of a graph.""" from rasa_core.training_utils.dsl import STORY_START, Story active_trackers = {STORY_START: [Story()]} rand = random.Random(42) for step in self.ordered_steps(): if step.start_checkpoint_name() in active_trackers: # these are the trackers that reached this story step # and that need to handle all events of the step incoming_trackers = active_trackers[step.start_checkpoint_name()] # TODO: we can't use tracker filter here to filter for # checkpoint conditions since we don't have trackers. # this code should rather use the code from the dsl. if max_number_of_trackers is not None: incoming_trackers = utils.subsample_array( incoming_trackers, max_number_of_trackers, rand) events = step.explicit_events(domain) # need to copy the tracker as multiple story steps might # start with the same checkpoint and all of them # will use the same set of incoming trackers if events: trackers = [Story(tracker.story_steps + [step]) for tracker in incoming_trackers] else: trackers = [] # small optimization # update our tracker dictionary with the trackers that handled # the events of the step and that can now be used for further # story steps that start with the checkpoint this step ended on if step.end_checkpoint_name() not in active_trackers: active_trackers[step.end_checkpoint_name()] = [] active_trackers[step.end_checkpoint_name()].extend(trackers) return active_trackers[None] def as_story_string(self): story_content = "" for step in self.story_steps: story_content += step.as_story_string(flat=False) return story_content @staticmethod def order_steps(story_steps): # type: (List[StoryStep]) -> Deque[Text] """Topological sort of the steps returning the ids of the steps.""" checkpoints = StoryGraph._group_by_start_checkpoint(story_steps) graph = {s.id: [other.id for other in checkpoints[s.end_checkpoint_name()]] for s in story_steps} return StoryGraph.topological_sort(graph) @staticmethod def _group_by_start_checkpoint(story_steps): # type: (List[StoryStep]) -> Dict[Text, List[StoryStep]] """Returns all the start checkpoint of the steps""" checkpoints = defaultdict(list) for step in story_steps: checkpoints[step.start_checkpoint_name()].append(step) return checkpoints @staticmethod def topological_sort(graph): """Creates a topsort of a directed graph. This is an unstable sorting! The graph should be represented as a dictionary, e.g.: >>> example_graph = { ... "a": ["b", "c", "d"], ... "b": [], ... "c": ["d"], ... "d": [], ... "e": ["f"], ... "f": []} >>> StoryGraph.topological_sort(example_graph) deque([u'e', u'f', u'a', u'c', u'd', u'b']) """ GRAY, BLACK = 0, 1 ordered = deque() unprocessed = set(graph) visited_nodes = {} def dfs(node): visited_nodes[node] = GRAY for k in graph.get(node, ()): sk = visited_nodes.get(k, None) if sk == GRAY: raise ValueError("Cycle found at node: {}".format(sk)) if sk == BLACK: continue unprocessed.discard(k) dfs(k) ordered.appendleft(node) visited_nodes[node] = BLACK while unprocessed: dfs(unprocessed.pop()) return ordered
StarcoderdataPython
1781696
<reponame>Arcensoth/pyckaxe from .loot_table import * from .loot_table_serializer import *
StarcoderdataPython
3324884
#!/usr/bin/python3 import json import urllib3 from pathlib import Path import requests urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) pools = requests.get('https://raw.githubusercontent.com/xolentum/mining-pools-json/main/' 'xolentum-mining-pools.json').json()['pools'] with open(Path(__file__).parent / '../data/pools-history-data.json') as f: pools_history = json.load(f) for pool in pools: if len(pools_history[pool['url']]) > 15: pools_history[pool['url']].pop(0) try: requests.get(pool['api'] if not isinstance(pool['api'], list) else pool['api'][0], verify=False, timeout=5) pools_history[pool['url']].append(1) except (requests.Timeout, requests.exceptions.ConnectionError, requests.exceptions.ConnectTimeout): pools_history[pool['url']].append(0) pools_history = json.dumps(pools_history, indent=4) with open(Path(__file__).parent / '../data/pools-history-data.json', 'w') as f: f.write(pools_history)
StarcoderdataPython
11369152
<filename>Pyton_curso/A+B.py n1 = float (input("Digite um numero A ")) n2 = float (input("Digite um numero B ")) print("A+B = {}".format(n1+n2) ) print("A soma entre {}".format(n1), "e {}".format(n2), "e igual a {}".format(n1+n2)) print("A soma entre {} e {} e igual a {}".format(n1, n2, n1+n2))
StarcoderdataPython
9665357
import youtube_dl import os song_there = os.path.isfile("song.mp3") try: if song_there: os.remove("song.mp3") except PermissionError: print("Wait for the current playing music to end or use the 'stop' command") ydl_opts = { 'format': 'bestaudio/best', #'postprocessors': [{ # 'key': 'FFmpegExtractAudio', # 'preferredcodec': 'mp3', # 'preferredquality': '192', #}], } with youtube_dl.YoutubeDL(ydl_opts) as ydl: ydl.download(["https://youtu.be/ktvTqknDobU"]) for file in os.listdir("./"): if file.endswith(".m4a"): os.rename(file, "song.mp3")
StarcoderdataPython
3550106
# -*- coding: utf-8 -*- class DuobeiSDKException(Exception): pass class DuobeiSDKInvalidParamException(DuobeiSDKException): pass class DuobeiSDKServerException(DuobeiSDKException): pass
StarcoderdataPython
1882942
<filename>examples/sync_to_async_sleep.py import asyncio import random from greenletio import await_ def main(): for i in range(10): await_(asyncio.sleep(random.random())) print(i) main()
StarcoderdataPython
268541
<filename>util/rotate.py """Do the rotation action that some products need.""" import sys import os import gzip BASE = "/mesonet/ldmdata/" def main(argv): """Do SOmething""" data = sys.stdin.buffer.read() fnbase = argv[1] fmt = argv[2] dirname = "%s/%s" % (BASE, os.path.dirname(fnbase)) if not os.path.isdir(dirname): os.makedirs(dirname) if fmt == "tif.Z": for i in range(9, -1, -1): oldfp = "%s/%s%s.%s" % (BASE, fnbase, i, fmt) newfp = "%s/%s%s.%s" % (BASE, fnbase, i + 1, fmt) if os.path.isfile(oldfp): os.rename(oldfp, newfp) output = open("%s/%s%s.%s" % (BASE, fnbase, 0, fmt), "wb") output.write(data) output.close() data = gzip.open("%s/%s%s.%s" % (BASE, fnbase, 0, fmt), "rb").read() fmt = "tif" for i in range(9, -1, -1): oldfp = "%s/%s%s.%s" % (BASE, fnbase, i, fmt) newfp = "%s/%s%s.%s" % (BASE, fnbase, i + 1, fmt) if os.path.isfile(oldfp): os.rename(oldfp, newfp) output = open("%s/%s%s.%s" % (BASE, fnbase, 0, fmt), "wb") output.write(data) output.close() if __name__ == "__main__": main(sys.argv)
StarcoderdataPython
11293498
#coding: utf-8 from caty.testutil import TestCase from caty.util.cache import * from functools import partial class MemoizeTest(TestCase): def test_memoized(self): def foo(a, b): return a() + b() def _(d): d['x'] += 1 return d['x'] d1 = {'x': 0} x = partial(_, d1) d2 = {'x': 1} y = partial(_, d2) m = self.assertNotRaises(memoize, foo) v = self.assertNotRaises(m, x, y) self.assertEquals(v, 3) self.assertEquals(d1['x'], 1) self.assertEquals(d2['x'], 2) v = self.assertNotRaises(m, x, y) self.assertEquals(v, 3) self.assertEquals(d1['x'], 1) self.assertEquals(d2['x'], 2) m.clear() v = self.assertNotRaises(m, x, y) self.assertEquals(v, 5) self.assertEquals(d1['x'], 2) self.assertEquals(d2['x'], 3) def test_cannot_memoized(self): def foo(a, b): return (a, b) m = self.assertNotRaises(memoize, foo) self.assertRaises(Exception, m, [], {}) class CacheTest(TestCase): def test_cache(self): cache = Cache(101) for i in range(100): cache.set(i, i**2) for i in range(11, 100): self.assertEquals(cache.get(i), i**2) cache.set(100, 100**2) # 1..11 が削除されているはず for i in range(1, 11): self.assertEquals(cache.get(i), None) self.assertEquals(cache.get(100), 100**2) for i in range(20, 101): self.assertEquals(cache.get(i), i**2) for i in range(101, 111): cache.set(i, i**2) cache.get(i) cache.get(i) # 0, 11..20 が削除されているはず for i in range(10, 20): self.assertEquals(cache.get(i), None)
StarcoderdataPython
11209934
# Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test configs for is_finite.""" import numpy as np import tensorflow.compat.v1 as tf from tensorflow.lite.testing.zip_test_utils import create_tensor_data from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests from tensorflow.lite.testing.zip_test_utils import register_make_test_function @register_make_test_function() def make_is_finite_tests(options): """Make a set of tests to do is_finite.""" test_parameters = [ { "input_shape": [[100], [3, 15, 14, 3]], }, ] def build_graph(parameters): """Build the graph for the test case.""" input_tensor = tf.compat.v1.placeholder( dtype=tf.float32, name="input", shape=parameters["input_shape"]) out = tf.math.is_finite(input_tensor) return [input_tensor], [out] def build_inputs(parameters, sess, inputs, outputs): """Build the inputs for the test case.""" input_values = create_tensor_data( np.float32, parameters["input_shape"], min_value=-10, max_value=10) # Inject NaN and Inf value. def random_index(shape): result = [] for dim in shape: result.append(np.random.randint(low=0, high=dim)) return tuple(result) input_values[random_index(input_values.shape)] = np.Inf input_values[random_index(input_values.shape)] = -np.Inf input_values[random_index(input_values.shape)] = np.NAN input_values[random_index(input_values.shape)] = tf.float32.max input_values[random_index(input_values.shape)] = tf.float32.min return [input_values], sess.run( outputs, feed_dict=dict(zip(inputs, [input_values]))) make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
StarcoderdataPython
3484914
<gh_stars>0 import requests import json import yaml with open("config.yaml", "r") as ymlfile: config = yaml.load(ymlfile, Loader=yaml.FullLoader) # api_url = f"https://app.harness.io/gateway/api/graphql?accountId={config['harness']['account_id']}" headers = {"x-api-key": config["harness"]["api_key"], "Content-Type": "application/json"} application_name = config["harness"]["application_name"] query=""" query { applicationByName(name:"%s"){ id } } """ % (application_name) def run_query(query): request = requests.post(api_url, json={'query': query}, headers=headers) if request.status_code == 200: return request.json() else: raise Exception("Query failed to run by returning code of {}. {}".format(request.status_code, query)) result = run_query(query) application_id = result['data']['applicationByName']['id'] query=""" query { pipelineByName(applicationId:"%s",pipelineName:"Opsani Promotion"){ id name } } """ % (application_id) result = run_query(query) print(result) pipeline_id = result['data']['pipelineByName']['id'] mutation=""" mutation { startExecution(input: { applicationId: "%s" entityId: "%s" executionType: PIPELINE, variableInputs: [ { name: "cpu" variableValue: { type: NAME value: "2048" } }, { name: "mem" variableValue: { type: NAME value: "4096" } } ] notes: "Triggered by Opsani Engine", }) { clientMutationId warningMessage } } """ % (application_id,pipeline_id) #request = requests.post(api_url, json={'query': mutation}, headers=headers) result = run_query(mutation) print(application_id) print(pipeline_id) print(result)
StarcoderdataPython
1817687
<filename>tvizbase/__init__.py __all__ = [ "api", "base58", "broadcast", "key", "operations", "storage", "types", "ws_client", ]
StarcoderdataPython
1908579
# -*- coding: utf-8 -*- """ Chromium-based rasterizer module. Prerequisite: sudo apt-get install -y chromedriver chromium """ from __future__ import absolute_import, unicode_literals from selenium import webdriver from PIL import Image from io import BytesIO import json import logging import math import os import re import sys import xml.etree.ElementTree as ET from psd2svg.rasterizer.base_rasterizer import BaseRasterizer logger = logging.getLogger(__name__) # CHROMEDRIVER_PATH = "/usr/lib/chromium-browser/chromedriver" VIEWPORT_SIZE = (16, 16) # Default size when nothing is specified. # https://stackoverflow.com/questions/46656622/ def send(driver, cmd, params={}): resource = ( "/session/%s/chromium/send_command_and_get_result" % driver.session_id ) url = driver.command_executor._url + resource body = json.dumps({'cmd':cmd, 'params': params}) response = driver.command_executor._request('POST', url, body) if response['status']: raise Exception(response.get('value')) return response.get('value') class ChromiumRasterizer(BaseRasterizer): """Chromium rasterizer.""" def __init__(self, executable_path="chromedriver", dpi=96.0, **kwargs): options = webdriver.ChromeOptions() options.add_argument("headless") options.add_argument("disable-gpu") options.add_argument("disable-infobars") options.add_argument("no-sandbox") options.add_argument("disable-dev-shm-usage") options.add_argument("enable-experimental-web-platform-features") options.add_argument("default-background-color FFFFFF00") self.driver = webdriver.Chrome( executable_path=executable_path, options=options) self.dpi = dpi self.driver.execute_cdp_cmd( "Emulation.setDefaultBackgroundColorOverride", {'color': {'r': 255, 'g': 255, 'b': 255, 'a': 0}} ) def __del__(self): self.driver.quit() def rasterize(self, url, size=None): if not re.match(r"^(\S+)://.*$", url): url = "file://" + os.path.abspath(url) if size: self.driver.set_window_size(*size) self.driver.get(url) else: self.driver.get(url) size = self._set_windowsize() rasterized = Image.open(BytesIO(self.driver.get_screenshot_as_png())) if rasterized.width != size[0] or rasterized.height != size[1]: logger.info("Resizing captured screenshot from {} to {}".format( rasterized.size, size)) rasterized = rasterized.resize(size, Image.NEAREST) return self.composite_background(rasterized) def _set_windowsize(self): svg = self.driver.find_element_by_tag_name("svg") if not svg: return width = svg.get_attribute("width") height = svg.get_attribute("height") if not width or not height: return logger.debug("Resizing to {}x{}".format(width, height)) width = self._get_pixels(width) height = self._get_pixels(height) if width == 0 or height == 0: width, height = VIEWPORT_SIZE self.driver.set_window_size(int(width), int(height)) return width, height def _get_pixels(self, value): match = re.match(r"(?P<value>\d+)(?P<unit>\D+)?", value) value = int(match.group("value")) unit = match.group("unit") if unit == "pt": value = math.ceil(value * self.dpi / 72.0) return value
StarcoderdataPython
1976841
"""lg-rez / features / Commandes annexes Commandes diverses qu'on ne savait pas où ranger """ import random import requests import datetime from discord.ext import commands from akinator.async_aki import Akinator from lgrez.blocs import tools from lgrez.bdd import Joueur class Annexe(commands.Cog): """Commandes annexes aux usages divers""" @commands.command() async def roll(self, ctx, *, XdY): """Lance un ou plusieurs dés Args: XdY: dés à lancer + modifieurs, au format ``XdY + XdY + ... + Z - Z ...`` avec X le nombre de dés, Y le nombre de faces et Z les modifieurs (constants). Examples: - ``!roll 1d6`` -> lance un dé à 6 faces - ``!roll 1d20 +3`` -> lance un dé à 20 faces, ajoute 3 au résultat - ``!roll 1d20 + 2d6 -8`` -> lance un dé 20 plus deux dés 6, enlève 8 au résultat """ dices = XdY.replace(' ', '').replace('-', '+-').split('+') # "1d6 + 5 - 2" -> ["1d6", "5", "-2"] r = "" s = 0 try: for dice in dices: if 'd' in dice: nb, faces = dice.split('d', maxsplit=1) for i in range(int(nb)): v = random.randrange(int(faces)) + 1 s += v r += f" + {v}₍{tools.sub_chiffre(int(faces), True)}₎" else: v = int(dice) s += v r += f" {'-' if v < 0 else '+'} {abs(v)}" r += f" = {tools.emoji_chiffre(s, True)}" except Exception: raise commands.UserInputError(f"pattern non reconu") else: await tools.send_blocs(ctx, r[3:]) @commands.command(aliases=["cf", "pf"]) async def coinflip(self, ctx): """Renvoie le résultat d'un tirage à Pile ou Face (aléatoire) Pile je gagne, face tu perds. """ await ctx.send(random.choice(["Pile", "Face"])) @commands.command(aliases=["pong"]) async def ping(self, ctx): """Envoie un ping au bot Pong Warning: Commande en bêta, non couverte par les tests unitaires. """ ts_rec = datetime.datetime.utcnow() delta_rec = ts_rec - ctx.message.created_at # Temps de réception = temps entre création message et sa réception pingpong = ctx.invoked_with.replace("i", "x").replace("I", "X") pingpong = pingpong.replace("o", "i").replace("O", "I") pingpong = pingpong.replace("x", "o").replace("X", "O") cont = ( f" Réception : {delta_rec.total_seconds()*1000:4.0f} ms\n" f" Latence : {ctx.bot.latency*1000:4.0f} ms\n" ) mess = await ctx.send( f"!{pingpong}\n" + tools.code_bloc(cont + " (...)") ) ts_ret = datetime.datetime.utcnow() delta_ret = ts_ret - mess.created_at # Retour information message réponse créé delta_env = ts_ret - ts_rec - delta_ret # Temps d'envoi = temps entre réception 1er message (traitement quasi # instantané) et création 2e, moins temps de retour d'information delta_tot = delta_rec + delta_ret + delta_env # Total = temps entre création message !pong et réception information # réponse envoyée await mess.edit(content=f"!{pingpong}\n" + tools.code_bloc( cont + f" Envoi : {delta_env.total_seconds()*1000:4.0f} ms\n" f" Retour : {delta_ret.total_seconds()*1000:4.0f} ms\n" f"——————————————————————\n" f" Total : {delta_tot.total_seconds()*1000:4.0f} ms" )) @commands.command() async def akinator(self, ctx): """J'ai glissé chef Implémentation directe de https://pypi.org/project/akinator.py Warning: Commande en bêta, non couverte par les tests unitaires. """ # Un jour mettre ça dans des embeds avec les https://fr.akinator.com/ # bundles/elokencesite/images/akitudes_670x1096/<akitude>.png croppées, # <akitude> in ["defi", "serein", "inspiration_legere", # "inspiration_forte", "confiant", "mobile", "leger_decouragement", # "vrai_decouragement", "deception", "triomphe"] await ctx.send( "Vous avez demandé à être mis en relation avec " + tools.ital("Akinator : Le Génie du web") + ".\nVeuillez patienter..." ) async with ctx.typing(): # Connexion aki = Akinator() question = await aki.start_game(language="fr") exit = False while not exit and aki.progression <= 80: mess = await ctx.send(f"({aki.step + 1}) {question}") reponse = await tools.wait_for_react_clic( mess, {"👍": "yes", "🤷": "idk", "👎": "no", "⏭️": "stop"} ) if reponse == "stop": exit = True else: async with ctx.typing(): question = await aki.answer(reponse) async with ctx.typing(): await aki.win() mess = await ctx.send( f"Tu penses à {tools.bold(aki.first_guess['name'])} " f"({tools.ital(aki.first_guess['description'])}) !\n" f"J'ai bon ?\n{aki.first_guess['absolute_picture_path']}" ) if await tools.yes_no(mess): await ctx.send( "Yay\nhttps://fr.akinator.com/bundles/elokencesite" "/images/akitudes_670x1096/triomphe.png" ) else: await ctx.send( "Oof\nhttps://fr.akinator.com/bundles/elokencesite" "/images/akitudes_670x1096/deception.png" ) @commands.command() async def xkcd(self, ctx, N): """J'ai aussi glissé chef, mais un peu moins Args: N: numéro du comic Warning: Commande en bêta, non couverte par les tests unitaires. """ async with ctx.typing(): r = requests.get(f"https://xkcd.com/{N}/info.0.json") if not r: await ctx.send("Paramètre incorrect ou service non accessible.") return url = r.json().get("img") if not url: await ctx.send("Paramètre incorrect ou service non accessible.") return await ctx.send(url)
StarcoderdataPython
1893956
<gh_stars>0 # -*- coding: utf-8 -*- import re from datetime import datetime from city_scrapers.constants import COMMISSION from city_scrapers.spider import Spider class ChiSsa25Spider(Spider): name = 'chi_ssa_25' agency_name = 'Chicago Special Service Area #25 Little Village' timezone = 'America/Chicago' allowed_domains = ['littlevillagechamber.org'] start_urls = [ 'http://littlevillagechamber.org/{}-meetings-minutes/'.format(datetime.now().year) ] def parse(self, response): """ `parse` should always `yield` a dict that follows a modified OCD event schema (docs/_docs/05-development.md#event-schema) Change the `_parse_id`, `_parse_name`, etc methods to fit your scraping needs. """ in_meeting_rows = False for item in response.css('table tr'): is_meeting_row = len(item.css('td:not([bgcolor])')) > 0 if not in_meeting_rows and is_meeting_row: in_meeting_rows = True if in_meeting_rows and not is_meeting_row: break elif not is_meeting_row: continue date_str, time_str = self._parse_date_time_str(item) data = { '_type': 'event', 'name': self._parse_name(item), 'event_description': '', 'classification': COMMISSION, 'start': self._parse_start(date_str, time_str), 'end': self._parse_end(date_str, time_str), 'all_day': False, 'location': self._parse_location(item), 'documents': self._parse_documents(item), 'sources': [{ 'url': response.url, 'note': '' }], } data['status'] = self._generate_status(data) data['id'] = self._generate_id(data) yield data def _parse_name(self, item): """ Parse or generate event name. """ meeting_type = item.css('td::text').extract()[4] return 'Commission: {}'.format(meeting_type) def _parse_date_time_str(self, item): """Pull out date and time strings""" date_str = item.css('td::text').extract()[1] time_str = item.css('td::text').extract()[2] return date_str, time_str def _parse_start(self, date_str, time_str): """ Parse start date and time. """ duration_str, am_pm = time_str.split(' ') start_time_str = duration_str.split('-')[0] return { 'date': datetime.strptime(date_str, '%m/%d/%Y').date(), 'time': datetime.strptime('{} {}'.format(start_time_str, am_pm), '%I:%M %p').time(), 'note': '', } def _parse_end(self, date_str, time_str): """ Parse end date and time. """ duration_str, am_pm = time_str.split(' ') end_time_str = duration_str.split('-')[-1] return { 'date': datetime.strptime(date_str, '%m/%d/%Y').date(), 'time': datetime.strptime('{} {}'.format(end_time_str, am_pm), '%I:%M %p').time(), 'note': '', } def _parse_location(self, item): """ Parse or generate location. Latitude and longitude can be left blank and will be geocoded later. """ loc_text = item.css('td::text').extract()[3] loc_name = re.search(r'^[^\d]*(?=\d{2,4})', loc_text).group() loc_addr = loc_text[len(loc_name):] loc_name = loc_name.rstrip('-–, ') if 'Chicago' not in loc_addr: loc_addr += ' Chicago, IL' return { 'address': loc_addr, 'name': loc_name, 'neighborhood': '', } def _parse_documents(self, item): """ Parse or generate documents. """ minutes_link = item.css('td a::attr(href)').extract_first() if minutes_link: return [{'url': minutes_link, 'note': 'Minutes'}] return []
StarcoderdataPython
294167
from flask import Blueprint, render_template, redirect, url_for, request from models import Post, Category, Blogroll from models import cache from flask.ext.login import current_user, login_required, logout_user from sqlalchemy import desc bp = Blueprint('blog', __name__) # pagination POSTS_PER_PAGE = 5 @bp.route('/') @bp.route('/<int:page>') def index(page=1): posts = Post.query.order_by(desc(Post.post_date)).paginate(page, POSTS_PER_PAGE, False) categories = Category.query.all() blogrolls = Blogroll.query.all() return render_template('index.html', title='Home', posts=posts, categories=categories, blogrolls=blogrolls, user=current_user) @bp.route('/categories/<int:category_id>/posts') def get_post_by_category(category_id): page = int(request.args.get('page')) posts = Post.query.filter_by(category_id=category_id).order_by(desc(Post.post_date)).paginate(page, POSTS_PER_PAGE, False) categories = Category.query.all() blogrolls = Blogroll.query.all() return render_template('category_blog.html', title='Home', posts=posts, categories=categories, blogrolls=blogrolls, user=current_user) @cache.cached(timeout=50) @bp.route('/posts/<int:post_id>', methods=['GET']) @bp.route('/categories/<int:category_id>/posts/<int:post_id>', methods=['GET']) def get_post_by_id(post_id, category_id=None): if category_id is not None: post = Post.query.filter_by(post_id=post_id, category_id=category_id).first() else: post = Post.query.filter_by(post_id=post_id).first() categories = Category.query.all() blogrolls = Blogroll.query.all() return render_template('blog.html', title='Blog', post=post, categories=categories, blogrolls=blogrolls, user=current_user) @bp.route('/logout') @login_required def logout(): logout_user() return redirect(url_for('blog.index')) @bp.route('/about') def about(): return render_template('about.html', title='about', user=current_user)
StarcoderdataPython
342559
<gh_stars>0 from django.db import models from django.contrib.auth.models import User class SavedCityModel(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) city = models.TextField() modified = models.DateTimeField(auto_now=True) def __str__(self): return self.city
StarcoderdataPython
8181831
<filename>grr/server/grr_response_server/flow_utils.py #!/usr/bin/env python """Utils for flow related tasks.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import logging import time from grr_response_server import aff4 from grr_response_server import flow from grr_response_server.aff4_objects import aff4_grr # How long to wait, by default, for a flow to finish. DEFAULT_TIMEOUT = 650 def GetUserInfo(knowledge_base, user): # TODO: This docstring cannot be a raw literal because there are # issues with raw unicode literals on Python 2. Once support for Python 2 is # dropped, it can be made raw again. # pylint: disable=g-docstring-has-escape """Get a User protobuf for a specific user. Args: knowledge_base: An rdf_client.KnowledgeBase object. user: Username as string. May contain domain like DOMAIN\\user. Returns: A User rdfvalue or None """ # pylint: enable=g-docstring-has-escape if "\\" in user: domain, user = user.split("\\", 1) users = [ u for u in knowledge_base.users if u.username == user and u.userdomain == domain ] else: users = [u for u in knowledge_base.users if u.username == user] if not users: return else: return users[0] def UpdateVFSFileAndWait(client_id, vfs_file_urn, token=None, timeout=DEFAULT_TIMEOUT): """Waits for a file to be updated on the client. Calls the UpdateVFSFile flow on a urn and waits for both it and the ListDirectory flow it calls to finish. Note that this is needed because any flows UpdateVFSFile calls via VFS Update methods will not become child flows of UpdateVFSFile, and therefore waiting for UpdateVFSFile to complete is not enough. Args: client_id: Which client to run the flow on. vfs_file_urn: Path to VFSFile to update. token: The datastore access token. timeout: How long to wait for a flow to finish, maximum. """ # Wait for the UpdateVFSFile flow. update_flow_urn = StartFlowAndWait( client_id, token=token, timeout=timeout, flow_name=aff4_grr.UpdateVFSFile.__name__, vfs_file_urn=vfs_file_urn) update_flow_obj = aff4.FACTORY.Open( update_flow_urn, token=token, aff4_type=flow.GRRFlow) # Get the child flow so we can wait for it too. sub_flow_urn = update_flow_obj.state.get_file_flow_urn # If there was no subflow, no need to wait for it. if not sub_flow_urn: return WaitForFlow(sub_flow_urn, token=token, timeout=timeout) def WaitForFlow(flow_urn, token=None, timeout=DEFAULT_TIMEOUT, max_sleep_time=1, min_sleep_time=0.2, dampening_multiplier=0.9): """Waits for a flow to finish, polling while we wait. Args: flow_urn: The urn of the flow to wait for. token: The datastore access token. timeout: How long to wait before giving up, usually because the client has gone away. max_sleep_time: The initial and longest time to wait in between polls. min_sleep_time: The final and shortest time to wait in between polls. dampening_multiplier: The current sleep time is multiplied by this number on each iteration. Controls how fast the polling reaches its minimum sleep time. You probably want this to be less than 1, unless you want to wait an increasing amount of time in between flows. Raises: IOError: If we time out while waiting for the client. """ start_time = time.time() sleep_time = max_sleep_time while True: # Reopen the AFF4Object to check if its status has changed, and also make # sure it's a flow. with aff4.FACTORY.Open( flow_urn, token=token, aff4_type=flow.GRRFlow) as flow_obj: # Stop if the flow is done or has timed out. if time.time() - start_time > timeout: logging.warn("Timed out after waiting %ss for %s!", timeout, flow_obj) raise IOError("Timed out trying to access client! Is it connected?") if not flow_obj.GetRunner().IsRunning(): break # Decrease the time we sleep each iteration. sleep_time = max(sleep_time * dampening_multiplier, min_sleep_time) time.sleep(sleep_time) logging.debug("Waiting for %s, sleeping for %.3fs", flow_obj, sleep_time) def StartFlowAndWait(client_id, token=None, timeout=DEFAULT_TIMEOUT, **flow_args): """Runs a flow and waits for it to finish. Args: client_id: The client id of the client to run on. token: The datastore access token. timeout: How long to wait for a flow to complete, maximum. **flow_args: Pass through to flow. Returns: The urn of the flow that was run. """ flow_urn = flow.StartAFF4Flow( client_id=client_id, token=token, sync=True, **flow_args) WaitForFlow(flow_urn, token=token, timeout=timeout) return flow_urn # TODO(user): Deprecate this function once there is an alternative for # CacheGrep. def InterpolatePath(path, knowledge_base, users=None, path_args=None, depth=0): """Take a string as a path on a client and interpolate with client data. Args: path: A single string/unicode to be interpolated. knowledge_base: An rdf_client.KnowledgeBase object. users: A list of string usernames, or None. path_args: A dict of additional args to use in interpolation. These take precedence over any system provided variables. depth: A counter for recursion depth. Returns: A single string if users is None, otherwise a list of strings. """ sys_formatters = { # TODO(user): Collect this during discovery from the registry. # HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\ # Value: SystemRoot "systemroot": "c:\\Windows" } # Override any system formatters with path_args. if path_args: sys_formatters.update(path_args) if users: results = [] for user in users: # Extract and interpolate user specific formatters. user = GetUserInfo(knowledge_base, user) if user: formatters = dict((x.name, y) for x, y in user.ListSetFields()) formatters.update(sys_formatters) try: results.append(path.format(**formatters)) except KeyError: pass # We may be missing values for some users. return results else: try: path = path.format(**sys_formatters) except KeyError: logging.warn("Failed path interpolation on %s", path) return "" if "{" in path and depth < 10: path = InterpolatePath( path, knowledge_base=knowledge_base, users=users, path_args=path_args, depth=depth + 1) return path
StarcoderdataPython
3321791
<gh_stars>0 # from coding problem # Trying brute force on a sudoku board will take a really long time: we will need to try every permutation of the numbers 1-9 for all the non-empty squares. # Let's try using backtracking to solve this problem instead. What we can do is try filling each empty cell one by one, and backtrack once we hit an invalid state. # To do this, we'll need an valid_so_far function that tests the board for its validity by checking all the rows, columns, and squares. Then we'll backtrack as usual: X = None # Placeholder empty value def sudoku(board): if is_complete(board): return board r, c = find_first_empty(board) # set r, c to a val from 1 to 9 for i in range(1, 10): board[r][c] = i if valid_so_far(board): result = sudoku(board) if is_complete(result): return result board[r][c] = X return board def is_complete(board): return all(all(val is not X for val in row) for row in board) def find_first_empty(board): for i, row in enumerate(board): for j, val in enumerate(row): if val == X: return i, j return False def valid_so_far(board): if not rows_valid(board): return False if not cols_valid(board): return False if not blocks_valid(board): return False return True def rows_valid(board): for row in board: if duplicates(row): return False return True def cols_valid(board): for j in range(len(board[0])): if duplicates([board[i][j] for i in range(len(board))]): return False return True def blocks_valid(board): for i in range(0, 9, 3): for j in range(0, 9, 3): block = [] for k in range(3): for l in range(3): block.append(board[i + k][j + l]) if duplicates(block): return False return True def duplicates(arr): c = {} for val in arr: if val in c and val is not X: return True c[val] = True return False
StarcoderdataPython
397373
''' Created on 22.01.2014 @author: CaiusC ''' import re import csv import subprocess import shutil import os import sys class TGF(object): ''' This class contains every Hardware component and is responsible for creating the XPS project. ''' def __init__(self, tgf_file, component_path, base_design): ''' Constructor ''' self.Library = Library(component_path) self.BaseDesign = base_design self.Components = [] print 'Info: Parsing tfl: ', tgf_file with open(tgf_file, "r") as f: for line in f: if ( self.isHardwareComponent(line) ): comp = HardwareComponent(line, self.Library) self.Components.append(comp) print 'Info: Found ', len(self.Components) , ' Hardware components' def printComponents(self): ''' Prints every hardware component which has been found inside of the tgf file. ''' print 'Info: Component count: ', self.getComponentCount() print 'Info: Printing Hardware Components: ' for c in self.Components: c.printStatus() def getUniqueComponentCount(self): ''' Returns the number of distinct hardware components. ''' components = [] for c in self.Components: components.append(c.getName()) unique = set(list) return len(unique) def getUniqueComponents(self): ''' Returns a list of unique hardware components. ''' components = [] for c in self.Components: components.append(c.getName()) unique = set(list) return unique def getComponentCount(self): ''' Returns the number of Hardware Components from the specified tgf file ''' return len(self.Components) def isHardwareComponent(self, line): ''' Returns if the specified line from the tgf file accords to a hardware component ''' words = line.split(' ') for w in words: if not (w.find("hw") == -1): return True return False def createSetupFile(self): ''' Creates the setup_zynq file which is needed by the reconos mhs creator. ''' path = "project/setup_zynq" # Prepare content base_design = 'base_design=' + self.BaseDesign num_static_hwts = 'num_static_hwts=' + str(self.getComponentCount()) implementations = 'static_hwts="' used_components = set() first = True for c in self.Components: if(c.getType() not in used_components): used_components.add(c.getType()) if c.getImplementation() is None: print 'Error: Could not find ' + c.Id + ' inside of the components.csv' sys.exit(1) if first: implementations = implementations + c.getImplementation() + '#' + str(c.getComponentCount()) first = False else: implementations = implementations + ' ' + c.getImplementation() + '#' + str(c.getComponentCount()) implementations = implementations + '"' # Check preconditions if not (os.path.exists("project")): os.makedirs("project") if os.path.isfile(path): os.remove(path) # Write file with open(path, 'a') as f: f.write(base_design + '\n') f.write(num_static_hwts+ '\n') f.write(implementations + '\n') def linkComponents(self): ''' Every pcore has to be linked into the project folder ''' # Delegating the linking to the hardware components for c in self.Components: c.export(self.Library) def reconosSetup(self): # Executes the reconos_setup script subprocess.call(['reconos/reconos_setup.sh', 'project/setup_zynq']) def directScript(self): ''' Can be used to skip the reconos_setup.sh script. But actually it is not fully implemented :/ ''' #mhsaddhwts.py <architecture> <system.mhs> <num_static_hwts> <num_reconf_regions> <hwt0_directory>[#<count>] <hwt1_directory>[#<count>] arch = "zynq" mhs_path = "reconos/basedesign_audio_reconos.mhs" num_static_hwts = str(self.getComponentCount()) num_reconf = 0 implementations = '' used_components = set() for c in self.Components: if(c.getType() not in used_components): used_components.add(c.getType()) if c.getImplementation() is None: print 'Error: Could not find ' + c.Id + ' inside of the components.csv' sys.exit(1) implementations = implementations + c.getImplementation() + '#' + str(c.getComponentCount())+ ' ' implementations = implementations + '"' subprocess.call(['reconos/mhsaddhwts.py', arch, mhs_path, num_static_hwts, num_reconf, implementations]) def getLibrary(self): return self.Library class HardwareComponent(object): ''' Represents a Component which is used by the current Patch ''' # Class variables c_dict = dict() linked_set = set() def __init__(self, line, library): ''' Constructor ''' wordList = re.sub("[^\w]", " ", line).split() self.Id = wordList[0] self.Type = wordList[1] self.Slot = wordList[3] self.IsLinked = False # Every component has to be linked one time into the project folder self.Implementation = library.getImplementation(self.Type) if HardwareComponent.c_dict.has_key(self.Type): count = HardwareComponent.c_dict[self.Type] count = count + 1 HardwareComponent.c_dict.update({self.Type : count}) else: HardwareComponent.c_dict.update({self.Type : 1}) def getComponentCount(self): if HardwareComponent.c_dict.has_key(self.Type): return HardwareComponent.c_dict[self.Type] else: print 'Error: Did not find ', self.Type, ' in dictionary. This should not happen bro :(' return 0 def printStatus(self): print 'Id: ', self.Id print 'Type: ', self.Type print 'Implementation', self.Implementation print 'Slot: ', self.Slot print '' def export(self, library): ''' Every distinct hardware pcore has to be linked exactly one time into the project folder. That's why we need the Class variable "linked_set" ''' if self.Type in HardwareComponent.linked_set: return else: HardwareComponent.linked_set.add(self.Type) src = library.getPath(self.Type) dst = os.path.join('project/', self.Implementation) if (os.path.isdir(dst)): os.unlink(dst) os.symlink(src, dst) self.IsLinked = True def getType(self): return self.Type def getImplementation(self): return self.Implementation def getSlot(self): return self.Slot class Library(object): ''' This class contains every available Hardware component. ''' # Initializes the Library by reading the components csv File def __init__(self, component_path): self.ComponentPath = component_path self.Components = [] with open('components.csv', 'rb') as f: reader = csv.reader(f) for row in reader: self.Components.append(LibraryComponent(row[0], row[1], self.ComponentPath)) def getComponentList(self): return self.Components # Returns the path of the given component def getPath(self, abbreviation): for c in self.Components: if(c.getAbbreviation() == abbreviation): return c.getPath() print "Warning: Did not find component: ", c.getAbbreviation return None def getImplementation(self, abb): for c in self.Components: if abb == c.getAbbreviation(): return c.getImplementation() class LibraryComponent(object): ''' This class represents a single Hardware Component ''' def __init__(self, abbreviation, implementation, componentPath): self.Abbreviation = abbreviation self.Implementation = implementation if componentPath is None: componentPath = os.getenv("SOUNDGATES") componentPath = os.path.join(componentPath, 'hardware/hwt/pcores/') self.Path = os.path.join(componentPath, self.getImplementation()) self.Count = 0 def getAbbreviation(self): return self.Abbreviation def getImplementation(self): return self.Implementation def getPath(self): return self.Path def increaseCount(self): self.Count = self.Count + 1 def getCount(self): return self.Count
StarcoderdataPython
12827891
# encoding=utf-8 __author__ = 'Jonny' __location__ = '西安' __date__ = '2018-03-25' from scrapy import cmdline cmdline.execute('scrapy crawl douban'.split(''))
StarcoderdataPython
185352
<reponame>eabyshev/appscale<gh_stars>1-10 """ A test script to start Zookeeper. """ import logging import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib")) import monit_interface def run(): """ Starts up cassandra. """ logging.warning("Starting Zookeeper.") monit_interface.start('zookeeper-9999', is_group=False) logging.warning("Done!") if __name__ == '__main__': run()
StarcoderdataPython
3276826
# global import abc # local from ivy_builder.specs.spec import Spec from ivy_builder.specs.spec import locals_to_kwargs from ivy_builder.specs.dataset_dirs import DatasetDirs class DatasetSpec(Spec, abc.ABC): def __init__(self, dirs: DatasetDirs, **kwargs) -> None: """ base class for storing general properties of the dataset which is saved on disk """ kw = locals_to_kwargs(locals()) super().__init__(dirs=dirs, **kwargs) self._kwargs = kw
StarcoderdataPython