text
string
size
int64
token_count
int64
from rkstiff.grids import construct_x_kx_rfft, construct_x_kx_fft from rkstiff.grids import construct_x_Dx_cheb from rkstiff.derivatives import dx_rfft, dx_fft import numpy as np def test_periodic_dx_rfft(): N = 100 a, b = 0, 2*np.pi x,kx = construct_x_kx_rfft(N,a,b) u = np.sin(x) ux_exact = np.cos(x) ux_approx = dx_rfft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_zeroboundaries_dx_rfft(): N = 400 a, b = -30., 30. x,kx = construct_x_kx_rfft(N,a,b) u = 1./np.cosh(x) ux_exact = -np.tanh(x)/np.cosh(x) ux_approx = dx_rfft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_gauss_dx_rfft(): N = 128 a,b = -10,10 x,kx = construct_x_kx_rfft(N,a,b) u = np.exp(-x**2) ux_exact = -2*x*np.exp(-x**2) ux_approx = dx_rfft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_manydx_rfft(): N = 128 a, b = 0, 2*np.pi x,kx = construct_x_kx_rfft(N,a,b) u = np.sin(x) ux_exact = np.sin(x) ux_approx = u.copy() for _ in range(4): ux_approx = dx_rfft(kx,ux_approx) rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact) assert rel_err < 1e-6 ux_approx = u.copy() ux_approx = dx_rfft(kx,ux_approx,8) rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact) assert rel_err < 0.1 def test_manydx_fft(): N = 128 a, b = 0, 2*np.pi x,kx = construct_x_kx_fft(N,a,b) u = np.sin(x) ux_exact = np.sin(x) ux_approx = u.copy() for _ in range(4): ux_approx = dx_fft(kx,ux_approx) rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact) assert rel_err < 1e-6 ux_approx = u.copy() ux_approx = dx_fft(kx,ux_approx,8) rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact) assert rel_err < 0.1 def test_periodic_dx_fft(): N = 100 a, b = 0, 2*np.pi x,kx = construct_x_kx_fft(N,a,b) u = np.sin(x) ux_exact = np.cos(x) ux_approx = dx_fft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_zeroboundaries_dx_fft(): N = 400 a, b = -30., 30. x,kx = construct_x_kx_fft(N,a,b) u = 1./np.cosh(x) ux_exact = -np.tanh(x)/np.cosh(x) ux_approx = dx_fft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_gauss_dx_fft(): N = 128 a,b = -10,10 x,kx = construct_x_kx_fft(N,a,b) u = np.exp(-x**2) ux_exact = -2*x*np.exp(-x**2) ux_approx = dx_fft(kx,u) assert np.allclose(ux_exact,ux_approx) def test_exp_trig_x_Dx_cheb(): # standard interval [-1,1] N = 20; a = -1; b = 1 x,Dx = construct_x_Dx_cheb(N,-1,1) u = np.exp(x)*np.sin(5*x) Du_exact = np.exp(x)*(np.sin(5*x)+5*np.cos(5*x)) Du_approx = Dx.dot(u) error = Du_exact - Du_approx assert np.linalg.norm(error)/np.linalg.norm(Du_exact) < 1e-8 # non-standard interval [-3,3] N = 30; a = -3; b = 3 x,Dx = construct_x_Dx_cheb(N,a,b) u = np.exp(x)*np.sin(5*x) Du_exact = np.exp(x)*(np.sin(5*x)+5*np.cos(5*x)) Du_approx = Dx.dot(u) error = Du_exact - Du_approx assert np.linalg.norm(error)/np.linalg.norm(Du_exact) < 1e-7
3,179
1,686
#!/usr/bin/env python import os from jinja2 import Environment, FileSystemLoader PATH = os.path.dirname(os.path.abspath(__file__)) env = Environment(loader=FileSystemLoader(os.path.join(PATH, 'templates'))) mac_addr = "01:23:45:67:89:01" PXE_ROOT_DIR = "/data/tftpboot" pxe_options = { 'os_distribution': 'centos7', 'path_to_vmlinuz': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'vmlinuz'), 'path_to_initrd': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'initrd.img'), 'path_to_kickstart_cfg': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'ks.cfg'), 'pxe_server_ip': '128.0.0.1', 'protocol': 'nfs' } def build_pxe_config(ctxt, template): """Build the PXE boot configuration file. This method builds the PXE boot configuration file by rendering the template with the given parameters. :param pxe_options: A dict of values to set on the configuration file. :param template: The PXE configuration template. :param root_tag: Root tag used in the PXE config file. :param disk_ident_tag: Disk identifier tag used in the PXE config file. :returns: A formatted string with the file content. """ tmpl_path, tmpl_file = os.path.split(template) env = Environment(loader=FileSystemLoader(tmpl_path)) template = env.get_template(tmpl_file) return template.render(ctxt) def get_pxe_mac_path(mac, delimiter=None): """Convert a MAC address into a PXE config file name. :param mac: A MAC address string in the format xx:xx:xx:xx:xx:xx. :param delimiter: The MAC address delimiter. Defaults to dash ('-'). :returns: the path to the config file. """ if delimiter is None: delimiter = '-' mac_file_name = mac.replace(':', delimiter).lower() mac_file_name = '01-' + mac_file_name return os.path.join(PXE_ROOT_DIR, 'pxelinux.cfg', mac_file_name) def get_teml_path(): """ """ return os.path.join(PXE_ROOT_DIR, 'template', '01-xx-xx-xx-xx-xx-xx.template') #def render_template(template_filename, context): # return env.get_template(template_filename).render(context) def create_pxe_config_file(pxe_options): # fname = "output.html" cname = get_pxe_mac_path(mac_addr) tname = get_teml_path() context = { 'pxe_opts': pxe_options } with open(cname, 'w') as f: config = build_pxe_config(context, tname) f.write(config) ######################################## if __name__ == "__main__": create_pxe_config_file(pxe_options)
2,511
903
from django.contrib import admin from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin from .models import Student, User admin.site.site_header = 'BIA SCHOOL SYSTEM' class UserAdmin(DjangoUserAdmin): model = User fieldsets = DjangoUserAdmin.fieldsets + ((None, { 'fields': ('role', 'middle_name', 'birth_date')}),) list_display = ('role', 'last_name', 'first_name', 'middle_name', 'birth_date') def save_model(self, request, obj, form, change): if request.user.is_teacher: obj.is_staff = True obj.save() admin.site.register(User, UserAdmin) class StudentUser(UserAdmin): model = Student fieldsets = UserAdmin.fieldsets + ((None, { 'fields': ('entry_year', 'klass')}),) list_display = ('role', 'last_name', 'first_name', 'middle_name', 'birth_date', 'entry_year', 'klass') search_fields = ('last_name', 'first_name', 'middle_name', 'entry_year', 'klass') admin.site.register(Student, StudentUser)
1,101
343
# coding: utf-8 """Converter module.""" import util THEME = 'theme' BACKGROUND = 'background' class ThemeConverter(object): """Object that converts themes using given map file.""" def __init__(self, theme_map, transp_map): """Constructor.""" self.theme_map = theme_map self.transp_map = transp_map def convert(self, source_theme): """Create object that describes desktop theme. Arguments: source_theme - theme object """ target_theme = util.get_empty_theme() for desktop_key, att_key in self.theme_map.items(): if att_key not in source_theme[THEME]: # print('Missing {0} key in source theme'.format(att_key)) continue color = source_theme[THEME][att_key] if desktop_key in self.transp_map: alpha = self.transp_map[desktop_key] color = util.apply_transparency(color, alpha) target_theme[THEME][desktop_key] = color target_theme[BACKGROUND] = source_theme[BACKGROUND] return target_theme
1,111
330
import pickle import platform import os import pytest import localpaths from . import serve from .serve import Config @pytest.mark.skipif(platform.uname()[0] == "Windows", reason="Expected contents are platform-dependent") def test_make_hosts_file_nix(): c = Config(browser_host="foo.bar", alternate_hosts={"alt": "foo2.bar"}) hosts = serve.make_hosts_file(c, "192.168.42.42") lines = hosts.split("\n") assert set(lines) == {"", "192.168.42.42\tfoo.bar", "192.168.42.42\tfoo2.bar", "192.168.42.42\twww.foo.bar", "192.168.42.42\twww.foo2.bar", "192.168.42.42\twww1.foo.bar", "192.168.42.42\twww1.foo2.bar", "192.168.42.42\twww2.foo.bar", "192.168.42.42\twww2.foo2.bar", "192.168.42.42\txn--lve-6lad.foo.bar", "192.168.42.42\txn--lve-6lad.foo2.bar", "192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo.bar", "192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo2.bar"} assert lines[-1] == "" @pytest.mark.skipif(platform.uname()[0] != "Windows", reason="Expected contents are platform-dependent") def test_make_hosts_file_windows(): c = Config(browser_host="foo.bar", alternate_hosts={"alt": "foo2.bar"}) hosts = serve.make_hosts_file(c, "192.168.42.42") lines = hosts.split("\n") assert set(lines) == {"", "0.0.0.0\tnonexistent.foo.bar", "0.0.0.0\tnonexistent.foo2.bar", "192.168.42.42\tfoo.bar", "192.168.42.42\tfoo2.bar", "192.168.42.42\twww.foo.bar", "192.168.42.42\twww.foo2.bar", "192.168.42.42\twww1.foo.bar", "192.168.42.42\twww1.foo2.bar", "192.168.42.42\twww2.foo.bar", "192.168.42.42\twww2.foo2.bar", "192.168.42.42\txn--lve-6lad.foo.bar", "192.168.42.42\txn--lve-6lad.foo2.bar", "192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo.bar", "192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo2.bar"} assert lines[-1] == "" def test_ws_doc_root_default(): c = Config() assert c.ws_doc_root == os.path.join(localpaths.repo_root, "websockets", "handlers") def test_init_ws_doc_root(): c = Config(ws_doc_root="/") assert c.doc_root == localpaths.repo_root # check this hasn't changed assert c._ws_doc_root == "/" assert c.ws_doc_root == "/" def test_set_ws_doc_root(): c = Config() c.ws_doc_root = "/" assert c.doc_root == localpaths.repo_root # check this hasn't changed assert c._ws_doc_root == "/" assert c.ws_doc_root == "/" def test_pickle(): # Ensure that the config object can be pickled pickle.dumps(Config())
3,109
1,291
# -*-coding: utf-8 -*- import os import re from sklearn.feature_extraction.text import CountVectorizer import sys import numpy as np from sklearn.model_selection import train_test_split from sklearn.feature_extraction.text import TfidfTransformer import commands import tflearn import pickle max_features=10000 max_document_length=100 min_opcode_count=2 webshell_dir="../Datasets/dataset_webshell/webshell/PHP/" whitefile_dir="../Datasets/dataset_webshell/normal/php/" white_count=0 black_count=0 php_bin="/usr/bin/php" def load_files_re(dir): files_list = [] g = os.walk(dir) for path, d, filelist in g: #print d; for filename in filelist: #print os.path.join(path, filename) if filename.endswith('.php') or filename.endswith('.txt'): fulepath = os.path.join(path, filename) print "Load %s" % fulepath t = load_file(fulepath) print len(t) files_list.append(t) return files_list def load_files_opcode_re(dir): global min_opcode_count files_list = [] g = os.walk(dir) for path, d, filelist in g: #print d; for filename in filelist: #print os.path.join(path, filename) if filename.endswith('.php') : fulepath = os.path.join(path, filename) print "Load %s opcode" % fulepath t = load_file_opcode(fulepath) print len(t) if len(t) > min_opcode_count: files_list.append(t) else: print "Load %s opcode failed" % fulepath #print "Add opcode %s" % t return files_list def load_file(file_path): t="" with open(file_path) as f: for line in f: line=line.strip('\n') t+=line return t def load_file_opcode(file_path): global php_bin t="" cmd=php_bin+" -dvld.active=1 -dvld.execute=0 "+file_path #print "exec "+cmd status,output=commands.getstatusoutput(cmd) t=output #print t tokens=re.findall(r'\s(\b[A-Z_]+\b)\s',output) t=" ".join(tokens) print "opcode count %d" % len(t) return t def load_files(path): files_list=[] for r, d, files in os.walk(path): for file in files: if file.endswith('.php'): file_path=path+file print "Load %s" % file_path t=load_file(file_path) files_list.append(t) return files_list #php N-Gram + TF-IDF def get_feature_by_ngram(): global white_count global black_count global max_features print "max_features=%d" % max_features x=[] y=[] webshell_files_list = load_files_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list y=y1+y2 CV = CountVectorizer(ngram_range=(2, 2), decode_error="ignore",max_features=max_features, token_pattern = r'\b\w+\b',min_df=1, max_df=1.0) x=CV.fit_transform(x).toarray() transformer = TfidfTransformer(smooth_idf=False) #x_tfidf = transformer.fit_transform(x) #x = x_tfidf.toarray() return x,y #opcode N-Gram def get_feature_by_opcode_ngram(): global white_count global black_count global max_features print "max_features=%d" % max_features x=[] y=[] data_file = "./Model/Data/opcode_ngram_tf.data" if os.path.exists(data_file): f = open(data_file, 'rb') x, y = pickle.loads(f.read()) f.close() return x, y webshell_files_list = load_files_opcode_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_opcode_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list y=y1+y2 CV = CountVectorizer(ngram_range=(2, 4), decode_error="ignore",max_features=max_features, token_pattern = r'\b\w+\b',min_df=1, max_df=1.0) x=CV.fit_transform(x).toarray() transformer = TfidfTransformer(smooth_idf=False) #x_tfidf = transformer.fit_transform(x) #x = x_tfidf.toarray() data = pickle.dumps((x, y)) with open(data_file, 'w') as f: f.write(data) f.close() f.close() return x,y #opcode词汇表 def get_feature_by_opcode_vt(): global white_count global black_count x=[] y=[] data_file = "./Model/Data/opcode_vt.data" if os.path.exists(data_file): f = open(data_file, 'rb') x, y = pickle.loads(f.read()) f.close() else: webshell_files_list = load_files_opcode_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_opcode_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list #print x y=y1+y2 vp=tflearn.data_utils.VocabularyProcessor(max_document_length=100, min_frequency=0, vocabulary=None, tokenizer_fn=None) x=vp.fit_transform(x, unused_y=None) x=np.array(list(x)) f = open(data_file, 'wb') data = pickle.dumps((x, y)) f.write(data) f.close() #print x #print y return x,y #php词汇表 def get_feature_by_vt(): global white_count global black_count x=[] y=[] webshell_files_list = load_files_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list y=y1+y2 vp=tflearn.data_utils.VocabularyProcessor(max_document_length=100, min_frequency=0, vocabulary=None, tokenizer_fn=None) x=vp.fit_transform(x, unused_y=None) x=np.array(list(x)) return x,y #php序列 def get_feature_by_php(): global white_count global black_count global max_features global webshell_dir global whitefile_dir print "max_features=%d webshell_dir=%s whitefile_dir=%s" % (max_features,webshell_dir,whitefile_dir) x=[] y=[] webshell_files_list = load_files_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list #print x y=y1+y2 CV = CountVectorizer(ngram_range=(3000, 3000), decode_error="ignore",max_features=max_features, token_pattern = r'\b\w+\b',min_df=1, max_df=1.0) x=CV.fit_transform(x).toarray() return x,y #opcode序列 def get_feature_by_opcode(): global white_count global black_count global max_features global webshell_dir global whitefile_dir print "max_features=%d webshell_dir=%s whitefile_dir=%s" % (max_features,webshell_dir,whitefile_dir) x=[] y=[] data_file = "./Model/Data/opcodelist.data" if os.path.exists(data_file): f = open(data_file, 'rb') x, y = pickle.loads(f.read()) f.close() return x, y webshell_files_list = load_files_opcode_re(webshell_dir) y1=[1]*len(webshell_files_list) black_count=len(webshell_files_list) wp_files_list =load_files_opcode_re(whitefile_dir) y2=[0]*len(wp_files_list) white_count=len(wp_files_list) x=webshell_files_list+wp_files_list #print x y=y1+y2 CV = CountVectorizer(ngram_range=(3000, 3000), decode_error="ignore",max_features=max_features, token_pattern = r'\b\w+\b',min_df=1, max_df=1.0) x=CV.fit_transform(x).toarray() f = open(data_file, 'wb') data = pickle.dumps((x, y)) f.write(data) f.close() return x,y
8,440
3,096
import numpy as np from DREAM.Settings.Equations.EquationException import EquationException from . import DistributionFunction as DistFunc from . DistributionFunction import DistributionFunction from .. TransportSettings import TransportSettings INIT_FORWARD = 1 INIT_XI_NEGATIVE = 2 INIT_XI_POSITIVE = 3 INIT_ISOTROPIC = 4 class RunawayElectronDistribution(DistributionFunction): def __init__(self, settings, fre=[0.0], initr=[0.0], initp=[0.0], initxi=[0.0], initppar=None, initpperp=None, rn0=None, n0=None, rT0=None, T0=None, bc=DistFunc.BC_PHI_CONST, ad_int_r=DistFunc.AD_INTERP_CENTRED, ad_int_p1=DistFunc.AD_INTERP_CENTRED, ad_int_p2=DistFunc.AD_INTERP_CENTRED, ad_jac_r=DistFunc.AD_INTERP_JACOBIAN_LINEAR, ad_jac_p1=DistFunc.AD_INTERP_JACOBIAN_LINEAR, ad_jac_p2=DistFunc.AD_INTERP_JACOBIAN_LINEAR, fluxlimiterdamping=1.0): """ Constructor. """ super().__init__(settings=settings, name='f_re', grid=settings.runawaygrid, f=fre, initr=initr, initp=initp, initxi=initxi, initppar=initppar, initpperp=initpperp, rn0=rn0, n0=n0, rT0=rT0, T0=T0, bc=bc, ad_int_r=ad_int_r, ad_int_p1=ad_int_p1, ad_int_p2=ad_int_p2, fluxlimiterdamping=fluxlimiterdamping) self.inittype = INIT_FORWARD def setInitType(self, inittype): """ Specifies how the runaway electron distribution function f_re should be initialized from the runaway density n_re. :param int inittype: Flag indicating how to initialize f_re. """ self.inittype = int(inittype) def fromdict(self, data): """ Load data for this object from the given dictionary. """ super().fromdict(data) def scal(v): if type(v) == np.ndarray: return v[0] else: return v if 'inittype' in data: self.inittype = int(scal(data['inittype'])) def todict(self): """ Returns a Python dictionary containing all settings of this RunawayElectronDistribution object. """ d = super().todict() d['inittype'] = self.inittype return d
2,252
798
# Copyright (c) 2015 Adi Roiban. # See LICENSE for details. """ Tests for the assertion helpers. """ from __future__ import print_function from __future__ import division from __future__ import absolute_import import os from chevah.compat.exceptions import CompatError from chevah.compat.testing import ChevahTestCase, mk class TestAssertionMixin(ChevahTestCase): """ Test for assertions. ChevahTestCase is inheriting the assertion mixin and we can test it. """ def check_assertWorkingFolderIsClean(self, content): """ Common tests for assertWorkingFolderIsClean. """ with self.assertRaises(AssertionError) as context: self.assertWorkingFolderIsClean() message = context.exception.args[0].decode('utf-8') for member in content: self.assertContains(member, message) # Calling it again will not raise any error since the folder is clean. self.assertWorkingFolderIsClean() def test_assertTempIsClean_clean_temp(self): """ No error is raised if temp folder is clean. """ self.assertTempIsClean() def test_assertTempIsClean_dirty(self): """ If temp is not clean an error is raised and then temp folders is cleaned. """ temp_segments = mk.fs.createFileInTemp() with self.assertRaises(AssertionError) as context: self.assertTempIsClean() message = context.exception.args[0].decode('utf-8') self.assertStartsWith(u'Temporary folder is not clean.', message) self.assertContains(temp_segments[-1], message) self.assertFalse(mk.fs.exists(temp_segments)) def test_assertWorkingFolderIsClean_with_folder(self): """ An error is raised if current working folder contains a temporary folder and folder is cleaned. """ # Our compat filesystem API does not support creating files in # current working directory so we use direct API call to OS. name = mk.string() os.mkdir(mk.fs.getEncodedPath(name)) self.check_assertWorkingFolderIsClean([name]) def test_assertWorkingFolderIsClean_with_file(self): """ An error is raised if current working folder contains a temporary file and file is cleaned. """ name = mk.string() open(mk.fs.getEncodedPath(name), 'a').close() self.check_assertWorkingFolderIsClean([name]) def test_assertWorkingFolderIsClean_with_file_and_folder(self): """ An error is raised if current working folder contains a temporary folder and file, and folder and folder is cleaned. """ file_name = mk.string() folder_name = mk.string() open(mk.fs.getEncodedPath(file_name), 'a').close() os.mkdir(mk.fs.getEncodedPath(folder_name)) self.check_assertWorkingFolderIsClean([file_name, folder_name]) def test_assertIsEmpty(self): """ Raise an exception when not empty and otherwise does nothing. """ self.assertIsEmpty(()) self.assertIsEmpty([]) self.assertIsEmpty('') self.assertIsEmpty(set()) with self.assertRaises(AssertionError) as context: self.assertIsEmpty((1, 2)) self.assertEqual( 'Iterable is not empty.\n(1, 2).', context.exception.args[0]) def test_assertCompatError_no_CompatError(self): """ Will show the details if error is not an CompatError. """ exception = self.assertRaises( AssertionError, self.assertCompatError, u'123-id', Exception('generic-error') ) self.assertEqual( "Error generic-error not CompatError but " "<type 'exceptions.Exception'>", exception.args[0], ) def test_assertCompatError_bad_id(self): """ Will show the details if error is not an CompatError. """ exception = self.assertRaises( AssertionError, self.assertCompatError, u'123-id', CompatError(u'456', u'Some details.') ) self.assertEqual( 'Error id for CompatError 456 - Some details. is not 123-id, ' 'but 456.', exception.args[0], ) def test_assertIteratorItemsEqual_no_iterable(self): """ Raise an exception if the actual value is not iterable. """ sut = [1, 3] exception = self.assertRaises( AssertionError, self.assertIteratorItemsEqual, [], sut, ) self.assertEqual( 'Value is not iterable.', exception.args[0], ) def test_assertIteratorItemsEqual_ok(self): """ Is equal even if elements are in a different order. """ iterator = iter([2]) value = [1, b'3', u'a', iterator] sut = iter(value) self.assertIteratorItemsEqual([b'3', 1, u'a', iterator], sut) def test_assertIteratorItemsEqual_less(self): """ It fails if the values are not equal. """ value = [1, b'3', u'a'] sut = iter(value) exception = self.assertRaises( AssertionError, self.assertIteratorItemsEqual, [1], sut, ) # The check here is more complicated since the message relies on the # assertEqual implementation. self.assertStartsWith( "Element counts were not equal:", exception.args[0], ) def test_assertEqual_unicode_vs_bytestring_in_list(self): """ Fails with AssertionError when asserting that lists containing a Unicode string vs. a bytestring are equal. """ unicode_list = [u'text'] bytes_list = [b'text'] with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_list, bytes_list) self.assertEqual('First is unicode while second is str for "text".', context.exception.message) def test_assertEqual_unicode_vs_bytestring_in_nested_list(self): """ Fails with AssertionError when asserting that nested lists containing a Unicode string vs. a bytestring are equal. """ unicode_list = [[u'text']] bytes_list = [[b'text']] with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_list, bytes_list) self.assertEqual('First is unicode while second is str for "text".', context.exception.message) def test_assertEqual_unicode_vs_bytestring_in_tuple(self): """ Fails with AssertionError when asserting that tuples containing a Unicode string vs. a bytestring are equal. """ unicode_tuple = (u'text',) bytes_tuple = (b'text',) with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_tuple, bytes_tuple) self.assertEqual('First is unicode while second is str for "text".', context.exception.message) def test_assertEqual_unicode_vs_bytestring_in_set(self): """ Fails with AssertionError when asserting that sets containing a Unicode string vs. a bytestring are equal. """ unicode_set = set([u'text']) bytes_set = set([b'text']) with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_set, bytes_set) self.assertEqual('First is unicode while second is str for "text".', context.exception.message) def test_assertEqual_unicode_vs_bytestring_in_dict_keys(self): """ Fails with AssertionError when asserting that lists containing a Unicode string vs. a bytestring are equal. """ unicode_dict = {u'key': 'value'} bytes_dict = {b'key': 'value'} with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_dict, bytes_dict) self.assertEqual('First is unicode while second is str for "key".', context.exception.message) def test_assertEqual_unicode_vs_bytestring_in_dict_values(self): """ Fails with AssertionError when asserting that lists containing a Unicode string vs. a bytestring are equal. """ unicode_dict = {'key': u'value'} bytes_dict = {'key': b'value'} with self.assertRaises(AssertionError) as context: self.assertEqual(unicode_dict, bytes_dict) self.assertEqual('First is unicode while second is str for "value".', context.exception.message)
8,907
2,514
import unittest import sys sys.path.insert(0, '../src/') from conformal_predictors.icp import ConformalPredictor from conformal_predictors.nc_measures import * import conformal_predictors.calibrutils as cu from sklearn.datasets import * import numpy as np from sklearn.svm import SVC from sklearn.ensemble import RandomForestClassifier from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier from sklearn.base import clone from sklearn.metrics import classification_report from nonconformist.cp import IcpClassifier from nonconformist.nc import NcFactory, InverseProbabilityErrFunc, MarginErrFunc class TestPValues(unittest.TestCase): def test_iris(self): is_smoothed = False # iris = load_iris() iris = load_breast_cancer() model = KNeighborsClassifier(n_neighbors=11) test_model = clone(model) idx = np.random.permutation(iris.target.size) idx_train, idx_cal, idx_test = idx[:50], idx[50:100], idx[100:] ## Nonconformist nc = NcFactory.create_nc( model, InverseProbabilityErrFunc() # MarginErrFunc() ) icp = IcpClassifier(nc, smoothing=is_smoothed) # Create an inductive conformal classifier # Fit the ICP using the proper training set icp.fit(iris.data[idx_train, :], iris.target[idx_train]) # Calibrate the ICP using the calibration set icp.calibrate(iris.data[idx_cal, :], iris.target[idx_cal]) nonconformist_p_values = icp.predict(iris.data[idx_test, :]) ## Test model y_cal = iris.target[idx_cal] y_test = iris.target[idx_test] test_model.fit(iris.data[idx_train, :], iris.target[idx_train]) y_cal_proba = test_model.predict_proba(iris.data[idx_cal, :]) y_test_proba = test_model.predict_proba(iris.data[idx_test, :]) icp = ConformalPredictor(y_cal_proba, y_cal, y_test_proba, y_test, smoothed=is_smoothed, mondrian=False) icp.fit(negative_logit) # icp.fit(margin_error_func) self.assertEqual(np.round(np.sum(nonconformist_p_values - icp.p_values), 12), 0) def test_breast_cancer(self): pass if __name__ == '__main__': unittest.main()
2,253
805
import os.path import torchvision.transforms as transforms from data.base_dataset import BaseDataset, get_transform from data.image_folder import make_dataset from PIL import Image import PIL from pdb import set_trace as st import torch import numpy as np #from yolo.utils.datasets import pad #import torchvision.transforms as transforms from yolo.utils.datasets import pad_to_square, resize, pad_to_square2 class UnalignedDataset(BaseDataset): # I/O for hybrid YOLOv3 + CycleGAN! Unsupported for batch data for YOLOv3 def initialize(self, opt, normalized_labels = True): self.opt = opt self.root = opt.dataroot self.normalized_labels = normalized_labels # self.dir_A = os.path.join(opt.dataroot, opt.phase + 'A') # self.dir_B = os.path.join(opt.dataroot, opt.phase + 'B') self.dir_A = os.path.join(opt.dataroot, 'A_train.txt') # A.txt contains a list of path/to/img1.jpg self.dir_B = os.path.join(opt.dataroot, 'B_train.txt') self.A_paths = make_dataset(self.dir_A) self.B_paths = make_dataset(self.dir_B) self.A_paths = sorted(self.A_paths) self.B_paths = sorted(self.B_paths) self.A_size = len(self.A_paths) self.B_size = len(self.B_paths) self.transform = get_transform(opt) # transform for cyclegan # prepare targets for yolo self.A_label_files = [ path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt") for path in self.A_paths ] # self.A_label_files = [ # path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt").replace("rainy/", "").replace("cloudy1000/", "").replace("sunny/", "").replace("night_or_night_and_rainy/", "") # for path in self.A_paths # ] self.B_label_files = [ path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt").replace("rainy/", "").replace("cloudy1000/", "").replace("sunny/", "").replace("night_or_night_and_rainy/", "") for path in self.B_paths ] def __getitem__(self, index): A_path = self.A_paths[index % self.A_size] B_path = self.B_paths[index % self.B_size] A_path = A_path.strip('\n') B_path = B_path.strip('\n') #print('A_path = ', A_path) A_img = Image.open(A_path).convert('RGB') B_img = Image.open(B_path).convert('RGB') #img = transforms.ToTensor()(Image.open(img_path).convert('RGB')) tmp_A = transforms.ToTensor()(A_img) #print('\n**************************************************A_img.shape = ', tmp_A.shape) _, h, w = tmp_A.shape h_factor, w_factor = (h, w) if self.normalized_labels else (1, 1) # Pad to square resolution tmp_A, pad = pad_to_square2(tmp_A, 0) _, padded_h, padded_w = tmp_A.shape tmp_B = transforms.ToTensor()(B_img) #print('\n**************************************************A_img.shape = ', tmp_A.shape) _, hB, wB = tmp_B.shape h_factorB, w_factorB = (hB, wB) if self.normalized_labels else (1, 1) # Pad to square resolution tmp_B, padB = pad_to_square2(tmp_B, 0) _, padded_hB, padded_wB = tmp_B.shape A_img = self.transform(A_img) B_img = self.transform(B_img) # --------- # Label # --------- def label_path2bboxes(label_path, pad, h_factor, w_factor, padded_h, padded_w): tmp_targets = None if os.path.exists(label_path): boxes = torch.from_numpy(np.loadtxt(label_path).reshape(-1, 5)) # Extract coordinates for unpadded + unscaled image x1 = w_factor * (boxes[:, 1] - boxes[:, 3] / 2) y1 = h_factor * (boxes[:, 2] - boxes[:, 4] / 2) x2 = w_factor * (boxes[:, 1] + boxes[:, 3] / 2) y2 = h_factor * (boxes[:, 2] + boxes[:, 4] / 2) # Adjust for added padding x1 += pad[0] y1 += pad[2] x2 += pad[1] y2 += pad[3] # Returns (x, y, w, h) in scale [0, 1] boxes[:, 1] = ((x1 + x2) / 2) / padded_w boxes[:, 2] = ((y1 + y2) / 2) / padded_h boxes[:, 3] *= w_factor / padded_w boxes[:, 4] *= h_factor / padded_h #print('\nboxes x y w h: ', boxes) tmp_targets = torch.zeros((len(boxes), 6)) tmp_targets[:, 1:] = boxes return tmp_targets label_path = self.A_label_files[index % len(self.A_paths)].rstrip() A_targets = label_path2bboxes(label_path, pad, h_factor, w_factor, padded_h, padded_w) label_path_B = self.B_label_files[index % len(self.B_paths)].rstrip() B_targets = label_path2bboxes(label_path_B, padB, h_factorB, w_factorB, padded_hB, padded_wB) #print('targets = ', targets) #targets = generate_YOLO_targets(self.bbox) # A_path = A_annotation # return {'A': A_img, 'B': B_img, # 'A_paths': A_path, 'B_paths': B_path, # 'targets': targets} return {'A': A_img, 'B': B_img, 'A_paths': A_path, 'B_paths': B_path, 'A_targets': A_targets, 'B_targets': B_targets} # add B_bbox, A_bbox def collate_fn(self, batch): # input images will be resized to 416 # this collate_fn to suport batchSize >= 2 #print('collate fn: ', zip(*batch)) tmp = list(batch) #print('tmp = ', len(tmp)) target_As = [data['A_targets'] for data in tmp if data['A_targets'] is not None] #print('targets_As = ', target_As) for i, boxes in enumerate(target_As): boxes[:, 0] = i target_As = torch.cat(target_As, 0) # BUG #print('target_As: ', target_As.shape) #print('target_As cat = ', target_As) target_Bs = [data['B_targets'] for data in tmp if data['B_targets'] is not None] for i, boxes in enumerate(target_Bs): boxes[:, 0] = i #print('\ntarget_Bs: ', target_Bs) #target_Bs = torch.cat(target_Bs, 0) # BUG As = torch.stack([data['A'] for data in tmp]) Bs = torch.stack([data['B'] for data in tmp]) path_As = [data['A_paths'] for data in tmp] #path_As = torch.cat(path_As, 0) path_Bs = [data['B_paths'] for data in tmp] #path_Bs = torch.cat(path_Bs, 0) # paths, imgs, targets = list(zip(*batch)) # # Remove empty placeholder targets # targets = [boxes for boxes in targets if boxes is not None] # # Add sample index to targets # for i, boxes in enumerate(targets): # boxes[:, 0] = i # targets = torch.cat(targets, 0) # # Selects new image size every tenth batch # if self.multiscale and self.batch_count % 10 == 0: # self.img_size = random.choice(range(self.min_size, self.max_size + 1, 32)) # # Resize images to input shape # imgs = torch.stack([resize(img, self.img_size) for img in imgs]) # self.batch_count += 1 return {'A': As, 'B': Bs, 'A_paths': path_As, 'B_paths': path_Bs, 'A_targets': target_As, 'B_targets': target_Bs} def __len__(self): return max(self.A_size, self.B_size) def name(self): return 'UnalignedDataset'
7,561
2,652
"""Interface to Primare amplifiers using Twisted SerialPort. This module allows you to control your Primare I22 and I32 amplifier from the command line using Primare's binary protocol via the RS232 port on the amplifier. """ import logging import click from contextlib import closing from primare_control import PrimareController # from twisted.logger import ( # FilteringLogObserver, # globalLogBeginner, # Logger, # LogLevel, # LogLevelFilterPredicate, # textFileLogObserver # ) # log = Logger() # globalLogBeginner.beginLoggingTo([ # FilteringLogObserver( # textFileLogObserver(sys.stdout), # [LogLevelFilterPredicate(LogLevel.debug)] # ) # ]) # Setup logging so that is available FORMAT = '%(asctime)-15s %(name)s %(levelname)-8s %(message)s' logging.basicConfig(level=logging.DEBUG, format=FORMAT) logger = logging.getLogger(__name__) class DefaultCmdGroup(click.Group): """Custom implementation for handling Primare methods in a unified way.""" def list_commands(self, ctx): """List Primare Control methods.""" rv = [method for method in dir(PrimareController) if not method.startswith('_')] rv.append('interactive') rv.sort() return rv def get_command(self, ctx, name): """Return click command.""" @click.pass_context def subcommand(*args, **kwargs): #logger.debug("subcommand args: {}".format(args)) #logger.debug("subcommand kwargs: {}".format(kwargs)) ctx = args[0] params = ctx.obj['parameters'] ctx.obj['p_ctrl'] = PrimareController(port=params['port'], baudrate=params['baudrate'], source=None, volume=None, debug=params['debug']) with closing(ctx.obj['p_ctrl']): try: if ctx.obj['parameters']['amp_info']: ctx.obj['p_ctrl'].setup() method = getattr(PrimareController, name) if len(kwargs): method(ctx.obj['p_ctrl'], int(kwargs['value'])) else: method(ctx.obj['p_ctrl']) except KeyboardInterrupt: logger.info("User aborted") except TypeError as e: logger.error(e) if name == "interactive": cmd = click.Group.get_command(self, ctx, 'interactive') else: if name in [method for method in dir(PrimareController) if not method.startswith('_')]: # attach doc from original callable so it will appear in CLI # output subcommand.__doc__ = getattr(PrimareController, name).__doc__ if getattr(PrimareController, name).__func__.__code__.co_argcount > 1: params_arg = [click.Argument(("value",))] else: params_arg = None cmd = click.Command(name, params=params_arg, callback=subcommand) else: #logger.debug("get_command no_such_cmd") cmd = None return cmd @click.command(cls=DefaultCmdGroup) @click.pass_context @click.option("--amp-info", default=False, is_flag=True, help="Retrieve and print amplifier information") @click.option("--baudrate", default='4800', type=click.Choice(['300', '1200', '2400', '4800', '9600', '19200', '57600', '115200']), help="Serial port baudrate. For I22 it _must_ be 4800.") @click.option("--debug", "-d", default=False, is_flag=True, help="Enable debug output.") @click.option("--port", "-p", default="/dev/ttyUSB0", help="Serial port to use (e.g. 3 for a COM port on Windows, " "/dev/ttyATH0 for Arduino Yun, /dev/ttyACM0 for Serial-over-USB " "on RaspberryPi.") def cli(ctx, amp_info, baudrate, debug, port): """Prototype command.""" try: # on Windows, we need port to be an integer port = int(port) except ValueError: pass ctx.obj = {} ctx.obj['p_ctrl'] = None ctx.obj['parameters'] = { 'amp_info': amp_info, 'baudrate': baudrate, 'debug': debug, 'port': port, } @cli.command() @click.pass_context def interactive(ctx): """Start interactive shell for controlling a Primare amplifier. Press enter (blank line), 'q' or 'quit' to exit. For a list of available commands, type 'help' """ method_list = [ (method, getattr(PrimareController, method).__doc__) for method in dir(PrimareController) if not method.startswith('_')] help_string = """To exit, press enter (blank line) or type 'q' or 'quit'.\n Available commands are: {}""".format('\n'.join(" {} {}".format(method.ljust(25), doc.splitlines()[0]) for method, doc in method_list)) try: params = ctx.obj['parameters'] ctx.obj['p_ctrl'] = PrimareController(port=params['port'], baudrate=params['baudrate'], source=None, volume=None, debug=params['debug']) if ctx.obj['parameters']['amp_info']: ctx.obj['p_ctrl'].setup() logger.info(help_string) nb = '' while True: nb = raw_input('Cmd: ').strip() if not nb or nb == 'q' or nb == 'quit': logger.debug("Quit: '{}'".format(nb)) break elif nb.startswith('help'): if len(nb.split()) == 2: help_method = nb.split()[1] matches = [item for item in method_list if item[0].startswith(help_method)] if len(matches): logger.info("\n".join("\n== {}\n{}".format( method.ljust(25), doc_string) for method, doc_string in matches)) else: logger.info( "Help requested on unknown method: {}".format( help_method)) else: logger.info(help_string) else: parsed_cmd = nb.split() command = getattr(ctx.obj['p_ctrl'], parsed_cmd[0], None) if command: try: if len(parsed_cmd) > 1: if parsed_cmd[1].lower() == "true": parsed_cmd[1] = True elif parsed_cmd[1].lower() == "false": parsed_cmd[1] = False elif parsed_cmd[0] == "remote_cmd": pass parsed_cmd[1] = '{}'.format(parsed_cmd[1]) else: parsed_cmd[1] = int(parsed_cmd[1]) command(parsed_cmd[1]) else: command() except TypeError as e: logger.warn("You called a method with an incorrect" + "number of parameters: {}".format(e)) else: logger.info("No such function - try again") except KeyboardInterrupt: logger.info("User aborted") # in a non-main thread: ctx.obj['p_ctrl'].close() del ctx.obj['p_ctrl'] ctx.obj['p_ctrl'] = None if __name__ == '__main__': cli()
8,396
2,220
#!/usr/bin/env python3 import csv import sys input_file = sys.argv[1] output_file = sys.argv[2] with open(input_file, 'r', newline='') as csv_in_file: with open(output_file, 'w', newline='') as csv_out_file: filereader = csv.reader(csv_in_file, delimiter=',') filewriter = csv.writer(csv_out_file, delimiter=',') for row_list in filereader: filewriter.writerow(row_list)
382
149
class Dog: """ Creates dog object """ def __init__(self): self.val = 'dog' def __repr__(self): return self.val class Cat: """ Creates cat object """ def __init__(self): self.val = 'cat' def __repr__(self): return self.val
298
101
import unittest from provdbconnector.exceptions.database import InvalidOptionsException, AuthException from provdbconnector import Neo4jAdapter, NEO4J_USER, NEO4J_PASS, NEO4J_HOST, NEO4J_BOLT_PORT from provdbconnector.prov_db import ProvDb from provdbconnector.tests import AdapterTestTemplate from provdbconnector.tests import ProvDbTestTemplate class Neo4jAdapterTests(AdapterTestTemplate): """ This test extends from AdapterTestTemplate and provide a common set for the neo4j adapter """ def setUp(self): """ Setup the test """ self.instance = Neo4jAdapter() auth_info = {"user_name": NEO4J_USER, "user_password": NEO4J_PASS, "host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT } self.instance.connect(auth_info) session = self.instance._create_session() session.run("MATCH (x) DETACH DELETE x") @unittest.skip( "Skipped because the server configuration currently is set to 'no password', so the authentication will never fail") def test_connect_fails(self): """ Try to connect with the wrong password """ auth_info = {"user_name": NEO4J_USER, "user_password": 'xxxxxx', "host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT } self.instance.connect(auth_info) with self.assertRaises(AuthException): self.instance.connect(auth_info) def test_connect_invalid_options(self): """ Try to connect with some invalid arguments """ auth_info = {"u": NEO4J_USER, "p": 'xxxxxx', "h": NEO4J_HOST + ":" + NEO4J_BOLT_PORT } with self.assertRaises(InvalidOptionsException): self.instance.connect(auth_info) def tearDown(self): """ Delete all data on the database :return: """ session = self.instance._create_session() session.run("MATCH (x) DETACH DELETE x") del self.instance class Neo4jAdapterProvDbTests(ProvDbTestTemplate): """ High level api test for the neo4j adapter """ def setUp(self): self.auth_info = {"user_name": NEO4J_USER, "user_password": NEO4J_PASS, "host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT } self.provapi = ProvDb(api_id=1, adapter=Neo4jAdapter, auth_info=self.auth_info) def clear_database(self): """ This function get called before each test starts """ session = self.provapi._adapter._create_session() session.run("MATCH (x) DETACH DELETE x") def tearDown(self): """ Delete all data in the database """ session = self.provapi._adapter._create_session() session.run("MATCH (x) DETACH DELETE x") del self.provapi
2,980
895
# -*- coding: utf-8 -*- import psutil # CPU print("CPU: ", psutil.cpu_count()) # CPU逻辑数量 print("CPU: ", psutil.cpu_count(logical=False)) # CPU物理核心 print("CPU: ", psutil.cpu_times()) # 统计CPU的用户/系统/空闲时间 # for x in range(3): # print(psutil.cpu_percent(interval=1, percpu=True)) # 每秒刷新一次CPU使用率 # 内存 print("memory", psutil.virtual_memory()) # 物理内存信息, 以整数字节为单位显示 print("memory", psutil.swap_memory()) # 交换内存信息 # 磁盘 print("disk: ", psutil.disk_partitions()) # 磁盘分区信息 print("disk: ", psutil.disk_usage('/')) # 磁盘使用情况 print("disk: ", psutil.disk_io_counters()) # 磁盘IO # 网络 print("network: ", psutil.net_io_counters()) # 网络读写字节/包的个数 print("network: ", psutil.net_if_addrs()) # 网络接口信息 print("network: ", psutil.net_if_stats()) # 网络接口状态 print("network: ", psutil.net_connections()) # 当前网络连接信息 # 进程 print("process: ", psutil.pids()) # 所有进程ID p = psutil.Process(12052) # 获取指定进程 print("process: ", p.name(), # 进程名称 "\nprocess: ", p.status(), # 进程状态 "\nprocess: ", p.exe(), # 进程exe路径 "\nprocess: ", p.cwd(), # 进程工作目录 "\nprocess: ", p.create_time(), # 进程创建时间 "\nprocess: ", p.cmdline(), # 进程启动的命令行 "\nprocess: ", p.ppid(), # 父进程ID "\nprocess: ", p.parent(), # 父进程 "\nprocess: ", p.children(), # 子进程列表 "\nprocess: ", p.username(), # 进程用户名 "\nprocess: ", p.cpu_times(), # 进程使用的CPU时间 "\nprocess: ", p.memory_info(), # 进程使用的内存 "\nprocess: ", p.num_threads(), # 进程的线程数量 "\nprocess: ", p.threads(), # 所有线程信息 "\nprocess: ", p.environ(), # 进程环境变量 "\nprocess: ", p.open_files(), # 进程打开的文件 "\nprocess: ", p.connections() # 进程相关网络连接 ) # p.terminate() # 结束进程 psutil.test() # test()函数可模拟出ps命令的效果 # ### psutil # - Cross-platform lib for process and system monitoring in Python. # - Home Page: https://github.com/giampaolo/psutil # - Documentation: http://psutil.readthedocs.io/en/latest/
1,956
1,013
import json from NewDeclarationInQueue.formular_converter import FormularConverter from NewDeclarationInQueue.preprocess_one_step import PreprocessOneStep from NewDeclarationInQueue.preprocess_two_steps import PreProcessTwoSteps from NewDeclarationInQueue.processfiles.customprocess.search_text_line_parameter import SearchTextLineParameter from NewDeclarationInQueue.processfiles.customprocess.table_config_detail import TableConfigDetail from NewDeclarationInQueue.processfiles.customprocess.text_with_special_ch import TextWithSpecialCharacters from NewDeclarationInQueue.processfiles.ocr_worker import OcrWorker from NewDeclarationInQueue.processfiles.process_messages import ProcessMessages def process_only_second_steps(input_file_path: str): second_step = PreprocessOneStep() #second_step.process_step_two(input_file_path) second_step.process_custom_model_step_two(input_file_path) def get_input(input_file: str): node = [] with open(input_file) as json_data: node = json.load(json_data) json_data.close() return node def process_two_steps(sfile: str): str_msg_id = 'abc' dict_input = get_input(sfile) two_steps = PreProcessTwoSteps() process_messages = ProcessMessages('OCR Process', str_msg_id) one_step = PreprocessOneStep() ocr_constants = one_step.get_env() ocr_file, process_messages = two_steps.get_file_info(dict_input, process_messages) formular_converter = FormularConverter() ocr_formular = formular_converter.get_formular_info(ocr_constants, ocr_file) #process_messages_json = two_steps.process_document(ocr_file, ocr_constants, ocr_formular, process_messages) process_messages = two_steps.process_document_with_custom_model(ocr_file, ocr_constants, process_messages) #two_steps.save_in_output_queue(process_messages_json) #process_only_second_steps(r"test_url.json") process_two_steps(r"test_url.json")
1,971
616
"""Extend event column in account history Revision ID: 18632a2d5fc Revises: 3e19c50e864 Create Date: 2015-06-05 17:49:12.757269 """ # revision identifiers, used by Alembic. revision = '18632a2d5fc' down_revision = '3e19c50e864' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(engine_name): globals()["upgrade_%s" % engine_name]() def downgrade(engine_name): globals()["downgrade_%s" % engine_name]() def upgrade_account(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('tariff_history', 'event', existing_type=mysql.VARCHAR(length=8), type_=sa.String(length=16), existing_nullable=True) ### end Alembic commands ### def downgrade_account(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('tariff_history', 'event', existing_type=sa.String(length=16), type_=mysql.VARCHAR(length=8), existing_nullable=True) ### end Alembic commands ### def upgrade_fitter(): pass def downgrade_fitter(): pass
1,187
431
import random import datetime from django.db import models from django.contrib.auth.models import ( BaseUserManager, AbstractBaseUser ) from django.utils import timezone SECURTYQUESTION = ( ('1', "What city were you born in?"), ('2', "What is your mother's maiden name?"), ('3', "What street did you grow up on?"), ('4', "What is the title of your favorite book?"), ('5', "What is your favorite vacation spot?"), ('6', "What is your pet's name?"), ) class UserManager(BaseUserManager): def create_user(self, email, first, last, principal, username, dob, securtyq, securtya, avatarname, password=None): """ Creates and saves a User with the given email, date of birth and password. """ if not email: raise ValueError('Users must have an email address') if not principal: raise ValueError('Users must have a principal id') if not username: raise ValueError('Users must have a username') user = self.model( email=UserManager.normalize_email(email), firstname=first, lastname=last, principal_id=principal, username=username, securtyq=securtyq, securtya=securtya, dob=dob, avatarname=avatarname ) user.set_password(password) user.save(using=self._db) return user def create_superuser(self, username, email, firstname, lastname, principal_id, password): """ Creates and saves a superuser with the given parameters """ # securtyq = '1' # securtya = 'india' # dob = '2014-08-18'#datetime.date.today # avatarname = 'people-pic1.png' # user = self.create_user(email, # firstname, lastname, principal_id, username, dob, securtyq, securtya, avatarname, password, # ) user = self.model( username=username, email=UserManager.normalize_email(email), firstname=firstname, lastname=lastname, principal_id=principal_id ) user.set_password(password) user.is_admin = True user.is_superuser = True user.is_staff = True user.save(using=self._db) return user class User(AbstractBaseUser): email = models.EmailField( verbose_name='email address', max_length=255, unique=True, db_index=True, ) username = models.CharField(max_length=255, unique=True) dob = models.DateField(default=datetime.date.today) firstname = models.CharField(max_length=64) lastname = models.CharField(max_length=64) principal_id = models.CharField(max_length=36) scope_id = models.CharField( max_length=36, default='00000000-0000-0000-0000-000000000000' ) securtyq = models.CharField( max_length=255, choices=SECURTYQUESTION, default='1') securtya = models.CharField(max_length=255, default='india') user_level = models.IntegerField(default=0) is_active = models.BooleanField(default=True) is_admin = models.BooleanField(default=False) is_superuser = models.BooleanField(default=False) avatarname = models.CharField(max_length=250, blank=True, null=True) date_joined = models.DateTimeField(default=timezone.now) is_staff = models.BooleanField(default=False) objects = UserManager() USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['username', 'firstname', 'lastname', 'principal_id'] def get_full_name(self): self.email def get_short_name(self): self.email def __unicode__(self): return '%s ** %s %s' % (self.email, self.firstname, self.lastname) def has_perms(self, perm, obj=None): "Does the user have a specific permission?" # Simplest possible answer: Yes, always return True def has_perm(self, perm, obj=None): "Does the user have a specific permission?" # Simplest possible answer: Yes, always return True def has_module_perms(self, app_label): "Does the user have permissions to view the app `app_label`?" # Simplest possible answer: Yes, always return True def get_firstname_lastname(self): return '%s %s' % (self.firstname, self.lastname) # @property # def is_staff(self): # "Is the user a member of staff?" # Simplest possible answer: All admins are staff # return self.is_admin class TempUserManager(models.Manager): def create_temp_user(self, email, firstname, lastname, key, username, dob, securtyq, securtya, password=None): if not email: raise ValueError('Users must have an email address') temp_user = self.model( email=UserManager.normalize_email(email), username=username, firstname=firstname, lastname=lastname, securtyq=securtyq, securtya=securtya, activation_key=key, password=password ) temp_user.save(using=self._db) return temp_user class TempUser(models.Model): username = models.CharField(max_length=255, unique=True) email = models.EmailField(max_length=95, unique=True) firstname = models.CharField(max_length=32) lastname = models.CharField(max_length=32) password = models.CharField(max_length=20) dob = models.DateField() securtyq = models.CharField(max_length=255, choices=SECURTYQUESTION) securtya = models.CharField(max_length=255) created = models.DateTimeField(auto_now=True) activation_key = models.CharField(max_length=64) avatarname = models.CharField(max_length=250, blank=True, null=True) accounttype = models.CharField( max_length=64, blank=True, default='basic membership') objects = TempUserManager() def __unicode__(self): return '%s ** %s %s ** %s' % (self.email, self.firstname, self.lastname, self.created) class ChangeEmailManager(models.Manager): def create_temp_email(self, email, key): if not email: raise ValueError('Users must have an email address') temp_email = self.model( email=ChangeEmailManager.normalize_email(email), activation_key=key ) temp_email.save(using=self._db) return temp_email class ChangeEmail(models.Model): email = models.EmailField(max_length=95, unique=True) created = models.DateTimeField(auto_now=True) activation_key = models.CharField(max_length=64) class ChangePasswordManager(models.Manager): def create_confirmation(self, password, key, cuser_id): temp_password = self.model( password=password, activation_key=key, user_id=cuser_id, ) temp_password.save(using=self._db) return temp_password class ChangePassword(models.Model): password = models.CharField(max_length=20) created = models.DateTimeField(auto_now=True) activation_key = models.CharField(max_length=64) user_id = models.CharField(max_length=50, blank=True, null=True) objects = ChangePasswordManager() class SyncUser(models.Model): email = models.EmailField( verbose_name='email address', max_length=255, unique=True, db_index=True, ) firstname = models.CharField(max_length=64) lastname = models.CharField(max_length=64) principal_id = models.CharField(max_length=36) scope_id = models.CharField( max_length=36, default='00000000-0000-0000-0000-000000000000' ) user_level = models.IntegerField(default=0) def __unicode__(self): return '%s ** %s %s' % (self.firstname, self.lastname, self.email)
8,018
2,470
from emoji import emojize from data import all_emoji from aiogram.types import InlineKeyboardMarkup from aiogram.types import InlineKeyboardButton from aiogram.utils.callback_data import CallbackData cb_confirm_close = CallbackData('cb_cc', 'type_btn') def create_kb_confirm_close(): emo_snail = all_emoji['back__main_menu'] keyboard = InlineKeyboardMarkup() keyboard.add ( InlineKeyboardButton ( text = 'подтверждаю!', callback_data = cb_confirm_close.new(type_btn='confirm') ) ) keyboard.add ( InlineKeyboardButton ( text = 'добавить сообщение', callback_data = cb_confirm_close.new(type_btn='add_message') ) ) keyboard.add ( InlineKeyboardButton ( text = 'вернуться к заявке', callback_data = cb_confirm_close.new(type_btn='back_to_request') ) ) keyboard.add ( InlineKeyboardButton ( text = f'назад {emo_snail} главное меню', callback_data = cb_confirm_close.new(type_btn='back__main_menu') ) ) return keyboard def create_kb_confirm_cancel_request(): emo_snail = all_emoji['back__main_menu'] keyboard = InlineKeyboardMarkup() keyboard.add ( InlineKeyboardButton ( text = 'отменить заявку', callback_data = 'cancel' ) ) keyboard.add ( InlineKeyboardButton ( text = 'вернуться к заявке', callback_data = 'back_to_request' ) ) keyboard.add ( InlineKeyboardButton ( text = f'назад {emo_snail} главное меню', callback_data = 'back__main_menu' ) ) return keyboard
1,729
542
''' This file contains utility of AStarSearch. Thanks to Binyu Wang for providing the codes. ''' from random import randint import numpy as np class SearchEntry(): def __init__(self, x, y, g_cost, f_cost=0, pre_entry=None): self.x = x self.y = y # cost move form start entry to this entry self.g_cost = g_cost self.f_cost = f_cost self.pre_entry = pre_entry def getPos(self): return (self.x, self.y) def AStarSearch(img, source, dest): def getNewPosition(img, location, offset): x, y = (location.x + offset[0], location.y + offset[1]) if x < 0 or x >= img.shape[0] or y < 0 or y >= img.shape[1] or img[x, y] == 1 or img[x, y] == 3: return None return (x, y) def getPositions(img, location): # use four ways or eight ways to move offsets = [(-1, 0), (0, -1), (1, 0), (0, 1)] # offsets = [(-1,0), (0, -1), (1, 0), (0, 1), (-1,-1), (1, -1), (-1, 1), (1, 1)] poslist = [] for offset in offsets: pos = getNewPosition(img, location, offset) if pos is not None: poslist.append(pos) return poslist # imporve the heuristic distance more precisely in future def calHeuristic(pos, dest): return abs(dest.x - pos[0]) + abs(dest.y - pos[1]) def getMoveCost(location, pos): if location.x != pos[0] and location.y != pos[1]: return 1.4 else: return 1 # check if the position is in list def isInList(list, pos): if pos in list: return list[pos] return None # add available adjacent positions def addAdjacentPositions(img, location, dest, openlist, closedlist): poslist = getPositions(img, location) for pos in poslist: # if position is already in closedlist, do nothing if isInList(closedlist, pos) is None: findEntry = isInList(openlist, pos) h_cost = calHeuristic(pos, dest) g_cost = location.g_cost + getMoveCost(location, pos) if findEntry is None: # if position is not in openlist, add it to openlist openlist[pos] = SearchEntry(pos[0], pos[1], g_cost, g_cost + h_cost, location) elif findEntry.g_cost > g_cost: # if position is in openlist and cost is larger than current one, # then update cost and previous position findEntry.g_cost = g_cost findEntry.f_cost = g_cost + h_cost findEntry.pre_entry = location # find a least cost position in openlist, return None if openlist is empty def getFastPosition(openlist): fast = None for entry in openlist.values(): if fast is None: fast = entry elif fast.f_cost > entry.f_cost: fast = entry return fast all_path = [] openlist = {} closedlist = {} location = SearchEntry(source[0], source[1], 0.0) dest = SearchEntry(dest[0], dest[1], 0.0) openlist[source] = location while True: location = getFastPosition(openlist) if location is None: # not found valid path # print("can't find valid path") return ([source]) if location.x == dest.x and location.y == dest.y: break closedlist[location.getPos()] = location openlist.pop(location.getPos()) addAdjacentPositions(img, location, dest, openlist, closedlist) while location is not None: all_path.append([location.x, location.y]) # img[location.x][location.y] = 2 location = location.pre_entry return all_path[::-1] def hca(img, all_start, all_end, steps=100): all_path = [] robot_loc = np.where(img == 3) for i in range(img.shape[0]): for j in range(img.shape[1]): if img[i, j] == 3: img[i, j] = 0 res_imgs = np.expand_dims(img, axis=0).repeat(steps, axis=0) for i in range(len(robot_loc[0])): res_imgs[0, robot_loc[0][i], robot_loc[1][i]] = 3 for i in range(len(all_start)): robot_path = AStarTime(res_imgs, (all_start[i][0], all_start[i][1]), (all_end[i][0], all_end[i][1])) # print(i) if len(robot_path) == 1: new_path = [] for j in range(steps - 1): res_imgs[j, all_start[i][0], all_start[i][1]] = 3 new_path.append([all_start[i][0], all_start[i][1], j]) all_path.append(new_path) continue else: for loc in robot_path: res_imgs[loc[2], loc[0], loc[1]] = 3 all_path.append(robot_path) return all_path class SearchEntryTime(): def __init__(self, x, y, z, g_cost, f_cost=0, pre_entry=None): self.x = x self.y = y self.z = z # cost move form start entry to this entry self.g_cost = g_cost self.f_cost = f_cost self.pre_entry = pre_entry def getPos(self): return (self.x, self.y, self.z) def AStarTime(imgs, source, dest, total_steps=80): def getNewPosition(img, location, offset, step=0): x, y = (location.x + offset[0], location.y + offset[1]) if x < 0 or x >= img.shape[0] or y < 0 or y >= img.shape[1] or img[x, y] == 1 or img[x, y] == 3: return None return (x, y, step) def getPositions(img, location, step=0): # use four ways or eight ways to move offsets = [(-1, 0), (0, -1), (1, 0), (0, 1)] # offsets = [(-1,0), (0, -1), (1, 0), (0, 1), (-1,-1), (1, -1), (-1, 1), (1, 1)] poslist = [] for offset in offsets: pos = getNewPosition(img, location, offset, step) if pos is not None: poslist.append(pos) return poslist # imporve the heuristic distance more precisely in future def calHeuristic(pos, dest): return abs(dest.x - pos[0]) + abs(dest.y - pos[1]) def getMoveCost(location, pos): if location.x != pos[0] and location.y != pos[1]: return 1.4 else: return 1 # check if the position is in list def isInList(list, pos): if pos in list: return list[pos] return None # add available adjacent positions def addAdjacentPositions(imgs, location, dest, openlist, closedlist, steps): img = imgs[int(steps + 1), :, :] poslist = getPositions(img, location, steps) for pos in poslist: # if position is already in closedlist, do nothing if isInList(closedlist, pos) is None: findEntry = isInList(openlist, pos) h_cost = calHeuristic(pos, dest) g_cost = location.g_cost + getMoveCost(location, pos) if findEntry is None: # if position is not in openlist, add it to openlist steps = int(g_cost) openlist[(pos[0], pos[1], steps)] = SearchEntryTime(pos[0], pos[1], steps, g_cost, g_cost + h_cost, location) elif findEntry.g_cost > g_cost: # if position is in openlist and cost is larger than current one, # then update cost and previous position findEntry.g_cost = g_cost findEntry.f_cost = g_cost + h_cost findEntry.z = int(g_cost) findEntry.pre_entry = location # find a least cost position in openlist, return None if openlist is empty def getFastPosition(openlist): fast = None for entry in openlist.values(): if fast is None: fast = entry elif fast.f_cost > entry.f_cost: fast = entry return fast all_path = [] openlist = {} closedlist = {} location = SearchEntryTime(source[0], source[1], 0, 0.0) dest = SearchEntryTime(dest[0], dest[1], 0, 0.0) openlist[(source[0], source[1], 0)] = location steps = 0 while steps < total_steps: location = getFastPosition(openlist) if location is None: # not found valid path # print("can't find valid path") return ([source]) if location.x == dest.x and location.y == dest.y: break closedlist[location.getPos()] = location openlist.pop(location.getPos()) steps = int(location.g_cost) addAdjacentPositions(imgs, location, dest, openlist, closedlist, steps) while location is not None: all_path.append([location.x, location.y, location.z]) # img[location.x][location.y] = 2 location = location.pre_entry return all_path[::-1] # img = np.zeros((20,20)) # source = (0,0) # dest = (img.shape[0]-1, img.shape[1]-1) # path = AStarSearch(img, source, dest)
9,076
2,899
import xlrd from app.services.extension import task_server, sqlalchemy as db from app.models.core.user import User from app.application import initialize_app try: from app.config.production import ProductionConfig as config_object except ImportError: from app.config.local import LocalConfig as config_object @task_server.task() def upload_users(file_object): workbook = xlrd.open_workbook(file_object) worksheet = workbook.sheet_by_index(0) offset = 0 rows = [] for i, row in enumerate(range(worksheet.nrows)): if i <= offset: # (Optionally) skip headers continue r = [] for j, col in enumerate(range(worksheet.ncols)): r.append(worksheet.cell_value(i, j)) rows.append(r) users = [] for i, row in enumerate(rows): users.append({ 'initial_name': row[0], 'first_name': row[1], 'last_name': row[2], 'username': row[3], 'email': row[4], 'password': row[5], 'active': row[6] }) app = initialize_app(config_object) with app.test_request_context(): user_object = User() user_object.create_or_update(users) return "OK."
1,270
385
from praw import Reddit import random class Savenger: AVENGERS = ["Iron Man", "Doctor Strange", "Star-Lord", "Black Widow", "Thor", "Spider-Man", "Captain America", "Wanda Maximoff", "Bucky Barnes", "Loki", "Hulk", "Black Panther", "Vision", "Gamora", "Drax", "Nebula", "Sam Wilson", "Mantis", "Okoye", "Shuri", "Groot", "Rocket", "Heimdall"] def __init__(self): self.Reddit = Reddit def get_superhero(self): return random.choice(self.AVENGERS) def authenticate(self, username, password, client_id, client_secret, user_agent): print("Authenticating...") try: self.reddit = self.Reddit(user_agent=user_agent, client_id=client_id, client_secret=client_secret, username=username, password=password) self.user = self.reddit.user.me() print(f"Authenticated as {self.user}") return self.reddit except Exception as e: print(e) exit() def save(self, subreddit): try: print("Savengers are on the way, stay hold.") subreddit = self.reddit.subreddit(subreddit) print(f"{self.get_superhero()} finding every threatening submission made in {subreddit}") subreddit_submissions = self.get_user_subreddit_submissions(subreddit) self.delete_submissions(subreddit_submissions) print(f"{self.get_superhero()} saved your from dying by the submission's author") print(f"{self.get_superhero()} finding every forbidding comment made in {subreddit}") subreddit_comments = self.get_user_subreddit_comments(subreddit) self.delete_comments(subreddit_comments) print("Savengers have saved you!") print("Go visit https://www.reddit.com/r/savengers/ to have a chat with the fellow superheroes") return True except Exception as e: print(e) exit() def get_user_subreddit_comments(self, subreddit): subreddit_comments = [] for comment in self.user.comments.new(limit=None): if comment.subreddit == subreddit: if comment.body: print(f"{self.get_superhero()} found a comment with the body: {comment.body}") subreddit_comments.append(comment) return subreddit_comments def get_user_subreddit_submissions(self, subreddit): subreddit_submissions = [] for submission in self.user.submissions.new(limit=None): if submission.subreddit == subreddit: if submission.title: print(f"{self.get_superhero()} found a submission with the title: {submission.title}") subreddit_submissions.append(submission) return subreddit_submissions def delete_comments(self, subreddit_comments): for subreddit_comment in subreddit_comments: print(f"{self.get_superhero()} successfully eliminated the threatening comment!") subreddit_comment.delete() return True def delete_submissions(self, subreddit_submissions): for subreddit_submission in subreddit_submissions: print(f"{self.get_superhero()} successfully eliminated the forbidding post!") subreddit_submission.delete() return True
3,444
1,000
from .utils.distance import distance from .classification import MDM import numpy from sklearn.base import BaseEstimator, TransformerMixin ########################################################## class ElectrodeSelection(BaseEstimator, TransformerMixin): def __init__(self, nelec=16, metric='riemann'): self.nelec = nelec self.metric = metric self.subelec = -1 self.dist = [] def fit(self, X, y=None): mdm = MDM(metric=self.metric) mdm.fit(X, y) self.covmeans = mdm.covmeans Ne, _ = self.covmeans[0].shape self.subelec = range(0, Ne, 1) while (len(self.subelec)) > self.nelec: di = numpy.zeros((len(self.subelec), 1)) for idx in range(len(self.subelec)): sub = self.subelec[:] sub.pop(idx) di[idx] = 0 for i in range(len(self.covmeans)): for j in range(i + 1, len(self.covmeans)): di[idx] += distance( self.covmeans[i][ :, sub][ sub, :], self.covmeans[j][ :, sub][ sub, :]) # print di torm = di.argmax() self.dist.append(di.max()) self.subelec.pop(torm) return self def transform(self, X): return X[:, self.subelec, :][:, :, self.subelec]
1,493
460
from django.conf.urls import url, include from django.contrib.auth import views as auth from user.forms import NewAccountForm from user import views app_name = 'user' urlpatterns = [ # auth url(r'^create/$', views.UserCreate.as_view(), name='create'), url(r'^login/$', auth.login, {'template_name':'user/login.html'}, name='login'), url(r'^logout/$', auth.logout, {'template_name':'user/logout.html'}, name='logout'), url(r'^password_change/$', auth.password_change, {'template_name':'user/password_change_form.html', 'post_change_redirect':'user:password_change_done'}, name='password_change'), url(r'^password_change/done/$', auth.password_change_done, {'template_name':'user/password_change_done.html'}, name='password_change_done'), url(r'^password_reset/$', auth.password_reset, {'post_reset_redirect': 'user:password_reset_done', 'template_name': 'user/password_reset_form.html', 'email_template_name': 'user/password_reset_email.html', 'subject_template_name': 'user/password_reset_subject.txt'}, name='password_reset'), url(r'^password_reset/done/$', auth.password_reset_done, {'template_name': 'user/password_reset_done.html'}, name='password_reset_done'), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', auth.password_reset_confirm, {'post_reset_redirect':'user:password_reset_complete', 'template_name': "user/password_reset_confirm.html"}, name='password_reset_confirm'), url(r'^reset/done/$', auth.password_reset_complete, {'template_name': 'user/password_reset_complete.html'}, name='password_reset_complete'), # profile url(r'^basic/$', views.BasicInfo.as_view(), name="basic"), ]
1,897
622
#!/usr/bin/env python3 import subprocess import re import argparse def get_arguments(): parser = argparse.ArgumentParser() parser.add_argument("-i", "--interface", dest="interface", help="interface to change mac address") parser.add_argument("-m", "--mac", dest="new_mac", help="value of new mac address") options = parser.parse_args() if not options.interface: parser.error("Please enter interface, use --help for more information") elif not options.new_mac: parser.error( "Please enter new MAC address use --help for more information") return options def change_mac(interface, new_mac): print(f"[+] Changing mac address for {interface} to {new_mac}") subprocess.call(["sudo", "ifconfig", interface, "down"]) subprocess.call(["sudo", "ifconfig", interface, "hw", "ether", new_mac]) subprocess.call(["sudo", "ifconfig", interface, "up"]) def get_current_mac(interface): ifconfig_result = str(subprocess.check_output( ["sudo", "ifconfig", interface])) search_result = re.search( r"\w\w:\w\w:\w\w:\w\w:\w\w:\w\w", ifconfig_result) if search_result: return search_result.group(0) else: print("[-] Could not read mac address") if __name__ == "__main__": options = get_arguments() current_mac = get_current_mac(options.interface) print(f"Current Mac:{current_mac}") change_mac(options.interface, options.new_mac) current_mac = get_current_mac(options.interface) if current_mac == options.new_mac: print(f"[+] MAC address was successfully changed to {current_mac}") else: print("[-] MAC address did not change")
1,728
521
import tensorflow as tf import numpy as np import time import utils path = r'data/' x, y = utils.reload_data(path) inp_shape = (x[0].shape[0],1) x = np.array(x).reshape(-1, 1000, 1)# change 1000 to your sample lenght if you changed frame (= CHUNK ) or RESOLUTION # prepared for testing and evaluating. try other combinations of architecture dense_layers = [1] conv_sizes = [64] conv_layers = [2] dense_layer_sizes = [256] kernel = 10 pool_size = 4 _batchs = 5 _epochs = 10 for dense_layer in dense_layers: for conv_layer in conv_layers: for dense_size in dense_layer_sizes: for conv_size in conv_sizes: NAME = '{}-conv_layers-{}-dense_layers-{}-conv_size-{}-dense_size-{}-kernel-{}'.format(conv_layer,dense_layer,conv_size, dense_size,kernel, int(time.time())) model = tf.keras.Sequential() model.add(tf.keras.layers.Conv1D(conv_size, kernel, activation='relu', input_shape = inp_shape)) model.add(tf.keras.layers.MaxPooling1D(pool_size)) for i in range(conv_layer-1): model.add(tf.keras.layers.Conv1D(conv_size, kernel, activation='relu')) model.add(tf.keras.layers.MaxPooling1D(pool_size)) model.add(tf.keras.layers.Flatten()) for _ in range(dense_layer): model.add(tf.keras.layers.Dense(dense_size, activation='relu')) model.add(tf.keras.layers.Dense(1, activation='sigmoid')) model.compile(loss = 'binary_crossentropy', optimizer='adam', metrics=['accuracy']) tensorboard = tf.keras.callbacks.TensorBoard(log_dir='model_evaluate/{}'.format(NAME)) print(NAME) model.fit(x,y, batch_size = _batchs, epochs=_epochs, validation_split = 0.2, callbacks=[tensorboard]) model.save('trained_models/{}.h5'.format(NAME))
1,916
657
# Generated by Django 3.1.6 on 2021-02-12 07:47 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Flavor', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('datetime_created', models.DateTimeField(auto_now_add=True, null=True)), ('datetime_modified', models.DateTimeField(auto_now=True, null=True)), ('is_active', models.BooleanField(default=True)), ('datetime_deleted', models.DateTimeField(blank=True, null=True)), ('name', models.CharField(max_length=128)), ('description', models.TextField(blank=True, default='')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_created_set', to=settings.AUTH_USER_MODEL)), ('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_deleted_set', to=settings.AUTH_USER_MODEL)), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_modified_set', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Topping', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('datetime_created', models.DateTimeField(auto_now_add=True, null=True)), ('datetime_modified', models.DateTimeField(auto_now=True, null=True)), ('is_active', models.BooleanField(default=True)), ('datetime_deleted', models.DateTimeField(blank=True, null=True)), ('name', models.CharField(max_length=128)), ('description', models.TextField(blank=True, default='')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_created_set', to=settings.AUTH_USER_MODEL)), ('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_deleted_set', to=settings.AUTH_USER_MODEL)), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_modified_set', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='IceCreamServing', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('datetime_created', models.DateTimeField(auto_now_add=True, null=True)), ('datetime_modified', models.DateTimeField(auto_now=True, null=True)), ('is_active', models.BooleanField(default=True)), ('datetime_deleted', models.DateTimeField(blank=True, null=True)), ('name', models.CharField(blank=True, default='', max_length=128)), ('description', models.TextField(blank=True, default='')), ('category', models.IntegerField(choices=[(1, 'In Cone'), (2, '500ml Tub'), (3, '1L Tub'), (4, '2L Tub')])), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_created_set', to=settings.AUTH_USER_MODEL)), ('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_deleted_set', to=settings.AUTH_USER_MODEL)), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_modified_set', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='IceCream', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('datetime_created', models.DateTimeField(auto_now_add=True, null=True)), ('datetime_modified', models.DateTimeField(auto_now=True, null=True)), ('is_active', models.BooleanField(default=True)), ('datetime_deleted', models.DateTimeField(blank=True, null=True)), ('order', models.IntegerField()), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_created_set', to=settings.AUTH_USER_MODEL)), ('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_deleted_set', to=settings.AUTH_USER_MODEL)), ('flavor', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.flavor')), ('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_modified_set', to=settings.AUTH_USER_MODEL)), ('serving', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.icecreamserving')), ('toppings', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.topping')), ], options={ 'abstract': False, }, ), ]
6,163
1,868
import logging from common.api_handlers import handle_request from common.packet import Packet from common.response import Response from common.transport.protocol import TCPProtocol from game.models.world import WORLD from game.session import GameSession from game.states import Connected LOG = logging.getLogger(f"l2py.{__name__}") class Lineage2GameProtocol(TCPProtocol): session_cls = GameSession def connection_made(self, transport): super().connection_made(transport) LOG.info( "New connection from %s:%s", *self.transport.peer, ) self.session.set_state(Connected) @TCPProtocol.make_async async def data_received(self, data: bytes): request = self.transport.read(data) response = await handle_request(request) if response: LOG.debug( "Sending packet to %s:%s", *self.transport.peer, ) self.transport.write(response) for action in response.actions_after: action_result = await action if isinstance(action_result, Packet): self.transport.write(Response(action_result, self.session)) def connection_lost(self, exc) -> None: self.session.logout_character()
1,306
351
import fileinput # "day6.txt" groups = [x.split() for x in ''.join(fileinput.input()).split('\n\n')] # part 1 print(sum(len(set([j for sub in group for j in sub])) for group in groups)) # part 2 print(sum(len(set.intersection(*[set(list(j)) for j in group])) for group in groups))
292
113
'''Considere o problema de computar o valor absoluto de um número real. O valor absoluto de um número real x é dado por f(x) = x se x >= 0 ou f(x) = -x se x < 0. Projete e implemente um programa em Python que lei um número de ponto flutuante x, calcule e imprima o valor absoluto de x.''' x = float(input()) y = (x**2)**(1/2) print("|{:.2f}| = {:.2f}".format(x,y))
370
144
import torch import warnings def newton_raphson(fn, x0, linsolver = "lu", rtol = 1e-6, atol = 1e-10, miter = 100): """ Solve a nonlinear system with Newton's method. Return the solution and the last Jacobian Args: fn: function that returns the residual and Jacobian x0: starting point linsolver (optional): method to use to solve the linear system rtol (optional): nonlinear relative tolerance atol (optional): nonlinear absolute tolerance miter (optional): maximum number of nonlinear iterations """ x = x0 R, J = fn(x) nR = torch.norm(R, dim = -1) nR0 = nR i = 0 while (i < miter) and torch.any(nR > atol) and torch.any(nR / nR0 > rtol): x -= solve_linear_system(J, R) R, J = fn(x) nR = torch.norm(R, dim = -1) i += 1 if i == miter: warnings.warn("Implicit solve did not succeed. Results may be inaccurate...") return x, J def solve_linear_system(A, b, method = "lu"): """ Solve or iterate on a linear system of equations Args: A: block matrix b: block RHS method (optional): """ if method == "diag": return b / torch.diagonal(A, dim1=-2, dim2=-1) elif method == "lu": return torch.linalg.solve(A, b) else: raise ValueError("Unknown solver method!")
1,394
492
# Time: O(n) # Space: O(1) class Solution(object): def numSub(self, s): """ :type s: str :rtype: int """ MOD = 10**9+7 result, count = 0, 0 for c in s: count = count+1 if c == '1' else 0 result = (result+count)%MOD return result
323
117
import logging import datetime import asyncio from edgefarm_application.base.application_module import application_module_network_nats from edgefarm_application.base.avro import schemaless_decode from run_task import run_task from state_tracker import StateTracker from schema_loader import schema_load _logger = logging.getLogger(__name__) _state_report_subject = "public.seatres.status" class SeatResTrainMonitor: def __init__(self, train_id, q): self.train_id = train_id self.edge_report_ts = None # this is the combined state from the train and the train online state self.state = StateTracker( "TrainSeatRes", { "UNKNOWN": "unknown", "OFFLINE": "offline", "ONLINE-UNKNOWN": "online, unclear state", "ONLINE-NOK": "online, but not ok", "ONLINE-OK": "online, ok", }, ) # this is just the online state of the train self.state_online = StateTracker( "Train-Online-Monitor", { "UNKNOWN": "train state unknown", "OFFLINE": "train is offline", "ONLINE": "train is online", }, ) self._q = q self._task = asyncio.create_task(run_task(_logger, q, self._watchdog)) async def start(self): self.state.update("UNKNOWN") await self.state_online.update_and_send_event("UNKNOWN", self._send_event) def stop(self): self._task.cancel() async def update_edge_state(self, state): self.edge_report_ts = datetime.datetime.now() if state == -1: up_state = "ONLINE-UNKNOWN" elif state == 0: up_state = "ONLINE-NOK" elif state == 1: up_state = "ONLINE-OK" self.state.update(up_state) await self.state_online.update_and_send_event("ONLINE", self._send_event) async def _watchdog(self): while True: now = datetime.datetime.now() if self.edge_report_ts is not None: if (now - self.edge_report_ts).total_seconds() > 10: self.state.update("OFFLINE") await self.state_online.update_and_send_event( "OFFLINE", self._send_event ) await asyncio.sleep(1) async def _send_event(self, data): data["train_id"] = self.train_id await self._q.put(data) class TrainStatusCollector: """ Collect seat reservation system status of all trains. The individual trains report their SeatRes state via Nats subject 'public.seatres.status' to this module. """ def __init__(self, q): self._nc = application_module_network_nats() self._q = q self._state_report_codec = schema_load(__file__, "system_status") self._trains = {} async def start(self): self._state_report_subscription_id = await self._nc.subscribe( _state_report_subject, cb=self._state_report_handler ) async def stop(self): await self._nc.unsubscribe(self._state_report_subscription_id) for v in self._trains.values(): v.stop() async def add_train(self, train_id): if train_id not in self._trains.keys(): v = SeatResTrainMonitor(train_id, self._q) self._trains[train_id] = v await v.start() else: v = self._trains[train_id] return v def trains(self): return self._trains.values() async def _state_report_handler(self, nats_msg): """ Called when a NATS message is received on _state_report_subject """ reply_subject = nats_msg.reply msg = schemaless_decode(nats_msg.data, self._state_report_codec) _logger.debug(f"state report received msg {msg}") train_id = msg["data"]["trainId"] try: v = self._trains[train_id] await self._update_edge_state(v, msg) except KeyError: _logger.info(f"received state report from new train {train_id}") v = await self.add_train(train_id) await self._update_edge_state(v, msg) await self._nc.publish(reply_subject, b"") async def _update_edge_state(self, v, msg): try: await v.update_edge_state(msg["data"]["status"]) except KeyError: _logger.error(f"couldn't find [data][status] in {msg}")
4,531
1,367
from termcolor import colored, cprint import sys text = colored('Hello, World!', 'red', attrs=['reverse', 'blink']) print(text) cprint('Hello, World!', 'green', 'on_red') for i in range(10): cprint(i, 'magenta', end=' ') cprint("Attention!",'red', attrs=['bold'], file=sys.stdout)
302
113
import sys from azureml.core import Workspace, Experiment, Environment, ScriptRunConfig from azureml.core.compute import ComputeTarget, AmlCompute from azureml.core.compute_target import ComputeTargetException from shutil import copy ws = Workspace.from_config() # Choose a name for your CPU cluster # cpu_cluster_name = "cpucluster" cpu_cluster_name = "gpucompute" experiment_name = "main" src_dir = "model" script = "train.py" # Verify that cluster does not exist already try: cpu_cluster = ComputeTarget(workspace=ws, name=cpu_cluster_name) print('Found existing cluster, use it.') except ComputeTargetException: compute_config = AmlCompute.provisioning_configuration(vm_size='Standard_DS12_v2', max_nodes=4) cpu_cluster = ComputeTarget.create(ws, cpu_cluster_name, compute_config) cpu_cluster.wait_for_completion(show_output=True) experiment = Experiment(workspace=ws, name=experiment_name) copy('./config.json', 'model/config.json') myenv = Environment.from_pip_requirements(name="myenv", file_path="requirements.txt") myenv.environment_variables['PYTHONPATH'] = './model' myenv.environment_variables['RUNINAZURE'] = 'true' config = ScriptRunConfig(source_directory=src_dir, script="./training/train.py", arguments=sys.argv[1:] if len(sys.argv) > 1 else None, compute_target=cpu_cluster_name, environment=myenv) run = experiment.submit(config) aml_url = run.get_portal_url() print(aml_url)
1,597
476
import cpuinfo def pytest_benchmark_update_json(config, benchmarks, output_json): """Calculate compression/decompression speed and add as extra_info""" for benchmark in output_json["benchmarks"]: if "data_size" in benchmark["extra_info"]: rate = benchmark["extra_info"].get("data_size", 0.0) / benchmark["stats"]["mean"] benchmark["extra_info"]["rate"] = rate def pytest_benchmark_update_machine_info(config, machine_info): cpu_info = cpuinfo.get_cpu_info() brand = cpu_info.get("brand_raw", None) if brand is None: brand = "{} core(s) {} CPU ".format(cpu_info.get("count", "unknown"), cpu_info.get("arch", "unknown")) machine_info["cpu"]["brand"] = brand machine_info["cpu"]["hz_actual_friendly"] = cpu_info.get("hz_actual_friendly", "unknown")
818
264
import time import gcld3 detector = gcld3.NNetLanguageIdentifier(min_num_bytes=0, max_num_bytes=1000) # text = "This text is written in English" text = "薄雾" while True: result = detector.FindLanguage(text=text) print(text, result.probability, result.language) time.sleep(0.01)
331
116
''' 实现获取 下一个排列 的函数,算法需要将给定数字序列重新排列成字典序中下一个更大的排列。 如果不存在下一个更大的排列,则将数字重新排列成最小的排列(即升序排列)。 必须 原地 修改,只允许使用额外常数空间。 示例 1: 输入:nums = [1,2,3] 输出:[1,3,2] 示例 2: 输入:nums = [3,2,1] 输出:[1,2,3] 示例 3: 输入:nums = [1,1,5] 输出:[1,5,1] 示例 4: 输入:nums = [1] 输出:[1]   提示: 1 <= nums.length <= 100 0 <= nums[i] <= 100 ''' class Solution: def nextPermutation(self, nums: List[int]) -> None: """ Do not return anything, modify nums in-place instead. """ n = len(nums) if n <= 1: return nums # 从右向左循环数组 i = n - 1 while i > 0: # 找到相邻的两位元素,右侧的数值大于左侧的数值 if nums[i] > nums[i - 1]: # 从右向左循环 n - 1到 i区间的数组元素 j = n - 1 while j >= i: # 找到在此区间内比 i - 1位置的数值大的元素,开始进行换位操作 if nums[j] > nums[i - 1]: # 移位交换操作 self.exchangeVal(nums, i - 1, j) # 将 n - 1到 i区间的元素调整为升序,即为最小的数值排列 self.reverseArr(nums, i, n - 1) return j -= 1 i -= 1 # 如果是降序数组,则反转数组,称为最小数值的排列 self.reverseArr(nums, 0, n - 1) def exchangeVal(self, arr, left, right): arr[left], arr[right] = arr[right], arr[left] def reverseArr(self, arr, begin, end): while begin < end: self.exchangeVal(arr, begin, end) begin += 1 end -= 1 if __name__ == '__main__': points = [1, 2, 3] ins = Solution() ins.nextPermutation(points) print(points)
1,593
819
# script to generate the overview and individual html report website. import os import numpy def main(result_folder, name, header_comp): menu_html_file_path = '/home/brain/qa/html/menu_html.html' menu_html_file = open(menu_html_file_path, 'r') menu_html = menu_html_file.readlines() menu_html_file.close() result_folder_list = os.listdir(result_folder) if not 'results.html' in result_folder_list: report_file_name = '' for i in result_folder_list: if i.startswith('report_'): report_file_name = i report_file = open(result_folder +'/'+ report_file_name,'r') html_file = open(result_folder +'/results.html','w') #read reportfile report_file_list = [] for line in report_file: for word in line.split(): report_file_list.append(word) html_file.writelines(menu_html) html_file.write('\t\t<h1 style="margin-top:80px;">Result structural-data '+name+'</h1>\n') html_file.write('\t\t<table>\n\t\t\t<tr bgcolor=#f6f6f6><td><b>pSignal</b></td><td>'+report_file_list[1]+'</td></tr>\n') html_file.write('\t\t\t<tr bgcolor=#ffffff><td><b>pNoise</b></td><td>'+report_file_list[3]+'</td></tr>\n') html_file.write('\t\t\t<tr bgcolor=#f6f6f6><td><b>bSNR</b></td><td>'+report_file_list[5]+'</td></tr>\n\t\t</table>\n') html_file.write('\t\t<h2>Header comparison </h1>\n') if not header_comp: html_file.write('\t\t<p>No differences between headers or no DICOM file to compare available.</p>\n') else: html_file.write('\t\t<table>\n\t\t\t<th><td colspan="3"><b>DICOM header comparison</b></td></th>\n') html_file.write('\t\t\t<tr><td><b>Field name</b></td><td><b>Reference value</b></td><td><b>Value in data</b></td></tr>\n') for k in header_comp: try: html_file.write('\t\t\t<tr><td><i>'+str(k[0])+'</i></td><td>'+str(k[1])+'</td><td>'+str(k[2])+'</td></tr>\n') except: html_file.write('\t\t\t<tr><td><i>'+str(k[0])+'</i></td><td>'+k[1].encode('utf-8')+'</td><td>'+k[2].encode('utf-8')+'</td></tr>\n') html_file.write('\t\t</table>\n') html_file.write('\t\t<h2>Histogram of pNoise</h2>\n\t\t<p><img src="histogram_'+name+'.png" alt="histogram_of_noise"</p>\n') html_file.write('\t\t<h2>Histogram of pNoise (intensity > 30 )</h2>\n\t\t<p><img src="histogram_upper_values_'+name+'.png" alt="histogram_of_noise"</p>\n') html_file.write('\t\t<h2>Background mask slice 0</h2>\n\t\t<p><img src="slice0.png" alt="slice0"</p>\n') html_file.write('\t\t<h2>Background mask slice 25%</h2>\n\t\t<p><img src="slice25p.png" alt="slice25p"</p>\n') html_file.write('\t\t<h2>Background mask slice 50%</h2>\n\t\t<p><img src="slice50p.png" alt="slice50p"</p>\n') html_file.write('\t\t<h2>Background mask slice 75%</h2>\n\t\t<p><img src="slice75p.png" alt="slice75p"</p>\n') html_file.write('\t\t<h2>Background mask last slice </h2>\n\t\t<p><img src="sliceend.png" alt="sliceend"</p>\n') html_file.write('\t</body>\n') html_file.write('</html>') html_file.close() def generate_overview_html(result_folder,human_structural_settings): menu_html_file_path = '/home/brain/qa/html/menu_html.html' menu_html_file = open(menu_html_file_path, 'r') menu_html = menu_html_file.readlines() menu_html_file.close() result_file = open(result_folder+'overview.html','w') result_file.writelines(menu_html) result_file.write('\t\t<h1 style="margin-top:80px;">Structural Results Overview</h1>\n') result_file.write('\t\t<h2>Primitive mean intensity of brainmask (pSignal)</h2>\n\t\t<p><img src="pMean.png" alt="Mean intensity of brainmask"></p>\n') result_file.write('\t\t<h2>standard deviation of background (pNoise)</h2>\n\t\t<p><img src="pNoise.png" alt="Std of background"></p>\n') result_file.write('\t\t<h2>Signal to noise ratio (bSNR)</h2>\n\t\t<p><img src="bSNR.png" alt="Signal to noise ratio"></p>\n') if human_structural_settings[8] == 0: automatic_flag = False else: automatic_flag = True std_automatic_multiplier = human_structural_settings[9] if automatic_flag: os.chdir('/home/brain/qa/html/results/structural/') result_folder_list = os.listdir(result_folder) names = [] mean = [] noise = [] snr = [] for i in result_folder_list: if os.path.isdir(result_folder+i): sub_result_folder_list = os.listdir(result_folder+i) for j in sub_result_folder_list: sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j) for k in sub_sub_result_folder_list: sub_sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j+'/'+k) for l in sub_sub_sub_result_folder_list: if l.startswith('report'): names.append(i+'_'+j+'\n'+k) report_file = open(result_folder+i+'/'+j+'/'+k+'/'+l,'r') for data in report_file: values = data.split() if values[0].startswith('Mean_'): mean.append(float(values[1])) if values[0].startswith('Std_'): noise.append(float(values[1])) if values[0].startswith('SNR'): snr.append(float(values[1])) report_file.close() auto_mean_mean = numpy.mean(mean) auto_mean_std = numpy.std(mean) * float(std_automatic_multiplier) auto_noise_mean = numpy.mean(noise) auto_noise_std = numpy.std(noise) * float(std_automatic_multiplier) auto_snr_mean = numpy.mean(snr) auto_snr_std = numpy.std(snr) * float(std_automatic_multiplier) plus_settings_mean = auto_mean_mean + auto_mean_std minus_settings_mean = auto_mean_mean - auto_mean_std plus_settings_noise = auto_noise_mean+ auto_noise_std minus_settings_noise = auto_noise_mean - auto_noise_std plus_settings_snr = auto_snr_mean + auto_snr_std minus_settings_snr = auto_snr_mean - auto_snr_std else: mean_mean = float(human_structural_settings[1]) range_mean = float(human_structural_settings[2]) noise_mean = float(human_structural_settings[3]) range_noise = float(human_structural_settings[4]) snr_mean = float(human_structural_settings[5]) range_snr = float(human_structural_settings[6]) plus_settings_mean = mean_mean + range_mean minus_settings_mean = mean_mean - range_mean plus_settings_noise = noise_mean+ range_noise minus_settings_noise = noise_mean - range_noise plus_settings_snr = snr_mean + range_snr minus_settings_snr = snr_mean - range_snr result_folder_list = os.listdir(result_folder) for i in result_folder_list: if os.path.isdir(result_folder+i): result_file.write('\t\t<h2>'+i+'</h2>\n') result_sub_folder_list = os.listdir(result_folder+i) result_sub_folder_list .sort() for j in result_sub_folder_list: result_file.write('\t\t<h3>'+j+'</h3>\n') result_sub_sub_folder_list = os.listdir(result_folder+i+'/'+j) result_sub_sub_folder_list .sort() result_file.write('\t\t<ul>\n') for k in result_sub_sub_folder_list: sub_sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j+'/'+k) mean = 0 noise = 0 snr = 0 for l in sub_sub_sub_result_folder_list: if l.startswith('report'): report_file = open(result_folder+i+'/'+j+'/'+k+'/'+l,'r') for data in report_file: values = data.split() if values[0].startswith('Mean_'): mean = float(values[1]) if values[0].startswith('Std_'): noise = float(values[1]) if values[0].startswith('SNR'): snr = float(values[1]) report_file.close() if(mean > plus_settings_mean) or (mean < minus_settings_mean) or (noise > plus_settings_noise) or (noise < minus_settings_noise) or (snr > plus_settings_snr) or (snr < minus_settings_snr): result_file.write('\t\t\t<li><img src="/warning.png"><a href="/results/structural/'+i+'/'+j+'/'+k+'/results.html">'+k+'</a></li>\n') else: result_file.write('\t\t\t<li><a href="/results/structural/'+i+'/'+j+'/'+k+'/results.html">'+k+'</a></li>\n') result_file.write('\t\t</ul>\n') result_file.write('\t</body>\n</html>') result_file.close()
7,906
3,561
class tabla_de_sesgos : def __init__(self) : self.sesgo = None # self.clase_de_sesgo = None # pass class marcador (reproductor, tabla_de_sesgos, registro_de_tiempos, medio) : def __init__(self) : pass def cargar_medio (self) : # returns pass def marcar_tiempos (self) : # returns pass def marcar_sesgo (self) : # returns pass def indicar_idea_general (self) : # returns pass class reproductor : '''(NULL)''' def __init__(self) : pass def cargar_medio (self) : # returns pass class registro_de_tiempos : def __init__(self) : self.medio = None # self.tiempo = None # self.espacio = None # pass class medio : def __init__(self) : self.audio = None # self.video = None # self.texto = None # self.imagen = None # pass
786
358
"""Generates the supported SOP Classes.""" from collections import namedtuple import inspect import logging import sys from pydicom.uid import UID from pynetdicom3.service_class import ( VerificationServiceClass, StorageServiceClass, QueryRetrieveServiceClass, BasicWorklistManagementServiceClass, ) LOGGER = logging.getLogger('pynetdicom3.sop') def uid_to_service_class(uid): """Return the ServiceClass object corresponding to `uid`. Parameters ---------- uid : pydicom.uid.UID The SOP Class UID to find the corresponding Service Class. Returns ------- service_class.ServiceClass The Service Class corresponding to the SOP Class UID. Raises ------ NotImplementedError If the Service Class corresponding to the SOP Class `uid` hasn't been implemented. """ if uid in _VERIFICATION_CLASSES.values(): return VerificationServiceClass elif uid in _STORAGE_CLASSES.values(): return StorageServiceClass elif uid in _QR_CLASSES.values(): return QueryRetrieveServiceClass elif uid in _BASIC_WORKLIST_CLASSES.values(): return BasicWorklistManagementServiceClass else: raise NotImplementedError( "The Service Class for the SOP Class with UID '{}' has not " "been implemented".format(uid) ) SOPClass = namedtuple("SOPClass", ['uid', 'UID', 'service_class']) def _generate_sop_classes(sop_class_dict): """Generate the SOP Classes.""" for name in sop_class_dict: globals()[name] = SOPClass( UID(sop_class_dict[name]), UID(sop_class_dict[name]), uid_to_service_class(sop_class_dict[name]) ) # Generate the various SOP classes _VERIFICATION_CLASSES = { 'VerificationSOPClass' : '1.2.840.10008.1.1', } # pylint: disable=line-too-long _STORAGE_CLASSES = { 'ComputedRadiographyImageStorage' : '1.2.840.10008.5.1.4.1.1.1', 'DigitalXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.1', 'DigitalXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.1.1.1.1', 'DigitalMammographyXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.2', 'DigitalMammographyXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.1.2.1', 'DigitalIntraOralXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.3', 'DigitalIntraOralXRayImageProcessingStorage' : '1.2.840.10008.5.1.1.4.1.1.3.1', 'CTImageStorage' : '1.2.840.10008.5.1.4.1.1.2', 'EnhancedCTImageStorage' : '1.2.840.10008.5.1.4.1.1.2.1', 'LegacyConvertedEnhancedCTImageStorage' : '1.2.840.10008.5.1.4.1.1.2.2', 'UltrasoundMultiframeImageStorage' : '1.2.840.10008.5.1.4.1.1.3.1', 'MRImageStorage' : '1.2.840.10008.5.1.4.1.1.4', 'EnhancedMRImageStorage' : '1.2.840.10008.5.1.4.1.1.4.1', 'MRSpectroscopyStorage' : '1.2.840.10008.5.1.4.1.1.4.2', 'EnhancedMRColorImageStorage' : '1.2.840.10008.5.1.4.1.1.4.3', 'LegacyConvertedEnhancedMRImageStorage' : '1.2.840.10008.5.1.4.1.1.4.4', 'UltrasoundImageStorage' : '1.2.840.10008.5.1.4.1.1.6.1', 'EnhancedUSVolumeStorage' : '1.2.840.10008.5.1.4.1.1.6.2', 'SecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7', 'MultiframeSingleBitSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.1', 'MultiframeGrayscaleByteSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.2', 'MultiframeGrayscaleWordSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.3', 'MultiframeTrueColorSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.4', 'TwelveLeadECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.1', 'GeneralECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.2', 'AmbulatoryECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.3', 'HemodynamicWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.2.1', 'CardiacElectrophysiologyWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.3.1', 'BasicVoiceAudioWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.4.1', 'GeneralAudioWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.4.2', 'ArterialPulseWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.5.1', 'RespiratoryWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.6.1', 'GrayscaleSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.1', 'ColorSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.2', 'PseudocolorSoftcopyPresentationStageStorage' : '1.2.840.10008.5.1.4.1.1.11.3', 'BlendingSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.4', 'XAXRFGrayscaleSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.5', 'XRayAngiographicImageStorage' : '1.2.840.10008.5.1.4.1.1.12.1', 'EnhancedXAImageStorage' : '1.2.840.10008.5.1.4.1.1.12.1.1', 'XRayRadiofluoroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.12.2', 'EnhancedXRFImageStorage' : '1.2.840.10008.5.1.4.1.1.12.2.1', 'XRay3DAngiographicImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.1', 'XRay3DCraniofacialImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.2', 'BreastTomosynthesisImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.3', 'BreastProjectionXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.13.1.4', 'BreastProjectionXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.13.1.5', 'IntravascularOpticalCoherenceTomographyImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.14.1', 'IntravascularOpticalCoherenceTomographyImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.14.2', 'NuclearMedicineImageStorage' : '1.2.840.10008.5.1.4.1.1.20', 'ParametricMapStorage' : '1.2.840.10008.5.1.4.1.1.30', 'RawDataStorage' : '1.2.840.10008.5.1.4.1.1.66', 'SpatialRegistrationStorage' : '1.2.840.10008.5.1.4.1.1.66.1', 'SpatialFiducialsStorage' : '1.2.840.10008.5.1.4.1.1.66.2', 'DeformableSpatialRegistrationStorage' : '1.2.840.10008.5.1.4.1.1.66.3', 'SegmentationStorage' : '1.2.840.10008.5.1.4.1.1.66.4', 'SurfaceSegmentationStorage' : '1.2.840.10008.5.1.4.1.1.66.5', 'RealWorldValueMappingStorage' : '1.2.840.10008.5.1.4.1.1.67', 'SurfaceScanMeshStorage' : '1.2.840.10008.5.1.4.1.1.68.1', 'SurfaceScanPointCloudStorage' : '1.2.840.10008.5.1.4.1.1.68.2', 'VLEndoscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.1', 'VideoEndoscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.1.1', 'VLMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.2', 'VideoMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.2.1', 'VLSlideCoordinatesMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.3', 'VLPhotographicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.4', 'VideoPhotographicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.4.1', 'OphthalmicPhotography8BitImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.1', 'OphthalmicPhotography16BitImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.2', 'StereometricRelationshipStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.3', 'OpthalmicTomographyImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.4', 'WideFieldOpthalmicPhotographyStereographicProjectionImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.5', 'WideFieldOpthalmicPhotography3DCoordinatesImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.6', 'VLWholeSlideMicroscopyImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.6', 'LensometryMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.1', 'AutorefractionMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.2', 'KeratometryMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.3', 'SubjectiveRefractionMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.4', 'VisualAcuityMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.5', 'SpectaclePrescriptionReportStorage' : '1.2.840.10008.5.1.4.1.1.78.6', 'OpthalmicAxialMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.7', 'IntraocularLensCalculationsStorage' : '1.2.840.10008.5.1.4.1.1.78.8', 'MacularGridThicknessAndVolumeReport' : '1.2.840.10008.5.1.4.1.1.79.1', 'OpthalmicVisualFieldStaticPerimetryMeasurementsStorag' : '1.2.840.10008.5.1.4.1.1.80.1', 'OpthalmicThicknessMapStorage' : '1.2.840.10008.5.1.4.1.1.81.1', 'CornealTopographyMapStorage' : '1.2.840.10008.5.1.4.1.1.82.1', 'BasicTextSRStorage' : '1.2.840.10008.5.1.4.1.1.88.11', 'EnhancedSRStorage' : '1.2.840.10008.5.1.4.1.1.88.22', 'ComprehensiveSRStorage' : '1.2.840.10008.5.1.4.1.1.88.33', 'Comprehenseice3DSRStorage' : '1.2.840.10008.5.1.4.1.1.88.34', 'ExtensibleSRStorage' : '1.2.840.10008.5.1.4.1.1.88.35', 'ProcedureSRStorage' : '1.2.840.10008.5.1.4.1.1.88.40', 'MammographyCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.50', 'KeyObjectSelectionStorage' : '1.2.840.10008.5.1.4.1.1.88.59', 'ChestCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.65', 'XRayRadiationDoseSRStorage' : '1.2.840.10008.5.1.4.1.1.88.67', 'RadiopharmaceuticalRadiationDoseSRStorage' : '1.2.840.10008.5.1.4.1.1.88.68', 'ColonCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.69', 'ImplantationPlanSRDocumentStorage' : '1.2.840.10008.5.1.4.1.1.88.70', 'EncapsulatedPDFStorage' : '1.2.840.10008.5.1.4.1.1.104.1', 'EncapsulatedCDAStorage' : '1.2.840.10008.5.1.4.1.1.104.2', 'PositronEmissionTomographyImageStorage' : '1.2.840.10008.5.1.4.1.1.128', 'EnhancedPETImageStorage' : '1.2.840.10008.5.1.4.1.1.130', 'LegacyConvertedEnhancedPETImageStorage' : '1.2.840.10008.5.1.4.1.1.128.1', 'BasicStructuredDisplayStorage' : '1.2.840.10008.5.1.4.1.1.131', 'RTImageStorage' : '1.2.840.10008.5.1.4.1.1.481.1', 'RTDoseStorage' : '1.2.840.10008.5.1.4.1.1.481.2', 'RTStructureSetStorage' : '1.2.840.10008.5.1.4.1.1.481.3', 'RTBeamsTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.4', 'RTPlanStorage' : '1.2.840.10008.5.1.4.1.1.481.5', 'RTBrachyTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.6', 'RTTreatmentSummaryRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.7', 'RTIonPlanStorage' : '1.2.840.10008.5.1.4.1.1.481.8', 'RTIonBeamsTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.9', 'RTBeamsDeliveryInstructionStorage' : '1.2.840.10008.5.1.4.34.7', 'GenericImplantTemplateStorage' : '1.2.840.10008.5.1.4.43.1', 'ImplantAssemblyTemplateStorage' : '1.2.840.10008.5.1.4.44.1', 'ImplantTemplateGroupStorage' : '1.2.840.10008.5.1.4.45.1' } _QR_CLASSES = { 'PatientRootQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.1.1', 'PatientRootQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.1.2', 'PatientRootQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.1.3', 'StudyRootQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.2.1', 'StudyRootQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.2.2', 'StudyRootQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.2.3', 'PatientStudyOnlyQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.3.1', 'PatientStudyOnlyQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.3.2', 'PatientStudyOnlyQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.3.3', } _BASIC_WORKLIST_CLASSES = { 'ModalityWorklistInformationFind' : '1.2.840.10008.5.1.4.31', } # pylint: enable=line-too-long _generate_sop_classes(_VERIFICATION_CLASSES) _generate_sop_classes(_STORAGE_CLASSES) _generate_sop_classes(_QR_CLASSES) _generate_sop_classes(_BASIC_WORKLIST_CLASSES) def uid_to_sop_class(uid): """Given a `uid` return the corresponding SOPClass. Parameters ---------- uid : pydicom.uid.UID Returns ------- sop_class.SOPClass subclass The SOP class corresponding to `uid`. Raises ------ NotImplementedError If the SOP Class corresponding to the given UID has not been implemented. """ # Get a list of all the class members of the current module members = inspect.getmembers( sys.modules[__name__], lambda mbr: isinstance(mbr, tuple) ) for obj in members: if hasattr(obj[1], 'uid') and obj[1].uid == uid: return obj[1] raise NotImplementedError("The SOP Class for UID '{}' has not been " \ "implemented".format(uid))
12,226
6,520
import os from datetime import datetime import numpy as np import tensorflow as tf from tensorflow.python.training import moving_averages TF_DTYPE = tf.float64 MOMENTUM = 0.99 EPSILON = 1e-6 DELTA_CLIP = 50.0 class FeedForwardModel(): """ Abstract class for creating neural networks. Offers functions to build or clone individual layers of complete networks """ def __init__(self, bsde, run_name): self._bsde = bsde # ops for statistics update of batch normalization self._extra_train_ops = [] self.tb_dir = tf.app.flags.FLAGS.tensorboard_dir + run_name + "_" + datetime.now( ).strftime('%Y_%m_%d_%H_%M_%S') os.mkdir(self.tb_dir) def _clone_subnetwork(self, input_, timestep, layer_count, weights): """ Clone a neural network, using the same weights as the source networks. Args: input_ (Tensor): Input of the neural network that will be build timestep (float): Time index, used for tensor names layer_count (int): number of layers in the neural network that should be cloned weights (np.array(size=[num_timesteps, layer_count])) Returns: Tensor: Output of the last layer of the neural network """ with tf.variable_scope(str(timestep)): hiddens = self._batch_norm(input_, name='path_input_norm') for i in range(1, layer_count - 1): hiddens = self._copy_batch_layer(hiddens, 'layer_{}'.format(i), i, timestep, weights) output = self._copy_batch_layer(hiddens, 'final_layer', layer_count - 1, timestep, weights) return output def _subnetwork(self, input_, timestep, num_hiddens): """ Generate a neural network Args: input_ (Tensor): Input of the neural network that will be build timestep (float): Time index, used for tensor name num_hiddens (np.array(size=[layer_count])): Specifies the number of additional dimensions for each layer of the neural network. Returns: Tensor: Output of the last layer of the neural network """ matrix_weights = [] with tf.variable_scope(str(timestep)): # input norm hiddens = self._batch_norm(input_, name='path_input_norm') for i in range(1, len(num_hiddens) - 1): hiddens, weight = self._dense_batch_layer( hiddens, num_hiddens[i] + self._bsde.dim, activation_fn=tf.nn.relu, layer_name='layer_{}'.format(i), ) matrix_weights.append(weight) # last layer without relu output, weight = self._dense_batch_layer( hiddens, num_hiddens[-1] + self._bsde.dim, activation_fn=None, layer_name='final_layer', ) matrix_weights.append(weight) return output, matrix_weights def _dense_batch_layer(self, input_, output_size, activation_fn=None, stddev=5.0, layer_name="linear"): """ Generate one fully connected layer Args: input_ (Tensor): Input of layer output_size (int): Number of outputs this layer should have KwArgs: activation_fn (Function): activation function for the neurons in this layer. Will usually be ReLU, but can be left blank for the last layer. stddev (float): stddev to use for the initial distribution of weights in this layer layer_name (string): tensorflow name used for the variables in this layer Returns: Tensor: Output of the layer tf.Variable: Reference to the used Matrix weight """ with tf.variable_scope(layer_name): shape = input_.get_shape().as_list() weight = tf.get_variable( 'Matrix', [shape[1], output_size], TF_DTYPE, tf.random_normal_initializer( stddev=stddev / np.sqrt(shape[1] + output_size))) # matrix weight hiddens = tf.matmul(input_, weight) #batch norm hiddens_bn = self._batch_norm(hiddens) if activation_fn: return activation_fn(hiddens_bn), weight return hiddens_bn, weight def _copy_batch_layer(self, input_, layer_name, layer, timestep, weights): """ Copy one fully connected layer, reusing the weights of the previous layer Args: input_ (Tensor): Input of layer layer_name (string): tensorflow name used for the variables in this layer layer (int): index of the layer in the current timestep timestep (int): index of the current timestep weights (np.array(size=[num_timesteps, layer_count])): weight database to copy from Returns: Tensor: Output of the layer """ with tf.variable_scope(layer_name): # init matrix weight with matrix weights from primal stage weight = tf.Variable(weights[timestep - 1][layer - 1], 'Matrix') hiddens = tf.matmul(input_, weight) hiddens_bn = self._batch_norm(hiddens) return hiddens_bn def _batch_norm(self, input_, name='batch_norm'): """ Batch normalize the data Args: input_ (Tensor): Input of layer KwArgs: name (string): Used as tensorflow name Returns: Tensor: Output of the layer See https://arxiv.org/pdf/1502.03167v3.pdf p.3 """ with tf.variable_scope(name): params_shape = [input_.get_shape()[-1]] beta = tf.get_variable( 'beta', params_shape, TF_DTYPE, initializer=tf.random_normal_initializer( 0.0, stddev=0.1, dtype=TF_DTYPE)) gamma = tf.get_variable( 'gamma', params_shape, TF_DTYPE, initializer=tf.random_uniform_initializer( 0.1, 0.5, dtype=TF_DTYPE)) moving_mean = tf.get_variable( 'moving_mean', params_shape, TF_DTYPE, initializer=tf.constant_initializer(0.0, TF_DTYPE), trainable=False) moving_variance = tf.get_variable( 'moving_variance', params_shape, TF_DTYPE, initializer=tf.constant_initializer(1.0, TF_DTYPE), trainable=False) # These ops will only be performed when training mean, variance = tf.nn.moments(input_, [0], name='moments') self._extra_train_ops.append( moving_averages.assign_moving_average(moving_mean, mean, MOMENTUM)) self._extra_train_ops.append( moving_averages.assign_moving_average(moving_variance, variance, MOMENTUM)) mean, variance = tf.cond(self._is_training, lambda: (mean, variance), lambda: (moving_mean, moving_variance)) hiddens_bn = tf.nn.batch_normalization(input_, mean, variance, beta, gamma, EPSILON) hiddens_bn.set_shape(input_.get_shape()) return hiddens_bn
7,885
2,140
from .dataset import load_pr, load_1dof, load_mvc, load_ndof __all__ = ["load_pr", "load_1dof", "load_mvc", "load_ndof"]
122
53
""" Fibonacci sequence using python generators Written by: Ian Doarn """ def fib(): # Generator that yields fibonacci numbers a, b = 0, 1 while True: # First iteration: yield a # yield 0 to start with and then a, b = b, a + b # a will now be 1, and b will also be 1, (0 + 1) if __name__ == '__main__': # Maximum fib numbers to print max_i = 20 for i, fib_n in enumerate(fib()): #Print each yielded fib number print('{i:3}: {f:3}'.format(i=i, f=fib_n)) # Break when we hit max_i value if i == max_i: break
619
218
glossary = { 'intger': 'is colloquially defined as a number that can be written without a fractional component.\n', 'iterate': 'is the repetition of a process in order to generate a sequence of outcomes.\n', 'indentation': 'is an empty space at the beginning of a line that groups particular blocks of code.\n', 'concatinate': 'is the operation of joining character strings end-to-end.\n', 'boolean': 'is a logical data type that can have only the values True or False.\n', 'loop': 'for loop iterates over an object until that object is complete.\n', 'tuple': 'is a immutable data structure that store an ordered sequence of values.\n', 'dictionary': 'is an unordered and mutable Python container that stores mappings of unique keys to values.\n', 'parse': 'is a command for dividing the given program code into a small piece of code for analyzing the correct syntax.', } for k, v in glossary.items(): print(f'{k.title()}: {v}')
974
264
import ast import argparse import json import os import pprint import astor import tqdm import _jsonnet from seq2struct import datasets from seq2struct import grammars from seq2struct.utils import registry from third_party.spider import evaluation def main(): parser = argparse.ArgumentParser() parser.add_argument('--config', required=True) parser.add_argument('--config-args') parser.add_argument('--output', required=True) args = parser.parse_args() if args.config_args: config = json.loads(_jsonnet.evaluate_file(args.config, tla_codes={'args': args.config_args})) else: config = json.loads(_jsonnet.evaluate_file(args.config)) os.makedirs(args.output, exist_ok=True) gold = open(os.path.join(args.output, 'gold.txt'), 'w') predicted = open(os.path.join(args.output, 'predicted.txt'), 'w') train_data = registry.construct('dataset', config['data']['train']) grammar = registry.construct('grammar', config['model']['decoder_preproc']['grammar']) evaluator = evaluation.Evaluator( 'data/spider-20190205/database', evaluation.build_foreign_key_map_from_json('data/spider-20190205/tables.json'), 'match') for i, item in enumerate(tqdm.tqdm(train_data, dynamic_ncols=True)): parsed = grammar.parse(item.code, 'train') sql = grammar.unparse(parsed, item) evaluator.evaluate_one( item.schema.db_id, item.orig['query'].replace('\t', ' '), sql) gold.write('{}\t{}\n'.format(item.orig['query'].replace('\t', ' '), item.schema.db_id)) predicted.write('{}\n'.format(sql)) if __name__ == '__main__': main()
1,711
555
from tune.constants import TUNE_STOPPER_DEFAULT_CHECK_INTERVAL from typing import Any, Callable, Optional from tune._utils import run_monitored_process from tune.concepts.flow import Trial, TrialReport class NonIterativeObjectiveFunc: def generate_sort_metric(self, value: float) -> float: # pragma: no cover return value def run(self, trial: Trial) -> TrialReport: # pragma: no cover raise NotImplementedError def safe_run(self, trial: Trial) -> TrialReport: report = self.run(trial) return report.with_sort_metric(self.generate_sort_metric(report.metric)) class NonIterativeObjectiveLocalOptimizer: @property def distributable(self) -> bool: return True def run(self, func: NonIterativeObjectiveFunc, trial: Trial) -> TrialReport: # TODO: how to utilize execution_engine? return func.safe_run(trial) def run_monitored_process( self, func: NonIterativeObjectiveFunc, trial: Trial, stop_checker: Callable[[], bool], interval: Any = TUNE_STOPPER_DEFAULT_CHECK_INTERVAL, ) -> TrialReport: return run_monitored_process( self.run, [func, trial], {}, stop_checker=stop_checker, interval=interval ) def validate_noniterative_objective( func: NonIterativeObjectiveFunc, trial: Trial, validator: Callable[[TrialReport], None], optimizer: Optional[NonIterativeObjectiveLocalOptimizer] = None, ) -> None: _optimizer = optimizer or NonIterativeObjectiveLocalOptimizer() validator(_optimizer.run_monitored_process(func, trial, lambda: False, "1sec"))
1,632
511
a1 = 'mary' b1 = 'army' a2 = 'mary' b2 = 'mark' def is_anagram(a, b): """ Return True if words a and b are anagrams. Return Flase if otherwise. """ a_list = list(a) b_list = list(b) a_list.sort() b_list.sort() if a_list == b_list: return True else: return False print is_anagram(a1, b1) print is_anagram(a2, b2)
371
157
from sqlalchemy import ( Column, ForeignKey, Integer, Text, ) from sqlalchemy.orm import relationship from .meta import Base class Notification(Base): __tablename__ = 'notification' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("user.id"), nullable=False) subscription = Column(Text, nullable=True, default=None) user = relationship('User', back_populates='notification', uselist=False)
459
142
# -*- coding: utf-8 -*- """ model helper ~~~~~~~~~~~~ :Created: 2016-8-5 :Copyright: (c) 2016<smileboywtu@gmail.com> """ from customer_exceptions import OffsetOutOfRangeException class ListModelHelper(object): """get the object list""" @classmethod def list(cls, index=0, limit=8, sort=None, order='asc'): """get the list of the model object :param condition: filter condition :param index: page index :param limit: page entry number :param sort: sort condition :param order: asc or desc :return: object list """ if not sort: sort = 'id' order_by = '-' + sort if order != 'asc' else sort offset = index * limit # check the offset total = cls.objects.count() if offset > total: raise OffsetOutOfRangeException() return { 'total': total, 'datalist': cls.objects.order_by(order_by)\ [offset:offset + limit] } class ViewModelHelper(object): """get a single instance""" @classmethod def view(cls, pk): """ get a specific objects :param pk: primary key :return: """ return cls.objects.get(id=pk) class GenericModelHelper(ListModelHelper, ViewModelHelper): pass
1,340
405
from django import template register = template.Library() from .. import utils @register.filter def admin2_urlname(view, action): """ Converts the view and the specified action into a valid namespaced URLConf name. """ return utils.admin2_urlname(view, action) @register.filter def model_app_label(obj): """ Returns the app label of a model instance or class. """ return utils.model_app_label(obj) @register.filter def model_verbose_name(obj): """ Returns the verbose name of a model instance or class. """ return utils.model_verbose_name(obj) @register.filter def model_verbose_name_plural(obj): """ Returns the pluralized verbose name of a model instance or class. """ return utils.model_verbose_name_plural(obj) @register.filter def formset_visible_fieldlist(formset): """ Returns the labels of a formset's visible fields as an array. """ return [f.label for f in formset.forms[0].visible_fields()] @register.filter def for_admin(permissions, admin): """ Only useful in the permission handling. This filter binds a new admin to the permission handler to allow checking views of an arbitrary admin. """ # some permission check has failed earlier, so we don't bother trying to # bind a new admin to it. if permissions == '': return permissions return permissions.bind_admin(admin) @register.filter def for_view(permissions, view): """ Only useful in the permission handling. This filter binds a new view to the permission handler to check for view names that are not known during template compile time. """ # some permission check has failed earlier, so we don't bother trying to # bind a new admin to it. if permissions == '': return permissions return permissions.bind_view(view) @register.filter def for_object(permissions, obj): """ Only useful in the permission handling. This filter binds a new object to the permission handler to check for object-level permissions. """ # some permission check has failed earlier, so we don't bother trying to # bind a new object to it. if permissions == '': return permissions return permissions.bind_object(obj) @register.simple_tag def get_attr(record, attribute_name): """ Allows dynamic fetching of model attributes in templates """ if attribute_name == "__str__": return record.__unicode__() attribute = getattr(record, attribute_name) if callable(attribute): return attribute() return attribute
2,603
719
""" Tests for edges.py """ import unittest import pandas as pd from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher from biothings_explorer.filters.edges import filter_node_degree class TestFilterEdges(unittest.TestCase): # test for count values def test_count_values(self): counts = [10, 20, 40, 50, 100] seqd = SingleEdgeQueryDispatcher(input_cls='Gene', output_cls='ChemicalSubstance', input_id='NCBIGene', values='1017') seqd.query() for count in counts: newG = filter_node_degree(seqd.G, count) self.assertEqual(len(newG.nodes), count+1) # edge case test if count > num nodes, then returns num_nodes results def test_num_nodes(self): count = 1000 seqd = SingleEdgeQueryDispatcher(input_cls='Gene', output_cls='ChemicalSubstance', input_id='NCBIGene', values='1017') seqd.query() newG = filter_node_degree(seqd.G, count) self.assertEqual(len(newG.nodes), len(seqd.G.nodes)) # test for correct ordering of ranks def test_ranks(self): seqd = SingleEdgeQueryDispatcher(input_cls='Disease', input_id='MONDO', output_cls='PhenotypicFeature', pred='related_to', values='MONDO:0010997') seqd.query() newG = filter_node_degree(seqd.G) for i1,node1 in enumerate(newG.nodes): if node1 == 'MONDO:MONDO:0010997': continue for i2,node2 in enumerate(newG.nodes): if node2 == 'MONDO:MONDO:0010997': continue if newG.degree(node1) > newG.degree(node2): self.assertLess(newG.nodes.data()[node1]['rank'], newG.nodes.data()[node2]['rank']) elif newG.degree(node1) < newG.degree(node2): self.assertGreater(newG.nodes.data()[node1]['rank'], newG.nodes.data()[node2]['rank']) if __name__ == '__main__': unittest.main()
2,323
697
""" Retrieves data as json files from fantasy.premierleague.com """ import json import requests LAST_SEASON_DATA_FILENAME = "data/player_data_20_21.json" DATA_URL = "https://fantasy.premierleague.com/api/bootstrap-static/" DATA_FILENAME = "data/player_data_21_22.json" FIXTURES_URL = "https://fantasy.premierleague.com/api/fixtures/" FIXTURES_FILENAME = "data/fixtures_data_21_22.json" # Download all player data and write file def get_player_data(use_last_season): if use_last_season: return LAST_SEASON_DATA_FILENAME r = requests.get(DATA_URL) json_response = r.json() with open(DATA_FILENAME, 'w') as out_file: json.dump(json_response, out_file) return DATA_FILENAME # Download all fixtures data and write file def get_fixtures_data(): r = requests.get(FIXTURES_URL) json_response = r.json() with open(FIXTURES_FILENAME, 'w') as out_file: json.dump(json_response, out_file) return FIXTURES_FILENAME
974
370
from beamngpy import BeamNGpy, Vehicle, Scenario, ScenarioObject from beamngpy import setup_logging, Config from beamngpy.sensors import Camera, GForces, Lidar, Electrics, Damage, Timer import beamngpy import time, random # globals default_model = 'pickup' default_scenario = 'west_coast_usa' #'cliff' # smallgrid dt = 20 def spawn_point(scenario_locale): if scenario_locale is 'cliff': #return {'pos':(-124.806, 142.554, 465.489), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)} return {'pos': (-124.806, 190.554, 465.489), 'rot': None, 'rot_quat': (0, 0, 0.3826834, 0.9238795)} elif scenario_locale is 'west_coast_usa': #return {'pos':(-717.121, 101, 118.675), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)} return {'pos': (-717.121, 101, 118.675), 'rot': None, 'rot_quat': (0, 0, 0.918812, -0.394696)} #906, 118.78 rot: elif scenario_locale is 'smallgrid': return {'pos':(0.0, 0.0, 0.0), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)} def setup_sensors(vehicle): # Set up sensors pos = (-0.3, 1, 1.0) direction = (0, 1, 0) fov = 120 resolution = (512, 512) front_camera = Camera(pos, direction, fov, resolution, colour=True, depth=True, annotation=True) pos = (0.0, 3, 1.0) direction = (0, -1, 0) fov = 90 resolution = (512, 512) back_camera = Camera(pos, direction, fov, resolution, colour=True, depth=True, annotation=True) gforces = GForces() electrics = Electrics() damage = Damage() damage.encode_vehicle_request() lidar = Lidar(visualized=False) timer = Timer() # Attach them vehicle.attach_sensor('front_cam', front_camera) vehicle.attach_sensor('back_cam', back_camera) vehicle.attach_sensor('gforces', gforces) vehicle.attach_sensor('electrics', electrics) vehicle.attach_sensor('damage', damage) vehicle.attach_sensor('timer', timer) return vehicle def compare_damage(d1, d2): for key in d1['damage']: if d1['damage'][key] != d2['damage'][key]: print("d1['damage'][{}] == {}; d2['damage'][{}] == {}".format(key, d1['damage'][key], key, d2['damage'][key])) try: # handle specific keys if key == 'deform_group_damage' or key == 'part_damage': for k in d1['damage'][key].keys(): print("\td1['damage'][{}][{}] == {}; d2['damage'][{}][{}] == {}".format(key, k, d1['damage'][key][k], key, k, d2['damage'][key][k])) else: if d1['damage'][key] < d2['damage'][key]: print("\td2[damage][{}] is greater".format(key)) else: print("\td1[damage][{}] is greater".format(key)) except: continue print() return def backup(cum_list, sec): #return "1_24" dt = sec * 5.0 index = len(cum_list) - int(dt) if index < 0: index = 0 elif index >= len(cum_list): index = len(cum_list) -1 print("cum_list={}".format(cum_list)) print("index={}".format(index)) #try: return cum_list[index] #except: #return "0_0" def main(): global default_model, default_scenario beamng = BeamNGpy('localhost', 64256, home='C:/Users/merie/Documents/BeamNG.research.v1.7.0.1') #scenario = Scenario('smallgrid', 'spawn_objects_example') scenario = Scenario(default_scenario, 'research_test', description='Random driving for research') vehicle = Vehicle('ego_vehicle', model=default_model, licence='PYTHON') vehicle = setup_sensors(vehicle) spawn = spawn_point(default_scenario) scenario.add_vehicle(vehicle, pos=spawn['pos'], rot=spawn['rot'], rot_quat=spawn['rot_quat']) scenario.make(beamng) bng = beamng.open() bng.load_scenario(scenario) bng.start_scenario() vehicle.update_vehicle() d1 = bng.poll_sensors(vehicle) cum_list = [] bound = 0.0 for i in range(3): for _ in range(45): bound = bound + 0.0 # 0.1 # vehicle.save() vehicle.update_vehicle() d2 = bng.poll_sensors(vehicle) throttle = 1.0 #throttle = random.uniform(0.0, 1.0) steering = random.uniform(-1 * bound, bound) brake = 0.0 #random.choice([0, 0, 0, 1]) vehicle.control(throttle=throttle, steering=steering, brake=brake) pointName = "{}_{}".format(i, _) cum_list.append(pointName) vehicle.saveRecoveryPoint(pointName) bng.step(20) print("SEGMENT #{}: COMPARE DAMAGE".format(i)) damage_diff = compare_damage(d1, d2) d1 = d2 # "Back up" 1 second -- load vehicle at that time in that position. backup_pointName = backup(cum_list, 0.001) print('recovering to {}'.format(pointName)) loadfile = vehicle.loadRecoveryPoint(backup_pointName) print('loadfile is {}'.format(loadfile)) bng.pause() vehicle.update_vehicle() vehicle.load(loadfile) #vehicle.load("vehicles/pickup/vehicle.save.json") bng.resume() #vehicle.startRecovering() #time.sleep(1.5) #vehicle.stopRecovering() vehicle.update_vehicle() bng.pause() time.sleep(2) # vehicle.load("vehicles/pickup/vehicle.save.json") bng.resume() bng.close() if __name__ == "__main__": main()
5,587
2,127
#!/usr/bin/env python from __future__ import print_function import os, optparse, glob import depotcache, acf from ui import ui_tty as ui import hashlib import sys g_indent = ' ' colours = { False: 'back_red black', True: '' } class UnknownLen(list): pass def depot_summary_ok(mounted): if len(mounted) > 0: return True return False def str_depot_summary(mounted, managed): if isinstance(managed, UnknownLen): l = ui._ctext('back_yellow black', '?') else: l = str(len(managed)) ret = '%i/%s depotcaches mounted' % (len(mounted), l) if len(mounted) == 0: ret += ' - Not released on this platform yet?' return ret def manifest_filename(depot, timestamp): return '%s_%s.manifest' % (depot, timestamp) def manifest_path(library_root, filename): return os.path.join(library_root, 'depotcache/%s' % filename) def find_library_root(acf_filename): return os.path.relpath(os.path.realpath(os.path.join( os.path.curdir, os.path.dirname(acf_filename), '..'))) def find_steam_path_from_registry(opts, reg): if opts.verbose: ui._print("Looking for steam path from registry...") key = reg.OpenKey(reg.HKEY_CURRENT_USER, 'Software\\Valve\\Steam', 0, reg.KEY_READ | reg.KEY_WOW64_32KEY) return reg.QueryValueEx(key, 'SteamPath')[0] def cygwin_path(path): import subprocess return subprocess.check_output(['cygpath.exe', '-u', path]).strip() def guess_steam_path_win(opts, translate = lambda x: x): for path in [ r'c:\program files (x86)\steam', r'c:\program files\steam', r'c:\steam' ]: if opts.verbose: ui._print("Searching '%s'..." % translate(path)) if os.path.isdir(translate(path)): return path ui._cprint('red', 'Unable to find Steam root - rerun with --steam-root=') sys.exit(1) def find_steam_root(opts, acf_filename = None): if acf_filename is not None: # If this library has a depotcache, assume it is also the steam root # XXX: This could be tricked if someone has created or copied a # depotcache folder into the library, or if several steam # installations are sharing libraries. In these cases the user # will just have to specify --steam-root= to override it. library_root = find_library_root(acf_filename) if os.path.isdir(os.path.join(library_root, 'depotcache')): return library_root path = None if sys.platform.startswith('linux'): path = os.path.expanduser('~/.steam/root') elif sys.platform == 'cygwin': try: import cygwinreg except ImportError: if opts.verbose: ui._print('python-cygwinreg not installed, searching common Steam paths...') else: if not hasattr(cygwinreg, 'KEY_WOW64_32KEY'): cygwinreg.KEY_WOW64_32KEY = 512 path = cygwin_path(find_steam_path_from_registry(opts, cygwinreg)) path = path or guess_steam_path_win(opts, cygwin_path) elif sys.platform == 'win32': import _winreg path = find_steam_path_from_registry(opts, _winreg) path = path or guess_steam_path_win(opts) if path: return path ui._cprint('red', 'Unable to find Steam root - rerun with --steam-root=') sys.exit(1) class FilenameSet(set): # It may be more efficient to convert the paths to a tree structure, # but for the moment this is easier. def add(self, element): """ Override add method to ensure all directory components are also added to the set individually. """ set.add(self, element) dirname = os.path.dirname(element) if dirname != '': self.add(dirname) def verify_file_hash(filename, depot_hash, indent, opts): if depot_hash.filetype == 'directory': return os.path.isdir(filename) s = hashlib.sha1() f = open(filename, 'rb') bad_found = False off = 0 for chunk in sorted(depot_hash): assert(chunk.off == off) buf = f.read(chunk.len) off += chunk.len s.update(buf) sha = hashlib.sha1(buf).hexdigest() if sha != chunk.sha: if opts.verify == 1: return False if not bad_found: ui._cprint('red', ' (BAD CHECKSUM)') bad_found = True ui._print(indent, end='') ui._cprint('red', '%.10i:%.10i found %s expected %s' % \ (chunk.off, chunk.off+chunk.len, sha, chunk.sha)) assert(off == depot_hash.filesize) if bad_found: ui._print(indent, end='') eof_garbage = False while True: buf = f.read(1024*1024) if buf == '': break if not eof_garbage: ui._cprint('red', ' (Garbage found at end of file!)', end='') eof_garbage = True s.update(buf) if bad_found: return False return s.hexdigest() == depot_hash.sha def verify_manifest_files_exist(manifest_path, game_path, indent, opts): def verify_hash(): if (opts.verify or opts.delete_bad) and not verify_file_hash(filename, depot_hash, indent+g_indent, opts): ui._cprint('red', ' (BAD CHECKSUM)', end='') return True def check_filesize(): if depot_hash.filetype == 'directory': return True return filesize == depot_hash.filesize def warn_filesize(): if not check_filesize(): ui._cprint('red', ' (Filesize != %i, %+i)' % \ (depot_hash.filesize, filesize - depot_hash.filesize)) return True ok = True filenames = FilenameSet() for (orig_filename, depot_hash) in depotcache.decode_depotcache(manifest_path): filename = os.path.join(game_path, orig_filename.replace('\\', os.path.sep)) (found, correct, filename, pretty) = insensitive_path(filename, opts) filenames.add(filename) if opts.file_filter is not None and orig_filename not in opts.file_filter: continue if found: filesize = os.stat(filename).st_size corrupt = False if not correct: ui._print(indent, end='') ui._print(pretty, end='') if found: corrupt = warn_filesize() sys.stdout.flush() corrupt = corrupt or verify_hash() if corrupt and opts.delete_bad: ui._cprint('red', ' (DELETED)') os.remove(filename) else: ui._print(' (CASE MISMATCH, ', end='') if not opts.rename: ui._print('rerun with -r to fix)') else: ui._print('renamed)') else: ok = False ui._print(' (FILE MISSING)') elif opts.verbose > 2 or opts.verify or opts.delete_bad or not check_filesize(): ui._print(indent + filename, end='') corrupt = warn_filesize() sys.stdout.flush() corrupt = corrupt or verify_hash() if corrupt and opts.delete_bad: ui._cprint('red', ' (DELETED)', end='') os.remove(filename) ui._print() return (ok, filenames) def check_depots_exist(mounted_depots, managed_depots, library_root, indent, opts): ok = True num_mounted = 0 for depot in managed_depots: if depot in mounted_depots: num_mounted += 1 manifest = manifest_filename(depot, mounted_depots[depot]) path = manifest_path(library_root, manifest) if not os.path.exists(path): ui._cprint('red', '%s%s NOT FOUND!' % (indent, manifest), end='') ui._print(' (Verify the game cache and try again)') ok = False elif opts.verbose > 1: ui._print('%s%s (not mounted)' % (indent, depot)) assert(num_mounted == len(mounted_depots)) return ok def check_all_depot_files_exist(mounted_depots, library_root, game_path, indent, opts): ok = True filenames = set() for depot in mounted_depots: manifest = manifest_filename(depot, mounted_depots[depot]) if opts.depot_filter is not None and \ depot not in opts.depot_filter and \ manifest not in opts.depot_filter: continue path = manifest_path(library_root, manifest) if opts.verbose: ui._print('%s%s' % (indent, manifest)) (all_files_exist, manifest_filenames) = \ verify_manifest_files_exist(path, game_path, indent + g_indent, opts) filenames.update(manifest_filenames) ok = ok and all_files_exist return (ok, filenames) def mkdir_recursive(path): if os.path.isdir(path): return dirname = pretty_dirname = os.path.dirname(path) mkdir_recursive(dirname) os.mkdir(path) def insensitive_path(path, opts): if os.path.exists(path): return (True, True, path, path) basename = os.path.basename(path) dirname = pretty_dirname = os.path.dirname(path) if not os.path.isdir(dirname): (found, correct, dirname, pretty_dirname) = insensitive_path(dirname, opts) if not found: return (False, False, dirname, os.path.join(pretty_dirname, basename)) pretty_basename = '' for entry in os.listdir(dirname): if entry.lower() == basename.lower(): for i in range(len(entry)): if entry[i] != basename[i]: pretty_basename += ui._ctext('back_yellow black', entry[i]) else: pretty_basename += entry[i] if opts.rename: os.rename(os.path.join(dirname, entry), os.path.join(dirname, basename)) return (True, False, os.path.join(dirname, basename), os.path.join(pretty_dirname, pretty_basename)) return (True, False, os.path.join(dirname, entry), os.path.join(pretty_dirname, pretty_basename)) return (False, False, path, ui._ctext('back_red black', path)) def find_extra_files(game_path, known_filenames, indent, opts): known_filenames_l = set(map(str.lower, known_filenames)) if opts.move: dest_root = os.path.realpath(os.path.join(game_path, '..')) dest_root = os.path.join(dest_root, os.path.basename(game_path) + '~EXTRANEOUS') for (root, dirs, files) in os.walk(game_path, topdown = not (opts.delete or opts.move)): for fname in dirs + files: path = os.path.join(root, fname) if path in known_filenames: continue ui._print(indent, end='') extra='\n' if opts.move: if fname in dirs: try: os.rmdir(path) extra = ' (REMOVED)\n' except OSError as e: extra = ' %s\n' % str(e) else: dest = os.path.join(dest_root, os.path.relpath(path, game_path)) try: mkdir_recursive(os.path.dirname(dest)) os.rename(path, dest) extra = '\n%s --> %s\n' % (indent, os.path.relpath(dest)) except OSError as e: extra = ' %s\n' % str(e) elif opts.delete: extra = ' (DELETED)\n' if fname in dirs: os.rmdir(path) else: os.remove(path) if path.lower() in known_filenames_l: ui._cprint('back_blue yellow', path, end=' (DUPLICATE WITH DIFFERING CASE)%s' % extra) else: ui._cprint('back_blue yellow', path, end=extra) def find_game_path(app_state, library_root, acf_filename, opts): # XXX TODO: acf games can be installed in other libraries, I need to # try it to find if that would change this logic. # # NOTE: There is also a UserConfig.appinstalldir, however it may be # unreliable if the acf has been copied from another location and the # game has not yet been launched. install_dir = app_state['installdir'] if install_dir == '': ui._cprint('yellow', g_indent + 'WARNING: Blank installdir in %s, trying UserConfig.appinstalldir...' % acf_filename) install_dir = os.path.basename(app_state['UserConfig']['appinstalldir']) # Occasionally the install_dir is the full path in the Windows format. # This seems to happen sometimes when moving games from one install to # another. AFAICT the full path is never used - the acf file must be in # the same steam library as the install regardless, so discard the rest # of the path. install_dir = install_dir.split('\\')[-1] (found, correct, game_path, pretty) = insensitive_path(os.path.join(library_root, 'SteamApps/common/%s' % install_dir), opts) if found: # TODO: Warn if a second directory exists with the same name # but differing case, since that may confuse Steam or the game pass else: ui._print(g_indent, end='') ui._cprint(colours[False], 'Missing game directory', end=': ') ui._print(pretty) return None if not correct: ui._print(g_indent, end='') ui._cprint('back_yellow black', 'WARNING: Case Mismatch', end='') if not opts.rename: ui._print(' (rerun with -r to fix)', end='') ui._print(': ', end='') ui._print(pretty) return game_path def get_installed_depots(app_state): installed_depots = app_state['InstalledDepots'] return {k: v['manifest'] for k,v in installed_depots.items()} def get_mounted_depots(app_state): try: mounted_depots = app_state['MountedDepots'] except KeyError: # NOTE: Windows acf files seem to use 'ActiveDepots' instead of # 'MountedDepots'. Not sure why the difference. # XXX: Double check 'ActiveDepots' is the right key on # my Windows box try: return app_state['ActiveDepots'] except KeyError: # Seems some acf files no longer have either Mounted or Active Depots section return get_installed_depots(app_state) assert('ActiveDepots' not in app_state) return mounted_depots def check_acf(acf_filename, opts): app_state = acf.parse_acf(acf_filename)['AppState'] if 'appID' in app_state: app_id = app_state['appID'] else: app_id = app_state['appid'] try: name = app_state['UserConfig']['name'] except: name = app_state['name'] ui._print('%s (%s):' % (name, app_id)) library_root = find_library_root(acf_filename) game_path = find_game_path(app_state, library_root, acf_filename, opts) if game_path is None: return mounted_depots = get_mounted_depots(app_state) try: managed_depots = app_state['ManagedDepots'].split(',') except KeyError: #ui._cprint('back_yellow black', 'WARNING: No ManagedDepots, using MountedDepots instead!') managed_depots = UnknownLen(mounted_depots.keys()) ok = depot_summary_ok(mounted_depots) colour = colours[ok] if opts.verbose or not ok: ui._print(g_indent, end='') ui._cprint(colour, str_depot_summary(mounted_depots, managed_depots)) if not ok: if opts.uninstall: ui._print(g_indent, end='') path = os.path.join(os.path.curdir, acf_filename) os.rename(path, path + '~') ui._cprint('back_yellow black', 'UNINSTALLED!') return ok = check_depots_exist(mounted_depots, managed_depots, opts.steam_root, g_indent*2, opts) if not ok: return (ok, filenames) = check_all_depot_files_exist(mounted_depots, opts.steam_root, game_path, g_indent*2, opts) if opts.extra or opts.delete or opts.move: if opts.verbose: # So they don't appear to be under a manifest heading ui._print(g_indent*2 + 'Untracked files:') find_extra_files(game_path, filenames, g_indent*3, opts) if not ok: return ui._cprint('green', 'OK') def main(): parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action='count', help='Print out info about things that pasesd. Use multiple times for more info.') parser.add_option('-r', '--rename', action='store_true', help='Rename files & directories to correct case mismatches') parser.add_option('-e', '--extra', '--extraneous', action='store_true', help='List any files in the game directory that are not tracked by any manifest files. Extraneous files are highlighted in ' + \ ui._ctext('back_blue yellow', 'blue')) parser.add_option('--verify', action='count', help='Validate files integrity (Note: may show false positives if a file is in multiple depots). Specify twice to identify corrupt chunks.') parser.add_option('--file-filter', action='append', help='Specify file to check. Useful with --verify on large games when the bad files are already known. Can be specified multiple times.') parser.add_option('--depot-filter', action='append', help='Specify which mounted depots to process. Can be specified multiple times.') # '-d': Interractively delete (implies -e) files that not listed in the manifest file parser.add_option('-D', '--delete', action='store_true', help='Delete any extraneous files, without asking for confirmation (implies -e). CAUTION: Some games may store legitimate files in their directory that are not tracked by Steam which this option will delete. BE CAREFUL WITH THIS OPTION!') parser.add_option('--delete-bad', action='store_true', help='Delete any files with bad checksums, without asking for confirmation (implies --verify). CAUTION: Some games may store legitimate configuration files in their directory which this option may delete, potentially losing settings. BE CAREFUL WITH THIS OPTION!') parser.add_option('-M', '--move', action='store_true', help="Move any extraneous files to SteamApps/common/game~EXTRANEOUS (implies -e). rsync may be used to merge them back into the game directory later.") parser.add_option('-U', '--uninstall', action='store_true', help="Mark games with bad acf files (Currently that means 0 depotcaches mounted, but that definition may change in the future) as uninstalled. This WILL NOT DELETE THE GAME - it is intended to quickly remove bad acf files that may be interfering with launching or updating particular games. These games will need to be manually re-installed in Steam. (NOTE: Restart Steam afterwards)") parser.add_option('--steam-root', help="Specify where Steam is installed. This is usually detected automatically based on the acf path, but it may be necessary to specify it if working with games installed in an alternate steam library and this script can't find the game's manifest files.") # TODO: # '--verify': Mark game as needs verification on next launch (XXX: What option is that in the .acf? XXX: happens if Steam is running at the time?) # Also, when I can do this it might be an idea for some of the above rename/delete options to imply this. (opts, args) = parser.parse_args() # TODO: If directory specified, interactively ask which game to check by name (maybe change default to do this to) if opts.file_filter is not None: opts.file_filter = [ x.replace('/', '\\') for x in opts.file_filter ] if len(args) == 0: if opts.steam_root is None: opts.steam_root = find_steam_root(opts) args = glob.glob(os.path.join(opts.steam_root, 'SteamApps/appmanifest_*.acf')) elif opts.steam_root is None: opts.steam_root = find_steam_root(opts, args[0]) else: opts.steam_root = os.path.expanduser(opts.steam_root) if opts.verbose: ui._print("Using Steam root: '%s'" % opts.steam_root) for filename in args: check_acf(filename, opts) ui._print() if __name__ == '__main__': main() # vi:noet:ts=8:sw=8
17,827
6,722
from gpiozero import DistanceSensor from time import sleep sensor = DistanceSensor(echo=23, trigger=22) while True: print('Distance: ', sensor.distance * 100) sleep(1)
178
63
import pickle def remove_duplicate_from_list(data): """ remove duplications from specific list any data can be contained in the data. if the data is hashable, you can implement this function easily like below. data = list(set(data)) but if the data is unhashable, you have to implement in other ways. This function use pickle.dumps to convert any data to binary. Binary data is hashable, so after that, we can implement like with hashable data. Arguments: data {list(any)} -- list that contains any type of data Returns: {list(any)} -- list that contains any type of data without duplications """ pickled_data = [pickle.dumps(d) for d in data] removed_pickled_data = list(set(pickled_data)) result = [pickle.loads(d) for d in removed_pickled_data] return result if __name__ == "__main__": data = [1, 2, 2, 3, 2, 2, 2, 6] print(remove_duplicate_from_list(data)) data = ["hoge", 1, "hdf", 3.4, "hoge", 2, 2, 2] print(remove_duplicate_from_list(data))
1,070
341
# -*- coding: utf-8 -*- # # Copyright (C) 2022 NYU Libraries. # # ultraviolet-cli is free software; you can redistribute it and/or modify it # under the terms of the MIT License; see LICENSE file for more details. """Invenio module for custom UltraViolet commands.""" import click import glob import json import os import requests import sys from jsonschema import Draft4Validator from time import sleep from urllib3.exceptions import InsecureRequestWarning from .. import config, utils # Suppress InsecureRequestWarning warnings from urllib3. requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) def create_record_draft(metadata, api, token): sleep(1) try: r = requests.get(api, timeout=5, verify=False) r.raise_for_status() except requests.exceptions.RequestException as e: print(f'Couldn\'t connect to api at {api}. Is the application running?') raise SystemExit(e) headers = { 'content-type': 'application/json', 'authorization': f'Bearer {token}' } response = requests.post(url=api, data=json.dumps(metadata), headers=headers, verify=False) response.raise_for_status() return response.json() def delete_record_draft(pid, api, token): sleep(1) url = '/'.join((api.strip('/'), pid, 'draft')) try: r = requests.get(api, timeout=5, verify=False) r.raise_for_status() except requests.exceptions.RequestException as e: print(f'Couldn\'t connect to api at {api}. Is the application running?') raise SystemExit(e) headers = { 'authorization': f'Bearer {token}' } try: response = requests.delete(url=url, headers=headers, verify=False) return(response) except: print(f'Unable to delet draft with pid {pid}') def publish_record(record_metadata, access_token): sleep(1) url = record_metadata['links']['publish'] headers = { 'authorization': f'Bearer {access_token}' } response = requests.post(url=url, headers=headers, verify=False) return response.json() @click.group() def fixtures(): """ An entry point for fixtures subcommands, e.g., ingest, purge """ pass @fixtures.command() @click.option('-a', '--api', required=True, type=str, default=config.DEFAULT_RECORDS_API_URL, help=f'Invenio REST API base URL. Default={config.DEFAULT_RECORDS_API_URL}') @click.option('-d', '--dir', required=True, type=click.Path(exists=True), default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}') @click.option('-o', '--output', required=True, type=str, default=config.DEFAULT_FIXTURES_OUTFILE, help=f'Where new fixture pid mappings will be written') @click.option('-t', '--token', help='REST API token') def ingest(api, dir, output, token): """ Post local dir of UV fixture draft records via REST API. """ click.secho('REST API: ', nl=False, bold=True, fg='green') click.secho(api) click.secho('Fixtures directory: ', nl=False, bold=True, fg='green') click.secho(dir) if token is None: token = utils.token_from_user(email=config.DEFAULT_FIXTURES_USER, name='default-su-token') click.secho('Auth Token: ', nl=False, bold=True, fg='green') click.secho(token) records = glob.glob(f'{dir}/**/*.json', recursive=True) click.secho(f'\nFound {len(records)} records', nl=True, bold=True, fg='blue') results = json.loads(open(output).read()) if os.path.exists(output) else {} for file in records: click.secho(f'Posting record from {file}', nl=True, fg='blue') dict = json.loads(open(file).read()) draft = create_record_draft(dict, api, token) uv_id = os.path.dirname(file).split('/')[-1] results[draft['id']] = uv_id os.makedirs(os.path.dirname(output), exist_ok=True) with open(output, "w") as f: json.dump(results, f) # record = publish_record(draft, token) @fixtures.command() @click.option('-a', '--api', required=True, type=str, default=config.DEFAULT_RECORDS_API_URL, help=f'Invenio REST API base URL. Default={config.DEFAULT_RECORDS_API_URL}') @click.option('-d', '--dir', required=True, type=click.Path(exists=True), default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}') @click.option('-o', '--output', required=True, type=str, default=config.DEFAULT_FIXTURES_OUTFILE, help=f'Where new fixture pid mappings will be written') @click.option('-t', '--token', help='REST API token') def purge(api, dir, output, token): """ Delete all UV fixture draft records via REST API. """ click.secho('REST API: ', nl=False, bold=True, fg='green') click.secho(api) if token is None: token = utils.token_from_user(email=config.DEFAULT_FIXTURES_USER, name='default-su-token') click.secho('Auth Token: ', nl=False, bold=True, fg='green') click.secho(token) results = json.loads(open(output).read()) if os.path.exists(output) else {} for pid, uv_id in results.copy().items(): res = delete_record_draft(pid, api, token) if res.ok: click.secho(f'Delecting draft record {uv_id} aka {pid}', nl=True, bold=True, fg='blue') results.pop(pid) os.makedirs(os.path.dirname(output), exist_ok=True) with open(output, "w") as f: json.dump(results, f) @fixtures.command() @click.option('-d', '--dir', required=True, type=click.Path(exists=True), default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}') @click.option('-s', '--schema-file', required=True, type=click.Path(exists=True), default=config.DEFAULT_SCHEMA_PATH, help=f'Path to json schema. Default={config.DEFAULT_SCHEMA_PATH}') def validate(dir, schema_file): """ Validate local dir of fixture records against JSON schema. """ click.secho('Fixtures directory: ', nl=False, bold=True, fg='green') click.secho(dir) click.secho('JSON Schema: ', nl=False, bold=True, fg='green') click.secho(schema_file) records = glob.glob(f'{dir}/**/*.json', recursive=True) click.secho(f'\nFound {len(records)} records', nl=True, bold=True, fg='blue') schema = json.loads(open(schema_file).read()) Draft4Validator.check_schema(schema) validator = Draft4Validator(schema, format_checker=None) for file in records: dict = json.loads(open(file).read()) try: validator.validate(dict) click.secho(f'{file} passes', nl=True, fg='blue') except BaseException as error: click.secho(f'{file} fails', nl=True, fg='red') print('An exception occurred: {}'.format(error))
7,226
2,364
from __future__ import annotations from enum import Enum, auto class Paradigm(Enum): NONE = auto() KFULIM = auto() KFULIM_2 = auto() # used only for HUFAL NO_PREFIX = auto() # used for words like 'hUnDA!s', 'hU_wA!n', 'nI_sa!H', 'nI_qa!H' PE_ALEF = auto() # used only for PAAL PAAL_1 = auto() PAAL_2 = auto() PAAL_3 = auto() # some of the verbs that start with "[QRhj]" PAAL_4 = auto() PAAL_5 = auto() # some of the verbs that end with "a!Q" def is_kfulim(self) -> bool: return self in (Paradigm.KFULIM, Paradigm.KFULIM_2) def is_paal(self) -> bool: return self in ( Paradigm.PE_ALEF, Paradigm.PAAL_1, Paradigm.PAAL_2, Paradigm.PAAL_3, Paradigm.PAAL_4, Paradigm.PAAL_5, ) class Binyan(Enum): PAAL = auto() PIEL = auto() PUAL = auto() NIFAL = auto() HIFIL = auto() HUFAL = auto() HITPAEL = auto() class Pronoun(Enum): ANI = auto() ATA = auto() AT = auto() HU = auto() HI = auto() ANACNU = auto() ATEM = auto() ATEN = auto() HEM = auto() HEN = auto() # TODO: rename class Present(Enum): MALE_SINGULAR = auto() MALE_PLURAL = auto() FEMALE_SINGULAR = auto() FEMALE_PLURAL = auto()
1,320
551
class SettingsFactoryDoesNotExist(Exception): pass class InvalidSettingsFactory(Exception): pass class NoMatchingSettings(Exception): """Raised when a suitable settings class cannot be found.""" pass class InvalidCondition(Exception): pass
266
70
import gevent import docker import os from function_info import parse from port_controller import PortController from function import Function import random repack_clean_interval = 5.000 # repack and clean every 5 seconds dispatch_interval = 0.005 # 200 qps at most # the class for scheduling functions' inter-operations class FunctionManager: def __init__(self, config_path, min_port): self.function_info = parse(config_path) self.port_controller = PortController(min_port, min_port + 4999) self.client = docker.from_env() self.functions = { x.function_name: Function(self.client, x, self.port_controller) for x in self.function_info } self.init() def init(self): print("Clearing previous containers.") os.system('docker rm -f $(docker ps -aq --filter label=workflow)') gevent.spawn_later(repack_clean_interval, self._clean_loop) gevent.spawn_later(dispatch_interval, self._dispatch_loop) def _clean_loop(self): gevent.spawn_later(repack_clean_interval, self._clean_loop) for function in self.functions.values(): gevent.spawn(function.repack_and_clean) def _dispatch_loop(self): gevent.spawn_later(dispatch_interval, self._dispatch_loop) for function in self.functions.values(): gevent.spawn(function.dispatch_request) def run(self, function_name, request_id, runtime, input, output, to, keys): # print('run', function_name, request_id, runtime, input, output, to, keys) if function_name not in self.functions: raise Exception("No such function!") return self.functions[function_name].send_request(request_id, runtime, input, output, to, keys)
1,834
560
import sets import scan_set import os path = 'ids/' setlist = os.listdir(path) def getall(set): id = scan_set.scan_set(set) scan_set.write_ids(set, id) for set in sets.set_info: s = set + '.txt' if s not in setlist: print "Getting " + set getall(set) print "\n\nCompletely Finished........"
333
128
# ------------------------------------------------------------------------------------------------ # Copyright (c) 2018 Microsoft Corporation # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and # associated documentation files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, publish, distribute, # sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT # NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # ------------------------------------------------------------------------------------------------ import os import numpy as np import matplotlib.pyplot as plt import malmoenv import argparse from pathlib import Path import time from PIL import Image from stable_baselines3.common import results_plotter from stable_baselines3.common.monitor import Monitor from stable_baselines3.common.results_plotter import load_results, ts2xy, plot_results from stable_baselines3.common.noise import NormalActionNoise from stable_baselines3.common.callbacks import BaseCallback from stable_baselines3.common.env_checker import check_env from stable_baselines3 import PPO class SaveOnBestTrainingRewardCallback(BaseCallback): """ Callback for saving a model (the check is done every ``check_freq`` steps) based on the training reward (in practice, we recommend using ``EvalCallback``). :param check_freq: :param log_dir: Path to the folder where the model will be saved. It must contains the file created by the ``Monitor`` wrapper. :param verbose: Verbosity level. """ def __init__(self, check_freq: int, log_dir: str, verbose: int = 1): super(SaveOnBestTrainingRewardCallback, self).__init__(verbose) self.check_freq = check_freq self.log_dir = log_dir self.save_path = os.path.join(log_dir, 'best_model') self.best_mean_reward = -np.inf # def _init_callback(self) -> None: # # # Create folder if needed # # if self.save_path is not None: # # os.makedirs(self.save_path, exist_ok=True) def _on_step(self) -> bool: if self.n_calls % self.check_freq == 0: # Retrieve training reward x, y = ts2xy(load_results(self.log_dir), 'timesteps') if len(x) > 0: # Mean training reward over the last 100 episodes mean_reward = np.mean(y[-100:]) if self.verbose > 0: print(f"Num timesteps: {self.num_timesteps}") print(f"Best mean reward: {self.best_mean_reward:.2f} - Last mean reward per episode: {mean_reward:.2f}") # New best model, you could save the agent here if mean_reward > self.best_mean_reward: self.best_mean_reward = mean_reward # Example for saving best model if self.verbose > 0: print(f"Saving new best model to {self.save_path}") self.model.save(self.save_path) return True log_dir = "tmp/" os.makedirs(log_dir, exist_ok=True) if __name__ == '__main__': parser = argparse.ArgumentParser(description='malmovnv test') parser.add_argument('--mission', type=str, default='missions/jumping.xml', help='the mission xml') parser.add_argument('--port', type=int, default=9000, help='the mission server port') parser.add_argument('--server', type=str, default='127.0.0.1', help='the mission server DNS or IP address') parser.add_argument('--port2', type=int, default=None, help="(Multi-agent) role N's mission port. Defaults to server port.") parser.add_argument('--server2', type=str, default=None, help="(Multi-agent) role N's server DNS or IP") parser.add_argument('--episodes', type=int, default=100, help='the number of resets to perform - default is 1') parser.add_argument('--episode', type=int, default=0, help='the start episode - default is 0') parser.add_argument('--role', type=int, default=0, help='the agent role - defaults to 0') parser.add_argument('--episodemaxsteps', type=int, default=100, help='max number of steps per episode') parser.add_argument('--saveimagesteps', type=int, default=0, help='save an image every N steps') parser.add_argument('--resync', type=int, default=0, help='exit and re-sync every N resets' ' - default is 0 meaning never.') parser.add_argument('--experimentUniqueId', type=str, default='test1', help="the experiment's unique id.") args = parser.parse_args() if args.server2 is None: args.server2 = args.server xml = Path(args.mission).read_text() env = malmoenv.make() env.init(xml, args.port, server=args.server, server2=args.server2, port2=args.port2, role=args.role, exp_uid=args.experimentUniqueId, episode=args.episode, resync=args.resync) env = Monitor(env, log_dir) # print("checking env") check_env(env, True) s = SaveOnBestTrainingRewardCallback(2000, log_dir) # print("checked env") model = PPO("MlpPolicy", env, verbose=1, tensorboard_log="./ppo_test_tensorboard/") #model.load("tmp/best_model.zip") model.learn(total_timesteps=100000, callback=s, reset_num_timesteps=False) # print("trained and saved model") # for i in range(args.episodes): # print("reset " + str(i)) # obs = env.reset() # steps = 0 # done = False # while not done and (args.episodemaxsteps <= 0 or steps < args.episodemaxsteps): # # h, w, d = env.observation_space.shape # # print(done) # action, _states = model.predict(obs, deterministic=True) # # action = env.action_space.sample() # obs, reward, done, info = env.step(action) # steps += 1 # # print("reward: " + str(reward)) # # print(obs) # time.sleep(.05) env.close()
6,717
2,103
#!/usr/bin/env python3 # Software License Agreement (BSD License) # # Copyright (c) 2019, UFACTORY, Inc. # All rights reserved. # # Author: Vinman <vinman.wen@ufactory.cc> <vinman.cub@gmail.com> """ Example: Get GPIO Digital """ import os import sys import time sys.path.append(os.path.join(os.path.dirname(__file__), '../../..')) from xarm.wrapper import XArmAPI from configparser import ConfigParser parser = ConfigParser() parser.read('../robot.conf') try: ip = parser.get('xArm', 'ip') except: ip = input('Please input the xArm ip address[192.168.1.194]:') if not ip: ip = '192.168.1.194' arm = XArmAPI(ip) time.sleep(0.5) if arm.warn_code != 0: arm.clean_warn() if arm.error_code != 0: arm.clean_error() last_digitals = [-1, -1] while arm.connected and arm.error_code != 19 and arm.error_code != 28: code, digitals = arm.get_tgpio_digital() if code == 0: if digitals[0] == 1 and digitals[0] != last_digitals[0]: print('IO0 input high level') if digitals[1] == 1 and digitals[1] != last_digitals[1]: print('IO1 input high level') last_digitals = digitals time.sleep(0.1)
1,172
455
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** from . import _utilities import typing # Export this package's modules as members: from .cached_image import * from .container import * from .container_file import * from .network import * from .profile import * from .provider import * from .publish_image import * from .snapshot import * from .storage_pool import * from .volume import * from .volume_container_attach import * from ._inputs import * from . import outputs # Make subpackages available: if typing.TYPE_CHECKING: import pulumi_lxd.config as config else: config = _utilities.lazy_import('pulumi_lxd.config') _utilities.register( resource_modules=""" [ { "pkg": "lxd", "mod": "index/profile", "fqn": "pulumi_lxd", "classes": { "lxd:index/profile:Profile": "Profile" } }, { "pkg": "lxd", "mod": "index/storagePool", "fqn": "pulumi_lxd", "classes": { "lxd:index/storagePool:StoragePool": "StoragePool" } }, { "pkg": "lxd", "mod": "index/volumeContainerAttach", "fqn": "pulumi_lxd", "classes": { "lxd:index/volumeContainerAttach:VolumeContainerAttach": "VolumeContainerAttach" } }, { "pkg": "lxd", "mod": "index/cachedImage", "fqn": "pulumi_lxd", "classes": { "lxd:index/cachedImage:CachedImage": "CachedImage" } }, { "pkg": "lxd", "mod": "index/container", "fqn": "pulumi_lxd", "classes": { "lxd:index/container:Container": "Container" } }, { "pkg": "lxd", "mod": "index/network", "fqn": "pulumi_lxd", "classes": { "lxd:index/network:Network": "Network" } }, { "pkg": "lxd", "mod": "index/volume", "fqn": "pulumi_lxd", "classes": { "lxd:index/volume:Volume": "Volume" } }, { "pkg": "lxd", "mod": "index/containerFile", "fqn": "pulumi_lxd", "classes": { "lxd:index/containerFile:ContainerFile": "ContainerFile" } }, { "pkg": "lxd", "mod": "index/publishImage", "fqn": "pulumi_lxd", "classes": { "lxd:index/publishImage:PublishImage": "PublishImage" } }, { "pkg": "lxd", "mod": "index/snapshot", "fqn": "pulumi_lxd", "classes": { "lxd:index/snapshot:Snapshot": "Snapshot" } } ] """, resource_packages=""" [ { "pkg": "lxd", "token": "pulumi:providers:lxd", "fqn": "pulumi_lxd", "class": "Provider" } ] """ )
2,429
1,005
class Solution: def groupAnagrams(self, strs): l = len(strs) if l == 0: return [] map = dict() for i in range(l): key = ''.join(sorted(strs[i])) if key in map.keys(): map[key].append(i) else: map[key] = [i] res = [] for key in map.keys(): res.append([strs[k] for k in map[key]]) return res strs = ["eat", "tea", "tan", "ate", "nat", "bat"] sol = Solution() print(sol.groupAnagrams(strs))
544
184
#!/usr/bin/env python3 import os import time import sys gpio = None try: import RPi.GPIO gpio = RPi.GPIO except: print('RPi library not found. We\'re probably on a dev machine. Moving on...') import lvconfig import litrpc # This could be more efficient, we're making a lot more requests than we need to. def check_deposit(cointype): bals = conn.balance()['Balances'] sum = 0 for b in bals: if b['CoinType'] == int(cointype): # I'm not sure how this works, can it return dupes? sum += b['ChanTotal'] + b['TxoTotal'] return sum def main(cfg): if cfg['trigger_pin_num'] == -1: print('You need to configure me first. Come back later.') sys.exit(1) # Find important commonly-used variables. trigger_pin = cfg['trigger_pin_num'] sleep_time = cfg['pin_high_time'] deposit_delay = cfg['deposit_delay_time'] # Set up the GPIO pins. if gpio is not None: gpio.setmode(gpio.BOARD) gpio.setwarnings(False) gpio.setup(trigger_pin, gpio.OUT) # Set up the connection and connect. print('Connecting to lit at', cfg['lit_ip'], 'on port', cfg['lit_port']) global conn conn = litrpc.LitClient(cfg['lit_ip'], cfg['lit_port']) print('Set up client.') # Then just enter the main loop. print('Waiting for payment...') last_bal = {} for ty in cfg['coin_type_ids']: last_bal[ty] = -1 while True: # First figure out how much might have been sent to us. to_insert = 0 for ty in cfg['coin_type_ids']: bal = check_deposit(ty) if last_bal[ty] != -1: diff = bal - last_bal[ty] if diff <= 0: # when we withdraw it would break everything continue unit_cost = cfg['unit_costs'][ty] units = int(diff // unit_cost) extra = diff - units * unit_cost to_insert += units print('Balance for', ty, 'is now', bal, ', got a spend of', diff, 'sat worth', units, 'units with an extra', extra, 'sat left over') last_bal[ty] = bal # Then send that many quarters. if to_insert != 0: print('Total to insert:', to_insert) if gpio is not None: for i in range(to_insert): # Just turn it on, wait a bit, and turn it off. gpio.output(trigger_pin, gpio.HIGH) time.sleep(sleep_time) gpio.output(trigger_pin, gpio.LOW) time.sleep(deposit_delay) print('Done') else: print('Not running on RPi, doing nothing!') else: print('No payment') time.sleep(cfg['poll_rate']) if __name__ == '__main__': main(lvconfig.load_config())
2,429
1,002
#*** 文字列 *** #Pythonで文字列を作るには, ' (シングルクォーテーション)もしくは, " (ダブルクォーテーション)で囲む. print('some eggs') print("some eggs") print('some eggs\nsome eggs') #a == b は aとb同値であればTrue, そうでなければFalseを返す演算子です print('some eggs' == "some eggs") #True #'...' の中で ' ,または "..." の中で " を使う場合には, #各記号のまえに \ (バックスラッシュ) を入力する. print("I don't Know him") #I don't know him print('"Python"') #"Python" print("I don\'t know him") #I don't know him print("\"Python\"") #"Python" #\nは改行文字を表す. \nは一文字として扱われる. print("一行目\n二行目") #一行目 #二行目 #\nを改行文字としてではなく, #バックスラッシュ+nという文字列として使いたい場合は\\nと入力するか, #引用符の前にrをつけます. print("一行目\\n二行目") #一行目\n二行目 print(r"一行目\n二行目") #一行目\n二行目 #複数行の文字列を作りたいときは, #三連引用符("""...""" または ''' ... ''')を利用する. #改行は自動で含まれますが, 行末に \ を付けることで改行を無視することができる print("""改行あり 改行\ なし""") #改行あり #改行なし #文字列にも演算子がある. #+は文字列を連結して1つの文字列を作る. print("a lot of" + " eggs") #a lot of eggs #* は文字列を繰り返します print("Python" * 3) #PythonPythonPython #文字列も変数に代入して使うことができる. first_name = "太郎" last_name = "ドワンゴ" print(first_name + last_name) #太郎ドワンゴ #*** インデックス, スライス *** #文字列は添字表記, #(インデックス表記, つまり, ある文字列の何文字目かを指定)することができる. #最初の文字は 0番目になる. word = "Python" print(word) #Python #インデックスの指定には[]を使用します. #1文字目(0番目) print(word[0]) #P #5文字目(4番目) print(word[4]) #o #添字には負の数を指定するこもでき, #その場合は右から数えます. ただし, 0と-0は等しいので, 負の添字は-1から始まることに注意する. #最終文字 print(word[-1]) #n #後ろから2文字目 print(word[-2]) #o #まとめると正のインデックスと負のインデックスの関係は以下のようになる. # P y t h o n #正のインデックス 0 1 2 3 4 5 #負のインデックス -0 -5 -4 -3 -2 -1 #上の例にもあるように, oという文字は正のインデックスでは4番目, 負のインデックスでは, -2番目になる. #文字列のi番目からj番目までというように一部を切り出して取得することもできます. これをスライスと呼ぶ. #以下の例では, 0番目から1番目の文字列を取り出します. #終端の添字は1つ大きいことに注意する. #つまり開始番目の文字は含まれ, 終了番目は文字は列に含まれない(終了番目の1つ前まで取り出される) print(word[0:2]) #Py #開始番目を省略すると0とみなされます. #先頭から2番目まで print(word[:3]) #Pyt #逆に終了添字を省略すると文字列の最後までとみなされます. #3番目から最後まで print(word[3:]) #hon print(word[:3] + word[3:]) #Python #文字列の長さより大きい番目を指定した場合は範囲外エラーが発生します. #print(word[42]) #文字列の長さの取得にはlen()関数を使います. print("length:", len(word)) #length: 6 #ただし, スライスを利用した場合はエラーは発生せず適切に処理されます. print(word[4:42]) #on #またPythonの文字列は後から変更することができません. #word[0] = "J" #文字列の変更をするためには, 例えば修正したい文字列を再定義し直します. #1文字目をJにして, 以降はword[1:]を使う word = "J" + word[1:] print(word) #Jython #*** Format *** #ここでは文字列を特定のフォーマットで出力する方法を見ていきます. #print()関数を使って文字と数値などを同時に表示したいということがあると思います. #Pythonではそのような場合, #フォーマット済み文字列リテラル(f-string)というのを利用する. #名前が大げさですが, これは文字列を作るときに接頭辞としてfまたはFを付けることで生成される文字列である. #これらの文字列には波括弧{}を使って変数や式を埋め込むことができる. #下の例では{word}の部分を変数wordの内容で置き換える. word = "Python" print(f"Hello {word}") #Hello Python #{}の中では変数だけでなくPythonの気泡をそのまま使うことができます. print(f"length: {len(word)}") #length: 6 print(f"slice: {word[:2]}") #slice: Py #また数値であれば小数点以下の表示する桁数, 桁を揃えるために0や空白で埋める, #配置を中央に揃えるなど様々なフォーマットの文字列を作成することができる. pi = 3.14159265359 #そのまま表示 print(f"πの値は{pi}です") #πの値は3.14159265359です #小数点以下2桁まで print(f"πの値は{pi:.2f}です") #πの値は3.14です #最大10桁で不足分は空白で埋める print(f"πの値は{pi:10.2f}です") #πの値は 3.14です #最大5桁で不足分は0で埋める print(f"πの値は{pi:05.2f}です") #πの値は03.14です #右寄せ 空白埋め print(f"'{word:>10s}'") #' Python' #中央揃え print(f"'{word:^10s}'") #' Python '
3,070
2,467
import pytest from fluentql import GenericSQLDialect, Q from fluentql.types import Table test_table = Table("test_table") @pytest.fixture def dialect_cls(): return GenericSQLDialect @pytest.mark.parametrize( ["q", "expected"], [ (Q.delete().from_(test_table), "delete from test_table;"), ( Q.delete().from_(test_table).where(test_table["col1"] > 100), "delete from test_table where col1 > 100;", ), ], ) def test_delete_query(q, expected, dialect_cls): assert q.compile(dialect_cls) == expected
570
198
from nltk.grammar import CFG from nltk.parse.chart import ChartParser, BU_LC_STRATEGY grammar = CFG.fromstring(""" S -> T1 T4 T1 -> NNP VBZ T2 -> DT NN T3 -> IN NNP T4 -> T3 | T2 T3 NNP -> 'Tajmahal' | 'Agra' | 'Bangalore' | 'Karnataka' VBZ -> 'is' IN -> 'in' | 'of' DT -> 'the' NN -> 'capital' """) cp = ChartParser(grammar, BU_LC_STRATEGY, trace=True) sentence = "Bangalore is the capital of Karnataka" tokens = sentence.split() chart = cp.chart_parse(tokens) parses = list(chart.parses(grammar.start())) print("Total Edges :", len(chart.edges())) for tree in parses: print(tree) tree.draw()
597
262
from sphinx import cmdline import sys cmdline.main(sys.argv)
62
23
import logging from .store.user import User from .errors import SlackInactiveDispatcher, SlackNoThread logger = logging.getLogger(__name__) class SlackWrapper: """ A class to compose all available functionality of the slack plugin. An instance is offered to all incoming message of all the plugins to allow cross service messages """ def __init__(self, http_client, users, channels, groups, messages, threads, bot, dispatcher): self._http_client = http_client self._threads = threads self._dispatcher = dispatcher self.messages = messages self.users = users self.channels = channels self.groups = groups self.bot = bot async def send(self, *messages): """ Send the messages provided and update their timestamp :param messages: Messages to send """ for message in messages: message.frm = self.bot if self.bot.type == 'rtm' and isinstance(message.to, User): await self.users.ensure_dm(message.to) if message.response_url: # Message with a response url are response to actions or slash # commands data = message.serialize(type_='response') await self._http_client.response( data=data, url=message.response_url ) elif isinstance(message.to, User) and self.bot.type == 'rtm': data = message.serialize(type_='send', to=self.bot.type) message.raw = await self._http_client.message_send( data=data, token='bot' ) elif isinstance(message.to, User) and self.bot.type == 'event': data = message.serialize(type_='send', to=self.bot.type) message.raw = await self._http_client.message_send(data=data) else: data = message.serialize(type_='send', to=self.bot.type) message.raw = await self._http_client.message_send(data=data) async def update(self, *messages): """ Update the messages provided and update their timestamp :param messages: Messages to update """ for message in messages: if isinstance(message.to, User): await self.users.ensure_dm(message.to) message.frm = self.bot message.subtype = 'message_changed' message.raw = await self._http_client.message_update( message=message) message.ts = message.raw.get('ts') # await self._save_outgoing_message(message) async def delete(self, *messages): """ Delete the messages provided :param messages: Messages to delete """ for message in messages: message.timestamp = await self._http_client.message_delete(message) async def add_reaction(self, message, reaction): """ Add a reaction to a message :Example: >>> chat.add_reaction(Message, 'thumbsup') Add the thumbup and robotface reaction to the message :param messages: List of message and reaction to add """ await self._http_client.add_reaction(message, reaction) async def delete_reaction(self, message, reaction): """ Delete reactions from messages :Example: >>> chat.delete_reaction(Message, 'thumbsup') Delete the thumbup and robotface reaction from the message :param messages: List of message and reaction to delete """ await self._http_client.delete_reaction(message, reaction) async def get_reactions(self, message): """ Query the reactions of messages :param messages: Messages to query reaction from :return: dictionary of reactions by message :rtype: dict """ reactions = await self._http_client.get_reaction(message) for reaction in reactions: reaction['users'] = [ self.users.get(id_=user_id) for user_id in reaction.get('users', list()) ] message.reactions = reactions return reactions def add_action(self, id_, func, public=False): if 'action' in self._dispatcher: self._dispatcher['action'].register(id_, func, public=public) else: raise SlackInactiveDispatcher def add_event(self, event, func): if 'event' in self._dispatcher: self._dispatcher['event'].register(event, func) else: raise SlackInactiveDispatcher def add_command(self, command, func): if 'command' in self._dispatcher: self._dispatcher['command'].register(command, func) else: raise SlackInactiveDispatcher def add_message(self, match, func, flags=0, mention=False, admin=False, channel_id='*'): if 'action' in self._dispatcher: self._dispatcher['message'].register(match, func, flags, mention, admin, channel_id) else: raise SlackInactiveDispatcher def add_thread(self, message, func, user_id='all'): if message.thread or message.timestamp: self._threads[message.thread or message.timestamp][user_id] = func else: raise SlackNoThread()
5,526
1,415
import warnings warnings.filterwarnings('ignore') #ignore warnings to print values properly import logging import pandas as pd import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split from sklearn.tree import DecisionTreeClassifier from sklearn.naive_bayes import GaussianNB from sklearn.neighbors import KNeighborsClassifier from sklearn import metrics from datetime import datetime from plotter import Plotter class Classifier: # constructor def __init__(self, conn, repo_user, repo_name): self.conn = conn self.repository_id = self._get_repository_id(repo_user, repo_name) self.dic_classifier = { 'decisiontree': ["../output/decisiontreeplot.png", "Decision Tree", DecisionTreeClassifier(criterion="entropy", max_depth=3)], 'naivebayes': ["../output/nbplot.png", "Naive Bayes", GaussianNB()], 'knn': ["../output/knnplot.png", "K-Nearest Neighbors (3)", KNeighborsClassifier(n_neighbors=3)] } logging.basicConfig(filename="../output/returninfo.log", level=logging.INFO) def _get_repository_id(self, repo_user, repo_name): cursor_conn = self.conn.cursor() sql = "SELECT Id FROM Repositories WHERE Name = ?" cursor_conn.execute(sql, ["{}/{}".format(repo_user, repo_name)]) id = 0 cursor_fetch = cursor_conn.fetchone() if cursor_fetch: id = cursor_fetch[0] return id def _print_scores(self, classifier, X, y, test_size): # Split dataset into training set and test set X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size, random_state=1) # Train Decision Tree Classifer classifier.fit(X_train, y_train) # Predict the response for test dataset y_pred = classifier.predict(X_test) print(" Accuracy:", metrics.accuracy_score(y_test, y_pred)) logging.info(" Accuracy: {}".format(metrics.accuracy_score(y_test, y_pred))) print(" F1-Score:", metrics.f1_score(y_test, y_pred)) logging.info(" F1-Score: {}".format(metrics.f1_score(y_test, y_pred))) print(" Precision:", metrics.precision_score(y_test, y_pred)) logging.info(" Precision: {}".format(metrics.precision_score(y_test, y_pred))) print(" Recall:", metrics.recall_score(y_test, y_pred)) logging.info(" Recall: {}".format(metrics.recall_score(y_test, y_pred))) #print(" Confusion Matrix:", metrics.confusion_matrix(y_test, y_pred)) def classify(self, classifier_key): if classifier_key in self.dic_classifier: dic_item = self.dic_classifier[classifier_key] classifier_path_plot_file = dic_item[0] classifier_name = dic_item[1] classifier_obj = dic_item[2] print("repository_id = '{}'".format(self.repository_id)) #Get X, y arrays for classification, normalized data sql = "SELECT AuthorInfluencer, ClosedIssues, ClosedPullRequests, ClosedIssuesInfluencer, ClosedPullRequestsInfluencer, PrereleaseClass FROM ReleasesData WHERE IdRepository = ?;" dataset = pd.read_sql_query(sql, self.conn, params=str(self.repository_id)) X = dataset[['ClosedIssuesInfluencer', 'ClosedPullRequestsInfluencer']] y = dataset['PrereleaseClass'] # contains the values from the "Class" column self._print_scores(classifier_obj, X, y, test_size = 0.2) plotter = Plotter(classifier_name, classifier_obj, X, y) plotter.plot(classifier_path_plot_file) print("File '{}' plotted from current data and classifier '{}'".format(classifier_path_plot_file, classifier_name)) logging.info("File '{}' plotted from current data and classifier '{}'".format(classifier_path_plot_file, classifier_name)) else: print("{} :: classifier_key{} not found. Supported ones are: 'decisiontree', 'naivebayes', 'knn'".format(datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), classifier_key)) logging.info("{} :: classifier_key{} not found. Supported ones are: 'decisiontree', 'naivebayes', 'knn'".format(datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), classifier_key))
4,321
1,371
from typing import Dict, List mtbs_colormaps: Dict[str, Dict[int, List[int]]] = { "mtbs-severity": { 0: [0, 0, 0, 0], 1: [0, 100, 0, 255], 2: [127, 255, 212, 255], 3: [255, 255, 0, 255], 4: [255, 0, 0, 255], 5: [127, 255, 0, 255], 6: [255, 255, 255, 255], }, }
326
199
import sys from app import app, socketio if __name__ == "__main__": if len(sys.argv) > 1: port = int(sys.argv[1]) else: port=5000 socketio.run(app, host="0.0.0.0", port=port)
183
86
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys import numpy as np import pylab as pl from txtTools import openIOFile # =*=*=*=* FUNCTION DEFINITIONS *=*=*=*=*=*=*=*=*=*=*=* def isolateValues( line , stripChars ): v = [] sl = line.split() for i in xrange(len(sl)): for sc in stripChars: sl[i] = sl[i].strip(sc) for s in sl: v.append(float(s)) return v # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* try: factor = sys.argv[1] except: factor = 1. factor = float(factor) f = openIOFile('forces.dat', 'r') oc = openIOFile('forces.cmp', 'w') ot = openIOFile('forces.tot', 'w') lines = f.readlines() spr = ['(',')'] Fx = np.zeros(4,float) for l in lines[1:]: x = np.array(isolateValues(l,spr)) if( len(x) == 13 ): x.tofile(oc,sep=" \t"); oc.write("\n") Fx[0] = x[0] for i in xrange(1,len(Fx)): Fx[i]=factor*(x[i]+x[i+3]) # Pressure + Viscous Fx.tofile(ot, sep=" \t"); ot.write("\n") f.close(); oc.close(); ot.close()
1,004
487
import unittest from fluentcheck import Is from fluentcheck.exceptions import CheckError # noinspection PyStatementEffect class TestIsBasicChecks(unittest.TestCase): def test_is_none_pass(self): self.assertIsInstance(Is(None).none, Is) def test_is_none_fail(self): with self.assertRaises(CheckError): Is("I am not none").none def test_is_not_none_pass(self): self.assertIsInstance(Is("I am not none").not_none, Is) def test_is_not_none_fail(self): with self.assertRaises(CheckError): Is(None).not_none
580
189
# Generated by Django 2.0.2 on 2018-02-18 17:06 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('calculator', '0002_calculation_add_occurrences'), ] operations = [ migrations.AddField( model_name='calculation', name='last_occurrence', field=models.DateTimeField(auto_now=True), ), ]
414
138
from django.db import models from django.core.exceptions import ValidationError from rest_framework.exceptions import ValidationError as DRFValidationError from api.models import TimestampedModel class Interest(TimestampedModel): name = models.CharField(max_length=255, unique=True) def __str__(self): return self.name def save(self, *args, **kwargs): self.name = self.name.lower() try: self.validate_unique() except ValidationError: raise DRFValidationError({"detail": "Name must be unique."}) return super(Interest, self).save(*args, **kwargs)
627
173
"""Box for specifying initial guess for the fitting algorithm.""" from typing import Callable, List, Optional import numpy as np import toga from eddington import EddingtonException from toga.style import Pack from eddington_gui.boxes.line_box import LineBox from eddington_gui.consts import SMALL_INPUT_WIDTH class InitialGuessBox(LineBox): """Visual box for specifying initial guess.""" main_label: toga.Label initial_guess_labels: List[toga.Label] = [] initial_guess_inputs: List[toga.TextInput] = [] __n: int = 0 __a0: Optional[np.ndarray] = None __on_initial_guess_change: Optional[Callable[[], None]] = None def __init__(self, on_initial_guess_change): """Initial box.""" super().__init__() self.on_initial_guess_change = on_initial_guess_change self.main_label = toga.Label(text="Initial Guess:") self.add(self.main_label) @property def n(self): # pylint: disable=invalid-name """Getter of the expected number of parameters.""" return self.__n @n.setter def n(self, n): # pylint: disable=invalid-name """Setter of the expected number of parameters.""" self.reset_initial_guess() old_n = 0 if self.__n is None else self.__n self.__n = n if self.n > len(self.initial_guess_inputs): for i in range(len(self.initial_guess_inputs), self.n): self.initial_guess_labels.append(toga.Label(f"a[{i}]:")) self.initial_guess_inputs.append( toga.TextInput( style=Pack(width=SMALL_INPUT_WIDTH), on_change=lambda widget: self.reset_initial_guess(), ) ) if old_n < self.n: for i in range(old_n, self.n): self.add(self.initial_guess_labels[i], self.initial_guess_inputs[i]) if self.n < old_n: for i in range(self.n, old_n): self.remove(self.initial_guess_labels[i], self.initial_guess_inputs[i]) @property def a0(self): # pylint: disable=invalid-name """Getter of the initial guess.""" if self.__a0 is None: self.__calculate_a0() return self.__a0 @a0.setter def a0(self, a0): # pylint: disable=invalid-name """ Setter of the initial guess. Whenever a new initial guess is set, run handlers to update dependant components. """ self.__a0 = a0 if self.on_initial_guess_change is not None: self.on_initial_guess_change() @property def on_initial_guess_change(self): """on_initial_guess_change getter.""" return self.__on_initial_guess_change @on_initial_guess_change.setter def on_initial_guess_change(self, on_initial_guess_change): """on_initial_guess_change setter.""" self.__on_initial_guess_change = on_initial_guess_change def reset_initial_guess(self): """Reset the initial guess.""" self.a0 = None # pylint: disable=invalid-name def __calculate_a0(self): if self.n is None: return try: a0_values = [ self.initial_guess_inputs[i].value.strip() for i in range(self.n) ] if all([value == "" for value in a0_values]): return self.a0 = np.array(list(map(float, a0_values))) except ValueError as exc: raise EddingtonException( "Unable to parse initial guess. " "Initial guess should be written as floats." ) from exc
3,652
1,138
""" examples.select =============== An example that demonstrates the Select child class. """ from cues.cues import Select def main(): name = 'programming_language' message = 'Which of these is your favorite programming language?' options = ['Python', 'JavaScript', 'C++', 'C#'] cue = Select(name, message, options) answer = cue.send() print(answer) if __name__ == '__main__': main()
418
130
import json import unittest from functools import partial import ckan.model as model import responses from ckanext.satreasury.plugin import SATreasuryDatasetPlugin from mock import MagicMock, Mock, PropertyMock, patch TRAVIS_ENDPOINT = "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal" TRAVIS_COMMIT_MESSAGE = 'Rebuild with new/modified dataset' TRAVIS_WEB_URL = "https://travis-ci.org/vulekamali/static-budget-portal/builds/" class TestNotifyMethod(unittest.TestCase): @responses.activate def setUp(self): self.entity = Mock(spec=model.Package) self.entity.owner_org = PropertyMock(return_value=True) self.plugin = SATreasuryDatasetPlugin() flash_success_patch = patch( 'ckanext.satreasury.plugin.ckan_helpers.flash_success') self.flash_success_mock = flash_success_patch.start() flash_error_patch = patch( 'ckanext.satreasury.plugin.ckan_helpers.flash_error') self.flash_error_mock = flash_error_patch.start() self.addCleanup(flash_success_patch.stop) @patch( 'ckanext.satreasury.plugin.travis.build_trigger_enabled', return_value=True) def test_notify_already_building(self, build_trigger_enabled_mock): with responses.RequestsMock() as rsps: rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds", json={ 'builds': [ { 'id': 535878234, 'commit': { 'message': TRAVIS_COMMIT_MESSAGE }, }]}, status=200, content_type='application/json') self.plugin.notify(self.entity, None) message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/535878234' >Check progress of the update process.</a>" self.flash_success_mock.assert_called_with( message, allow_html=True) @patch( 'ckanext.satreasury.plugin.travis.build_trigger_enabled', return_value=True) def test_notify_build_triggered(self, build_trigger_enabled_mock): with responses.RequestsMock() as rsps: rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds", json={ 'builds': []}, status=200, content_type='application/json') rsps.add( responses.POST, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests", json={ 'request': { 'id': 12345}}, status=200, content_type='application/json') rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/request/12345", json={ 'builds': [ { 'commit': { 'message': TRAVIS_COMMIT_MESSAGE}, 'id': 535878234, }]}, status=200, content_type='application/json') self.plugin.notify(self.entity, None) message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/535878234' >Check progress of the update process.</a>" self.flash_success_mock.assert_called_with( message, allow_html=True) @patch( 'ckanext.satreasury.plugin.travis.build_trigger_enabled', return_value=True) def test_notify_build_request_but_no_build(self, build_trigger_enabled_mock): with responses.RequestsMock() as rsps: rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds", json={ 'builds': []}, status=200, content_type='application/json') rsps.add( responses.POST, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests", json={ 'request': { 'id': 12345}}, status=200, content_type='application/json') rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/request/12345", json={ 'builds': []}, status=200, content_type='application/json') self.plugin.notify(self.entity, None) message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/' >Check progress of the update process.</a>" self.flash_success_mock.assert_called_with( message, allow_html=True) @patch( 'ckanext.satreasury.plugin.travis.build_trigger_enabled', return_value=True) def test_notify_build_trigger_errored(self, build_trigger_enabled_mock): with responses.RequestsMock() as rsps: rsps.add( responses.GET, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds", json={ 'builds': []}, status=200, content_type='application/json') rsps.add( responses.POST, "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests", json={ 'request': { 'id': 12345}}, status=500, content_type='application/json') self.plugin.notify(self.entity, None) message = 'An error occurred when updating the static site data. Technical details: 500 Server Error: Internal Server Error for url: https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests' self.flash_error_mock.assert_called_with(message) @patch( 'ckanext.satreasury.plugin.travis.build_trigger_enabled', return_value=False) def test_notify_build_not_enabled(self, build_trigger_enabled_mock): self.plugin.notify(self.entity, None) self.assertTrue(True)
6,685
1,998
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Routines to evaluate the system. """ import logging from collections import Counter from . import db logger = logging.getLogger(__name__) def get_exhaustive_samples(corpus_tag): """ Use the document_sample table to get which documents have been exhaustively sampled. """ rows = db.select(""" SELECT e.doc_id, e.subject_id, e.object_id, e.relation, e.weight FROM evaluation_relation e, JOIN document_sample s ON (e.doc_id = s.doc_id) JOIN document_tag t ON (e.doc_id = t.doc_id AND t.tag = %(corpus_tag)s) WHERE e.weight > 0.5 AND e.relation <> 'no_relation' """, tag=corpus_tag) return [((row.subject_id, row.relation, row.object_id), 1.0) for row in rows] def get_submission_samples(corpus_tag, scheme, submission_id): rows = db.select(""" SELECT r.doc_id, r.subject_id, r.object_id, r.relation AS predicted_relation, e.relation AS gold_relation, b.params FROM submission_relation r, submission s, evaluation_relation e, evaluation_batch b WHERE e.question_batch_id = b.id AND r.doc_id = e.doc_id AND r.subject_id = e.subject_id AND r.object_id = e.object_id AND r.submission_id = s.id AND b.corpus_tag = %(tag)s AND b.batch_type = 'selective_relations' AND b.params ~ %(scheme)s AND b.params ~ %(submission_f)s AND r.submission_id = %(submission_id)s """, tag=corpus_tag, scheme='"method":"{}"'.format(scheme), submission_id=submission_id, submission_f='"submission_id":{}'.format(submission_id) ) # TODO: ^^ is a hack to get the right rows from the database. we # should probably do differently. return [((row.subject_id, row.predicted_relation, row.object_id), 1.0 if row.predicted_relation == row.gold_relation else 0.0) for row in rows]
2,011
656
#! /usr/bin/env python import os import sys from .api import install as install_mmd def model_data_dir(name, datarootdir=None): """Get a model's data dir. Parameters ---------- name : str The name of the model. Returns ------- str The absolute path to the data directory for the model. """ datarootdir = datarootdir or os.path.join(sys.prefix, "share") return os.path.join(datarootdir, "csdms", name) def get_cmdclass(paths, cmdclass=None): cmdclass = {} if cmdclass is None else cmdclass.copy() if "setuptools" in sys.modules: from setuptools.command.develop import develop as _develop from setuptools.command.install import install as _install else: from distutils.command.develop import develop as _develop from distutils.command.install import install as _install sharedir = os.path.join(sys.prefix, "share") class install(_install): def run(self): _install.run(self) for name, path in paths: name = name.split(":")[-1] install_mmd( os.path.abspath(path), os.path.join(sharedir, "csdms", name), silent=False, clobber=True, develop=False, ) class develop(_develop): def run(self): _develop.run(self) for name, path in paths: name = name.split(":")[-1] install_mmd( os.path.abspath(path), os.path.join(sharedir, "csdms", name), silent=False, clobber=True, develop=True, ) cmdclass["install"] = install cmdclass["develop"] = develop return cmdclass def get_entry_points(components, entry_points=None): entry_points = {} if entry_points is None else entry_points pymt_plugins = entry_points.get("pymt.plugins", []) for entry_point, _ in components: pymt_plugins.append(entry_point) if len(pymt_plugins) > 0: entry_points["pymt.plugins"] = pymt_plugins return entry_points
2,212
656
#!/usr/bin/python3 import os import subprocess import re import pymysql from datetime import datetime strPath = r"/etc/webmin/servers";# file dir files = os.listdir(strPath) lists = [];# file lists host = []; user = []; pwd = []; val = 0;# extractServer use test = "";# grep host test1 = "";# grep user test2 = "";# grep pass test3 = "";# Text = remove test5 = "";# Text /n remove test7 = "";# Text1 ' remove test9 = "";# Text1 /n remove #retry = "";# fail use filename show : no use cnt1 = 0;# array file wc total count filelenlist = [];# files wc total list filelentotallist = ""; #files wc total list make word and reset finallist = []; # after less 11 rows romeve then finally list lenlist = []; fcnt = [];# length 11 less count list frows = 0;# length 11 less count hs = "";# host us = "";# user ps = "";# pass rows = 0;# file wc -l row = 0;# file wc -l count = 0;# total file count for 11 less count servers = ""; #total = [];# value total : no use ########################################################################################## # FUNCTION ########################################################################################## def extractServer(server): val = server.index('.') result = server[:val] return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'') def extractText1(text1): #result = re.findall(r'^=[0-9]+(?:\.[0-9]+)', text) result = re.findall(r'\d+',str(text1)) return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'') #def extractFile(file): # result = re.search(r'.*[.].*$', file) # return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'') def extractIp(ip): result = re.findall(r'[0-9]+(?:\.[0-9]+){3}', ip) return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'') #regex1 = re.compile(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$') def extractText(text): #result = re.findall(r'^=[0-9]+(?:\.[0-9]+)', text) test3 = text.index('=') test5 = text.index('\n') result = text[test3+1:test5] return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'') print("files = %s" % files) servs = [file for file in files if file.endswith(".serv")] cnt = 0; now1 = datetime.now() now = now1.strftime("%Y")+now1.strftime("%m")+now1.strftime("%d") print("now = %s" %now); print("servs = %s" % servs); print("servs len = %s" % len(servs)); db = pymysql.connect(host='172.20.0.3', port=3306, user='root', passwd='', db='hms',charset='utf8',autocommit=True) cursor = db.cursor() ########################################################################################## # SERVER LIST PASING & MARIADB INSERT ########################################################################################## for serve in servs: print("==================================================="); print("start row 11 less count check servs = %s : " % servs); print("start row 11 less count check serve = %s : " % serve); print("==================================================="); print("now count = %s :" % count); lenlist.append(serve) print("all lenlist count = %s :" % lenlist); cnt2 = subprocess.check_output('cat /etc/webmin/servers/%s | wc -l' % lenlist[count],shell=True) cnt1 = extractText1(cnt2) filelenlist.append(cnt1) print("now filelenlist = %s :" % filelenlist[count]); #print("filelenlist.split() = %s : " % " ".join(filelenlist[count])); #for y in range(filelenlist): ##filelenlist[count] ##for fll in filelenlist: print("filelenlist[%d] = %s :" % (count, filelenlist)); ## print("len(filelenlist) = %s :" % len(filelenlist)); #print("now fll = %s :" % fll); #fl = fll.split(",") filelentotallist = filelenlist[count] print("now filelentotallist = %s :" % filelentotallist); if filelentotallist == '11': if count < len(servs): #count = count + 1; print("11 length ! pass ~~"); else: fcnt.append(serve) print(" no 11 length find ~~~ add value in fcnt + 1 = %s :" % count); if count < len(servs): #count = count + 1; filelentotallist = ""; count = count + 1; print("==================================================="); print("end row count = %s :" % count); print("fcnt = %s :" % fcnt); print("==================================================="); frows = len(fcnt) print("frows = %s:" % frows); ########################################################################################## # frows : less 11 rows -> craete new array and input filename and remove it ########################################################################################## for removes in fcnt: servs.remove(removes) print(" alter remove less 11 rows servs = %s :" % servs); try: with cursor: sql_d = "DELETE FROM tests" cursor.execute(sql_d) db.commit() for serv in servs: lists.append(serv) print("-----------------------------------------------------"); print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt)); rows = subprocess.check_output('cat /etc/webmin/servers/%s | wc -l' % lists[cnt],shell=True) row = extractText1(rows) print("-----------------------------------------------------"); print("row = %s cnt = %d : " % (row, cnt)); print("-----------------------------------------------------"); servers = extractServer(serv) #total.append(servers) print("fname = %s" % servers); if row == "11": test = subprocess.check_output('cat /etc/webmin/servers/%s | grep host' % lists[cnt],shell=True) test1 = subprocess.check_output('cat /etc/webmin/servers/%s | grep user' % lists[cnt],shell=True) test2 = subprocess.check_output('cat /etc/webmin/servers/%s | grep pass' % lists[cnt],shell=True) hs = extractIp(test.decode('utf-8')) host.append(hs) print("host =%s" % host[cnt]); print("host[%d] =%s" % (cnt,host[cnt])); #total.append(hs) us = extractText(test1.decode('utf-8')) user.append(us) print("user =%s" % user[cnt]); print("user[%d] =%s" % (cnt,user[cnt])); #total.append(us) ps = extractText(test2.decode('utf-8')) pwd.append(ps) print("pwd =%s" %pwd[cnt]); print("pwd[%d] =%s" % (cnt,pwd[cnt])); #total.append(ps) #cursor.execute("INSERT INTO tests(fname,host,user,pwd,inputdt) VALUES (%s,%s,%s,%s,%s)" % (servers,host[cnt],user[cnt],pwd[cnt],now)) sql = "INSERT INTO `tests` (`fname`,`host`,`user`,`pwd`,`inputdt`) VALUES (%s,%s,%s,%s,%s)" #for i in servs: cursor.execute(sql, (servers,host[cnt],user[cnt],pwd[cnt],now)) data = cursor.fetchall() db.commit() if cnt < len(servs): cnt = cnt+1; else: #print("cnt = %d:" % cnt); #retry = servs[cnt] #print("retry = %s : " % retry); #if cnt < len(servs)-1: # cnt = cnt; # print("cnt = %d , cnt < len(servs):" % cnt); # print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt)); # continue pass #else: # cnt = cnt; # print("cnt = %d , cnt = len(servs): " % cnt); # print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt)); # continue # pass finally: db.close() print("servs = %s" % servs) print("The currnt directory is: %s" % strPath)
8,060
2,582
"""Tree level width module.""" from collections import deque def tree_level_width(tree): """Return a list containing the width of each level of the specified tree.""" result = [] count = 0 queue = deque([tree.root, "s"]) while len(queue) > 0: node = queue.popleft() if node == "s": if(count == 0): break else: result.append(count) count = 0 queue.append("s") else: count += 1 queue.extend(node.children) return result
579
159
import os import unittest import clib from clib.utils import Box class BboxTest(unittest.TestCase): def setUp(self): self.bbox = Box(50, 50, 40, 60) def test_vi_bbox(self): self.assertEqual(self.bbox.int_left_top(), (30, 20)) self.assertEqual(self.bbox.int_right_bottom(), (70, 80)) self.assertEqual(self.bbox.left_top(), [30.0, 20.0]) self.assertEqual(self.bbox.right_bottom(), [70.0, 80.0]) self.bbox.crop_region(5, 5) self.assertEqual(self.bbox.right_bottom(), [5.0, 5.0]) if __name__ == '__main__': unittest.main()
592
250
# coding: utf-8 """ IriusRisk API Products API # noqa: E501 OpenAPI spec version: 1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from iriusrisk_python_client_lib.api_client import ApiClient class UsersApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def groups_group_users_delete(self, api_token, group, unassing_users_group_request_body, **kwargs): # noqa: E501 """Unassign a list of users from a group # noqa: E501 Unassign a list of users from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign users from a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_delete(api_token, group, unassing_users_group_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param UnassingUsersGroupRequestBody unassing_users_group_request_body: JSON object that contains information to unassign users from group (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, **kwargs) # noqa: E501 else: (data) = self.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, **kwargs) # noqa: E501 return data def groups_group_users_delete_with_http_info(self, api_token, group, unassing_users_group_request_body, **kwargs): # noqa: E501 """Unassign a list of users from a group # noqa: E501 Unassign a list of users from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign users from a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param UnassingUsersGroupRequestBody unassing_users_group_request_body: JSON object that contains information to unassign users from group (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'group', 'unassing_users_group_request_body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method groups_group_users_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_delete`") # noqa: E501 # verify the required parameter 'group' is set if ('group' not in params or params['group'] is None): raise ValueError("Missing the required parameter `group` when calling `groups_group_users_delete`") # noqa: E501 # verify the required parameter 'unassing_users_group_request_body' is set if ('unassing_users_group_request_body' not in params or params['unassing_users_group_request_body'] is None): raise ValueError("Missing the required parameter `unassing_users_group_request_body` when calling `groups_group_users_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in params: path_params['group'] = params['group'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'unassing_users_group_request_body' in params: body_params = params['unassing_users_group_request_body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/groups/{group}/users', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def groups_group_users_get(self, api_token, group, **kwargs): # noqa: E501 """List users from a group # noqa: E501 List users who belongs to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to list users of a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_get(api_token, group, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :return: list[User] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.groups_group_users_get_with_http_info(api_token, group, **kwargs) # noqa: E501 else: (data) = self.groups_group_users_get_with_http_info(api_token, group, **kwargs) # noqa: E501 return data def groups_group_users_get_with_http_info(self, api_token, group, **kwargs): # noqa: E501 """List users from a group # noqa: E501 List users who belongs to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to list users of a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_get_with_http_info(api_token, group, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :return: list[User] If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'group'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method groups_group_users_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_get`") # noqa: E501 # verify the required parameter 'group' is set if ('group' not in params or params['group'] is None): raise ValueError("Missing the required parameter `group` when calling `groups_group_users_get`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in params: path_params['group'] = params['group'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/groups/{group}/users', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[User]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def groups_group_users_put(self, api_token, group, assign_user_group_request_body, **kwargs): # noqa: E501 """Assigns users to a group # noqa: E501 Assigns users to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to assign users to a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_put(api_token, group, assign_user_group_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param AssignUserGroupRequestBody assign_user_group_request_body: JSON object that contains information to assign users to group (required) :return: InlineResponse201 If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, **kwargs) # noqa: E501 else: (data) = self.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, **kwargs) # noqa: E501 return data def groups_group_users_put_with_http_info(self, api_token, group, assign_user_group_request_body, **kwargs): # noqa: E501 """Assigns users to a group # noqa: E501 Assigns users to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to assign users to a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param AssignUserGroupRequestBody assign_user_group_request_body: JSON object that contains information to assign users to group (required) :return: InlineResponse201 If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'group', 'assign_user_group_request_body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method groups_group_users_put" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_put`") # noqa: E501 # verify the required parameter 'group' is set if ('group' not in params or params['group'] is None): raise ValueError("Missing the required parameter `group` when calling `groups_group_users_put`") # noqa: E501 # verify the required parameter 'assign_user_group_request_body' is set if ('assign_user_group_request_body' not in params or params['assign_user_group_request_body'] is None): raise ValueError("Missing the required parameter `assign_user_group_request_body` when calling `groups_group_users_put`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in params: path_params['group'] = params['group'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'assign_user_group_request_body' in params: body_params = params['assign_user_group_request_body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/groups/{group}/users', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='InlineResponse201', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def groups_group_users_user_delete(self, api_token, group, user, **kwargs): # noqa: E501 """Removes a user from a group # noqa: E501 Unassign a user from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign user from a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_user_delete(api_token, group, user, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param str user: user to be removed from group (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.groups_group_users_user_delete_with_http_info(api_token, group, user, **kwargs) # noqa: E501 else: (data) = self.groups_group_users_user_delete_with_http_info(api_token, group, user, **kwargs) # noqa: E501 return data def groups_group_users_user_delete_with_http_info(self, api_token, group, user, **kwargs): # noqa: E501 """Removes a user from a group # noqa: E501 Unassign a user from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign user from a group, **if you belong to this group**. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.groups_group_users_user_delete_with_http_info(api_token, group, user, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str group: name of the group (required) :param str user: user to be removed from group (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'group', 'user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method groups_group_users_user_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_user_delete`") # noqa: E501 # verify the required parameter 'group' is set if ('group' not in params or params['group'] is None): raise ValueError("Missing the required parameter `group` when calling `groups_group_users_user_delete`") # noqa: E501 # verify the required parameter 'user' is set if ('user' not in params or params['user'] is None): raise ValueError("Missing the required parameter `user` when calling `groups_group_users_user_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'group' in params: path_params['group'] = params['group'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/groups/{group}/users/{user}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def products_ref_users_delete(self, api_token, ref, unassign_users_product_request_body, **kwargs): # noqa: E501 """Unassigns a list of users from a product. # noqa: E501 Unassign a list of users from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_delete(api_token, ref, unassign_users_product_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param UnassignUsersProductRequestBody unassign_users_product_request_body: JSON object that contains information to unassign users from product (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, **kwargs) # noqa: E501 else: (data) = self.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, **kwargs) # noqa: E501 return data def products_ref_users_delete_with_http_info(self, api_token, ref, unassign_users_product_request_body, **kwargs): # noqa: E501 """Unassigns a list of users from a product. # noqa: E501 Unassign a list of users from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param UnassignUsersProductRequestBody unassign_users_product_request_body: JSON object that contains information to unassign users from product (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'ref', 'unassign_users_product_request_body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method products_ref_users_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_delete`") # noqa: E501 # verify the required parameter 'ref' is set if ('ref' not in params or params['ref'] is None): raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_delete`") # noqa: E501 # verify the required parameter 'unassign_users_product_request_body' is set if ('unassign_users_product_request_body' not in params or params['unassign_users_product_request_body'] is None): raise ValueError("Missing the required parameter `unassign_users_product_request_body` when calling `products_ref_users_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'ref' in params: path_params['ref'] = params['ref'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'unassign_users_product_request_body' in params: body_params = params['unassign_users_product_request_body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/products/{ref}/users', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def products_ref_users_get(self, api_token, ref, **kwargs): # noqa: E501 """List all users assigned to a product # noqa: E501 List all users assigned to a product. Conditions to be able to perform the action: - No permissions are required to perform this action. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_get(api_token, ref, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference to product (required) :return: list[str] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.products_ref_users_get_with_http_info(api_token, ref, **kwargs) # noqa: E501 else: (data) = self.products_ref_users_get_with_http_info(api_token, ref, **kwargs) # noqa: E501 return data def products_ref_users_get_with_http_info(self, api_token, ref, **kwargs): # noqa: E501 """List all users assigned to a product # noqa: E501 List all users assigned to a product. Conditions to be able to perform the action: - No permissions are required to perform this action. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_get_with_http_info(api_token, ref, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference to product (required) :return: list[str] If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'ref'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method products_ref_users_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_get`") # noqa: E501 # verify the required parameter 'ref' is set if ('ref' not in params or params['ref'] is None): raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_get`") # noqa: E501 collection_formats = {} path_params = {} if 'ref' in params: path_params['ref'] = params['ref'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/products/{ref}/users', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[str]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def products_ref_users_put(self, api_token, ref, assign_users_product_request_body, **kwargs): # noqa: E501 """Assigns users to a product. # noqa: E501 Assigns users to a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_put(api_token, ref, assign_users_product_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param AssignUsersProductRequestBody assign_users_product_request_body: JSON data that contains the information to assign users to product (required) :return: ProductShortUsers If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, **kwargs) # noqa: E501 else: (data) = self.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, **kwargs) # noqa: E501 return data def products_ref_users_put_with_http_info(self, api_token, ref, assign_users_product_request_body, **kwargs): # noqa: E501 """Assigns users to a product. # noqa: E501 Assigns users to a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param AssignUsersProductRequestBody assign_users_product_request_body: JSON data that contains the information to assign users to product (required) :return: ProductShortUsers If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'ref', 'assign_users_product_request_body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method products_ref_users_put" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_put`") # noqa: E501 # verify the required parameter 'ref' is set if ('ref' not in params or params['ref'] is None): raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_put`") # noqa: E501 # verify the required parameter 'assign_users_product_request_body' is set if ('assign_users_product_request_body' not in params or params['assign_users_product_request_body'] is None): raise ValueError("Missing the required parameter `assign_users_product_request_body` when calling `products_ref_users_put`") # noqa: E501 collection_formats = {} path_params = {} if 'ref' in params: path_params['ref'] = params['ref'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'assign_users_product_request_body' in params: body_params = params['assign_users_product_request_body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/products/{ref}/users', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ProductShortUsers', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def products_ref_users_user_delete(self, api_token, ref, user, **kwargs): # noqa: E501 """Unassigns a user from a product # noqa: E501 Unassigns a user from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_user_delete(api_token, ref, user, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param str user: Username of the user who will be unassigned from the product (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.products_ref_users_user_delete_with_http_info(api_token, ref, user, **kwargs) # noqa: E501 else: (data) = self.products_ref_users_user_delete_with_http_info(api_token, ref, user, **kwargs) # noqa: E501 return data def products_ref_users_user_delete_with_http_info(self, api_token, ref, user, **kwargs): # noqa: E501 """Unassigns a user from a product # noqa: E501 Unassigns a user from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.products_ref_users_user_delete_with_http_info(api_token, ref, user, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str ref: Reference for product (required) :param str user: Username of the user who will be unassigned from the product (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'ref', 'user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method products_ref_users_user_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_user_delete`") # noqa: E501 # verify the required parameter 'ref' is set if ('ref' not in params or params['ref'] is None): raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_user_delete`") # noqa: E501 # verify the required parameter 'user' is set if ('user' not in params or params['user'] is None): raise ValueError("Missing the required parameter `user` when calling `products_ref_users_user_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'ref' in params: path_params['ref'] = params['ref'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/products/{ref}/users/{user}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def users_get(self, api_token, **kwargs): # noqa: E501 """List of all Users. # noqa: E501 Returns a list of all the users of the system. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_get(api_token, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :return: list[User] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.users_get_with_http_info(api_token, **kwargs) # noqa: E501 else: (data) = self.users_get_with_http_info(api_token, **kwargs) # noqa: E501 return data def users_get_with_http_info(self, api_token, **kwargs): # noqa: E501 """List of all Users. # noqa: E501 Returns a list of all the users of the system. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_get_with_http_info(api_token, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :return: list[User] If the method is called asynchronously, returns the request thread. """ all_params = ['api_token'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method users_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `users_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/users', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[User]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def users_post(self, api_token, create_user_request_body, **kwargs): # noqa: E501 """Creates a new user # noqa: E501 Creates a new user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_post(api_token, create_user_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param CreateUserRequestBody create_user_request_body: JSON data that contains information to creates new user (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.users_post_with_http_info(api_token, create_user_request_body, **kwargs) # noqa: E501 else: (data) = self.users_post_with_http_info(api_token, create_user_request_body, **kwargs) # noqa: E501 return data def users_post_with_http_info(self, api_token, create_user_request_body, **kwargs): # noqa: E501 """Creates a new user # noqa: E501 Creates a new user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_post_with_http_info(api_token, create_user_request_body, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param CreateUserRequestBody create_user_request_body: JSON data that contains information to creates new user (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'create_user_request_body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method users_post" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `users_post`") # noqa: E501 # verify the required parameter 'create_user_request_body' is set if ('create_user_request_body' not in params or params['create_user_request_body'] is None): raise ValueError("Missing the required parameter `create_user_request_body` when calling `users_post`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'create_user_request_body' in params: body_params = params['create_user_request_body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/users', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def users_username_delete(self, api_token, username, **kwargs): # noqa: E501 """Deletes a user # noqa: E501 Deletes a user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can delete users who belongs to some of your user groups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_delete(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.users_username_delete_with_http_info(api_token, username, **kwargs) # noqa: E501 else: (data) = self.users_username_delete_with_http_info(api_token, username, **kwargs) # noqa: E501 return data def users_username_delete_with_http_info(self, api_token, username, **kwargs): # noqa: E501 """Deletes a user # noqa: E501 Deletes a user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can delete users who belongs to some of your user groups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_delete_with_http_info(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'username'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method users_username_delete" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `users_username_delete`") # noqa: E501 # verify the required parameter 'username' is set if ('username' not in params or params['username'] is None): raise ValueError("Missing the required parameter `username` when calling `users_username_delete`") # noqa: E501 collection_formats = {} path_params = {} if 'username' in params: path_params['username'] = params['username'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/users/{username}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def users_username_get(self, api_token, username, **kwargs): # noqa: E501 """Get all the information of a user # noqa: E501 Get all the relevant information of a user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_get(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: UserDetailed If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.users_username_get_with_http_info(api_token, username, **kwargs) # noqa: E501 else: (data) = self.users_username_get_with_http_info(api_token, username, **kwargs) # noqa: E501 return data def users_username_get_with_http_info(self, api_token, username, **kwargs): # noqa: E501 """Get all the information of a user # noqa: E501 Get all the relevant information of a user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_get_with_http_info(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: UserDetailed If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'username'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method users_username_get" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `users_username_get`") # noqa: E501 # verify the required parameter 'username' is set if ('username' not in params or params['username'] is None): raise ValueError("Missing the required parameter `username` when calling `users_username_get`") # noqa: E501 collection_formats = {} path_params = {} if 'username' in params: path_params['username'] = params['username'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/users/{username}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='UserDetailed', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def users_username_token_post(self, api_token, username, **kwargs): # noqa: E501 """Generates a user API token # noqa: E501 Generates a new user API token. If the user already has a generated API token, generates a new one. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can generate a user API token to users who belongs to some of your user groups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_token_post(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.users_username_token_post_with_http_info(api_token, username, **kwargs) # noqa: E501 else: (data) = self.users_username_token_post_with_http_info(api_token, username, **kwargs) # noqa: E501 return data def users_username_token_post_with_http_info(self, api_token, username, **kwargs): # noqa: E501 """Generates a user API token # noqa: E501 Generates a new user API token. If the user already has a generated API token, generates a new one. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can generate a user API token to users who belongs to some of your user groups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.users_username_token_post_with_http_info(api_token, username, async_req=True) >>> result = thread.get() :param async_req bool :param str api_token: Authentication token (required) :param str username: User's username (required) :return: str If the method is called asynchronously, returns the request thread. """ all_params = ['api_token', 'username'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method users_username_token_post" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'api_token' is set if ('api_token' not in params or params['api_token'] is None): raise ValueError("Missing the required parameter `api_token` when calling `users_username_token_post`") # noqa: E501 # verify the required parameter 'username' is set if ('username' not in params or params['username'] is None): raise ValueError("Missing the required parameter `username` when calling `users_username_token_post`") # noqa: E501 collection_formats = {} path_params = {} if 'username' in params: path_params['username'] = params['username'] # noqa: E501 query_params = [] header_params = {} if 'api_token' in params: header_params['api-token'] = params['api_token'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/users/{username}/token', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
67,167
19,031
# This file is placed in the Public Domain. import queue import threading from .dpt import Dispatcher from .obj import Object from .thr import launch from .utl import get_exception class Restart(Exception): pass class Stop(Exception): pass class Loop(Object): def __init__(self): super().__init__() self.queue = queue.Queue() self.speed = "normal" self.stopped = threading.Event() def do(self, e): Dispatcher.dispatch(self, e) def error(self, txt): pass def loop(self): dorestart = False self.stopped.clear() while not self.stopped.isSet(): e = self.queue.get() try: self.do(e) except Restart: dorestart = True break except Stop: break except Exception: self.error(get_exception()) if dorestart: self.restart() def restart(self): self.stop() self.start() def put(self, e): self.queue.put_nowait(e) def start(self): launch(self.loop) return self def stop(self): self.stopped.set() self.queue.put(None)
1,246
368
import json import logging from io import BytesIO from typing import List from typing import Optional import matplotlib.pyplot as plt import pandas as pd import seaborn as sns from telegram import InputMediaPhoto def __convert_plot_to_telegram_photo(plot) -> InputMediaPhoto: with BytesIO() as buffer: plot.figure.savefig(buffer) plot.clear() photo = InputMediaPhoto(buffer.getvalue()) return photo def _unpack_telegram_document(update) -> dict: """ This function retrieves JSON representation of a chat history from given telegram.Update """ document = update.message.document.get_file() chat_file = BytesIO(document.download_as_bytearray()) chat_json = json.load(chat_file) return chat_json def _form_data_frame_from_json(chat_json) -> Optional[pd.DataFrame]: try: messages_df = pd.DataFrame( chat_json['messages'], columns=['id', 'type', 'date', 'from', 'text', 'media_type']) except KeyError as e: logging.getLogger().error( msg=f'Unable to form DataFrame from json. ' f'Key "messages" not found. {e}' ) return else: messages_df.set_index('id', inplace=True) messages_df['date'] = pd.to_datetime(messages_df['date']) return messages_df def _make_barplot(messages_df: pd.DataFrame) -> InputMediaPhoto: """ :param messages_df: DataFrame with user messaging history :return: telegram.InputMediaPhoto """ messages_per_month = messages_df['date'] \ .groupby(messages_df['date'].dt.to_period('M')) \ .agg('count') plot = sns.barplot( x=messages_per_month.index, y=messages_per_month.values, color=(0.44, 0.35, 0.95) ) plt.xticks(rotation=45) plt.title('All time history') return __convert_plot_to_telegram_photo(plot) def _make_kde_plot(messages_df: pd.DataFrame) -> InputMediaPhoto: plot = sns.kdeplot( x=messages_df['date'], hue=messages_df['from'], shade=True ) plt.title('Activity by user') plt.xticks(rotation=45) plt.xlabel('') return __convert_plot_to_telegram_photo(plot) def _make_media_distribution_bar_plot(messages_df: pd.DataFrame) -> Optional[InputMediaPhoto]: logging.getLogger().info('Enter media dist function') media_dist_df = messages_df[['from', 'media_type']].value_counts() if media_dist_df.empty: return media_dist_plot = media_dist_df.unstack().plot( kind='bar', stacked=True, ylabel='Media messages', xlabel='User' ) plt.xticks(rotation=0) plt.title('Distribution of media messages') return __convert_plot_to_telegram_photo(media_dist_plot) def _make_weekday_distribution_bar_plot(messages_df: pd.DataFrame) -> InputMediaPhoto: dist_by_day_of_week = messages_df['from']\ .groupby(messages_df['date'].dt.weekday)\ .agg('value_counts') plot = dist_by_day_of_week.unstack().plot(kind='bar') plt.xlabel('') plt.ylabel('Messages') plt.xticks( list(range(7)), ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'], rotation=0 ) return __convert_plot_to_telegram_photo(plot) def make_plots(messages_df: pd.DataFrame) -> List[InputMediaPhoto]: sns.set_theme(context='paper') photo_list = [ _make_barplot(messages_df), _make_media_distribution_bar_plot(messages_df), _make_kde_plot(messages_df), _make_weekday_distribution_bar_plot(messages_df), ] return [p for p in photo_list if p is not None]
3,619
1,187
# coding=UTF-8 # ex:ts=4:sw=4:et=on # Copyright (c) 2013, Mathijs Dumon # All rights reserved. # Complete license can be found in the LICENSE file. from io import StringIO from scipy.optimize import fmin_l_bfgs_b from .exceptions import wrap_exceptions def setup_project(projectf): from pyxrd.file_parsers.json_parser import JSONParser from pyxrd.project.models import Project type(Project).object_pool.clear() f = StringIO(projectf) project = JSONParser.parse(f) f.close() return project @wrap_exceptions def run_refinement(projectf, mixture_index): """ Runs a refinement setup for - projectf: project data - mixture_index: what mixture in the project to use """ if projectf is not None: from pyxrd.data import settings settings.initialize() # Retrieve project and mixture: project = setup_project(projectf) del projectf import gc gc.collect() mixture = project.mixtures[mixture_index] mixture.refinement.update_refinement_treestore() refiner = mixture.refinement.get_refiner() refiner.refine() return list(refiner.history.best_solution), refiner.history.best_residual @wrap_exceptions def improve_solution(projectf, mixture_index, solution, residual, l_bfgs_b_kwargs={}): if projectf is not None: from pyxrd.data import settings settings.initialize() # Retrieve project and mixture: project = setup_project(projectf) del projectf mixture = project.mixtures[mixture_index] with mixture.data_changed.ignore(): # Setup context again: mixture.update_refinement_treestore() refiner = mixture.refinement.get_refiner() # Refine solution vals = fmin_l_bfgs_b( refiner.get_residual, solution, approx_grad=True, bounds=refiner.ranges, **l_bfgs_b_kwargs ) new_solution, new_residual = tuple(vals[0:2]) # Return result return new_solution, new_residual else: return solution, residual
2,215
670
import functools def enforceType(func): @functools.wraps(func) def wrapper(*args): wrapper.has_been_called = True x = func.__annotations__ t = [x[i] for i in x if i != 'return'] if len(args) != len(t): raise TypeError("Missing required positional arguments and/or annotations.") for i in range(len(t)): if not isinstance(args[i],t[i]): raise ValueError(f"Invalid literal for {t[i]}: {args[i]}") try: ReturnValue = x['return'] except KeyError: raise TypeError("Missing required return value annotation.") try: RV = func(*args) except Exception as e: raise Exception(e) ReturnValue = type(ReturnValue) if ReturnValue == None else ReturnValue if not isinstance(RV, ReturnValue): raise SyntaxWarning(f"Expected function to return {ReturnValue}. Got {type(RV)} instead.") return RV wrapper.has_been_called = False return wrapper
931
371
# encoding: UTF-8 from __future__ import print_function import sys try: reload(sys) # Python 2 sys.setdefaultencoding('utf8') except NameError: pass # Python 3 import multiprocessing from time import sleep from datetime import datetime, time from cyvn.trader.vtEvent import EVENT_LOG, EVENT_RECORDER_DAY,EVENT_ERROR from cyvn.trader.eventEngine import EventEngine2, Event from cyvn.trader.vtEngine import MainEngine, LogEngine from cyvn.trader.gateway.CtpGateway import ctpGateway from cyvn.trader.app import dataRecorder #---------------------------------------------------------------------- def processErrorEvent(event): """ 处理错误事件 错误信息在每次登陆后,会将当日所有已产生的均推送一遍,所以不适合写入日志 """ error = event.dict_['data'] print(u'错误代码:%s,错误信息:%s' %(error.errorID, error.errorMsg)) #---------------------------------------------------------------------- def runChildProcess(): """子进程运行函数""" print('-'*20) # 创建日志引擎 le = LogEngine() le.setLogLevel(le.LEVEL_INFO) le.addConsoleHandler() le.info(u'启动行情记录运行子进程') ee = EventEngine2() le.info(u'事件引擎创建成功') me = MainEngine(ee) me.addGateway('CTP') me.addApp(dataRecorder) le.info(u'主引擎创建成功') ee.register(EVENT_LOG, le.processLogEvent) ee.register(EVENT_ERROR, processErrorEvent) le.info(u'注册日志事件监听') me.connect('CTP') le.info(u'连接CTP接口') has_recorder_day = False while True: sleep(1) if has_recorder_day == False: time_now = datetime.now() if time_now.time().hour ==15 and time_now.time().minute > 5: event1 = Event(type_=EVENT_RECORDER_DAY) ee.put(event1) has_recorder_day = True #---------------------------------------------------------------------- def runParentProcess(): """父进程运行函数""" # 创建日志引擎 le = LogEngine() le.setLogLevel(le.LEVEL_INFO) le.addConsoleHandler() le.info(u'启动行情记录守护父进程') DAY_START = time(8, 57) # 日盘启动和停止时间 DAY_END = time(15, 18) NIGHT_START = time(20, 57) # 夜盘启动和停止时间 NIGHT_END = time(2, 33) p = None # 子进程句柄 while True: currentTime = datetime.now().time() recording = False # 判断当前处于的时间段 if ((currentTime >= DAY_START and currentTime <= DAY_END) or (currentTime >= NIGHT_START) or (currentTime <= NIGHT_END)): recording = True # 过滤周末时间段:周六全天,周五夜盘,周日日盘 if ((datetime.today().weekday() == 6) or (datetime.today().weekday() == 5 and currentTime > NIGHT_END) or (datetime.today().weekday() == 0 and currentTime < DAY_START)): recording = False # 记录时间则需要启动子进程 if recording and p is None: le.info(u'启动子进程') p = multiprocessing.Process(target=runChildProcess) p.start() le.info(u'子进程启动成功') # 非记录时间则退出子进程 if not recording and p is not None: le.info(u'关闭子进程') p.terminate() p.join() p = None le.info(u'子进程关闭成功') sleep(5) if __name__ == '__main__': #runChildProcess() runParentProcess()
3,239
1,298
# Python3 a, b = [int(i) for i in input().split()] def euclid_gcd(a, b): if b == 0: return a c = a%b return euclid_gcd(b, c) if a>b: gcd = euclid_gcd(a, b) else: gcd = euclid_gcd(b, a) print(a*b//gcd)
232
123
import os current_dir = os.path.dirname(os.path.realpath(__file__)) import gym from gym.envs.registration import registry, make, spec def register(id, *args, **kvargs): if id in registry.env_specs: return else: return gym.envs.registration.register(id, *args, **kvargs) register(id="RandomWalkEnv-v0", entry_point="environments.mocap_envs:RandomWalkEnv") register(id="TargetEnv-v0", entry_point="environments.mocap_envs:TargetEnv") register(id="JoystickEnv-v0", entry_point="environments.mocap_envs:JoystickEnv") register(id="PathFollowEnv-v0", entry_point="environments.mocap_envs:PathFollowEnv") register(id="HumanMazeEnv-v0", entry_point="environments.mocap_envs:HumanMazeEnv")
711
264
import datetime from django.test import TestCase from libya_elections.utils import at_noon class ScheduleTest(TestCase): def test_at_noon(self): # at_noon returns a datetime with the right values dt = datetime.datetime(1970, 2, 3, 4, 5, 6, 7) result = at_noon(dt) self.assertEqual(12, result.hour) self.assertEqual(0, result.minute) self.assertEqual(0, result.second) self.assertEqual(0, result.microsecond)
471
163
from django.conf.urls import url from website.apps.statistics.views import statistics urlpatterns = [ url(r'^$', statistics, name="statistics"), ]
153
48
from skimage import measure import pydicom from pydicom.dataset import Dataset, FileDataset from pydicom.sequence import Sequence import os import numpy as np import SimpleITK as sITK import time import glob import sitk_ct_io as imio from skimage.draw import polygon # for debugging # import matplotlib.pyplot as plt # import matplotlib.image as mpimg def read_rtss_to_sitk(rtss_file, image_dir, return_names=True, return_image=False): # modified code from xuefeng # http://aapmchallenges.cloudapp.net/forums/3/2/ # # The image directory is required to set the spacing on the label map # read the rtss contours, label_names = read_contours(pydicom.read_file(rtss_file)) # read the ct dcms = [] for subdir, dirs, files in os.walk(image_dir): dcms = glob.glob(os.path.join(subdir, "*.dcm")) slices = [pydicom.read_file(dcm) for dcm in dcms] slices.sort(key=lambda x: float(x.ImagePositionPatient[2])) image = np.stack([s.pixel_array for s in slices], axis=-1) # convert to mask atlas_labels = get_mask(contours, slices, image) atlas_image = imio.read_sitk_image_from_dicom(image_dir) atlas_labels.SetOrigin(atlas_image.GetOrigin()) atlas_labels.SetSpacing(atlas_image.GetSpacing()) if not return_names: return atlas_labels elif not return_image: return atlas_labels, label_names else: return atlas_labels, label_names, atlas_image def write_rtss_from_sitk(labels, label_names, ct_directory, output_filename): # labels is a sITK image volume with integer labels for the objects # assumes 0 for background and consequtive label numbers starting from 1 # corresponding to the label_names # the ct_directory is required to correctly link the UIDs # load ct to get slice UIDs, z-slices and anything else we might need slice_info = {} series_info = {} z_values = [] first_slice = True spacing = [0, 0] origin = [0, 0] with os.scandir(ct_directory) as it: for entry in it: if not entry.name.startswith('.') and entry.is_file(): slice_file = ct_directory + entry.name dicom_info = pydicom.read_file(slice_file) slice_info[str(float(dicom_info.SliceLocation))] = dicom_info.SOPInstanceUID z_values.append(float(dicom_info.SliceLocation)) if first_slice: # get generic information series_info['SOPClassUID'] = dicom_info.SOPClassUID series_info['FrameOfReferenceUID'] = dicom_info.FrameOfReferenceUID series_info['StudyInstanceUID'] = dicom_info.StudyInstanceUID series_info['SeriesInstanceUID'] = dicom_info.SeriesInstanceUID series_info['PatientName'] = dicom_info.PatientName series_info['PatientID'] = dicom_info.PatientID series_info['PatientBirthDate'] = dicom_info.PatientBirthDate series_info['PatientSex'] = dicom_info.PatientSex spacing[0] = float(dicom_info.PixelSpacing[0]) spacing[1] = float(dicom_info.PixelSpacing[1]) origin[0] = float(dicom_info.ImagePositionPatient[0]) origin[1] = float(dicom_info.ImagePositionPatient[1]) # Assuming axial for now first_slice = False z_values = np.sort(z_values) current_time = time.localtime() modification_time = time.strftime("%H%M%S", current_time) modification_time_long = modification_time + '.123456' # madeup modification_date = time.strftime("%Y%m%d", current_time) file_meta = Dataset() file_meta.FileMetaInformationGroupLength = 192 file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.481.3' file_meta.MediaStorageSOPInstanceUID = "1.2.826.0.1.3680043.2.1125." + modification_time + ".3" + modification_date file_meta.ImplementationClassUID = "1.2.3.771212.061203.1" file_meta.TransferSyntaxUID = '1.2.840.10008.1.2' pydicom.dataset.validate_file_meta(file_meta, True) ds = FileDataset(output_filename, {}, file_meta=file_meta, preamble=b"\0" * 128) # Add the data elements ds.PatientName = series_info['PatientName'] ds.PatientID = series_info['PatientID'] ds.PatientBirthDate = series_info['PatientBirthDate'] ds.PatientSex = series_info['PatientSex'] # Set the transfer syntax ds.is_little_endian = True ds.is_implicit_VR = True # Set lots of tags ds.ContentDate = modification_date ds.SpecificCharacterSet = 'ISO_IR 100' # probably not true TODO Check ds.InstanceCreationDate = modification_date ds.InstanceCreationTime = modification_time_long ds.StudyDate = modification_date ds.SeriesDate = modification_date ds.ContentTime = modification_time ds.StudyTime = modification_time_long ds.SeriesTime = modification_time_long ds.AccessionNumber = '' ds.SOPClassUID = '1.2.840.10008.5.1.4.1.1.481.3' # RT Structure Set Stroage ds.SOPInstanceUID = "1.2.826.0.1.3680043.2.1125." + modification_time + ".3" + modification_date ds.Modality = "RTSTRUCT" ds.Manufacturer = "Python software" ds.ManufacturersModelName = 'sitk_rtss_io.py' ds.ReferringPhysiciansName = '' ds.StudyDescription = "" ds.SeriesDescription = "RTSS from SimpleITK data" ds.StudyInstanceUID = series_info['StudyInstanceUID'] ds.SeriesInstanceUID = "1.2.826.0.1.3680043.2.1471." + modification_time + ".4" + modification_date ds.StructureSetLabel = "RTSTRUCT" ds.StructureSetName = '' ds.StructureSetDate = modification_time ds.StructureSetTime = modification_time contour_sequence = Sequence() for slice_z in z_values: contour_data = Dataset() contour_data.ReferencedSOPClassUID = series_info['SOPClassUID'] contour_data.ReferencedSOPInstanceUID = slice_info[str(slice_z)] contour_sequence.append(contour_data) referenced_series = Dataset() referenced_series.SeriesInstanceUID = series_info['SeriesInstanceUID'] referenced_series.ContourImageSequence = contour_sequence referenced_study = Dataset() referenced_study.ReferencedSOPClassUID = '1.2.840.10008.3.1.2.3.2' referenced_study.ReferencedSOPInstanceUID = series_info['StudyInstanceUID'] referenced_study.RTReferencedSeriesSequence = Sequence([referenced_series]) frame_of_ref_data = Dataset() frame_of_ref_data.FrameOfReferenceUID = series_info['FrameOfReferenceUID'] frame_of_ref_data.RTReferencedStudySequence = Sequence([referenced_study]) ds.ReferencedFrameOfReferenceSequence = Sequence([frame_of_ref_data]) roi_sequence = Sequence() roi_observations = Sequence() for label_number in range(0, len(label_names)): roi_data = Dataset() roi_obs = Dataset() roi_data.ROINumber = label_number + 1 roi_obs.ObservationNumber = label_number + 1 roi_obs.ReferencedROINumber = label_number + 1 roi_data.ReferencedFrameOfReferenceUID = series_info['FrameOfReferenceUID'] roi_data.ROIName = label_names[label_number] roi_data.ROIObservationDescription = '' roi_data.ROIGenerationAlgorithm = 'Atlas-based' roi_data.ROIGenerationMethod = 'Python' roi_obs.RTROIInterpretedType = '' roi_obs.ROIInterpreter = '' roi_sequence.append(roi_data) roi_observations.append(roi_obs) ds.StructureSetROISequence = roi_sequence ds.RTROIObservationsSequence = roi_observations # as if that wasn't bad enough, now we have to add the contours! label_data = sITK.GetArrayFromImage(labels) roi_contour_sequence = Sequence() for label_number in range(0, len(label_names)): roi_contour_data = Dataset() roi_contour_data.ROIDisplayColor = '255\\0\\0' roi_contour_data.ReferencedROINumber = label_number + 1 contour_sequence = Sequence() # convert labels to polygons contour_number = 0 for slice_number in range(0, labels.GetSize()[2] - 1): slice_data = label_data[slice_number, :, :] slice_for_label = np.where(slice_data != label_number + 1, 0, slice_data) if np.any(np.isin(slice_for_label, label_number + 1)): contours = measure.find_contours(slice_for_label, (float(label_number + 1) / 2.0)) for contour in contours: # Convert to real world and add z_position # plt.imshow(slice_data) # plt.plot(contour[:, 1], contour[:, 0], color='#ff0000') contour_as_string = '' is_first_point = True for point in contour[:-1]: real_contour = [point[1] * spacing[0] + origin[0], point[0] * spacing[1] + origin[1], z_values[slice_number]] if not is_first_point: contour_as_string = contour_as_string + '\\' else: is_first_point = False contour_as_string = contour_as_string + str(real_contour[0]) + '\\' contour_as_string = contour_as_string + str(real_contour[1]) + '\\' contour_as_string = contour_as_string + str(real_contour[2]) contour_number = contour_number + 1 contour_data = Dataset() contour_data.ContourGeometricType = 'CLOSED_PLANAR' contour_data.NumberOfContourPoints = str(len(contour)) contour_data.ContourNumber = str(contour_number) image_data = Dataset() image_data.ReferencedSOPClassUID = series_info['SOPClassUID'] image_data.ReferencedSOPInstanceUID = slice_info[str(z_values[slice_number])] contour_data.ContourImageSequence = Sequence([image_data]) contour_data.ContourData = contour_as_string contour_sequence.append(contour_data) roi_contour_data.ContourSequence = contour_sequence roi_contour_sequence.append(roi_contour_data) ds.ROIContourSequence = roi_contour_sequence ds.ApprovalStatus = 'UNAPPROVED' ds.save_as(output_filename) return def read_contours(structure_file): # code from xuefeng # http://aapmchallenges.cloudapp.net/forums/3/2/ contours = [] contour_names = [] for i in range(len(structure_file.ROIContourSequence)): contour = {'color': structure_file.ROIContourSequence[i].ROIDisplayColor, 'number': structure_file.ROIContourSequence[i].ReferencedROINumber, 'name': structure_file.StructureSetROISequence[i].ROIName} assert contour['number'] == structure_file.StructureSetROISequence[i].ROINumber contour['contours'] = [s.ContourData for s in structure_file.ROIContourSequence[i].ContourSequence] contours.append(contour) contour_names.append(contour['name']) return contours, contour_names def get_mask(contours, slices, image): # code from xuefeng # http://aapmchallenges.cloudapp.net/forums/3/2/ z = [s.ImagePositionPatient[2] for s in slices] pos_r = slices[0].ImagePositionPatient[1] spacing_r = slices[0].PixelSpacing[1] pos_c = slices[0].ImagePositionPatient[0] spacing_c = slices[0].PixelSpacing[0] im_dims = image.shape label = np.zeros([im_dims[2], im_dims[1], im_dims[0]], dtype=np.uint8) z_index = 0 for con in contours: num = int(con['number']) for c in con['contours']: nodes = np.array(c).reshape((-1, 3)) assert np.amax(np.abs(np.diff(nodes[:, 2]))) == 0 zNew = [round(elem, 1) for elem in z] try: z_index = z.index(nodes[0, 2]) except ValueError: try: z_index = zNew.index(round(nodes[0, 2], 1)) except ValueError: print('Slice not found for ' + con['name'] + ' at z = ' + str(nodes[0, 2])) r = (nodes[:, 1] - pos_r) / spacing_r c = (nodes[:, 0] - pos_c) / spacing_c rr, cc = polygon(r, c) label[z_index, rr, cc] = num return sITK.GetImageFromArray(label)
12,779
4,394
# -*- coding: utf-8 -*- """ Created on Mon Apr 20 14:03:18 2020 @author: Nicolai """ import sys sys.path.append("../differential_evolution") from JADE import JADE import numpy as np import scipy as sc import testFunctions as tf def downhillsimplex(population, function, minError, maxFeval): ''' implementation of a memetic JADE: \n maxFeval-2*dim of the function evaluation are spend on JADE 2*dim of the function evaluation is used to perform a downhill simplex internal parameters of JADE are set to p=0.3 and c=0.5 Parameters ---------- population: numpy array 2D numpy array where lines are candidates and colums is the dimension function: function fitness function that is optimised minError: float stopping condition on function value maxFeval: int stopping condition on max number of function evaluation Returns ------- history: tuple tupel[0] - popDynamic tupel[1] - FEDynamic tupel[2] - FDynamic tupel[3] - CRDynamic Examples -------- >>> import numpy as np >>> def sphere(x): return np.dot(x,x) >>> minError = -1*np.inf >>> maxGen = 10**3 >>> population = 100*np.random.rand(50,2) >>> (popDynamic, FEDynamic, FDynamic, CRDynamic) = JADE(population, sphere, minError, maxGen) ''' psize, dim = population.shape startSolution = population[np.random.randint(0, high=psize)] _, _, _, _, _, allvecs = sc.optimize.fmin(function, startSolution, ftol=minError, \ maxfun=maxFeval, \ full_output = True, retall = True) FDynamic = [] CRDynamic = [] popDynamic = [] FEDynamic = [] for x in allvecs: popDynamic.append(np.array([x])) FEDynamic.append(function(allvecs[-1])) return (popDynamic, FEDynamic, FDynamic, CRDynamic) if __name__ == "__main__": import matplotlib.pyplot as plt population = 100*np.random.rand(4,2) minError = 10**-200 maxFeval = 10**3 H = 100 p = 0.3 c = 0.5 (popDynamic, FEDynamic, FDynamic, CRDynamic) = downhillsimplex(population, \ tf.sphere, minError, maxFeval) plt.semilogy(FEDynamic)
2,364
781
from selenium import webdriver from selenium.webdriver.common.keys import Keys import time import random import pandas as pd import os class InstagramBot: def __init__(self, username, password, function, url, num_people=1): ''' Bot that comment on photos on Instagram Args: username:string: username to an Instagram account password:string: password to an Instagram account function:string: 'comment' if only comment or 'get_comments' to get comments (scrapper) url:string/list: unique url if 'comment', list of url if 'get_comments' num_people(optional):int: number of people to tag, valid only if 'comment' Attributes: username:string: username given password:string: password given base_url:string: instagram website (https://www.instagram.com) driver:selenium.webdriver.Chrome: driver that performs actions in the browser ''' self.username = username self.password = password self.function = function self.url = url self.num_people = num_people self.driver = webdriver.Chrome(executable_path = "INPUT CHROME DRIVER PATH HERE") def login(self): ''' Logs into the Instagram account with the given username and password Args: None ''' driver = self.driver driver.get("https://www.instagram.com") time.sleep(3) user_box = driver.find_element_by_xpath("//input[@name = 'username']") user_box.click() user_box.clear() user_box.send_keys(self.username) password_box = driver.find_element_by_xpath("//input[@name = 'password']") password_box.click() password_box.clear() password_box.send_keys(self.password) password_box.send_keys(Keys.RETURN) time.sleep(3) not_now_login = driver.find_element_by_xpath("//button[contains(text(), 'Agora não')]") # If your language is not portuguese, change the "Agora não" to "Not now" or similar not_now_login.click() time.sleep(1) self.run_bot() @staticmethod def human_type(phrase, input_comment): ''' Type letter by letter, with random intervals of time in between Args: phrase:string: text that will be written by the function input_comment:selenium.webdriver.Chrome.find_element_by_xpath: path to the comment box ''' for letter in phrase: input_comment.send_keys(letter) time.sleep(random.randint(1,5)/30) input_comment.send_keys(" ") def comment_on_post(self, num_people): ''' Comment on the choosen URL post, choosing random strings on the people list, the number of times that were specified on "num_people" Args: num_people(optional):int: number of people to pick from the people list ''' i = 0 # Counter driver = self.driver driver.get(self.url) time.sleep(3) people = [ "@person1", "@person2", "@person3" ] while (1): try: driver.find_element_by_class_name("Ypffh").click() commentary_box = driver.find_element_by_class_name("Ypffh") time.sleep(random.randint(1, 10)/40) cache = [] for num in range(num_people): person = random.choice(people) if person not in cache: cache.append(person) elif person in cache: check = True while check: person = random.choice(people) if person not in cache: check = False cache.append(person) self.human_type(person, commentary_box) time.sleep(random.randint(1,4)/4) time.sleep(random.randint(1, 4)) publish = driver.find_element_by_xpath("//button[contains(text(), 'Publicar')]") # If your language is not portuguese, change the "Publicar" to "Publish" or similar publish.click() i += 1 print("You published ", i, " commentaries") time.sleep(random.randint(45, 90)) if i % 100 == 0: time.sleep(60*5) except Exception as e: print(e) time.sleep(5) def scroll(self): """ Scroll screen to show all comments Args: None """ # Get scroll height try: driver = self.driver while True: # Click on "plus" sign print("Carregando mais comentários...") driver.find_element_by_xpath("//button[contains(@class, 'dCJp8')]").click() # Wait to load page time.sleep(1) except: pass def get_comments(self): """ Get all the comments from Instagram URLs Args: None """ try: # Getting all the comments from the post all_comments = [] for url in self.url: driver = self.driver driver.get(url) time.sleep(3) # Scroll to load all the comments self.scroll() comment = driver.find_elements_by_class_name('gElp9 ') for c in comment: container = c.find_element_by_class_name('C4VMK') content = container.find_elements_by_xpath('(.//span)')[1].text content = content.replace('\n', ' ').strip().rstrip() print(content) all_comments.append(content) time.sleep(3) # Exporting comments to csv df = pd.DataFrame({"comments" : all_comments}) # Check if file already exists and export to csv i = 0 exists = True while exists: filename = f'comments{i}.csv' filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename) if os.path.isfile(filepath): i += 1 else: df.to_csv(filename, sep=';', index=False) exists = False except Exception as e: print(e) time.sleep(5) def run_bot(self): if self.function == 'comment': self.comment_on_post(self.num_people) print('chegou') elif self.function == 'get_comments': self.get_comments()
7,030
1,850