prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
# coding: utf-8 from __future__ import print_function, absolute_import, division, unicode_literals import sys from .compat import no_limit_int # NOQA if False: # MYPY from typing import Text, Any, Dict, List # NOQA __all__ = ["ScalarFloat", "ExponentialFloat", "ExponentialCapsFloat"] class ScalarFloat(floa...
p', None) # type: ignore e_width = kw.pop('e_width', None) # type: ignore e_sign = kw.pop('e_sign', None) # type: ignore underscore = kw.pop('underscore', Non
e) # type: ignore v = float.__new__(cls, *args, **kw) # type: ignore v._width = width v._prec = prec v._m_sign = m_sign v._m_lead0 = m_lead0 v._exp = exp v._e_width = e_width v._e_sign = e_sign v._underscore = underscore return v ...
~~~~~~~~~~~~~~~~~~~ Lexers for semantic web and RDF query languages and markup. :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import RegexLexer, bygroups, default fro
m pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \ Whitespace, Name, Literal, Comment, Text __all__ = [
'SparqlLexer', 'TurtleLexer'] class SparqlLexer(RegexLexer): """ Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language. .. versionadded:: 2.0 """ name = 'SPARQL' aliases = ['sparql'] filenames = ['*.rq', '*.sparql'] mimetypes = ['application/sparql-query'] #...
from __future__ import print_function # Time: O(n) # Space: O(1) # # Given a sorted linked list, delete all nodes that have duplicate numbers, # leaving only distinct numbers from the original list. # # For example, # Given 1->2->3->3->4->4->5, return 1->2
->5. # Given 1->1->1->2->3, return 2->3. # # Definition for singly-linked list. class ListNode: def __init__(self, x): self.val = x self.next = None def __repr__(self): if self is None: return "Nil" else: return "{} -> {}".format(self.val, repr(self.next...
stNode :rtype: ListNode """ dummy = ListNode(0) pre, cur = dummy, head while cur: if cur.next and cur.next.val == cur.val: val = cur.val; while cur and cur.val == val: cur = cur.next pre.next = cur ...
f conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of ...
init__(self) ... self.add_markup("bold", "b", "<strong>{string}</strong>") You can also change the delimiters for the markup, add new regular expressions, d
elete markups, etc. Have a look at the class methods. """ name = "wiki" def __init__(self, start="&lt;", end="&gt;", close="/"): """Service constructor.""" Service.__init__(self) self.markup_delimiter_start = start self.markup_delimiter_end = end self.marku...
OR PERFORMANCE OF THIS SOFTWARE. """ from libnl.attr import nla_policy, NLA_U16, NLA_U32, NLA_U64, NLA_U8 from libnl.misc import c_int8, c_uint8, SIZEOF_S8, SIZEOF_U8 from libnl.nl80211 import nl80211 from libnl.nl80211.iw_util import ampdu_space, get_ht_capability, get_ht_mcs, get_ssid WLAN_CAPABILITY_ESS = 1 << 0 ...
ree/scan.c?id=v3.17#n323. Positional arguments: data -- bytearray data to read. Returns: String. """ if data[0] == 0x00: return '<no flags>' if data[0] & 0x01: return 'NonERP_Present' if data[0] & 0x02: return 'Use_Protection' if data[0] & 0x04: retu...
def get_cipher(data): """http://git.kernel.org/cgit/linux/kernel/git/jberg/iw.git/tree/scan.c?id=v3.17#n336. Positional arguments: data -- bytearray data to read. Returns: WiFi stream cipher used by the access point (string). """ legend = {0: 'Use group cipher suite', 1: 'WEP-40', 2: 'T...
# # Project: MXCuBE # https://github.com/mxcube # # This file is part of MXCuBE software. # # MXCuBE is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your...
rick the handles LIMS (ISPyB) login, ie ProposalBrick. The signal is emitted when a user was succesfully logged in. """ logged_in = True self.ispyb_lo
gged_in = logged_in if HWR.beamline.session is not None: HWR.beamline.session.set_user_group("") self.setEnabled(logged_in) self.task_tool_box_widget.ispyb_logged_in(logged_in) def property_changed(self, property_name, old_value, new_value): if property_name == "useOsc...
from django.conf.urls import patterns, url from . import vie
ws urlpatterns = patterns( '', url(r'^$', views.customer_list, name='customers'), u
rl(r'^(?P<pk>[0-9]+)/$', views.customer_details, name='customer-details') )
#!/usr/bin/python import numpy as np #a = np.linspace(0.,10.,100) #b = np.sqrt(a) import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import csv def import_text(filename, separator): for line in csv.reader(open(filename), delimiter=separator, skipinitialspace...
ht*0.9]) #ax.legend(loc='upper center', bbox_to_anchor=(0.5, 1.05), ncol = 6, fancybox = True, shadow = True, prop={'size':9}, ) #ax.legend(loc='upper center', ncol = 3, fancybox = True, shadow = True, prop={'size':9},
) #ax.legend(loc='upper left', ncol = 1, fancybox = True, shadow = True, prop={'size':9}, ) ax.legend(loc='upper center', ncol = 4, bbox_to_anchor=(0.5,-0.1), fancybox = True, shadow = True, prop={'size':9}, ) plt.show() fig.savefig('cudaMemcpy_vs_d2d_offset1.pdf')
# A Gui interface allowing the binary illiterate to figure out the ip address the Arduino has been assigned. import os import re from PySide.QtCore import QFile, QMetaObject, QSignalMapper, Slot, QRegExp from PySide.QtGui import QDialog, QPushButton, QRegExpValidator from PySide.QtUiTools import QUiLoader class IPHel...
) self.uiFourthTetTXT.setValidator(validator) #
build a map of the buttons self.buttons = [None]*16 self.signalMapper = QSignalMapper(self) self.signalMapper.mapped.connect(self.tetMap) for button in self.findChildren(QPushButton): match = re.findall(r'^uiTrellis(\d{,2})BTN$', button.objectName()) if match: i = int(match[0]) self.buttons[i] = b...
, 3: 'nzbs', 4: 'eztv', 5: 'nzbmatrix', 6: 'tvnzb', 7: 'ezrss', 8: 'thepiratebay', 9: 'kat'} def execute(self): self.connection.action("ALTER TABLE history RENAME TO history_old;") self.c...
DTV) else: newAction = common.Quality.compositeStatus(newStatus, common.Quality.UNKNOWN) self.connection.action("UPDATE history SET action = ? WHERE date = ? AND showid = ?", [newAction,
curUpdate["date"], curUpdate["showid"]]) self.connection.action("CREATE TABLE db_version (db_version INTEGER);") self.connection.action("INSERT INTO db_version (db_version) VALUES (?)", [1]) class DropOldHistoryTable(NewQualitySettings): def test(self): return self.checkDBVersion() >= 2 ...
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,...
[self.batch_size, self.input_dim]) for idx, batch in enumerate(scores_, 1): # All scores that are padded should be zero np.testing.assert_array_equal(batch[idx:], np.zeros_like(batch[idx:])) # Scores should sum to 1 scores_sum = np.sum(scores_, axis=1) np.testin...
AttentionLayerDot class""" def _create_layer(self): return AttentionLayerDot( params={"num_units": self.attention_dim}, mode=tf.contrib.learn.ModeKeys.TRAIN) def test_layer(self): self._test_layer() class AttentionLayerBahdanauTest(AttentionLayerTest): """Tests the AttentionLayerBahda...
# -*- coding: utf-8 -*- from converters.circle import circle from converters.currency import currency from converters.electric import electric from converters.force import force from converters.pressure import pressure from converters.speed import speed fr
om converters.temperature import temperature class UnitsManager(object): ''' Class responsible to manage the unit converters of this application. ''' _units = [ circle, currency,
electric, force, pressure, speed, temperature, ] def __iter__(self): return (x for x in self._units) def register(self, converter): """ Method that receives a new converter and adds it to this manager. Useful to add custom new methods...
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A very very simple mock object harness.""" from types import ModuleType DONT_CARE = '' class MockFunctionCall(object): def __init__(self, name): se...
d(call) call.WithArgs(*args) return call def _install_hook(self, func_name): def handler(*args, **_): got_call = MockFunctionCall( func_name).WithArgs(*args).WillReturn(DONT_CARE) if self._trace.next_call_index >= len(self._trace.expected_calls): raise Exception( 'Ca...
self._trace.next_call_index] expected_call.VerifyEquals(got_call) self._trace.next_call_index += 1 for h in expected_call.when_called_handlers: h(*args) return expected_call.return_value handler.is_hook = True setattr(self, func_name, handler) class MockTimer(object): """...
it: Chunked Corpus Reader # # Copyright (C) 2001-2015 NLTK Project # Author: Steven Bird <stevenbird1@gmail.com> # Edward Loper <edloper@gmail.com> # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ A reader for corpora that contain chunked (and optionally tagged) documents. """ import o...
(if the corpus has tags) or word strings (if the cor
pus has no tags). :rtype: list(Tree) """ return concat([ChunkedCorpusView(f, enc, 1, 1, 0, 1, *self._cv_args) for (f, enc) in self.abspaths(fileids, True)]) def chunked_paras(self, fileids=None): """ :return: the given file(s) as a list of ...
import datetime from dateutil import parser from .numbers import is_number from .strings import STRING_TYPES DATE_TYPES = (datetime.date, datetime.datetime) def parse_dates(d, default='today'): """ Parses one or more dates from d """ if default == 'today': default = datetime.datetime.today() ...
return default elif isinstance(d, DATE_TYPES): return d elif is_number(d): # Treat as milliseconds since 1970 d = d if isinstance(d, float) else float(d) return datetime.datetime.utcfromtimestamp(d) elif not isinstance(d, STRING_TYPES): if hasattr(d, '__ite...
teutil.parser < version 2.5 return default else: try: return parser.parse(d) except (AttributeError, ValueError): return default
#!/usr/bin/python import argparse import requests,json from requests.auth import HTTPBasicAuth from subprocess import call import time import sys import os from vas_config_sw1 import * DEFAULT_PORT='8181' USERNAME='admin' PASSWORD='admin' OPER_OVSDB_TOPO='/restconf/operational/network-topology:network-topology/topo...
WORD)) if debug == True: print r.text r.raise_for_status() # Main definition - constants # ======================= # MENUS FUNCTIONS # ======================= # Main menu # ======================= # MAIN PROGRAM # ======================= # Main Program NODE_ID_OVSDB = '' SUBNET_2_...
36.1":"vswitch-1"} PORTIDX_OF_LSW = {"vswitch-1":1, "vswitch-2":1} def rpc_create_logic_switch_uri(): return "/restconf/operations/fabric-service:create-logical-switch" def rpc_create_logic_switch_data(name): return { "input" : { "fabric-id": "fabric:1", "name":name } ...
from examples.isomorph import ( get_all_canonicals, get_canonical,
get_translation_dict, ) from pokertools import cards_from_str as flop def test_isomorph(): assert len(get_all_canonicals()) == 1755 assert get_canonical(flop('6s 8d 7c')) == flop('6c 7d 8h') assert get_translation_dict(flop('6s 8d 7c')) == {'c': 'd', 'd': 'h', 'h': 's', 's': 'c'} asse
rt get_canonical(flop('Qs Qd 4d')) == flop('4c Qc Qd') assert get_translation_dict(flop('Qs Qd 4d')) == {'c': 'h', 'd': 'c', 'h': 's', 's': 'd'}
# Licensed under the Apache License, Version 2.0 (the "License"); you # may not use this file except in compliance with the License. You may # obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under t...
yvals("a:b/c:d", '/', ':')) def test_extract_domain_or_image_args(self): args1 = ['--something', '-d', 'domain', 'somethingelse'] args2 = ['-b', '--something', '-a', 'image', 'somethingelse'] args3 = ['-b', '-c', '--something'] self.assertEqual(['-d', 'domain'], ...
ract_domain_or_image_args(args1)) self.assertEqual(['-a', 'image'], utils.extract_domain_or_image_args(args2)) self.assertRaises(ValueError, utils.extract_domain_or_image_args, args3) def test_extract_image_args_from_disks(self): args1 = ['...
"""Support for HomematicIP Cloud weather devices.""" import logging from homematicip.aio.device import ( AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro) from homematicip.aio.home import AsyncHome from homeassistant.components.weather import WeatherEntity from homeassistant.config_entries impor...
perature @property def temperature_unit(self) -> str: """Return the unit of measurement.""" return TEMP_CELSIUS @property def humidity(self) -> int: """Return the humidity.""" return self._device.humidity @property def wind_speed(self) -> float: """Retu...
: """Return the attribution.""" return "Powered by Homematic IP" @property def condition(self) -> str: """Return the current condition.""" if hasattr(self._device, "raining") and self._device.raining: return 'rainy' if self._device.storm: return '...
from bears.yml.RA
MLLintBear import RAMLLintBear from tests.LocalBearTestHelper import verify_local_bear good_file = """ #%RAML 0.8 title: World Music API baseUri: http://example.api.com/{version} version: v1 """ bad_file = """#%RAML 0.8 title: Failing RAML version: 1 baseUri: http://example.com /resource: description: hello ...
les=(bad_file,), tempfile_kwargs={"suffix": ".raml"})
import sys import six import logging import ooxml from ooxml import parse, serialize, importer logging.basicConfig(filename='ooxml.log', level=logging.INFO) if len(sys.argv) > 1: file_name = sys.argv[1] dfile = ooxml.read_from_file(file_name) six.print_("\n-[HTML]-----------------------------\n") ...
(dfile.document.used_styles)
six.print_("\n-[USED FONT SIZES]------------------\n") six.print_(dfile.document.used_font_size)
'one do I use?') else: # all good - the files are the same # we can update our local sync info self.state.markObjectAsSynced(f.path, localFileIn...
send2
trash(localPath) os.rename(tmpFile, localPath) localMD = self.localStore.get_last_modified_date(localPath) self.state.markObjectAsSynced(f.path, f.hash, localMD) self.outputQueue.put(self._get_working_message()) ...
output diff_output = "" identical_hosts = [hosts[0]] for (host, diff) in diffs: if diff: diff_output += "=" * 70 + "\n\n%s\n%s\n\n" % (host, diff) else: identical_hosts.append(host) output = OUTPUT_TEMPLATE.render( ...
saveas = SaveDialog(self.view, defaultDir=self._save_path, message="Save csv as...").get_choice() if saveas: merged_scans = MergedNessusReport(self.files) if not saveas.endswith(".csv"): saveas = saveas+".csv" sorted_tree_items = self.sorted_tree...
s+merged_scans.others) with open(saveas, "wb") as f: csv_writer = csv.writer(f) csv_writer.writerow(["PID","Severity","Hosts","Output","Diffs"]) for item in sorted_tree_items: csv_writer.writerow([ item.pid, ...
# -*- coding: utf-8 -*- import re import logging from completor.utils import check_subseq from .utils import parse_uri word_pat = re.compile(r'([\d\w]+)', re.U) word_ends = re.compile(r'[\d\w]+$', re.U) logger = logging.getLogger("completor") # [ # [{ # u'range': { #
u'start': {u'line': 273, u'character': 5}, # u'end': {u'line': 273, u'character': 12} # }, # u'uri': u'file:///home/linuxbrew/.linuxbrew/Cellar/go/1.12.4/libexec/src/fmt/print.go' # noqa # }] # ] def gen_jump_list(ft, name, data): res = [] if not data: return res items =...
1', '!') start = item['range']['start'] res.append({ 'filename': uri, 'lnum': start['line'] + 1, 'col': start['character'] + 1, 'name': name, }) return res # [ # [ # { # u'newText': u'', # u'range': { # ...
from ctypes import POINTER from ctypes import c_long from ctypes import c_uint32 from ctypes impor
t c_void_p CFIndex = c_long CFStringEncoding = c_uint32 CFString = c_void_p CFArray = c_void_p CFDictionary = c_void_p CFError = c_void_p CFType = c_void_p CFAllocatorRef = c_void_p CFStringR
ef = POINTER(CFString) CFArrayRef = POINTER(CFArray) CFDictionaryRef = POINTER(CFDictionary) CFErrorRef = POINTER(CFError) CFTypeRef = POINTER(CFType) kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) kCGWindowListOptionAll = 0 kCGWindowListOptionOnScreenOnly = (1 << 0) kCGNullWindowID = 0
#!/usr/bin/env python from numpy import array, dtype, int32 traindat = '../data/fm_train_real.dat' testdat = '../data/fm_test_real.dat' label_traindat = '../data/label_train_multiclass.dat' # set both input attributes as continuous i.e. 2 feattypes = array([2, 2],dtype=int32) parameter_list = [[traindat,testdat,labe...
import RealFeatures, MulticlassLabels, CSVFile, CHAIDTree except ImportError: print("Could not import Shogun modules") return # wrap featu
res and labels into Shogun objects feats_train=RealFeatures(CSVFile(train)) feats_test=RealFeatures(CSVFile(test)) train_labels=MulticlassLabels(CSVFile(labels)) # CHAID Tree formation with nominal dependent variable c=CHAIDTree(0,feattypes,10) c.set_labels(train_labels) c.train(feats_train) # Classify test d...
for _ in range(int(input())): A, B, C, D = map(int, input().split()) if A < B or C + D < B: print("
No") continue elif C >= B - 1: print("Yes") continue ret = [] s_set
= set() now = A while True: now %= B if now in s_set: print("Yes", ret) break else: s_set.add(now) if now <= C: now += D ret.append(now) else: print("No", ret) break
import pytest import pwny target_little_endian = pwny.Target(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.little) target_big_endian = pwny.Target(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.big) def test_pack(): assert pwny.pack('I', 0x41424344) == b'DCBA' def test_pack_format_with_e...
yield check_short_form_pack_endian, 'P', 4294901244, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big def test_pointer_unpack(): yield check_short_form_unpack, 'u', -66052, b'\xfc\xfd\xfe\xff' yield check_short_form_unpack_endian, 'u', -66052, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little yield check_short...
dian, 'u', -66052, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big yield check_short_form_unpack, 'U', 4294901244, b'\xfc\xfd\xfe\xff' yield check_short_form_unpack_endian, 'U', 4294901244, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little yield check_short_form_unpack_endian, 'U', 4294901244, b'\xff\xfe\xfd\xfc'...
from django.conf.urls import include, url from django.views.generic import TemplateView from kuma.attachments.feeds import AttachmentsFeed from kuma.attachments.views import edit_attachment from . import feeds, views from .constants import DOCUMENT_PATH_RE # These patterns inherit (?P<document_path>[^\$]+). documen...
e='wiki.revision'), url(r'^\$history$', views.list.revisions, name='wiki.document_revisions'), url(r'^\$edit$', views.edit.edit, name='wiki.edit'), url(r'^\$files$', edit_attachm
ent, name='attachments.edit_attachment'), url(r'^\$edit/(?P<revision_id>\d+)$', views.edit.edit, name='wiki.new_revision_based_on'), url(r'^\$compare$', views.revision.compare, name='wiki.compare_revisions'), url(r'^\$children$', views.document.children, ...
#!/usr/bin/env python from datetime import timedelta import numpy as np from opendrift.readers import reader_basemap_landmask from opendrift.readers import reader_netCDF_CF_generic from opendrift.models.oceandrift import OceanDrift o = OceanDrift(loglevel=0) # Set loglevel to 0 for debug information reader_norkys...
np.meshgrid(lons, lats) lons = lons.ravel() lats = lats.ravel() # Seed oil elements on a grid at regular time interval start_time = reader_norkyst.start_time time_step = timedelta(hours=6) num_steps = 10 for i in range(num_steps+1): o.seed_elements(lons, lats, radius=0, number=100, time=start_...
p=900) # Print and plot results print(o) o.animation()
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2014 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # pu...
reference field is hidden. +-----------------+-----------------+------------------------------
+ | Information | Invoice field | Instead of (in base modules) | +=================+=================+==============================+ | Invoice number | Invoice number | Invoice number | +-----------------+-----------------+------------------------------+ | Description | Na...
': ['glXGetVisualFromFBConfig'], 'arguments': 'Display* dpy, GLXFBConfig config', }, { 'return_type': 'GLXWindow', 'names': ['glXCreateWindow'], 'arguments': 'Display* dpy, GLXFBConfig config, Window win, const int* attribList', }, { 'return_type': 'void', 'names': ['glXDestroyWindow'], 'arguments': 'Di...
ualSGIX'], 'arguments': 'Display* dpy, XVisualInfo* visualInfo', }, { 'return_type': 'GLXContext', 'names': ['glXCreateContextAttribsARB'], 'arguments': 'Display* dpy, GLXFBConfig config, GLXContext share_context, int direct, ' 'const int* attrib_list', }, ] FUNCTION_SETS = [ [GL_FUNCTIONS, 'gl', [...
esa/MesaLib/include/GL/glext.h', '../../third_party/khronos/GLES2/gl2ext.h'], []], [OSMESA_FUNCTIONS, 'osmesa', [], []], [EGL_FUNCTIONS, 'egl', ['../../third_party/khronos/EGL/eglext.h'], [ 'EGL_ANGLE_d3d_share_handle_client_buffer', 'EGL_ANGLE_surface_d3d_texture_2d_share_handle', ], ], ...
import sys import numpy as np def check_symmetric(a, tol=1e-8): return np.allclose(a, a.T, atol=tol) for line in sys.stdin: a = np.matrix(line) f = che
ck_symmet
ric(a) if not f: print("Not symmetric") else: print("Symmetric")
from diofant import (Derivative, Function, Integral, bell, besselj, cos, exp, legendre, oo, symbols) from diofant.printing.conventions import requires_partial, split_super_sub __all__ = () def test_super_sub(): assert split_super_sub('beta_13_2') == ('beta', [], ['13', '2']) assert spli...
it_super_sub('') == ('', [], [])
def test_requires_partial(): x, y, z, t, nu = symbols('x y z t nu') n = symbols('n', integer=True) f = x * y assert requires_partial(Derivative(f, x)) is True assert requires_partial(Derivative(f, y)) is True # integrating out one of the variables assert requires_partial(Derivative(Integra...
#--------------------------------------------------------------------------- # Introdução a Programação de Computadores - IPC # Universidade do Estado do Amazonas - UEA # Prof. Jucimar Jr # Gabriel de Queiroz Sousa 1715310044 # Lucas Gabriel Silveira Duarte 1715310053 # Matheus de Olive...
avra leet admite muitas variações, como l33t ou 1337. # O uso do leet reflete uma subcultura relacionada ao mundo dos jogos de computador e internet, # sendo muito usada para confundir os iniciantes e afirmar-se como parte de um grupo. Pesquise # sobre as principais formas de traduzir as letras. Depois, faça um prog...
), ('e', '3'), ('s', '5'), ('g', '6'), ('r', '12'), ('t', '7'), ('q', '9')) sring = input("Informe palavra = ") nova = sring print("Inicialmente: ", sring) for antigo, novo in leet: nova = nova.replace(antigo, novo) print("Finalmente = ", nova)
alityChanges] = self._empty_quality_changes self._containers[_ContainerIndexes.Quality] = self._empty_quality self._containers[_ContainerIndexes.Material] = self._empty_material self._containers[_ContainerIndexes.Variant] = self._empty_variant self.containersChanged.connect(self._onCont...
erInterface) -> None: self.replaceContainer(_ContainerIndexes.Definition, new_definition) def
getDefinition(self) -> "DefinitionContainer": return cast(DefinitionContainer, self._containers[_ContainerIndexes.Definition]) definition = pyqtProperty(QObject, fget = getDefinition, fset = setDefinition, notify = pyqtContainersChanged) @override(ContainerStack) def getBottom(self) -> "Definitio...
import nose import angr import logging l = logging.getLogger("angr_tests.path_groups") import os location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests')) addresses_fauxware = { 'armel': 0x8524, 'armhf': 0x104c9, # addr+1 to force thumb #'i386': 0x8048524, # comm...
tep the real path until the real authentication paths return to the same place pg5 = pg4.explore(find=main_addr, num_find=2, stash='auth').unstash_all(from_stash='found', to_stash='auth') nose.tools.assert_equal(len(pg5.active), 0) nose.tools.assert_equal(len(pg5.backdoor), 1) nose.tools.assert_
equal(len(pg5.auth), 2) # now unstash everything pg6 = pg5.unstash_all(from_stash='backdoor').unstash_all(from_stash='auth') nose.tools.assert_equal(len(pg6.active), 3) nose.tools.assert_equal(len(pg6.backdoor), 0) nose.tools.assert_equal(len(pg6.auth), 0) nose.tools.assert_equal(len(set(pg6.m...
#!/usr/bin/env python3 import os # makedirs import sys # argv, exit import csv # DictReader def cutoffdict(cdict): rdict = dict() for key in cdict.keys(): candi = cdict[key] top = max(candi, key = candi.get) if candi[top] > (sum(candi.values())*0.5): rdict[key] = top ...
ndir(src_path) for entry in sit: if not entry.name.startsw
ith('.') and entry.is_file(): cip = entry.path groupbyprefix(cip) if __name__ == '__main__': sys.exit(main(sys.argv))
# -*- coding: utf-8 -*- """The EWF image path specification implementation.""" from d
fvfs.lib import definitions from dfvfs.path import factory from dfvfs.path import path_spec class EWFPathSpec(path_spec.PathSpec): """EWF image path specification.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_EWF def __init__(self, parent=None, **kwargs): """Initializes a path specification. Note t...
fication. Raises: ValueError: when parent is not set. """ if not parent: raise ValueError('Missing parent value.') super(EWFPathSpec, self).__init__(parent=parent, **kwargs) factory.Factory.RegisterPathSpec(EWFPathSpec)
# -*- coding: utf-8 -*- from collections import OrderedDict from django import forms from django.utils.translation import ugettext_lazy as _ from envelope.forms import ContactForm class ContactForm(ContactForm): template_name = "envelope/contact_email.txt" html_template_name = "
envelope/contact_email.html" phone = forms.CharField(label='Teléfono', required=False) country = forms.CharField(label='País', required=False) def __init__(self, *args, **kwargs): super(ContactForm, self).__init__(*args, **kwargs)
self.fields['email'].required = False ContactForm.base_fields = OrderedDict( (k, ContactForm.base_fields[k]) for k in [ 'sender', 'subject', 'email', 'phone', 'country', 'message', ] )
from dj
ango import template register = template.Library() @register.assignment_tag(takes_context=True) def has_bookmark_permission(context, action): """Checks if the current user can bookmark the action item. Returns a boolean. Syntax:: {% has_bookmark_permission action %} """ request = contex...
if action.target.approval_required and not request.user.can_access_all_projects: has_permission = False if not has_permission: return False return True @register.assignment_tag(takes_context=True) def get_existing_bookmark(context, action): request = context['request'] if not reque...
s import stdout from math import sqrt try: import numpy except ImportError: pass from nltk.cluster.api import ClusterI from nltk.compat import python_2_unicode_compatible class VectorSpaceClusterer(ClusterI): """ Abstract clusterer which takes tokens and maps them into a vector space. Optionally ...
ions=None): """ :param normalise: should vectors be normalised to length 1 :type normalise: boolean :param svd_dimensions: number of dimensions to use in reducing vector dimensionsionality with SVD :type svd_dimensions: int ...
cluster(self, vectors, assign_clusters=False, trace=False): assert len(vectors) > 0 # normalise the vectors if self._should_normalise: vectors = list(map(self._normalise, vectors)) # use SVD to reduce the dimensionality if self._svd_dimensions and self._svd_dimensio...
#!/usr/bin/env python # REQUIRES both rst2pdf and wikir project from google code. import sys import subprocess sys.path.insert(0, '../../rson/py2x') from rson import loads from simplejson import dumps subprocess.call('../../rst2pdf/bin/rst2pdf manual.txt -e preprocess -e dotted_toc -o manual.pdf'.split()) lines = i...
tuff = 'page:: space:: footer:: ##Page## contents::'.split() result = [] for line in lines: for check in badstuff: if check in line: break else: result.append(line) result.
append('') result = '\n'.join(result) from wikir import publish_string result = publish_string(result) f = open('manual.wiki', 'wb') f.write(result) f.close()
# -*- coding: utf-8 -*- from requests import (get, post, delete) from .base import Base class System(Base): def __init__(self, host, secret, endpoint='/plugins/restapi/v1/system/properties'): """ :param host: Scheme://Host/ for API requests :param secret: Shared secret key for API request...
, key): """ Retrieve system property :param key: The name of system property """ endpoint = '/'.join([self.endpoint, key]) return self.
_submit_request(get, endpoint) def update_prop(self, key, value): """ Create or update a system property :param key: The name of system property :param value: The value of system property """ payload = { '@key': key, '@value': value, ...
# -*- coding: utf-8 -*- from __future__ import absolute_import import mock from django.contrib.auth.models import User from sentry.constants import MEMBER_USER from sentry.models import Project from sentry.web.helpers import get_project_list from tests.base import TestCase class GetProjectListTEst(TestCase): d...
_exclude_public_projects_without_access(self): project_list = get
_project_list(self.user, MEMBER_USER) self.assertEquals(len(project_list), 0) @mock.patch('sentry.models.Team.objects.get_for_user') def test_does_include_private_projects_without_access(self, get_for_user): get_for_user.return_value = {self.project2.team.id: self.project2.team} project...
#!/usr/bin/python # Written by Stjepan Horvat # ( zvanstefan@gmail.com ) # by the exercises from David Lucal Burge - Perfect Pitch Ear Traning Supercourse # Thanks to Wojciech M. Zabolotny ( wzab@ise.pw.edu.pl ) for snd-virmidi example # ( wzab@ise.pw.edu.pl ) import random import time import sys import re fname="/de...
notes) if nameNote(noteOne) != nameNote(noteTwo): break while True: noteThree = random.choice(notes) if name
Note(noteOne) != nameNote(noteTwo): break if nameNote(noteOne) != nameNote(noteThree): break match = False while not match: done = False playNote(noteOne, noteTwo, noteThree) while not done: n = input("? ") if n == "1": playNote(noteOne, noteTw...
#!/usr/bin/env python import datetime as dt import numpy as np import matplotlib.pyplot as plt #import matplotlib.dates as mdates #import matplotlib.cbook as cbook #from matplotlib import pyplot as plt from matplotlib.dates import date2num from statsmodels.distributions.empirical_distribution import ECDF from colle...
ype != "Proxy": s.type = "Other services" services = [s.type for s in g.services.values()] totalServices = len(services) services = Counter(services).items() sercices = services.sort(key=itemgetter(1), reverse=True) servicesNumber = len(services) types = [typ for (typ, value) in services] values = [float(value)...
) ax = fig.add_subplot(111) rects = ax.bar(ind, values, width, color='black') ax.set_xlim(-width, len(ind) + width) ax.set_ylim(0, 0.7) # ax.set_ylim(0,45) ax.set_ylabel('Frequency') #ax.set_xlabel('Service Type') ax.set_title(' Service Categories Frequency') xTickMarks = [str(i) for i in types] ax.set_xticks(ind + wid...
# -*- coding: utf-8 -*- import gensim, logging class SemanticVector: model = '' def __init__(self, structure): self.structure = stru
cture def model_word2vec(self, min_count=15, window=15, size=100): print 'preparing sentences list' sentences = self.structure.prepare_list_of_words_in_sentences() print 'start modeling' self.model = gensim.models.Word2Vec(sentences, size=size, window=window, min_count=min_count, w...
self.model = gensim.models.Word2Vec.load(name)
# 类结构的堆排序 class DLinkHeap(object): def __init__(self, list=None, N = 0): self.dList = list self.lengthSize = N # 插入数据 def insert_heap(self, data): self.dList.append(data) self.lengthSize += 1 # 初始化堆结构 def init_heap(self): n = self.lengthS...
: if self.dList[tmp_pos] > self.dList[2*t+2]: tmp_pos = 2*t+2 if t != tmp_pos: self.swap(tmp_pos, t) t = tmp_pos else: break # 向
上调整节点 def sift_up(self, size): n = size i = n - 1 flag = 0 while i > 0 and flag == 0: parent_i = int(i/2) if self.dList[i] < self.dList[parent_i]: self.swap(i, parent_i) i = parent_i else: fl...
from . imp
ort slide_channel_technology_category from . import slide_channel_technology from
. import slide_channel
#!/usr/bin/python3 __author__ = 'ivan.shynkarenka' import argp
arse from TTWebClient.TickTraderWebClient import TickTraderWebClient def main(): parser = argparse.ArgumentParser(description='TickTrader Web API sample') parser.add_argument('web_api_address', help='TickTrader Web API address') args = parser.parse_args() # Create instance of the TickTrader Web API c...
for c in currencies: print('Currency: {0}'.format(c['Name'])) currency = client.get_public_currency(currencies[0]['Name']) print("{0} currency precision: {1}".format(currency[0]['Name'], currency[0]['Precision'])) if __name__ == '__main__': main()
from django.contrib import admin # Register your models here. from django.c
ontrib import admin from rango.models import Category, Page class PageAdmin(admin.ModelAdmin)
: list_display = ('title', 'category', 'url') admin.site.register(Category) admin.site.register(Page,PageAdmin)
#!/usr/bin/env python # coding: utf-8 # # rede_gephi_com_ipca_csv # In[6]: ano_eleicao = '2014' rede =f'rede{ano_eleicao}' csv_dir = f'/home/neilor/{rede}' # In[7]: dbschema = f'rede{ano_eleicao}' table_edges = f"{dbschema}.gephi_edges_com_ipca_2018" table_nodes = f"{dbschema}.gephi_nodes_com_ipca_2018" table...
mod_tse as mtse # In[9]: import os home = os.environ["HOME"] local_dir = f'{home}/temp' # In[10]: mtse.execute_query(f"update {table_municipios} set rede= 'N';") #
## REDE BRASIL # In[11]: def salva_rede_brasil(csv_dir,rede): rede_dir_BR = f'{csv_dir}/{rede}_Brasil' os.makedirs(rede_dir_BR) edges_csv_query=f"""copy ( select * from {table_edges} ) TO '{rede_dir_BR}/{rede}_Brasil_ed...
import pytest from django.db import connection, IntegrityError from .models import MyTree def flush_constraints(): # the default db setup is to have constraints DEFERRED. # So IntegrityErrors only happen when the transaction commits. # Django's testcase thing does eventually flush the constraints but to...
# To test this integrity error, have to update table without calling save() # (because save() changes `ltree` to match `parent_id`) MyTree.objects.filter(pk=desc3.pk).update(parent=root1) flush_constraints() def test_parent_i
s_descendant_errors(db): root1 = MyTree.objects.create(label='root1') child2 = MyTree.objects.create(label='child2', parent=root1) desc3 = MyTree.objects.create(label='desc3', parent=child2) child2.parent = desc3 with pytest.raises(IntegrityError): child2.save() flush_constraints()
"""Unit test for the SNES nonlinear solver""" # Copyright (C) 2012 Patrick E. Farrell # # This file is par
t of DOLFIN. # # DOLFIN is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DOLFIN is distributed in the hope that it will be us...
icense for more details. # # You should have received a copy of the GNU Lesser General Public License # along with DOLFIN. If not, see <http://www.gnu.org/licenses/>. # # First added: 2012-10-17 # Last changed: 2012-10-26 """Solve the Yamabe PDE which arises in the differential geometry of general relativity. http://...
# Copyright (c) 2016-2017 Dustin Doloff # Licensed under Apache License v2.0 import argparse import difflib import hashlib import os import subprocess import zipfile # Resets color formatting COLOR_END = '\33[0m' # Modifies characters or color COLOR_BOLD = '\33[1m' COLOR_DISABLED = '\33[02m' # Mostly just means darke...
info, s)) for s in info.__sl
ots__) summary += ') ' + os.linesep assert summary is not None, 'Unable to summarize %s' % file return summary def main(): args = parse_args() files = args.files assert len(files) >= 2, 'There must be at least two files to compare' files_hashes = set() max_file_size = 0...
#!/usr/bin/env python3 # Copyright 2015 Dietrich Epp. # This file is part of SGGL. SGGL is
licensed under the terms of the # 2-cl
ause BSD license. For more information, see LICENSE.txt. import glgen.__main__ glgen.__main__.main()
# Copyright 2015 Google Inc. All Rights Reserved. """Command for setting target pools of instance group manager.""" from googlecloudsdk.calliope import arg_parsers from googlecloudsdk.calliope import exceptions from googlecloudsdk.compute.lib import base_classes from googlecloudsdk.compute.lib import utils class SetT...
ToRegionName(ref.zone) if args.clear_target_pools: pool_refs = [] else: pool_refs = self.CreateRegionalReferences( args.target_pools, region, resource_type='targetPools') pools = [pool_ref.SelfLink() for pool_ref in pool_refs] request = ( self.messages.ComputeInstanceGroupM...
(), instanceGroupManagersSetTargetPoolsRequest=( self.messages.InstanceGroupManagersSetTargetPoolsRequest( targetPools=pools, ) ), project=self.project, zone=ref.zone,) ) return [request] SetTargetPools.detaile...
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import
models, migrations import django.contrib.gis.db.models.fields class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Incident', fields=[ ('id', models.AutoField(ve
rbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=150)), ('description', models.TextField(max_length=1000)), ('severity', models.CharField(default='ME', max_length=2, choices=[('UR', 'Urgent'), ('HI', 'High'), ('...
which fit the specified height. :rtype: list """ items_sub = [] fm = QFontMetrics(self.items_font) for i in items: sz = self.items_size(items_sub) if sz.height() > height: break items_sub.append(i) return items_sub ...
ayout requires the unicode character of the line separator multiline_items = '\u2028'
.join(multiline_items) self.draw_text( painter, multiline_items, self.items_font,
#!/usr/bin/python # -*- coding: utf-8 -*- """ ========================================================= SVM-SVC (Support Vector Classification) ========================================================= The classification application of the SVM is used below. The `Iris <http://en.wikipedia.org/wiki/Iris_flower_data_set...
, h), np.arange(y_min, y_max, h)) Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.re
shape(xx.shape) pl.figure(1, figsize=(4, 3)) pl.pcolormesh(xx, yy, Z, cmap=pl.cm.Paired) # Plot also the training points pl.scatter(X[:, 0], X[:, 1], c=Y, cmap=pl.cm.Paired) pl.xlabel('Sepal length') pl.ylabel('Sepal width') pl.xlim(xx.min(), xx.max()) pl.ylim(yy.min(), yy.max()) pl.xticks(()) pl.yticks(()) pl.show(...
r.db import manage from pulp.server.db.migrate import models from pulp.server.db.model import MigrationTracker import pulp.plugins.types.database as types_db import migration_packages.a import migration_packages.b import migration_packages.duplicate_versions import migration_packages.platform import migration_packages....
n_packages.raise_exception, migration_packages.version_gap, migration_packages.version_zero, migration_packages.z, ] if name == models.MIGRATIONS_ENTRY_POINT: return [FakeEntryPoint(package) for package in test_packages] return [] # Mock 1.0.0 has a built in mock_open, and...
ta to just be a str. # http://www.voidspace.org.uk/python/mock/0.8/examples.html?highlight=open#mocking-open if inPy3k: file_spec = [ '_CHUNK_SIZE', '__enter__', '__eq__', '__exit__', '__format__', '__ge__', '__gt__', '__hash__', '__iter__', '__le__', '__lt__', '__ne__', '__next__', '__repr_...
from flask import Flask from flask import request from flask import jsonify from flask import abort import time app = Flask(__name__) @app.route('/api/1', defaults={'path': ''}, methods=['GET', 'POST']) @app.route('/api/1/<path:path>', methods=['GET', 'POST']) def api1(path): time.sleep(20) return jsonify({ ...
id a bad request') @app.route('/api/3', defaults={'path': ''}, methods=['GET', 'POST']) @app.route('/api/3/<path:path>', methods=['GET', 'POST']) def api3(path): userId = request.args.get('userId') return jsonify({ 'userinfo': { 'userId': userId } }) @app.route('/usercenter/us...
GET', 'POST']) def api4(): return jsonify({ 'userinfo': { 'username': 'zhouyang' } }) if __name__ == '__main__': app.run(port=1330, host='0.0.0.0')
""" Simple utility code for animations. """ # Author: Prabhu Ramachandran <prabhu at aerodotiitbdotacdotin> # Copyright (c) 2009, Enthought, Inc. # License: BSD Style. import types from functools import wraps try: from decorator import decorator HAS_DECORATOR = True except ImportError: HAS_DECORATOR = Fals...
... >>> anim = anim() >>> t = Animator(500, anim.next) >>> t.edit_traits() This makes it very easy to animate your visualizations and control it from a simple UI. **Notes** If you want to modify the data plotted by an `mlab` function call,...
#################################### # Traits. start = Button('Start Animation') stop = Button('Stop Animation') delay = Range(10, 100000, 500, desc='frequency with which timer is called') # The internal timer we manage. timer = Instance(Timer) ##########################...
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'NewMangaDialog.ui' # # Created: Wed Jul 24 19:06:21 2013 # by: PyQt4 UI code generator 4.10.2 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except...
.Cancel|QtGui.QDialogButtonBox.Ok) self.b
uttonBox.setObjectName(_fromUtf8("buttonBox")) self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1) self.retranslateUi(NewMangaDialog) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), NewMangaDialog.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIG...
"""engine.SCons.Platform.darwin Platform-specific initialization for Mac OS X systems. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Platform.Platform() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008...
tribute, sublicense, and/or sell copies of the
Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, ...
# -*- coding: utf-8 -*- # This file is part of https://github.com/26fe/jsonstat.py # Copyright (C) 2016-2021 gf <gf@26fe.com> # See LICENSE file # stdlib import time import os import hashlib # packages import requests # jsonstat from jsonstat.exceptions import JsonStatException class Downloader: """Helper clas...
ne is infinity, 0 for not to store """ if cache_dir is not None: self.__cache_dir = os.path.abspath(cache_dir) else: self.__cache_dir = None self.__time_to_live = time_to_live self.__session = requests.session() def cache_dir(self): return s...
ternet. Store the downloaded content into <cache_dir>/file. If <cache_dir>/file exists, it returns content from disk :param url: page to be downloaded :param filename: filename where to store the content of url, None if we want not store :param time_to_live: how many seconds to...
# Us
e default debug configuration or local configuration try: from .config_local import * except ImportError: from .config_default impo
rt *
#------------------------------------------------------------------------------- # Name: ModSlaveSettingsRTU # Purpose: # # Author: ElBar # # Created: 17/04/2012 # Copyright: (c) ElBar 2012 # Licence: <your licence> #---------------------------------
---------------------------------------------- #!/usr/bin/env python from PyQt4 import QtGui,QtCore from Ui_settingsModbusRTU import Ui_SettingsModbusRTU import Utils #add logging capability import logging #---
---------------------------------------------------------------------------- class ModSlaveSettingsRTUWindow(QtGui.QDialog): """ Class wrapper for RTU settings ui """ def __init__(self): super(ModSlaveSettingsRTUWindow,self).__init__() #init value self.rtu_port = 1 self.baud_rat...
for views.py """ from base_handler import base_handler import traceback import app.model from flask import g, render_template class single_access_handler(base_handler): def __init__(self): """ Manages all the operations that are involved with a single port association with EPGs (for virt...
0)") return # Creates switch profiles, interface profiles, policy groups and static bindings to associate a port # to an EPG try: port_id = form_values['sel_create_single_access_port'].split('[')[-1][:-1].replace('/','-') switch_id = form_values['sel_cr
eate_single_access_leaf'].split('/')[-1] if form_values['create_port_access_type'] == 'single_vlan': network_o = app.model.network.select().where(app.model.network.epg_dn == form_values['sel_create_single_access_network']) ...
datetime.now() t0 = datetime.datetime.now() ideal_path = 'ideal_coordinates.pickle' #if no paths have been done before, start afresh! if not os.path.exists(ideal_path): M = 1e5 many_points = ps.points_in_shape(shape_path, M) coords = cluster_points(many_points,N) #else import already processed coord...
(coords) #find index of array in nested array to remove! #del coords[itemindex] #print(counter2) #-----------------------------------------------
----------------------- #or random selection of paths?! #---------------------------------------------------------------------- #remove a random set of paths associated with a single one of the N coordinates rand_int = random.randint(0,len(paths)-1) old_path = paths[ran...
" Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com> This file is part of RockStor. RockStor is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any late...
return Response() def _validate_netmask(self, request): netmask = request.data.get('netmask', None) e_msg = ('Provided netmask value(%s) is invalid. You can provide it ' 'in a IP address format(eg: 255.255.255.0) or number of ' 'bits(eg: 24)' % netmask) ...
0 try: bits = int(netmask) except ValueError: #assume ip address format was provided bits = sum([bin(int(x)).count('1') for x in '255.255.255'.split('.')]) if (bits < 1 or bits > 32): e_msg = ('Provided netmask value(%s) is invalid. Number of ' ...
from unittest import TestCase from rfxcom.protocol.temperature import Temperature from rfxcom.exceptions import (InvalidPacketLength, UnknownPacketSubtype, UnknownPacketType) class TemperatureTestCase(TestCase): def setUp(self): self.data = bytearray(b'\x08\x50\x02\x11\x...
b_type(self): self.data[2] = 0xEE self.assertFalse(self.parser.can_handle(self.data)) with self.assertRaises(UnknownPacketSubtype): self.parser.validate_packet(self.data) def test_log_name(self): self.asse
rtEquals(self.parser.log.name, 'rfxcom.protocol.Temperature')
#! /usr/bin/env python3 import sys in_class = Fa
lse for l in sys.stdin: if l.startswith("c
lass"): in_class = True if in_class: if l.startswith("};"): in_class = False continue else: print(l, end='')
: try: ret = "1" ret += sessionString.split()[1][-2:] # last 2 digits of year tempMap = (("fall", "9"), ("winter", "1"), ("spring", "5")) for season in tempMap: if season[0] in sessionString.lower(): ret += season[1] ...
processClass(self, lec, index, webData): """we process a typica
l lecture or tutorial row""" attr1 = ["classNumber", "compSec", "campusLocation"] for i in xrange(len(attr1)): setattr(lec, attr1[i], webData[index+i].strip()) index += 6 attr2 = ["enrlCap", "enrlTotal", "waitCap", "waitTotal"] for i in xrange(len(attr2)): ...
# coding=utf-8 ''' cron trigger @author: Huiyugeng ''' import datetime import trigger class CronTrigger(trigger.Trigger): def __init__(self, cron): trigger.Trigger.__init__(self, 0, 1); self.cron = cron def _is_match(self): parser = CronParser(self.cron) ...
s = value.split(',') else: range_items.append(value) for range_item in range_items: temp_result = [] interval = 1 if '/' in range_item: temp = range_item.split('/') range_item = temp[0] interva...
interval = 1 if '*' in range_item: temp_result.extend(self._add_to_set(min_val, max_val)) elif '-' in range_item: item = range_item.split('-') temp_result.extend(self._add_to_set(int(item[0]), int(item[1]))) ...
't be properly parsed elif buf[0] == 128: ssl2 = True # in SSLv2 we need to read 2 bytes in total to know the size of # header, we already read 1 result = None for result in self._sockRecvAll(1): if result in (0, 1): ...
it is multiple of block size""" currentLength = len(data) blockLength = self.blockSize paddingLength = blockLength - 1 - (currentLength % blockLength) paddingBytes = bytearray([paddingLength] * (paddingLength+1)) data += paddingBytes return data def calculateMAC(se...
Type, data): """Calculate the SSL/TLS version of a MAC""" mac.update(compatHMAC(seqnumBytes)) mac.update(compatHMAC(bytearray([contentType]))) assert self.version in ((3, 0), (3, 1), (3, 2), (3, 3)) if self.version != (3, 0): mac.update(compatHMAC(bytearray([self.vers...
from django.contrib import admin from django.contrib.admin.filters import RelatedFieldListFilter from .models import ClientLog, Client, Feedback def client_id(obj): return obj.client.externid class AliveClientsRelatedFieldListFilter(RelatedFieldListFilter): def __init__(self, field, request, *args, **kwargs):...
(ClientLog, ClientLogAdmin) class ClientAdmin(admin.ModelAdmin): list_display = ("status", "externid", "ip", "updated", "created", "useragent") list_filter = ("status", "useragent", "failures", "complets") ordering = ("status", "-updated", "-created", ) search_fields = ("ip", "useragent", "externid", )...
FeedbackAdmin(admin.ModelAdmin): list_display = ("id", "useremail", "ip", "created") ordering = ("-id",) admin.site.register(Feedback, FeedbackAdmin)
from sqlalchemy import Column, String, BigInteger from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine import time BaseModel = declarative_base() class Video(BaseModel): __tablename__ = 'video' id = Column(BigInteger, primary_key=True, autoincrement=True) name = ...
, nullable=False) # 来源平台 video_category = Column(String(10), nullable=False) # 视频大分类:电视剧、电影、综艺 series_region = Column(String(20)) # 电视剧地区分类:全部热播、内地、网剧、韩剧、美剧 movie_region = Column(String(20)) # 电影地区分类:全部热播、院线、内地、香港、美国 veriety_region = Column(String(20)) # 综艺分类:热门 created_at = Column(BigInteger,...
ne('mysql+pymysql://root:123456@localhost:3306/videoSpider?charset=utf8mb4') BaseModel.metadata.create_all(engine) """ data = { 'name' : name.get_text(), 'image' : 'http:' + image.get('r-lazyload'), 'desc' : ' '.join(desc.get_text().strip().split()), 'play...
#!/usr/bin/env python #coding=utf-8 import sys sys.path.append("..") import urllib import myjson from datetime import datetime, date, timedelta import time from define import * from data_interface.stock_dataset import stock_dataset class turtle(object): """ turtle model """ def get_mean(self, data, ...
YS"] = str(BUY_DAYS) result["SELL_DAYS"] = str(SELL_DAYS) if result["close_price"] > result["max
_date"][0]: result["choise"] = 4 result["info"] = "buy" elif result["close_price"] < result["min_date"][0]: result["choise"] = 0 result["info"] = "sell all" elif result["close_price"] < result["max_date"][0] and result["close_price"] > result["min_date"][0...
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from pants.backend.python.target_types import PythonLibrary, PythonTests from pants.engine.target import BoolField class SkipIsortField(BoolField): alias
= "skip_isort" default = False help = "If true, don't run isort on this target's code." def rules(): return [ PythonLibrar
y.register_plugin_field(SkipIsortField), PythonTests.register_plugin_field(SkipIsortField), ]
import requests import csv
from configparser import ConfigParser config = ConfigParse
r() config.read("config.cfg") token = config.get("auth", "token") domain = config.get("instance", "domain") headers = {"Authorization" : "Bearer %s" % token} source_course_id = 311693 csv_file = "" payload = {'migration_type': 'course_copy_importer', 'settings[source_course_id]': source_course_id} with open(csv_file, '...
from django_nose.tools import assert_equal from pontoon.base.tests import TestCase from pontoon.base.utils import NewlineEscapePlaceable, mark_placeables class PlaceablesTests(TestCase): def test_newline_escape_placeable(self): """Test detecting newline escape sequences""" placeable = NewlineEsca...
mark_placeables(u'\\nA string'), u'<mark class="placeable" title="Escaped newline">\\n</mark>A string' ) assert_equal( mark_placeables(u'
A\\nstring'), u'A<mark class="placeable" title="Escaped newline">\\n</mark>string' ) assert_equal( mark_placeables(u'A string'), u'A string' ) assert_equal( mark_placeables(u'A\nstring'), u'A\nstring' )
# Copyright (C) 2009 - TODAY Renato Lima - Akretion # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html from odoo import api, fields, models class AccountTax(models.Model): _inherit = 'account.tax' fiscal_tax_ids = fields.Many2many( comodel_name='l10n_br_fiscal.tax', relation='l...
icmssn_range=None ): """ Returns all information required to apply taxes (in self + their children in case of a tax goup). We consider the sequence of the parent for group of taxes. Eg. considering letters as taxes and alphabetic order as sequenc...
[G, B([A, D, F]), E, C] will be computed as [A, D, F, C, E, G] RETURN: { 'total_excluded': 0.0, # Total without taxes 'total_included': 0.0, # Total with taxes 'taxes': [{ # One dict for each tax in self # and their...
ict['txIn_txin_script_len'] .append(self.txin_script_len) txDict['txIn_scriptSig'] = txDict.get('txIn_scriptSig', []) txDict['txIn_scriptSig'].append(get_hexstring(self.scriptSig)) txDict['txIn_sequence_no'] = txDict.get('txIn_sequence_no', []) txDict['txIn_sequence_no'].append(self.sequen...
nsactions = [] #print 'List of transactions' for i in range(0, self.transaction_cnt): tx = Transaction() tx.parse(bf) self.transactions.append(tx) def printBlock(se
lf): print 'magic_no:\t0x%8x' % self.magic_no print 'size: \t%u bytes' % self.blocksize print 'Block header:\t%s' % self.blockheader print 'Transactions: \t%d' % self.transaction_cnt for i in range(0, self.transaction_cnt): print '='*50 print ' TX NUMBE...
import json import logging import webapp2 from datetime import datetime from google.appengine.ext import ndb from controllers.api.api_base_controller import ApiBaseController from database.event_query import EventListQuery from helpers.award_helper import AwardHelper from helpers.district_helper import DistrictHelp...
= 60 * 60 def __init__(self, *args, **kw): super(ApiEventAwardsController, self).__init__(*args, **kw) self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.event_key) def _track_call(self, event_key): self._track_call_defer('event/award
s', event_key) def _render(self,event_key): self._set_event(event_key) award_dicts = [ModelToDict.awardConverter(award) for award in AwardHelper.organizeAwards(self.event.awards)] return json.dumps(award_dicts, ensure_ascii=True) class ApiEventDistrictPointsController(ApiEventController)...
""" Check the measured process sizes. If we are on a platform which supports multiple measuring facilities (e.g. Linux), check if the reported sizes match. This should help to protect against scaling errors (e.g. Byte vs KiB) or using the wrong value for a different measure (e.g. resident in physical memory vs virtual...
info = process._ProcessMemoryInfoPS() procinfo = process._ProcessMemoryInfoProc() self._match_sizes(psinfo, procinfo) def test_ps_vs_getrusage(self): '''Test process sizes match: ps util vs getrusage ''' psinfo = process._ProcessMemoryInfoPS() try:
resinfo = process._ProcessMemoryInfoResource() except AttributeError: pass else: self._match_sizes(psinfo, resinfo, ignore=['rss']) if psinfo.available and resinfo.available: self.assertTrue(resinfo.rss >= psinfo.rss) def test_proc_vs_getrusag...
"""Collection of
fixtures and functions for the HomeKit tests.""" from unittest.mock import patch def patch_debounce(): """Return patch for debounce method.""" return patch( "homeassistant.components.homekit.accessories.debounce", lambda f: lambda *args, **kwargs: f(*args, **kwargs), )
# python-jinjatools # # Various tools for Jinja2, # including new filters and tests based on python-moretools, # a JinjaLoader class for Django, # and a simple JinjaBuilder class for SCons. # # Copyright (C) 2011-2015 Stefan Zimmermann <zimmermann.code@gmail.com> # # python-jinjatools is free software: you can redistri...
WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with python-jinjatools. If not, see <http://www.gnu.org/...
(jinja2.Environment): def __init__(self, filters={}, tests={}, globals={}, **kwargs): jinja2.Environment.__init__(self, **kwargs) morefilters = __import__('jinjatools.filters').filters.filters for name, func in chain(morefilters.items(), filters.items()): self.filters[name] = fu...
sessions" description: - allows the addition, modification and deletion of sessions in a consul cluster. These sessions can then be used in conjunction with key value pairs to implement distributed locks. In depth documentation for working with sessions can be found here http://www.consul.io/docs/internals/se...
sessions=sessions_list) elif state == 'node': node = module.params.get('node') if not node: module.fail_json( msg="nod
e name is required to retrieve sessions for node") sessions = consul_client.session.node(node, dc=datacenter) module.exit_json(changed=True, node=node, sessions=sessions) elif state == 'info': session_id = module.param...
"""Tests for asyncio/sslproto.py.""" try: import ssl except ImportError: ssl = None import trollius as asyncio from trollius import ConnectionResetError from trollius import sslproto from trollius import test_utils from trollius.test_utils import mock from trollius.test_utils import unittest @unittest.skipI...
ts(test_utils.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() self.set_event_loop(self.loop) def ssl_protocol(self, waiter=None): sslcontext = test_utils.dummy_ssl_context() app_proto = asyncio.Protocol() proto = sslproto.SSLProtocol(self.loop, app_prot...
app_transport.close) return proto def connection_made(self, ssl_proto, do_handshake=None): transport = mock.Mock() sslpipe = mock.Mock() sslpipe.shutdown.return_value = b'' if do_handshake: sslpipe.do_handshake.side_effect = do_handshake else: ...
import tensorflow as tf from tensorflow.contrib import slim as slim from avb.ops import * import math def encoder(x, config, is_training=True): df_dim = config['df_dim'] z_dim = config['z_dim'] a_dim = config['iaf_a_dim'] # Center x at 0 x = 2*x - 1 net = flatten_spatial(x)
net = slim.fully_connected(net, 300, activation_fn=tf.nn.softplus, scope="fc_0") net = slim.fully_connected(net, 300, activation
_fn=tf.nn.softplus, scope="fc_1") zmean = slim.fully_connected(net, z_dim, activation_fn=None) log_zstd = slim.fully_connected(net, z_dim, activation_fn=None) a = slim.fully_connected(net, a_dim, activation_fn=None) return zmean, log_zstd, a
""" 2015 gupon.jp Connector for C4D Python Generator """ import c4d, math, itertools, random from c4d.modules import mograph as mo #userdata id ID_SPLINE_TYPE = 2 ID_SPLINE_CLOSED = 4 ID_SPLINE_INTERPOLATION = 5 ID_SPLINE_SUB = 6 ID_SPLINE_ANGLE = 8 ID_SPLINE_MAXIMUMLENGTH = 9 ID_USE_SCREEN_DIST = 10 ID_USE_MAXS...
b = comb[1].world addP = True if useMaxSeg: if maxSegNum: acnt = 0 bcnt = 0 for p in obj.GetAllPoints(): if p == a: acnt += 1 if p == b: bcnt += 1 if acnt >= maxSegNum or bcnt >= maxSegNum: addP = False break else: addP = False if addP: obj.SetPoi...
) obj = c4d.BaseObject(c4d.Ospline) targetListData = op[c4d.ID_USERDATA, 3] numTargets = targetListData.GetObjectCount() if numTargets < 1: return obj targetList = [] for i in range(numTargets): targetList.append(targetListData.ObjectFromIndex(doc, i)) pointGroups = GetPointsFromObjects(targetList) ...
import pandas as pd import numpy as np from dateutil.relativedelta import relativedelta #### Utilities def get_first_visit_date(data_patient): ''' Determines the first visit for a given patient''' #IDEA Could be parallelized in Dask data_patient['first_visit_date'] = min(data_patient.visit_date) retur...
patient.next_visit_date)) late_time = reference_date - date_last_appointment if late_time.days > grace_period: status = 'LTFU' date_out = date_last_appointment if late_time.days <= grace_period: status = 'Followed' if (data_patient.reasonDescEn.iloc[0] is not np.nan) & (pd.to_dat...
etime(data_patient.discDate.iloc[0]) return pd.DataFrame([{'status': status, 'late_time': late_time, 'last_appointment': date_last_appointment, 'date_out':date_out , 'first_visit_date':data_patient.first_visit_da...
#!/usr/bin/env python import numpy as np import matplotlib.pyplot as plt import latex_plot_inits parameter_list = [[20, 5, 1., 1000, 1, None, 5], [100, 5, 1., 1000, 1, None, 10]] def classifier_perceptron_graphical(n=100, distance=5, learn_rate=1., max_iter=1000, num_threads=1, seed=None, nperceptrons=5): from shog...
converged dummy = BinaryLabels() dummy.io.set_loglevel(MSG_INFO) np.random.seed(seed) # Produce some (probably) linearly separable traini
ng data by hand # Two Gaussians at a far enough distance X = np.array(np.random.randn(_DIM,n))+distance Y = np.array(np.random.randn(_DIM,n)) label_train_twoclass = np.hstack((np.ones(n), -np.ones(n))) fm_train_real = np.hstack((X,Y)) feats_train = RealFeatures(fm_train_real) labels = BinaryLabels(label_train_t...
from typing import Optional from thinc.api import Model from .stop_words import STOP_WORDS from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS from .lex_attrs import LEX_ATTRS from .lemmatizer import RussianLemmatizer from ...language import Language class RussianDefaults(Language.Defaults): tokenizer_excepti...
) def make_lemmatizer( nlp: Language,
model: Optional[Model], name: str, mode: str, overwrite: bool, ): return RussianLemmatizer(nlp.vocab, model, name, mode=mode, overwrite=overwrite) __all__ = ["Russian"]
if (not found_next_field): print ("Skipping hit at " + hex(hit) + " - cannot find PHONE2 field") continue # can't find next field so skip this hit funi.seek(fb.tell()) phonestring2 = read_nullterm_unistring(funi) if (phonestring2 == ""): ...
print ("Skipping hit at " + hex(hit) + " - cannot find Received text field") continue # can't find next field so skip this hit
string_offset = fb.tell() funi.seek(string_offset) unistring = read_nullterm_unistring(funi) # print "Text (" + hex(string_offset).rstrip("L") +"): " + unistring timeval = 0 if (nums_listed == 0): # Original method: Manual adjustment of FILETIM...
"""Support for tracking consumption over given periods of time.""" from datetime import timedelta import logging from croniter import croniter import voluptuous as vol from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import CONF_NAME from homeassistant.helpers import discov...
"Invalid tariff: %s (possible tariffs: %s)", tariff, ", ".join(self._tariffs), ) return self._current_tariff = tariff self.async_write_ha_state() async def async_next_tariff(s
elf): """Offset current index.""" current_index = self._tariffs.index(self._current_tariff) new_index = (current_index + 1) % len(self._tariffs) self._current_tariff = self._tariffs[new_index] self.async_write_ha_state()
'like'] = self.like js['family'] = self.family js['packager'] = self.packager js['start_system'] = self.start_system js['has_os_release'] = self.has_os_release js['fallback_detection'] = self.fallback_detection return js class PackageInfo(object): """ Basic info...
lf.name js['version'] = str(self.version) js['arch'] = self.arch js['repo'] = self.repo if self.size is not None: js['size'] = self.size if
self.section is not None: js['section'] = self.section return js @classmethod def from_json(cls, js): """ Converts json dict to the object :param js: :return: """ obj = cls(name=js['name'], version=js['version'], arch=js['arch'], repo=js['re...
#!/Users/wuga/Documents/website/wuga/env/bin/python2.7 # # The Python Imaging Library # $Id$ # from __future__ import print_function import sys if sys.version_info[0] > 2: import tkinter else: import Tkinter as tkinter from PIL import Image, ImageTk # # an image viewer class UI(tkinter.Label): def _...
image self.image = ImageTk.BitmapImage(im, foreground="white") tkinter.Label.__init__(self, master, image=self.image, bd=0, bg="black") else:
# photo image self.image = ImageTk.PhotoImage(im) tkinter.Label.__init__(self, master, image=self.image, bd=0) # # script interface if __name__ == "__main__": if not sys.argv[1:]: print("Syntax: python viewer.py imagefile") sys.exit(1) filename = sys.argv[1] root...