content
stringlengths
1
1.05M
input_ids
listlengths
1
883k
ratio_char_token
float64
1
22.9
token_count
int64
1
883k
""" Bifurcation point classes. Each class locates and processes bifurcation points. * _BranchPointFold is a version based on BranchPoint location algorithms * BranchPoint: Branch process is broken (can't find alternate branch -- see MATCONT notes) Drew LaMar, March 2006 """ from __future__ import absolute_import, print_function from .misc import * from PyDSTool.common import args from .TestFunc import DiscreteMap, FixedPointMap from numpy import Inf, NaN, isfinite, r_, c_, sign, mod, \ subtract, divide, transpose, eye, real, imag, \ conjugate, average from scipy import optimize, linalg from numpy import dot as matrixmultiply from numpy import array, float, complex, int, float64, complex64, int32, \ zeros, divide, subtract, reshape, argsort, nonzero ##### _classes = ['BifPoint', 'BPoint', 'BranchPoint', 'FoldPoint', 'HopfPoint', 'BTPoint', 'ZHPoint', 'CPPoint', 'BranchPointFold', '_BranchPointFold', 'DHPoint', 'GHPoint', 'LPCPoint', 'PDPoint', 'NSPoint', 'SPoint'] __all__ = _classes ##### # Codimension-2 bifurcations # Discrete maps
[ 37811, 347, 361, 333, 30907, 966, 6097, 13, 220, 5501, 1398, 1179, 689, 290, 7767, 275, 361, 333, 30907, 2173, 13, 628, 220, 220, 220, 1635, 4808, 33, 25642, 12727, 37, 727, 318, 257, 2196, 1912, 319, 20551, 12727, 4067, 16113, 198, ...
2.826303
403
"""Urls for the marion application""" from django.urls import include, path from rest_framework import routers from .. import views router = routers.DefaultRouter() router.register(r"requests", views.DocumentRequestViewSet) urlpatterns = [ path("", include(router.urls)), ]
[ 37811, 16692, 7278, 329, 262, 1667, 295, 3586, 37811, 198, 198, 6738, 42625, 14208, 13, 6371, 82, 1330, 2291, 11, 3108, 198, 198, 6738, 1334, 62, 30604, 1330, 41144, 198, 198, 6738, 11485, 1330, 5009, 198, 198, 472, 353, 796, 41144, 1...
3.179775
89
""" PyXLL-Jupyter This package integrated Jupyter notebooks into Microsoft Excel. To install it, first install PyXLL (see https://www.pyxll.com). Briefly, to install PyXLL do the following:: pip install pyxll pyxll install Once PyXLL is installed then installing this package will add a button to the PyXLL ribbon toolbar that will start a Jupyter notebook browser as a custom task pane in Excel. To install this package use:: pip install pyxll_jupyter """ from setuptools import setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name="pyxll_jupyter", description="Adds Jupyter notebooks to Microsoft Excel using PyXLL.", long_description=long_description, long_description_content_type='text/markdown', version="0.1.11", packages=find_packages(), include_package_data=True, package_data={ "pyxll_jupyter": [ "pyxll_jupyter/resources/ribbon.xml", "pyxll_jupyter/resources/jupyter.png", ] }, project_urls={ "Source": "https://github.com/pyxll/pyxll-jupyter", "Tracker": "https://github.com/pyxll/pyxll-jupyter/issues", }, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: Microsoft :: Windows" ], entry_points={ "pyxll": [ "modules = pyxll_jupyter.pyxll:modules", "ribbon = pyxll_jupyter.pyxll:ribbon" ] }, install_requires=[ "pyxll >= 5.0.0", "jupyter >= 1.0.0", "PySide2" ] )
[ 37811, 198, 20519, 55, 3069, 12, 41, 929, 88, 353, 198, 198, 1212, 5301, 11521, 449, 929, 88, 353, 43935, 656, 5413, 24134, 13, 198, 198, 2514, 2721, 340, 11, 717, 2721, 9485, 55, 3069, 357, 3826, 3740, 1378, 2503, 13, 9078, 87, 2...
2.361905
735
from typing import List from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.http import JsonResponse from django.shortcuts import render from django.urls import reverse_lazy from django.views import generic, View from board.forms import SignUpForm from .const import BOARD_VIEW_COLUMN_COUNT from .models import Board, Priority, Membership, Contribution from .models import Task class SignUp(generic.CreateView): form_class = SignUpForm success_url = reverse_lazy('login') template_name = 'signup.html' class CreateBoard(View): class CreateTask(View): class CreateBoardMembership(View): def parse_priority(value: str): choices = Priority.choices for i in range(0, len(choices)): if value == choices[i][1].lower(): return choices[i][0]
[ 6738, 19720, 1330, 7343, 198, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 12501, 273, 2024, 1330, 17594, 62, 35827, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 27530, 1330, 11787, 198, 6738, 42625, 14208, 13, 4023,...
3.071942
278
# -*- coding: utf-8 -*- # Learn more: https://github.com/kennethreitz/setup.py from setuptools import setup, find_packages import os with open('README.md') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='lazmond3-pylib-instagram-type', version='1.0.8', description='update from 1.0.8: hasattr: 1.0.7: medias , str get multiple + init.py', long_description=readme, author='lazmond3', author_email='moikilo00@gmail.com', url='https://github.com/lazmond3/pylib-instagram-type.git', install_requires=["lazmond3-pylib-debug"], license=license, packages=find_packages(exclude=('tests', 'docs')), test_suite='tests' )
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 2, 14365, 517, 25, 3740, 1378, 12567, 13, 785, 14, 74, 1697, 2788, 260, 4224, 14, 40406, 13, 9078, 198, 198, 6738, 900, 37623, 10141, 1330, 9058, 11, 1064, 62, ...
2.388514
296
# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-01-15 22:49 from __future__ import unicode_literals from django.db import migrations
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 2980, 515, 416, 37770, 352, 13, 1157, 13, 1433, 319, 13130, 12, 486, 12, 1314, 2534, 25, 2920, 198, 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198,...
2.709091
55
#!/usr/bin/env python3 # # Copyright (c) 2019 Miklos Vajna and contributors. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """The test_webframe module covers the webframe module.""" from typing import List from typing import TYPE_CHECKING from typing import Tuple from typing import cast import configparser import datetime import os import unittest import unittest.mock import time # pylint: disable=unused-import import yattag import webframe if TYPE_CHECKING: # pylint: disable=no-name-in-module,import-error,unused-import from wsgiref.types import StartResponse # noqa: F401 if __name__ == '__main__': unittest.main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 198, 2, 15069, 357, 66, 8, 13130, 17722, 33280, 39838, 2616, 290, 20420, 13, 198, 2, 5765, 286, 428, 2723, 2438, 318, 21825, 416, 257, 347, 10305, 12, 7635, 5964, 326, 460, ...
3.12
225
from __future__ import unicode_literals import spotipy from spotipy.oauth2 import SpotifyClientCredentials import requests from youtube_search import YoutubeSearch import youtube_dl import eyed3.id3 import eyed3 import lyricsgenius import telepot spotifyy = spotipy.Spotify( client_credentials_manager=SpotifyClientCredentials(client_id='a145db3dcd564b9592dacf10649e4ed5', client_secret='389614e1ec874f17b8c99511c7baa2f6')) genius = lyricsgenius.Genius('biZZReO7F98mji5oz3cE0FiIG73Hh07qoXSIzYSGNN3GBsnY-eUrPAVSdJk_0_de') token = 'token bot' bot = telepot.Bot(token)
[ 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198, 11748, 4136, 541, 88, 198, 6738, 4136, 541, 88, 13, 12162, 1071, 17, 1330, 26778, 11792, 34, 445, 14817, 198, 11748, 7007, 198, 6738, 35116, 62, 12947, 1330, 27431, 18243,...
2.216028
287
#------------------------------------------------------------------------------ # Copyright (c) 2018-2019, Nucleic Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. #------------------------------------------------------------------------------ """Test the typed dictionary. """ import sys import pytest from atom.api import Atom, Dict, Int, atomdict MEMBERS = ['untyped', 'keytyped', 'valuetyped', 'fullytyped', 'untyped_default', 'keytyped_default', 'valuetyped_default', 'fullytyped_default'] def test_setitem(atom_dict): """Test setting items. """ atom_dict.untyped[''] = 1 assert atom_dict.untyped[''] == 1 atom_dict.keytyped[1] = '' assert atom_dict.keytyped[1] == '' with pytest.raises(TypeError): atom_dict.keytyped[''] = 1 atom_dict.valuetyped[1] = 1 assert atom_dict.valuetyped[1] == 1 with pytest.raises(TypeError): atom_dict.valuetyped[''] = '' atom_dict.fullytyped[1] = 1 assert atom_dict.fullytyped[1] == 1 with pytest.raises(TypeError): atom_dict.fullytyped[''] = 1 with pytest.raises(TypeError): atom_dict.fullytyped[1] = '' def test_setdefault(atom_dict): """Test using setdefault. """ assert atom_dict.untyped.setdefault('', 1) == 1 assert atom_dict.untyped.setdefault('', 2) == 1 assert atom_dict.untyped[''] == 1 assert atom_dict.keytyped.setdefault(1, '') == '' assert atom_dict.keytyped[1] == '' with pytest.raises(TypeError): atom_dict.keytyped.setdefault('', 1) assert atom_dict.valuetyped.setdefault(1, 1) == 1 assert atom_dict.valuetyped.setdefault(1, '') == 1 assert atom_dict.valuetyped[1] == 1 with pytest.raises(TypeError): atom_dict.valuetyped.setdefault(2, '') assert atom_dict.fullytyped.setdefault(1, 1) == 1 assert atom_dict.fullytyped.setdefault(1, '') == 1 assert atom_dict.fullytyped[1] == 1 with pytest.raises(TypeError): atom_dict.fullytyped.setdefault('', 1) with pytest.raises(TypeError): atom_dict.fullytyped.setdefault(2, '') def test_update(atom_dict): """Test update a dict. """ atom_dict.untyped.update({'': 1}) assert atom_dict.untyped[''] == 1 atom_dict.untyped.update([('1', 1)]) assert atom_dict.untyped['1'] == 1 atom_dict.keytyped.update({1: 1}) assert atom_dict.keytyped[1] == 1 atom_dict.keytyped.update([(2, 1)]) assert atom_dict.keytyped[1] == 1 with pytest.raises(TypeError): atom_dict.keytyped.update({'': 1}) atom_dict.valuetyped.update({1: 1}) assert atom_dict.valuetyped[1] == 1 atom_dict.valuetyped.update([(2, 1)]) assert atom_dict.valuetyped[1] == 1 with pytest.raises(TypeError): atom_dict.valuetyped.update({'': ''}) atom_dict.fullytyped.update({1: 1}) assert atom_dict.fullytyped[1] == 1 atom_dict.fullytyped.update([(2, 1)]) assert atom_dict.fullytyped[1] == 1 with pytest.raises(TypeError): atom_dict.fullytyped.update({'': 1}) with pytest.raises(TypeError): atom_dict.fullytyped.update({'': ''})
[ 2, 10097, 26171, 198, 2, 15069, 357, 66, 8, 2864, 12, 23344, 11, 399, 14913, 291, 7712, 4816, 13, 198, 2, 198, 2, 4307, 6169, 739, 262, 2846, 286, 262, 40499, 347, 10305, 13789, 13, 198, 2, 198, 2, 383, 1336, 5964, 318, 287, 262...
2.425037
1,334
from datetime import datetime
[ 6738, 4818, 8079, 1330, 4818, 8079, 628 ]
4.428571
7
from django.contrib import admin from bible.models import Bible, VerseOfTheDay
[ 6738, 42625, 14208, 13, 3642, 822, 1330, 13169, 198, 6738, 41169, 13, 27530, 1330, 9111, 11, 38641, 5189, 464, 12393, 628, 198 ]
3.681818
22
""" Our own implementation of an abstract syntax tree (AST). The convert function recursively converts a Python AST (from the module `ast`) to our own AST (of the class `Node`). """ import ast from logging import debug from typy.builtin import data_types from typy.exceptions import NotYetSupported, NoSuchAttribute, NotIterable from typy import types def _assign(target, value, type_map): value_type = value.check() if isinstance(target, Name): target_type = target.check() type_map.add_variable(target_type.id, value_type) elif isinstance(target, Attribute): target_type, attr = target.check() target_type.set_attribute(attr, value_type) else: raise NotYetSupported('assignment to', target) def convert(type_map, node): class_name = node.__class__.__name__ try: # Try to convert to a node class_ = globals()[class_name] return class_(type_map, node) except KeyError: try: # Try to convert to a builtin type class_ = getattr(data_types, class_name) return class_() except AttributeError: raise NotYetSupported('node', node)
[ 37811, 198, 5122, 898, 7822, 286, 281, 12531, 15582, 5509, 357, 11262, 737, 198, 198, 464, 10385, 2163, 664, 1834, 2280, 26161, 257, 11361, 29273, 357, 6738, 262, 8265, 4600, 459, 63, 8, 198, 1462, 674, 898, 29273, 357, 1659, 262, 139...
2.589744
468
""" Testing: - uploading over existing files - using deleted credentials - using expired credentials """ import io import minio from minio import Minio import pytest from minio.credentials import AssumeRoleProvider, Credentials from entityservice.object_store import connect_to_object_store, connect_to_upload_object_store from entityservice.settings import Config restricted_upload_policy = """{ "Version": "2012-10-17", "Statement": [ { "Action": [ "s3:PutObject" ], "Effect": "Allow", "Resource": [ "arn:aws:s3:::uploads/2020/*" ], "Sid": "Upload-access-to-specific-bucket-only" } ] } """
[ 37811, 198, 44154, 25, 198, 532, 33794, 625, 4683, 3696, 198, 532, 1262, 13140, 18031, 198, 532, 1262, 21350, 18031, 198, 198, 37811, 198, 11748, 33245, 198, 198, 11748, 949, 952, 198, 6738, 949, 952, 1330, 1855, 952, 198, 11748, 12972,...
2.689516
248
# Copyright 2020 Soil, Inc. # Copyright (c) 2011 X.commerce, a business unit of eBay Inc. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementation of SQLAlchemy backend.""" import collections import copy import datetime import functools import inspect import sys import threading from oslo_db.sqlalchemy import session as db_session from oslo_log import log as logging import soil.conf from soil.i18n import _ CONF = soil.conf.CONF LOG = logging.getLogger(__name__) _LOCK = threading.Lock() _FACADE = None
[ 2, 15069, 12131, 1406, 346, 11, 3457, 13, 198, 2, 15069, 357, 66, 8, 2813, 1395, 13, 27061, 11, 257, 1597, 4326, 286, 21698, 3457, 13, 198, 2, 15069, 3050, 1578, 1829, 5070, 355, 7997, 416, 262, 198, 2, 22998, 286, 262, 2351, 1578...
3.440341
352
import calendar import datetime from unittest import TestCase import pytz from dateutil.parser import parse as date_parse from tests import BaseTestCase from redash import models, redis_connection from redash.models import db, types from redash.utils import gen_query_hash, utcnow def _set_up_dashboard_test(d): d.g1 = d.factory.create_group(name='First', permissions=['create', 'view']) d.g2 = d.factory.create_group(name='Second', permissions=['create', 'view']) d.ds1 = d.factory.create_data_source() d.ds2 = d.factory.create_data_source() db.session.flush() d.u1 = d.factory.create_user(group_ids=[d.g1.id]) d.u2 = d.factory.create_user(group_ids=[d.g2.id]) db.session.add_all([ models.DataSourceGroup(group=d.g1, data_source=d.ds1), models.DataSourceGroup(group=d.g2, data_source=d.ds2) ]) d.q1 = d.factory.create_query(data_source=d.ds1) d.q2 = d.factory.create_query(data_source=d.ds2) d.v1 = d.factory.create_visualization(query_rel=d.q1) d.v2 = d.factory.create_visualization(query_rel=d.q2) d.w1 = d.factory.create_widget(visualization=d.v1) d.w2 = d.factory.create_widget(visualization=d.v2) d.w3 = d.factory.create_widget(visualization=d.v2, dashboard=d.w2.dashboard) d.w4 = d.factory.create_widget(visualization=d.v2) d.w5 = d.factory.create_widget(visualization=d.v1, dashboard=d.w4.dashboard) d.w1.dashboard.is_draft = False d.w2.dashboard.is_draft = False d.w4.dashboard.is_draft = False
[ 11748, 11845, 198, 11748, 4818, 8079, 198, 6738, 555, 715, 395, 1330, 6208, 20448, 198, 198, 11748, 12972, 22877, 198, 6738, 3128, 22602, 13, 48610, 1330, 21136, 355, 3128, 62, 29572, 198, 6738, 5254, 1330, 7308, 14402, 20448, 198, 198, ...
2.315549
656
''' Fibonacci Sequence ''' import HeaderOfFiles while True: try: f = int(input("Enter a number for Fibonacci: ")) break except: print("Give me a number please!") fibonacciSeq(f)
[ 7061, 6, 198, 37, 571, 261, 44456, 45835, 198, 7061, 6, 198, 198, 11748, 48900, 5189, 25876, 198, 220, 220, 220, 220, 220, 220, 220, 220, 198, 220, 220, 220, 220, 198, 198, 4514, 6407, 25, 198, 220, 220, 220, 1949, 25, 198, 220, ...
2.073394
109
# Copyright 2021 MosaicML. All Rights Reserved. from composer.algorithms.mixup.mixup import MixUp as MixUp from composer.algorithms.mixup.mixup import MixUpHparams as MixUpHparams from composer.algorithms.mixup.mixup import mixup_batch as mixup_batch _name = 'MixUp' _class_name = 'MixUp' _functional = 'mixup_batch' _tldr = 'Blends pairs of examples and labels' _attribution = '(Zhang et al, 2017)' _link = 'https://arxiv.org/abs/1710.09412' _method_card = ''
[ 2, 15069, 33448, 5826, 18452, 5805, 13, 1439, 6923, 33876, 13, 198, 198, 6738, 26777, 13, 282, 7727, 907, 13, 19816, 929, 13, 19816, 929, 1330, 15561, 4933, 355, 15561, 4933, 198, 6738, 26777, 13, 282, 7727, 907, 13, 19816, 929, 13, ...
2.840491
163
from __future__ import absolute_import, print_function import unittest import os import tensorflow as tf from tensorflow.keras import regularizers from niftynet.network.simple_gan import SimpleGAN from tests.niftynet_testcase import NiftyNetTestCase if __name__ == "__main__": tf.test.main()
[ 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 11, 3601, 62, 8818, 198, 198, 11748, 555, 715, 395, 198, 198, 11748, 28686, 198, 11748, 11192, 273, 11125, 355, 48700, 198, 6738, 11192, 273, 11125, 13, 6122, 292, 1330, 3218, 11341, 198, ...
3.212766
94
import time import thingspeak_wrapper as tsw # Initiate the class ThingWrapper with (CHANNEL_ID, WRITE_API__KEY, READ_API_KEY) # if is a public channel just pass the CHANNEL_ID argument, api_key defaults are None my_channel = tsw.wrapper.ThingWrapper(501309, '6TQDNWJQ44FA0GAQ', '10EVD2N6YIHI5O7Z') # all set of functions are: # my_channel.sender() # my_channel.multiple_sender() # my_channel.get_json_feeds() # my_channel.get_json_feeds_from() # my_channel.get_xml_feeds() # my_channel.get_xml_feeds_from() # my_channel.get_csv_feeds() # my_channel.get_csv_feeds_from() # --------------------------- # Now you can use all the possible functions # Send a value to a single field my_channel.sender(1, 4) # this delay is due to limitation of thingspeak free account which allow you to post data every 15 sec minimum time.sleep(15) # --------------------------- # Send data to multiple field # It take 2 input as lists ([..], [..]) # Create lists of fields and values fields = [1, 2, 3] values = [22.0, 1029, 700] # pass them to the function my_channel.multiple_sender(fields, values) # --------------------------- # Get data functions returns data as json, xml, csv # optionally csv can be returned as Pandas Data frame # pass arguments to the function (field, data_quantity) # default values are ( fields='feeds', results_quantity=None) # you will get all fields and all values (max 8000) json_field1 = my_channel.get_json_feeds(1, 300) print(json_field1) # get xml data pass same values as previous function xml_field1 = my_channel.get_xml_feeds(1, 300) print(xml_field1) # get csv data # this function requires to specify (field, pandas_format=True, result_quantity=None) # defaults are (fields='feeds', pandas_format=True, result_quantity=None) csv_field1 = my_channel.get_csv_feeds(1, pandas_format=True, results_quantity=300) print(csv_field1) # data without pandas_format csv_no_pandas = my_channel.get_csv_feeds(1, pandas_format=False, results_quantity=300) print(csv_no_pandas) # there is the possibility to request data from and to specific dates # set date and time as strings YYYY-MM-DD HH:NN:SS start_date, start_time = '2018-05-21', '12:00:00' stop_date, stop_time = '2018-05-21', '23:59:59' # pass values to the function # defaults are (start_date, start_time, stop_date=None, stop_time=None, fields='feeds') values_from_date = my_channel.get_json_feeds_from(stop_date, start_time, stop_date, stop_time, 1) print(values_from_date)
[ 11748, 640, 198, 198, 11748, 1243, 36729, 62, 48553, 355, 256, 2032, 198, 198, 2, 16204, 378, 262, 1398, 21561, 36918, 2848, 351, 357, 3398, 22846, 3698, 62, 2389, 11, 44423, 62, 17614, 834, 20373, 11, 20832, 62, 17614, 62, 20373, 8, ...
2.754615
921
# # Copyright (c) 2019, Neptune Labs Sp. z o.o. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import warnings import matplotlib.pyplot as plt import neptune import numpy as np import skopt.plots as sk_plots from skopt.utils import dump from neptunecontrib.monitoring.utils import axes2fig, expect_not_a_run def log_results(results, experiment=None, log_plots=True, log_pickle=True): """Logs runs results and parameters to neptune. Logs all hyperparameter optimization results to Neptune. Those include best score ('best_score' metric), best parameters ('best_parameters' property), convergence plot ('diagnostics' log), evaluations plot ('diagnostics' log), and objective plot ('diagnostics' log). Args: results('scipy.optimize.OptimizeResult'): Results object that is typically an output | of the function like `skopt.forest_minimize(...)` experiment(`neptune.experiments.Experiment`): Neptune experiment. Default is None. log_plots: ('bool'): If True skopt plots will be logged to Neptune. log_pickle: ('bool'): if True pickled skopt results object will be logged to Neptune. Examples: Run skopt training:: ... results = skopt.forest_minimize(objective, space, base_estimator='ET', n_calls=100, n_random_starts=10) Initialize Neptune:: import neptune neptune.init(api_token='ANONYMOUS', project_qualified_name='shared/showroom') neptune.create_experiment(name='optuna sweep') Send best parameters to Neptune:: import neptunecontrib.monitoring.skopt as sk_utils sk_utils.log_results(results) You can explore an example experiment in Neptune: https://ui.neptune.ai/o/shared/org/showroom/e/SHOW-1065/logs """ _exp = experiment if experiment else neptune expect_not_a_run(_exp) _log_best_score(results, _exp) _log_best_parameters(results, _exp) if log_plots: _log_plot_convergence(results, _exp) _log_plot_evaluations(results, _exp) _log_plot_regret(results, _exp) _log_plot_objective(results, _exp) if log_pickle: _log_results_object(results, _exp)
[ 2, 198, 2, 15069, 357, 66, 8, 13130, 11, 26461, 23500, 1338, 13, 1976, 267, 13, 78, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, ...
2.665714
1,050
#!/usr/bin/python # # This might be horrible code... # ...but it works # Feel free to re-write in a better way # And if you want to - send it to us, we'll update ;) # maltego@paterva.com (2010/10/18) # import sys from xml.dom import minidom
[ 2, 48443, 14629, 14, 8800, 14, 29412, 220, 198, 2, 198, 2, 770, 1244, 307, 12361, 2438, 986, 198, 2, 2644, 4360, 340, 2499, 198, 2, 18571, 1479, 284, 302, 12, 13564, 287, 257, 1365, 835, 198, 2, 843, 611, 345, 765, 284, 532, 375...
2.697917
96
# Generated by Django 2.1.5 on 2019-02-12 21:18 import django.db.models.deletion from django.db import migrations, models
[ 2, 2980, 515, 416, 37770, 362, 13, 16, 13, 20, 319, 13130, 12, 2999, 12, 1065, 2310, 25, 1507, 198, 198, 11748, 42625, 14208, 13, 9945, 13, 27530, 13, 2934, 1616, 295, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 11, 4981, ...
2.818182
44
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2018 CERN. # # Invenio is free software; you can redistribute it and/or modify it # under the terms of the MIT License; see LICENSE file for more details. """IIIF API for Invenio.""" IIIF_API_PREFIX = '/iiif/' """URL prefix to IIIF API.""" IIIF_UI_URL = '/api{}'.format(IIIF_API_PREFIX) """URL to IIIF API endpoint (allow hostname).""" IIIF_PREVIEWER_PARAMS = { 'size': '750,' } """Parameters for IIIF image previewer extension.""" IIIF_PREVIEW_TEMPLATE = 'invenio_iiif/preview.html' """Template for IIIF image preview.""" IIIF_API_DECORATOR_HANDLER = 'invenio_iiif.handlers:protect_api' """Image opener handler decorator.""" IIIF_IMAGE_OPENER_HANDLER = 'invenio_iiif.handlers:image_opener' """Image opener handler function."""
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 198, 2, 770, 2393, 318, 636, 286, 554, 574, 952, 13, 198, 2, 15069, 357, 34, 8, 2864, 327, 28778, 13, 198, 2, 198, 2, 554, 574, 952, 318, 1479, 3788, 26, 345,...
2.665574
305
#!/user/bin/env/python """ pub_ingest.py -- Read a bibtex file and make VIVO RDF The following objects will be made as needed: -- publisher -- journal -- information resource -- timestamp for the information resource -- people -- authorships -- concepts The resulting ADD and SUB RDF file can then be read into VIVO To Do -- Complete refactor as an update process. Create resuable parts so that a publication can be created from bibtex, doi or pmid -- Improve DateTimeValue accuracy. Currently all publications are entered as yearMonth precision. Sometimes we have more information, sometimes we have less. We should use the information as presented by the publisher, not overstate (yearMonth when there is only year) and not understate (yearMonth when we know the day). -- Reuse date objects -- only create dates when the appropriate date entity is not already in VIVO -- Update for VIVO-ISF -- Update or vivofoundation and vivopubs """ __author__ = "Michael Conlon" __copyright__ = "Copyright 2014, University of Florida" __license__ = "BSD 3-Clause license" __version__ = "1.3" import sys from datetime import datetime, date from pybtex.database.input import bibtex import tempita import vivotools MAX_AUTHORS = 50 publisher_report = {} journal_report = {} title_report = {} author_report = {} disambiguation_report = {} dictionaries = [] journal_dictionary = {} publisher_dictionary = {} title_dictionary = {} def open_files(bibtex_file_name): """ Give the name of the bibitex file to be used as input, generate the file names for rdf, rpt and lst. Return the open file handles """ base = bibtex_file_name[:bibtex_file_name.find('.')] rpt_file = open(base+'.rpt', 'w') lst_file = open(base+'.lst', 'w') rdf_file = open(base+'.rdf', 'w') return [rdf_file, rpt_file, lst_file] def update_disambiguation_report(authors, publication_uri): """ Given the authors structure and thte publication_uri, add to the report if any of the authors need to be disambiguated """ for value in authors.values(): if value[8] == "Disambig": if publication_uri in disambiguation_report: result = disambiguation_report[publication_uri] result[len(result.keys())+1] = value disambiguation_report[publication_uri] = result else: disambiguation_report[publication_uri] = {1:value} return # start here. Create a parser for bibtex and use it to read the file of # bibtex entries. open the output files print datetime.now(), "Read the BibTex" bibtex_file_name = sys.argv[1] [rdf_file, rpt_file, lst_file] = open_files(bibtex_file_name) parser = bibtex.Parser() bib_data = parser.parse_file(bibtex_file_name) bib_sorted = sorted(bib_data.entries.items(), key=lambda x: x[1].fields['title']) print >>rdf_file, "<!--", len(bib_data.entries.keys()),\ "publications to be processed -->" print datetime.now(), len(bib_data.entries.keys()),\ "publications to be processed." # make dictionaries for people, papers, publishers, journals, concepts print datetime.now(), "Creating the dictionaries" print datetime.now(), "Publishers" publisher_dictionary = vivotools.make_publisher_dictionary() print datetime.now(), "Journals" journal_dictionary = vivotools.make_journal_dictionary() print datetime.now(), "People" dictionaries = make_people_dictionaries() print datetime.now(), "Titles" title_dictionary = vivotools.make_title_dictionary() print datetime.now(), "Concepts" vivotools.make_concept_dictionary() # process the papers print >>rdf_file, vivotools.rdf_header() for key, value in bib_sorted: try: title = value.fields['title'].title() + " " except: title_report["No title"] = ["No Title", None, 1] print >>rdf_file, "<!-- No title found. No RDF necessary -->" continue title = abbrev_to_words(title) title = title[0:-1] if title in title_report: print >>rdf_file, "<!-- Title", title,\ "handled previously. No RDF necessary -->" title_report[title][2] = title_report[title][2] + 1 continue else: print >>rdf_file, "<!-- Begin RDF for " + title + " -->" print datetime.now(), "<!-- Begin RDF for " + title + " -->" document = {} document['title'] = title title_report[title] = ["Start", None, 1] [found, uri] = vivotools.find_title(title, title_dictionary) if not found: title_report[title][0] = "Create" # Create # Authors [author_rdf, authors] = make_author_rdf(value) document['authors'] = make_document_authors(authors) if count_uf_authors(authors) == 0: print >>rdf_file, "<!-- End RDF. No UF authors for " +\ title + " No RDF necessary -->" title_report[title][0] = "No UF Auth" continue update_author_report(authors) # Datetime [datetime_rdf, datetime_uri] = make_datetime_rdf(value, title) # Publisher [journal_create, journal_name, journal_uri] =\ make_journal_uri(value) [publisher_create, publisher, publisher_uri, publisher_rdf] =\ make_publisher_rdf(value) # Journal [journal_rdf, journal_uri] = make_journal_rdf(value,\ journal_create, journal_name, journal_uri) # Publisher/Journal bi-directional links publisher_journal_rdf = "" if journal_uri != "" and publisher_uri != "" and\ (journal_create or publisher_create): publisher_journal_rdf = \ make_publisher_journal_rdf(publisher_uri, journal_uri) # Authorships publication_uri = vivotools.get_vivo_uri() title_report[title][1] = publication_uri [authorship_rdf, authorship_uris] = make_authorship_rdf(authors,\ publication_uri) # AuthorInAuthorships author_in_authorship_rdf = make_author_in_authorship_rdf(authors,\ authorship_uris) # Journal/Publication bi-directional links if journal_uri != "" and publication_uri != "": journal_publication_rdf = \ make_journal_publication_rdf(journal_uri, publication_uri) # PubMed values pubmed_rdf = "" if 'doi' in value.fields: [pubmed_rdf, sub] = vivotools.update_pubmed(publication_uri,\ value.fields['doi']) if sub != "": raise Exception("Non empty subtraction RDF"+\ "for Update PubMed") # Publication publication_rdf = make_publication_rdf(value,\ title,publication_uri,datetime_uri,authorship_uris) print >>rdf_file, datetime_rdf, publisher_rdf, journal_rdf,\ publisher_journal_rdf, author_rdf, authorship_rdf,\ author_in_authorship_rdf, journal_publication_rdf,\ publication_rdf, pubmed_rdf print >>rdf_file, "<!-- End RDF for " + title + " -->" print >>lst_file, vivotools.string_from_document(document),\ 'VIVO uri', publication_uri, '\n' update_disambiguation_report(authors, publication_uri) else: title_report[title][0] = "Found" title_report[title][1] = uri print >>rdf_file, "<!-- Found: " + title + " No RDF necessary -->" print >>rdf_file, vivotools.rdf_footer() # # Reports # print >>rpt_file,""" Publisher Report Lists the publishers that appear in the bibtex file in alphabetical order. For each publisher, show the improved name, the number of papers in journals of this publisher, the action to be taken for the publisher and the VIVO URI -- the URI is the new URI to be created if Action is Create, otherwise it is the URI of the found publisher in VIVO. Publisher Papers Action VIVO URI ---------------------------------------------------------------------------------""" publisher_count = 0 actions = {} for publisher in sorted(publisher_report.keys()): publisher_count = publisher_count + 1 [create,uri,count] = publisher_report[publisher] if create: result = "Create" else: result = "Found " actions[result] = actions.get(result,0) + 1 print >>rpt_file, "{0:40}".format(publisher[0:40]),"{0:>3}".format(count),result,uri print >>rpt_file,"" print >>rpt_file, "Publisher count by action" print >>rpt_file, "" for action in sorted(actions): print >>rpt_file, action,actions[action] print >>rpt_file, publisher_count,"publisher(s)" print >>rpt_file, """ Journal Report Lists the journals that appear in the bibtex file in alphabetical order. For each journal, show the improved name, the number of papers t be linked to the journal, the action to be taken for the journal and the VIVO URI -- the URI is the new URI to be created if Action is Create, otherwise it is the URI of the found journal in VIVO. Journal Papers Action VIVO URI ---------------------------------------------------------------------------------""" journal_count = 0 actions = {} for journal in sorted(journal_report.keys()): journal_count = journal_count + 1 [create,uri,count] = journal_report[journal] if create: result = "Create" else: result = "Found " actions[result] = actions.get(result,0) + 1 print >>rpt_file, "{0:40}".format(journal[0:40]),"{0:>3}".format(count),result,uri print >>rpt_file, "" print >>rpt_file, "Journal count by action" print >>rpt_file, "" for action in sorted(actions): print >>rpt_file, action,actions[action] print >>rpt_file, journal_count,"journal(s)" print >>rpt_file, """ Title Report Lists the titles that appear in the bibtex file in alphabetical order. For each title, show the action to be taken, the number of times the title appears in the bibtex, the improved title and the VIVO URI of the publication -- the URI is the new URI to be created if action is Create, otherwise it is the URI of the found publication in VIVO. Action # Title and VIVO URI ---------------------------------------------------------------------------------""" title_count = 0 actions = {} for title in sorted(title_report.keys()): title_count = title_count +1 [action,uri,count] = title_report[title] actions[action] = actions.get(action,0) + 1 print >>rpt_file, "{0:>10}".format(action),title,uri print >>rpt_file, "" print >>rpt_file, "Title count by action" print >>rpt_file, "" for action in sorted(actions): print >>rpt_file, action,actions[action] print >>rpt_file, title_count,"title(s)" print >>rpt_file, """ Author Report For each author found in the bibtex file, show the author's name followed by the number of papers for the author in the bibtex to be entered, followed by a pair of results for each time the author appears on a paper in the bibtex. The result pair contains an action and a URI. The action is "non UF" if a non-UF author stub will be be created, the URI is the URI of the new author stub. Action "Make UF" if a new UF author stub will be created with the URI of the new author stub. "Found UF" indicate the author was found at the URI. "Disambig" if multiple UF people were found with the given name. The URI is the URI of one of the found people. Follow-up is needed to determine if correct and reassign author if not correct. Author Action URI Action URI ----------------------------------------------------------------------------------------------""" author_count = 0 actions = {} for author in sorted(author_report.keys()): author_count = author_count + 1 results = "" papers = len(author_report[author]) action = author_report[author][1][8] # 1st report, 8th value is action actions[action] = actions.get(action,0) + 1 for key in author_report[author].keys(): value = author_report[author][key] results = results + value[8] + " " + "{0:45}".format(value[9]) print >>rpt_file, "{0:25}".format(author),"{0:>3}".format(papers),results print >>rpt_file, "" print >>rpt_file, "Author count by action" print >>rpt_file, "" for action in sorted(actions): print >>rpt_file, action,actions[action] print >>rpt_file, author_count,"authors(s)" print >>rpt_file, """ Disambiguation Report For each publication with one or more authors to disambiguate, list the paper, and then the authors in question with each of the possible URIs to be disambiguated, show the URI of the paper, and then for each author that needs to be disambiguated on the paper, show the last name, first name and middle initial and the all the URIs in VIVO for UF persons with the same names. """ for uri in disambiguation_report.keys(): print >>rpt_file,"The publication at",uri,"has one or more authors in question" for key,value in disambiguation_report[uri].items(): uris = value[9].split(";") print >>rpt_file," ",value[4],value[5],value[6],":" for u in uris: person = vivotools.get_person(u) if 'last_name' not in person: person['last_name'] = "No last name" if 'middle_name' not in person: person['middle_name'] = "No middle name" if 'first_name' not in person: person['first_name'] = "No first name" if 'home_department_name' not in person: person['home_department_name'] = "No home department" npubs = len(person['authorship_uris']) print >>rpt_file," ",u,person['last_name'], \ person['first_name'],person['middle_name'], \ person['home_department_name'],"Number of pubs = ",npubs print >>rpt_file print >>rpt_file # # Close the files, we're done # rpt_file.close() rdf_file.close() lst_file.close()
[ 2, 48443, 7220, 14, 8800, 14, 24330, 14, 29412, 198, 37811, 198, 220, 220, 220, 2240, 62, 278, 395, 13, 9078, 1377, 4149, 257, 275, 571, 16886, 2393, 290, 787, 569, 3824, 46, 371, 8068, 628, 220, 220, 220, 383, 1708, 5563, 481, 30...
2.542837
5,626
#!/usr/bin/env python '''Generally useful bits and bobs.''' import queue # For PrintThread and exe_run from time import sleep, time, gmtime, strftime # For lock timeout, exe_run timeout and logging from multiprocessing import RLock from copy import copy import threading # For PrintThread import sys import os # For ChangeDir, has_admin import stat # To help deltree out from collections import deque # For storing a window of debug from telnetlib import Telnet # For talking to JLink server import socket import shutil # To delete a directory tree import signal # For CTRL_C_EVENT import subprocess import platform # Figure out current OS import re # Regular Expression import serial # Pyserial (make sure to do pip install pyserial) import psutil # For killing things (make sure to do pip install psutil) import requests # For HTTP comms with a KMTronic box (do pip install requests) import u_settings # Since this function is used by the global variables below it needs # to be placed here. def is_linux(): '''Returns True when system is Linux''' return platform.system() == 'Linux' # Since this function is used by the global variables below it needs # to be placed here. def pick_by_os(linux=None, other=None): ''' This is a convenience function for selecting a value based on platform. As an example the line below will print out "Linux" when running on a Linux platform and "Not Linux" when running on some other platform: print( u_utils.pick_by_os(linux="Linux", other="Not Linux") ) ''' if is_linux(): return linux return other # The port that this agent service runs on # Deliberately NOT a setting, we need to be sure # everyone uses the same value AGENT_SERVICE_PORT = 17003 # The maximum number of characters that an agent will # use from controller_name when constructing a directory # name for a ubxlib branch to be checked out into AGENT_WORKING_SUBDIR_CONTROLLER_NAME_MAX_LENGTH = 4 # How long to wait for an install lock in seconds INSTALL_LOCK_WAIT_SECONDS = u_settings.INSTALL_LOCK_WAIT_SECONDS #(60 * 60) # The URL for Unity, the unit test framework UNITY_URL = u_settings.UNITY_URL #"https://github.com/ThrowTheSwitch/Unity" # The sub-directory that Unity is usually put in # (off the working directory) UNITY_SUBDIR = u_settings.UNITY_SUBDIR #"Unity" # The path to DevCon, a Windows tool that allows # USB devices to be reset, amongst other things DEVCON_PATH = u_settings.DEVCON_PATH #"devcon.exe" # The path to jlink.exe (or just the name 'cos it's on the path) JLINK_PATH = u_settings.JLINK_PATH #"jlink.exe" # The port number for SWO trace capture out of JLink JLINK_SWO_PORT = u_settings.JLINK_SWO_PORT #19021 # The port number for GDB control of ST-LINK GDB server STLINK_GDB_PORT = u_settings.STLINK_GDB_PORT #61200 # The port number for SWO trace capture out of ST-LINK GDB server STLINK_SWO_PORT = u_settings.STLINK_SWO_PORT #61300 # The format string passed to strftime() # for logging prints TIME_FORMAT = u_settings.TIME_FORMAT #"%Y-%m-%d_%H:%M:%S" # The default guard time waiting for a platform lock in seconds PLATFORM_LOCK_GUARD_TIME_SECONDS = u_settings.PLATFORM_LOCK_GUARD_TIME_SECONDS #60 * 60 # The default guard time for downloading to a target in seconds DOWNLOAD_GUARD_TIME_SECONDS = u_settings.DOWNLOAD_GUARD_TIME_SECONDS #60 # The default guard time for running tests in seconds RUN_GUARD_TIME_SECONDS = u_settings.RUN_GUARD_TIME_SECONDS #60 * 60 # The default inactivity timer for running tests in seconds RUN_INACTIVITY_TIME_SECONDS = u_settings.RUN_INACTIVITY_TIME_SECONDS #60 * 5 # The name of the #define that forms the filter string # for which tests to run FILTER_MACRO_NAME = u_settings.FILTER_MACRO_NAME #"U_CFG_APP_FILTER" # The name of the environment variable that indicates we're running under automation ENV_UBXLIB_AUTO = "U_UBXLIB_AUTO" # The time for which to wait for something from the # queue in exe_run(). If this is too short, in a # multiprocessing world or on a slow machine, it is # possible to miss things as the task putting things # on the queue may be blocked from doing so until # we've decided the queue has been completely emptied # and moved on EXE_RUN_QUEUE_WAIT_SECONDS = u_settings.EXE_RUN_QUEUE_WAIT_SECONDS #1 # The number of seconds a USB cutter and the bit positions of # a KMTronic box are switched off for HW_RESET_DURATION_SECONDS = u_settings.HW_RESET_DURATION_SECONDS # e.g. 5 # Executable file extension. This will be "" for Linux # and ".exe" for Windows EXE_EXT = pick_by_os(linux="", other=".exe") def keep_going(flag, printer=None, prompt=None): '''Check a keep_going flag''' do_not_stop = True if flag is not None and not flag.is_set(): do_not_stop = False if printer and prompt: printer.string("{}aborting as requested.".format(prompt)) return do_not_stop # subprocess arguments behaves a little differently on Linux and Windows # depending if a shell is used or not, which can be read here: # https://stackoverflow.com/a/15109975 # This function will compensate for these deviations def subprocess_osify(cmd, shell=True): ''' expects an array of strings being [command, param, ...] ''' if is_linux() and shell: line = '' for item in cmd: # Put everything in a single string and quote args containing spaces if ' ' in item: line += '\"{}\" '.format(item) else: line += '{} '.format(item) cmd = line return cmd def split_command_line_args(cmd_line): ''' Will split a command line string into a list of arguments. Quoted arguments will be preserved as one argument ''' return [p for p in re.split("( |\\\".*?\\\"|'.*?')", cmd_line) if p.strip()] def get_actual_path(path): '''Given a drive number return real path if it is a subst''' actual_path = path if is_linux(): return actual_path if os.name == 'nt': # Get a list of substs text = subprocess.check_output("subst", stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): # Lines should look like this: # Z:\: => C:\projects\ubxlib_priv # So, in this example, if we were given z:\blah # then the actual path should be C:\projects\ubxlib_priv\blah text = line.decode() bits = text.rsplit(": => ") if (len(bits) > 1) and (len(path) > 1) and \ (bits[0].lower()[0:2] == path[0:2].lower()): actual_path = bits[1] + path[2:] break return actual_path def get_instance_text(instance): '''Return the instance as a text string''' instance_text = "" for idx, item in enumerate(instance): if idx == 0: instance_text += str(item) else: instance_text += "." + str(item) return instance_text # Get a list of instances as a text string separated # by spaces. def get_instances_text(instances): '''Return the instances as a text string''' instances_text = "" for instance in instances: if instance: instances_text += " {}".format(get_instance_text(instance)) return instances_text def remove_readonly(func, path, exec_info): '''Help deltree out''' del exec_info os.chmod(path, stat.S_IWRITE) func(path) def deltree(directory, printer, prompt): '''Remove an entire directory tree''' tries = 3 success = False if os.path.isdir(directory): # Retry this as sometimes Windows complains # that the directory is not empty when it # it really should be, some sort of internal # Windows race condition while not success and (tries > 0): try: # Need the onerror bit on Winders, see # this Stack Overflow post: # https://stackoverflow.com/questions/1889597/deleting-directory-in-python shutil.rmtree(directory, onerror=remove_readonly) success = True except OSError as ex: if printer and prompt: printer.string("{}ERROR unable to delete \"{}\" {}: \"{}\"". format(prompt, directory, ex.errno, ex.strerror)) sleep(1) tries -= 1 else: success = True return success # Some list types aren't quite list types: for instance, # the lists returned by RPyC look like lists but they # aren't of type list and so "in", for instance, will fail. # This converts an instance list (i.e. a list-like object # containing items that are each another list-like object) # into a plain-old two-level list. def copy_two_level_list(instances_in): '''Convert instances_in into a true list''' instances_out = [] if instances_in: for item1 in instances_in: instances_out1 = [] for item2 in item1: instances_out1.append(item2) instances_out.append(copy(instances_out1)) return instances_out # Check if admin privileges are available, from: # https://stackoverflow.com/questions/2946746/python-checking-if-a-user-has-administrator-privileges def has_admin(): '''Check for administrator privileges''' admin = False if os.name == 'nt': try: # only Windows users with admin privileges can read the C:\windows\temp if os.listdir(os.sep.join([os.environ.get("SystemRoot", "C:\\windows"), "temp"])): admin = True except PermissionError: pass else: # Pylint will complain about the following line but # that's OK, it is only executed if we're NOT on Windows # and there the geteuid() method will exist if "SUDO_USER" in os.environ and os.geteuid() == 0: admin = True return admin # Reset a USB port with the given Device Description def usb_reset(device_description, printer, prompt): ''' Reset a device''' instance_id = None found = False success = False try: # Run devcon and parse the output to find the given device printer.string("{}running {} to look for \"{}\"...". \ format(prompt, DEVCON_PATH, device_description)) cmd = [DEVCON_PATH, "hwids", "=ports"] text = subprocess.check_output(subprocess_osify(cmd), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): # The format of a devcon entry is this: # # USB\VID_1366&PID_1015&MI_00\6&38E81674&0&0000 # Name: JLink CDC UART Port (COM45) # Hardware IDs: # USB\VID_1366&PID_1015&REV_0100&MI_00 # USB\VID_1366&PID_1015&MI_00 # Compatible IDs: # USB\Class_02&SubClass_02&Prot_00 # USB\Class_02&SubClass_02 # USB\Class_02 # # Grab what we hope is the instance ID line = line.decode() if line.startswith("USB"): instance_id = line else: # If the next line is the Name we want then we're done if instance_id and ("Name: " + device_description in line): found = True printer.string("{}\"{}\" found with instance ID \"{}\"". \ format(prompt, device_description, instance_id)) break instance_id = None if found: # Now run devcon to reset the device printer.string("{}running {} to reset device \"{}\"...". \ format(prompt, DEVCON_PATH, instance_id)) cmd = [DEVCON_PATH, "restart", "@" + instance_id] text = subprocess.check_output(subprocess_osify(cmd), stderr=subprocess.STDOUT, shell=False) # Has to be False or devcon won't work for line in text.splitlines(): printer.string("{}{}".format(prompt, line.decode())) success = True else: printer.string("{}device with description \"{}\" not found.". \ format(prompt, device_description)) except subprocess.CalledProcessError: printer.string("{} unable to find and reset device.".format(prompt)) return success # Open the required serial port. def open_serial(serial_name, speed, printer, prompt): '''Open serial port''' serial_handle = None text = "{}: trying to open \"{}\" as a serial port...". \ format(prompt, serial_name) try: return_value = serial.Serial(serial_name, speed, timeout=0.05) serial_handle = return_value printer.string("{} opened.".format(text)) except (ValueError, serial.SerialException) as ex: printer.string("{}{} while accessing port {}: {}.". format(prompt, type(ex).__name__, serial_handle.name, str(ex))) return serial_handle def open_telnet(port_number, printer, prompt): '''Open telnet port on localhost''' telnet_handle = None text = "{}trying to open \"{}\" as a telnet port on localhost...". \ format(prompt, port_number) try: telnet_handle = Telnet("localhost", int(port_number), timeout=5) if telnet_handle is not None: printer.string("{} opened.".format(text)) else: printer.string("{} failed.".format(text)) except (socket.error, socket.timeout, ValueError) as ex: printer.string("{}{} failed to open telnet {}: {}.". format(prompt, type(ex).__name__, port_number, str(ex))) return telnet_handle def install_lock_acquire(install_lock, printer, prompt, keep_going_flag=None): '''Attempt to acquire install lock''' timeout_seconds = INSTALL_LOCK_WAIT_SECONDS success = False if install_lock: printer.string("{}waiting for install lock...".format(prompt)) while not install_lock.acquire(False) and (timeout_seconds > 0) and \ keep_going(keep_going_flag, printer, prompt): sleep(1) timeout_seconds -= 1 if timeout_seconds > 0: printer.string("{}got install lock.".format(prompt)) success = True else: printer.string("{}failed to aquire install lock.".format(prompt)) else: printer.string("{}warning, there is no install lock.".format(prompt)) return success def install_lock_release(install_lock, printer, prompt): '''Release install lock''' if install_lock: install_lock.release() printer.string("{}install lock released.".format(prompt)) def fetch_repo(url, directory, branch, printer, prompt, submodule_init=True, force=False): '''Fetch a repo: directory can be relative or absolute, branch can be a hash''' got_code = False success = False dir_text = directory if dir_text == ".": dir_text = "this directory" if printer and prompt: printer.string("{}in directory {}, fetching" " {} to {}.".format(prompt, os.getcwd(), url, dir_text)) if not branch: branch = "master" if os.path.isdir(directory): # Update existing code with ChangeDir(directory): if printer and prompt: printer.string("{}updating code in {}...". format(prompt, dir_text)) target = branch if branch.startswith("#"): # Actually been given a branch, lose the # preceding # target = branch[1:len(branch)] # Try this once and, if it fails and force is set, # do a git reset --hard and try again tries = 1 if force: tries += 1 while tries > 0: try: call_list = [] call_list.append("git") call_list.append("fetch") call_list.append("origin") call_list.append(target) if printer and prompt: text = "" for item in call_list: if text: text += " " text += item printer.string("{}in {} calling {}...". format(prompt, os.getcwd(), text)) # Try to pull the code text = subprocess.check_output(subprocess_osify(call_list), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): if printer and prompt: printer.string("{}{}".format(prompt, line.decode())) got_code = True except subprocess.CalledProcessError as error: if printer and prompt: printer.string("{}git returned error {}: \"{}\"". format(prompt, error.returncode, error.output)) if got_code: tries = 0 else: if force: # git reset --hard printer.string("{}in directory {} calling git reset --hard...". \ format(prompt, os.getcwd())) try: text = subprocess.check_output(subprocess_osify(["git", "reset", "--hard"]), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): if printer and prompt: printer.string("{}{}".format(prompt, line.decode())) except subprocess.CalledProcessError as error: if printer and prompt: printer.string("{}git returned error {}: \"{}\"". format(prompt, error.returncode, error.output)) force = False tries -= 1 if not got_code: # If we still haven't got the code, delete the # directory for a true clean start deltree(directory, printer, prompt) if not os.path.isdir(directory): # Clone the repo if printer and prompt: printer.string("{}cloning from {} into {}...". format(prompt, url, dir_text)) try: text = subprocess.check_output(subprocess_osify(["git", "clone", "-q", url, directory]), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): if printer and prompt: printer.string("{}{}".format(prompt, line.decode())) got_code = True except subprocess.CalledProcessError as error: if printer and prompt: printer.string("{}git returned error {}: \"{}\"". format(prompt, error.returncode, error.output)) if got_code and os.path.isdir(directory): # Check out the correct branch and recurse submodules with ChangeDir(directory): target = "origin/" + branch if branch.startswith("#"): # Actually been given a branch, so lose the # "origin/" and the preceding # target = branch[1:len(branch)] if printer and prompt: printer.string("{}checking out {}...". format(prompt, target)) try: call_list = ["git", "-c", "advice.detachedHead=false", "checkout", "--no-progress"] if submodule_init: call_list.append("--recurse-submodules") printer.string("{}also recursing sub-modules (can take some time" \ " and gives no feedback).".format(prompt)) call_list.append(target) if printer and prompt: text = "" for item in call_list: if text: text += " " text += item printer.string("{}in {} calling {}...". format(prompt, os.getcwd(), text)) text = subprocess.check_output(subprocess_osify(call_list), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): if printer and prompt: printer.string("{}{}".format(prompt, line.decode())) success = True except subprocess.CalledProcessError as error: if printer and prompt: printer.string("{}git returned error {}: \"{}\"". format(prompt, error.returncode, error.output)) return success def exe_where(exe_name, help_text, printer, prompt): '''Find an executable using where.exe or which on linux''' success = False try: printer.string("{}looking for \"{}\"...". \ format(prompt, exe_name)) # See here: # https://stackoverflow.com/questions/14928860/passing-double-quote-shell-commands-in-python-to-subprocess-popen # ...for why the construction "".join() is necessary when # passing things which might have spaces in them. # It is the only thing that works. if is_linux(): cmd = ["which {}".format(exe_name.replace(":", "/"))] printer.string("{}detected linux, calling \"{}\"...".format(prompt, cmd)) else: cmd = ["where", "".join(exe_name)] printer.string("{}detected nonlinux, calling \"{}\"...".format(prompt, cmd)) text = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): printer.string("{}{} found in {}".format(prompt, exe_name, line.decode())) success = True except subprocess.CalledProcessError: if help_text: printer.string("{}ERROR {} not found: {}". \ format(prompt, exe_name, help_text)) else: printer.string("{}ERROR {} not found". \ format(prompt, exe_name)) return success def exe_version(exe_name, version_switch, printer, prompt): '''Print the version of a given executable''' success = False if not version_switch: version_switch = "--version" try: text = subprocess.check_output(subprocess_osify(["".join(exe_name), version_switch]), stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this for line in text.splitlines(): printer.string("{}{}".format(prompt, line.decode())) success = True except subprocess.CalledProcessError: printer.string("{}ERROR {} either not found or didn't like {}". \ format(prompt, exe_name, version_switch)) return success def exe_terminate(process_pid): '''Jonathan's killer''' process = psutil.Process(process_pid) for proc in process.children(recursive=True): proc.terminate() process.terminate() def read_from_process_and_queue(process, read_queue): '''Read from a process, non-blocking''' while process.poll() is None: string = process.stdout.readline().decode() if string and string != "": read_queue.put(string) else: sleep(0.1) def queue_get_no_exception(the_queue, block=True, timeout=None): '''A version of queue.get() that doesn't throw an Empty exception''' thing = None try: thing = the_queue.get(block=block, timeout=timeout) except queue.Empty: pass return thing def capture_env_var(line, env, printer, prompt): '''A bit of exe_run that needs to be called from two places''' # Find a KEY=VALUE bit in the line, # parse it out and put it in the dictionary # we were given pair = line.split('=', 1) if len(pair) == 2: env[pair[0]] = pair[1].rstrip() else: printer.string("{}WARNING: not an environment variable: \"{}\"". format(prompt, line)) # Note: if returned_env is given then "set" # will be executed after the exe and the environment # variables will be returned in it. The down-side # of this is that the return value of the exe is, # of course, lost. def exe_run(call_list, guard_time_seconds=None, printer=None, prompt=None, shell_cmd=False, set_env=None, returned_env=None, bash_cmd=False, keep_going_flag=None): '''Call an executable, printing out what it does''' success = False start_time = time() flibbling = False kill_time = None read_time = start_time if returned_env is not None: # The caller wants the environment after the # command has run, so, from this post: # https://stackoverflow.com/questions/1214496/how-to-get-environment-from-a-subprocess # append a tag that we can detect # to the command and then call set, # from which we can parse the environment call_list.append("&&") call_list.append("echo") call_list.append("flibble") call_list.append("&&") if is_linux(): call_list.append("env") bash_cmd = True else: call_list.append("set") # I've seen output from set get lost, # possibly because the process ending # is asynchronous with stdout, # so add a delay here as well call_list.append("&&") call_list.append("sleep") call_list.append("2") try: popen_keywords = { 'stdout': subprocess.PIPE, 'stderr': subprocess.STDOUT, 'shell': shell_cmd, 'env': set_env, 'executable': "bin/bash" if bash_cmd else None } # Call the thang # Note: used to have bufsize=1 here but it turns out # that is ignored 'cos the output is considered # binary. Seems to work in any case, I guess # Winders, at least, is in any case line-buffered. process = subprocess.Popen(subprocess_osify(call_list, shell=shell_cmd), **popen_keywords) if printer: printer.string("{}{}, pid {} started with guard time {} second(s)". \ format(prompt, call_list[0], process.pid, guard_time_seconds)) # This is over complex but, unfortunately, necessary. # At least one thing that we try to run, nrfjprog, can # crash silently: just hangs and sends no output. However # it also doesn't flush and close stdout and so read(1) # will hang, meaning we can't read its output as a means # to check that it has hung. # So, here we poll for the return value, which is normally # how things will end, and we start another thread which # reads from the process's stdout. If the thread sees # nothing for guard_time_seconds then we terminate the # process. read_queue = queue.Queue() read_thread = threading.Thread(target=read_from_process_and_queue, args=(process, read_queue)) read_thread.start() while process.poll() is None: if keep_going_flag is None or keep_going(keep_going_flag, printer, prompt): if guard_time_seconds and (kill_time is None) and \ ((time() - start_time > guard_time_seconds) or (time() - read_time > guard_time_seconds)): kill_time = time() if printer: printer.string("{}guard time of {} second(s)." \ " expired, stopping {}...". format(prompt, guard_time_seconds, call_list[0])) exe_terminate(process.pid) else: exe_terminate(process.pid) line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS) read_time = time() while line is not None: line = line.rstrip() if flibbling: capture_env_var(line, returned_env, printer, prompt) else: if returned_env is not None and "flibble" in line: flibbling = True else: printer.string("{}{}".format(prompt, line)) line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS) read_time = time() sleep(0.1) # Can't join() read_thread here as it might have # blocked on a read() (if nrfjprog has anything to # do with it). It will be tidied up when this process # exits. # There may still be stuff on the queue, read it out here line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS) while line is not None: line = line.rstrip() if flibbling: capture_env_var(line, returned_env, printer, prompt) else: if returned_env is not None and "flibble" in line: flibbling = True else: printer.string("{}{}".format(prompt, line)) line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS) # There may still be stuff in the buffer after # the application has finished running so flush that # out here line = process.stdout.readline().decode() while line: line = line.rstrip() if flibbling: capture_env_var(line, returned_env, printer, prompt) else: if returned_env is not None and "flibble" in line: flibbling = True else: printer.string("{}{}".format(prompt, line)) line = process.stdout.readline().decode() if (process.poll() == 0) and kill_time is None: success = True if printer: printer.string("{}{}, pid {} ended with return value {}.". \ format(prompt, call_list[0], process.pid, process.poll())) except ValueError as ex: if printer: printer.string("{}failed: {} while trying to execute {}.". \ format(prompt, type(ex).__name__, str(ex))) except KeyboardInterrupt as ex: process.kill() raise KeyboardInterrupt from ex return success def set_process_prio_high(): '''Set the priority of the current process to high''' if is_linux(): print("Setting process priority currently not supported for Linux") # It should be possible to set prio with: # psutil.Process().nice(-10) # However we get "[Errno 13] Permission denied" even when run as root else: psutil.Process().nice(psutil.HIGH_PRIORITY_CLASS) def set_process_prio_normal(): '''Set the priority of the current process to normal''' if is_linux(): print("Setting process priority currently not supported for Linux") # It should be possible to set prio with: # psutil.Process().nice(0) # However we get "[Errno 13] Permission denied" even when run as root else: psutil.Process().nice(psutil.NORMAL_PRIORITY_CLASS) # Simple SWO decoder: only handles single bytes of application # data at a time, i.e. what ITM_SendChar() sends. # This stolen from here: # https://stackoverflow.com/questions/431684/how-do-i-change-the-working-directory-in-python def wait_for_completion(_list, purpose, guard_time_seconds, printer, prompt, keep_going_flag): '''Wait for a completion list to empty''' completed = False if len(_list) > 0: timeout_seconds = guard_time_seconds printer.string("{}waiting up to {} second(s)" \ " for {} completion...". \ format(prompt, guard_time_seconds, purpose)) count = 0 while (len(_list) > 0) and \ ((guard_time_seconds == 0) or (timeout_seconds > 0)) and \ keep_going(keep_going_flag, printer, prompt): sleep(1) timeout_seconds -= 1 count += 1 if count == 30: list_text = "" for item in _list: if list_text: list_text += ", " list_text += str(item) printer.string("{}still waiting {} second(s)" \ " for {} to complete (waiting" \ " for {}).". \ format(prompt, timeout_seconds, purpose, list_text)) count = 0 if len(_list) == 0: completed = True printer.string("{}{} completed.".format(prompt, purpose)) return completed def reset_nrf_target(connection, printer, prompt): '''Reset a Nordic NRFxxx target''' call_list = [] printer.string("{}resetting target...".format(prompt)) # Assemble the call list call_list.append("nrfjprog") call_list.append("--reset") if connection and "debugger" in connection and connection["debugger"]: call_list.append("-s") call_list.append(connection["debugger"]) # Print what we're gonna do tmp = "" for item in call_list: tmp += " " + item printer.string("{}in directory {} calling{}". \ format(prompt, os.getcwd(), tmp)) # Call it return exe_run(call_list, 60, printer, prompt) def usb_cutter_reset(usb_cutter_id_strs, printer, prompt): '''Cut and then un-cut USB cables using Cleware USB cutters''' # First switch the USB cutters off action = "1" count = 0 call_list_root = ["usbswitchcmd"] call_list_root.append("-s") call_list_root.append("-n") while count < 2: for usb_cutter_id_str in usb_cutter_id_strs: call_list = call_list_root.copy() call_list.append(usb_cutter_id_str) call_list.append(action) # Print what we're gonna do tmp = "" for item in call_list: tmp += " " + item if printer: printer.string("{}in directory {} calling{}". \ format(prompt, os.getcwd(), tmp)) # Set shell to keep Jenkins happy exe_run(call_list, 0, printer, prompt, shell_cmd=True) # Wait 5ish seconds if printer: printer.string("{}waiting {} second(s)...". \ format(prompt, HW_RESET_DURATION_SECONDS)) sleep(HW_RESET_DURATION_SECONDS) # "0" to switch the USB cutters on again action = "0" count += 1 def kmtronic_reset(ip_address, hex_bitmap, printer, prompt): '''Cut and then un-cut power using a KMTronic box''' # KMTronic is a web relay box which will be controlling # power to, for instance, EVKs The last byte of the URL # is a hex bitmap of the outputs where 0 sets off and 1 # sets on # Take only the last two digits of the hex bitmap hex_bitmap_len = len(hex_bitmap) hex_bitmap = hex_bitmap[hex_bitmap_len - 2:hex_bitmap_len] kmtronic_off = "http://" + ip_address + "FFE0" + hex_bitmap kmtronic_on = "http://" + ip_address + "FFE0" + "{0:x}".format(int(hex_bitmap, 16) ^ 0xFF) try: # First switch the given bit positions off if printer: printer.string("{}sending {}". \ format(prompt, kmtronic_off)) response = requests.get(kmtronic_off) # Wait 5ish seconds if printer: printer.string("{}...received response {}, waiting {} second(s)...". \ format(prompt, response.status_code, HW_RESET_DURATION_SECONDS)) sleep(HW_RESET_DURATION_SECONDS) # Switch the given bit positions on if printer: printer.string("{}sending {}".format(prompt, kmtronic_on)) response = requests.get(kmtronic_on) if printer: printer.string("{}...received response {}.". \ format(prompt, response.status_code)) except requests.ConnectionError: if printer: printer.string("{}unable to connect to KMTronic box at {}.". \ format(prompt, ip_address)) # Look for a single line anywhere in message # beginning with "test: ". This must be followed by # "x.y.z a.b.c m.n.o" (i.e. instance IDs space separated) # and then an optional "blah" filter string, or just "*" # and an optional "blah" filter string or "None". # Valid examples are: # # test: 1 # test: 1 3 7 # test: 1.0.3 3 7.0 # test: 1 2 example # test: 1.1 8 portInit # test: * # test: * port # test: none # # Filter strings must NOT begin with a digit. # There cannot be more than one * or a * with any other instance. # There can only be one filter string. # Only whitespace is expected after this on the line. # Anything else is ignored. # Populates instances with the "0 4.5 13.5.1" bit as instance # entries [[0], [4, 5], [13, 5, 1]] and returns the filter # string, if any. def commit_message_parse(message, instances, printer=None, prompt=None): '''Find stuff in a commit message''' instances_all = False instances_local = [] filter_string_local = None found = False if message: # Search through message for a line beginning # with "test:" if printer: printer.string("{}### parsing message to see if it contains a test directive...". \ format(prompt)) lines = message.split("\\n") for idx1, line in enumerate(lines): if printer: printer.string("{}text line {}: \"{}\"".format(prompt, idx1 + 1, line)) if line.lower().startswith("test:"): found = True instances_all = False # Pick through what follows parts = line[5:].split() for part in parts: if instances_all and (part[0].isdigit() or part == "*" or part.lower() == "none"): # If we've had a "*" and this is another one # or it begins with a digit then this is # obviously not a "test:" line, # leave the loop and try again. instances_local = [] filter_string_local = None if printer: printer.string("{}...badly formed test directive, ignoring.". \ format(prompt)) found = False break if filter_string_local: # If we've had a filter string then nothing # must follow so this is not a "test:" line, # leave the loop and try again. instances_local = [] filter_string_local = None if printer: printer.string("{}...extraneous characters after test directive," \ " ignoring.".format(prompt)) found = False break if part[0].isdigit(): # If this part begins with a digit it could # be an instance containing numbers instance = [] bad = False for item in part.split("."): try: instance.append(int(item)) except ValueError: # Some rubbish, not a test line so # leave the loop and try the next # line bad = True break if bad: instances_local = [] filter_string_local = None if printer: printer.string("{}...badly formed test directive, ignoring.". \ format(prompt)) found = False break if instance: instances_local.append(instance[:]) elif part == "*": if instances_local: # If we've already had any instances # this is obviously not a test line, # leave the loop and try again instances_local = [] filter_string_local = None if printer: printer.string("{}...badly formed test directive, ignoring.". \ format(prompt)) found = False break # If we haven't had any instances and # this is a * then it means "all" instances_local.append(part) instances_all = True elif part.lower() == "none": if instances_local: # If we've already had any instances # this is obviously not a test line, # leave the loop and try again if printer: printer.string("{}...badly formed test directive, ignoring.". \ format(prompt)) found = False instances_local = [] filter_string_local = None break elif instances_local and not part == "*": # If we've had an instance and this # is not a "*" then this must be a # filter string filter_string_local = part else: # Found some rubbish, not a "test:" # line after all, leave the loop # and try the next line instances_local = [] filter_string_local = None if printer: printer.string("{}...badly formed test directive, ignoring.". \ format(prompt)) found = False break if found: text = "found test directive with" if instances_local: text += " instance(s)" + get_instances_text(instances_local) if filter_string_local: text += " and filter \"" + filter_string_local + "\"" else: text += " instances \"None\"" if printer: printer.string("{}{}.".format(prompt, text)) break if printer: printer.string("{}no test directive found".format(prompt)) if found and instances_local: instances.extend(instances_local[:]) return found, filter_string_local
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 7061, 6, 37058, 4465, 10340, 290, 275, 8158, 2637, 7061, 198, 198, 11748, 16834, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, ...
2.041316
22,679
import numpy as np import astropy.modeling.blackbody as bb import astropy.constants as const from astropy.io import fits from scipy.interpolate import interp2d def get_a(P, M_star, M_p): """ Use Kepler's third law to derive the star-planet separation. """ return (P ** 2 * const.G.value * (M_star + M_p) / (4 * np.pi ** 2)) ** (1/3)
[ 11748, 299, 32152, 355, 45941, 198, 11748, 6468, 28338, 13, 4666, 10809, 13, 13424, 2618, 355, 275, 65, 220, 198, 11748, 6468, 28338, 13, 9979, 1187, 355, 1500, 198, 6738, 6468, 28338, 13, 952, 1330, 11414, 198, 6738, 629, 541, 88, 13...
2.648438
128
import torch.nn.utils.prune as prune import torch from src.vanilla_pytorch.utils import count_rem_weights from src.vanilla_pytorch.models.linearnets import LeNet, init_weights from src.vanilla_pytorch.models.resnets import Resnets def get_masks(model, prune_amts=None): """ prune the lowest p% weights by magnitude per layer :param model: model to prune :param p_rate: prune rate = 0.2 as per paper :param prune_amts: dictionary :return: the created mask. model has served it's purpose. """ # TODO: Adjust pruning with output layer if prune_amts is None: # ie dict is empty, use the default prune rate = 0.2 prune_amts = {"linear": 0.2, "conv": 0.2, "last": 0.2} for i, (name, module) in enumerate(model.named_modules()): # prune 20% of connections in all 2D-conv layers if isinstance(module, torch.nn.Conv2d): module = prune.l1_unstructured(module, name='weight', amount=prune_amts['conv']) # prune 20% of connections in all linear layers elif isinstance(module, torch.nn.Linear): module = prune.l1_unstructured(module, name='weight', amount=prune_amts['linear']) masks = list(model.named_buffers()) remove_pruning(model) return masks if __name__ == '__main__': net = Resnets(in_channels=3) net.apply(init_weights) prune_rate = 0.8 prune_custom = {"linear": 0.2, "conv": 0.2, "last": 0.1} for i in range(3): masks = get_masks(net, prune_amts=prune_custom) print(f"Count zero : {count_rem_weights(net)}")
[ 11748, 28034, 13, 20471, 13, 26791, 13, 1050, 1726, 355, 778, 1726, 198, 11748, 28034, 198, 6738, 12351, 13, 10438, 5049, 62, 9078, 13165, 354, 13, 26791, 1330, 954, 62, 2787, 62, 43775, 198, 6738, 12351, 13, 10438, 5049, 62, 9078, 13...
2.486529
631
""" read_instance_BH-cyclic.py """ ''' [seed graph] V_C : "V_C" E_C : "E_C" [core specification] ell_LB : "\ell_{\rm LB}" ell_UB : "\ell_{\rm UB}" cs_LB : "\textsc{cs}_{\rm LB}" cs_UB : "\textsc{cs}_{\rm UB}" ''' import sys # prepare a set of chemical rooted tree if __name__=="__main__": V_C, E_C, \ E_ge_two, E_ge_one, E_zero_one, E_equal_one, \ I_ge_two, I_ge_one, I_zero_one, I_equal_one, \ ell_LB, ell_UB, n_LB_int, n_UB_int, \ n_LB, n_star, rho, \ ch_LB, ch_UB, bl_LB, bl_UB, \ Lambda, Lambda_dg_int, Gamma_int_ac, Gamma_int, \ Lambda_star, na_LB, na_UB, Lambda_int, \ na_LB_int, na_UB_int, ns_LB_int, ns_UB_int, \ ac_LB_int, ac_UB_int, ec_LB_int, ec_UB_int, \ bd2_LB, bd2_UB, bd3_LB, bd3_UB, dg_LB, dg_UB = read_seed_graph(sys.argv[1]) set_F, psi_epsilon, Code_F, n_psi, deg_r, \ beta_r, atom_r, ht, Lambda_ex = prepare_fringe_trees(sys.argv[2]) # print(V_C) # print(E_C) # print(E_ge_two) # print(E_ge_one) # print(E_zero_one) # print(E_equal_one) # print(ell_LB) # print(ell_UB) # print(bl_UB) for psi in (set_F + [psi_epsilon]): print(str(Code_F[psi]) + " " + str(n_psi[Code_F[psi]]) + " " + \ str(ht[Code_F[psi]]) + " " + str(atom_r[Code_F[psi]]) + " " + \ str(deg_r[Code_F[psi]]) + " " + str(beta_r[Code_F[psi]])) # print(Lambda_ex) # set_F_v = {v : set_F for v in V_C} # set_F_E = set_F # n_C = max(psi.numVertex - 1 for v in V_C for psi in set_F_v[v]) # n_T = max(psi.numVertex - 1 for psi in set_F_E) # n_F = max(psi.numVertex - 1 for psi in set_F_E) # print(str(n_C) + " " + str(n_T) + " " + str(n_F)) MAX_VAL = 4 val = {"C": 4, "O": 2, "N": 3} n_H = dict() na_alpha_ex = {ele : {i + 1 : 0} for i in range(len(set_F)) for ele in Lambda_ex} for i, psi in enumerate(set_F): n_H_tmp = {d : 0 for d in range(MAX_VAL)} na_ex_tmp = {ele : 0 for ele in Lambda_ex} for u, (ele, dep) in enumerate(psi.vertex[1:]): beta_tmp = 0 na_ex_tmp[ele] += 1 for v in psi.adj[u + 1]: beta_tmp += psi.beta[u + 1][v] d_tmp = val[ele] - beta_tmp n_H_tmp[d_tmp] += 1 for ele, d in na_alpha_ex.items(): d[i + 1] = na_ex_tmp[ele] n_H[i + 1] = n_H_tmp print(n_H) print(na_alpha_ex)
[ 37811, 198, 961, 62, 39098, 62, 33, 39, 12, 15539, 291, 13, 9078, 198, 37811, 198, 198, 7061, 6, 198, 58, 28826, 4823, 60, 198, 220, 220, 569, 62, 34, 220, 220, 220, 220, 220, 220, 220, 1058, 366, 53, 62, 34, 1, 198, 220, 220,...
1.748765
1,417
from .system import * from .colours import *
[ 6738, 764, 10057, 1330, 1635, 198, 6738, 764, 4033, 4662, 1330, 1635, 198, 220, 220, 220, 220, 220, 220, 220, 220, 198 ]
2.454545
22
# Lint as: python3 # Copyright 2019, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for ServerState save.""" import functools import os import attr import tensorflow as tf import tensorflow_federated as tff from tensorflow_federated.python.examples.mnist import models from tensorflow_federated.python.research.utils import checkpoint_utils if __name__ == '__main__': tf.compat.v1.enable_v2_behavior() tf.test.main()
[ 2, 406, 600, 355, 25, 21015, 18, 198, 2, 15069, 13130, 11, 383, 309, 22854, 37535, 35089, 515, 46665, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, ...
3.419014
284
#!/usr/bin/env python # -*- coding: utf-8 -*- import peewee from flask import current_app,abort from flask.ext.login import AnonymousUserMixin, UserMixin from itsdangerous import TimedJSONWebSignatureSerializer as Serializer from peewee import Model, IntegerField, CharField,PrimaryKeyField from website.app import db_wrapper, login_manager from website.http.main_exception import MainException from werkzeug.security import check_password_hash,generate_password_hash """ """ login_manager.anonymous_user = AnonymousUser
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 11748, 613, 413, 1453, 198, 6738, 42903, 1330, 1459, 62, 1324, 11, 397, 419, 198, 6738, 42903, 13, 2302, 13, 38235, 1...
3.375796
157
# application import application
[ 2, 3586, 198, 11748, 3586 ]
6.4
5
#!/usr/bin/env python # -*- coding: utf-8 -*- # (c) 2012 Michal Kalewski <mkalewski at cs.put.poznan.pl> # # This file is a part of the Simple Network Simulator (sim2net) project. # USE, MODIFICATION, COPYING AND DISTRIBUTION OF THIS SOFTWARE IS SUBJECT TO # THE TERMS AND CONDITIONS OF THE MIT LICENSE. YOU SHOULD HAVE RECEIVED A COPY # OF THE MIT LICENSE ALONG WITH THIS SOFTWARE; IF NOT, YOU CAN DOWNLOAD A COPY # FROM HTTP://WWW.OPENSOURCE.ORG/. # # For bug reports, feature and support requests please visit # <https://github.com/mkalewski/sim2net/issues>. """ Provides an implementation of a constant node speed. In this case a speed of a node is constant at a given value. """ from math import fabs from sim2net.speed._speed import Speed from sim2net.utility.validation import check_argument_type __docformat__ = 'reStructuredText'
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 628, 198, 2, 357, 66, 8, 2321, 2843, 282, 12612, 46151, 220, 1279, 28015, 282, 46151, 379, 50115, 13, 1996, 13, 7501, 89, ...
3.136531
271
from nexula.nexula_utility.utility_import_var import import_class
[ 6738, 497, 87, 4712, 13, 12413, 4712, 62, 315, 879, 13, 315, 879, 62, 11748, 62, 7785, 1330, 1330, 62, 4871, 628 ]
3.045455
22
import os from configparser import ConfigParser cfg = ConfigParser() #PATH_CUR = os.getcwd() + '/pynori' PATH_CUR = os.path.dirname(__file__) cfg.read(PATH_CUR+'/config.ini') # PREPROCESSING ENG_LOWER = cfg.getboolean('PREPROCESSING', 'ENG_LOWER')
[ 11748, 28686, 198, 6738, 4566, 48610, 1330, 17056, 46677, 198, 198, 37581, 796, 17056, 46677, 3419, 198, 2, 34219, 62, 34, 4261, 796, 28686, 13, 1136, 66, 16993, 3419, 1343, 31051, 79, 2047, 10145, 6, 198, 34219, 62, 34, 4261, 796, 28...
2.432692
104
# # THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS # FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS. # import com.xhaus.jyson.JysonCodec as json if not servicenowServer: raise Exception("ServiceNow server ID must be provided") if not username: username = servicenowServer["username"] if not password: password = servicenowServer["password"] servicenowUrl = servicenowServer['url'] credentials = CredentialsFallback(servicenowServer, username, password).getCredentials() content = None RESPONSE_OK_STATUS = 200 print "Sending content %s" % content servicenowAPIUrl = servicenowUrl + '/api/now/v1/table/%s?sysparm_display_value=true&sysparm_limit=1000&sysparm_query=%s' % (tableName, query) servicenowResponse = XLRequest(servicenowAPIUrl, 'GET', content, credentials['username'], credentials['password'], 'application/json').send() if servicenowResponse.status == RESPONSE_OK_STATUS: json_data = json.loads(servicenowResponse.read()) rows = {} for item in json_data['result']: row = item['number'] rows[row] = get_row_data(item) data = rows else: error = json.loads(servicenowResponse.read()) if 'Invalid table' in error['error']['message']: print "Invalid Table Name" data = {"Invalid table name"} servicenowResponse.errorDump() else: print "Failed to run query in Service Now" servicenowResponse.errorDump() sys.exit(1)
[ 2, 198, 2, 12680, 42714, 5357, 38044, 15986, 36592, 2389, 1961, 366, 1921, 3180, 1, 42881, 34764, 56, 3963, 15529, 509, 12115, 11, 412, 10554, 1137, 7788, 32761, 1961, 6375, 198, 2, 8959, 49094, 11, 47783, 2751, 21728, 5626, 40880, 5390...
2.805217
575
# Generated by Django 2.2.10 on 2020-03-31 15:33 from django.db import migrations
[ 2, 2980, 515, 416, 37770, 362, 13, 17, 13, 940, 319, 12131, 12, 3070, 12, 3132, 1315, 25, 2091, 198, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 628 ]
2.8
30
""" The Depth First Search (DFS) The goal of a dfs is to search as deeply as possible, connecting as many nodes in the graph as possible and branching where necessary. Think of the BFS that builds a search tree one level at a time, whereas the DFS creates a search tree by exploring one branch of the tree as deeply as possible. As with bfs the dfs makes use of `predecessor` links to construct the tree. In addition, the dfs will make use of two additional instance variables in the Vertex class, `discovery` and `finish_time`. predecessor : same as bfs discovery : tracks the number of steps in the algorithm before a vertex is first encountered; finish_time : is the number of steps before a vertex is colored black """ from datastruct.graph import Vertex, Graph
[ 37811, 198, 464, 36350, 3274, 11140, 357, 8068, 50, 8, 628, 220, 220, 220, 383, 3061, 286, 257, 288, 9501, 318, 284, 2989, 355, 7744, 355, 1744, 11, 14320, 355, 867, 13760, 287, 262, 4823, 355, 1744, 290, 198, 220, 220, 220, 49526, ...
3.551724
232
def sysrc(value): """Call sysrc. CLI Example: .. code-block:: bash salt '*' freebsd_common.sysrc sshd_enable=YES salt '*' freebsd_common.sysrc static_routes """ return __salt__['cmd.run_all']("sysrc %s" % value)
[ 4299, 827, 10677, 7, 8367, 2599, 198, 220, 220, 220, 37227, 14134, 827, 10677, 13, 198, 220, 220, 220, 43749, 17934, 25, 628, 220, 220, 220, 11485, 2438, 12, 9967, 3712, 27334, 628, 220, 220, 220, 220, 220, 220, 220, 8268, 705, 9, ...
2.212389
113
from .rest import RestClient
[ 6738, 764, 2118, 1330, 8324, 11792, 628 ]
4.285714
7
# coding=utf-8 # # The MIT License (MIT) # # Copyright (c) 2016-2018 yutiansut/QUANTAXIS # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import QUANTAXIS as QA import random """ """ B = QA.QA_BacktestBroker() AC = QA.QA_Account() """ # AC.reset_assets(assets) # Order=AC.send_order(code='000001',amount=1000,time='2018-03-21',towards=QA.ORDER_DIRECTION.BUY,price=0,order_model=QA.ORDER_MODEL.MARKET,amount_model=QA.AMOUNT_MODEL.BY_AMOUNT) # dealmes=B.receive_order(QA.QA_Event(order=Order,market_data=data)) # AC.receive_deal(dealmes) # risk=QA.QA_Risk(AC) """ AC.reset_assets(20000000) # simple_backtest(AC, QA.QA_fetch_stock_block_adv( ).code[0:10], '2017-01-01', '2018-01-31') print(AC.message) AC.save() risk = QA.QA_Risk(AC) print(risk.message) risk.save()
[ 2, 19617, 28, 40477, 12, 23, 198, 2, 198, 2, 383, 17168, 13789, 357, 36393, 8, 198, 2, 198, 2, 15069, 357, 66, 8, 1584, 12, 7908, 331, 315, 1547, 315, 14, 10917, 1565, 5603, 55, 1797, 198, 2, 198, 2, 2448, 3411, 318, 29376, 75...
2.886914
619
""" field.py Class instance used for modifying field via Display window. """ # Load the needed packages from functools import partial from ..core import Variable, Component, QtGui, QtCore
[ 37811, 198, 3245, 13, 9078, 198, 198, 9487, 4554, 973, 329, 30620, 2214, 2884, 16531, 4324, 13, 198, 37811, 198, 198, 2, 8778, 262, 2622, 10392, 198, 6738, 1257, 310, 10141, 1330, 13027, 198, 198, 6738, 11485, 7295, 1330, 35748, 11, 3...
3.84
50
from bson.errors import InvalidId from django.core.exceptions import ValidationError from django.utils.encoding import smart_str from mongoengine import dereference from mongoengine.base.document import BaseDocument from mongoengine.document import Document from rest_framework import serializers from mongoengine.fields import ObjectId import sys if sys.version_info[0] >= 3:
[ 6738, 275, 1559, 13, 48277, 1330, 17665, 7390, 198, 6738, 42625, 14208, 13, 7295, 13, 1069, 11755, 1330, 3254, 24765, 12331, 198, 6738, 42625, 14208, 13, 26791, 13, 12685, 7656, 1330, 4451, 62, 2536, 198, 6738, 285, 25162, 18392, 1330, ...
3.754902
102
import pytest from fuzz_lightyear.datastore import _ALL_POST_FUZZ_HOOKS_BY_OPERATION from fuzz_lightyear.datastore import _ALL_POST_FUZZ_HOOKS_BY_TAG from fuzz_lightyear.datastore import _RERUN_POST_FUZZ_HOOKS_BY_OPERATION from fuzz_lightyear.datastore import _RERUN_POST_FUZZ_HOOKS_BY_TAG from fuzz_lightyear.datastore import get_excluded_operations from fuzz_lightyear.datastore import get_included_tags from fuzz_lightyear.datastore import get_non_vulnerable_operations from fuzz_lightyear.datastore import get_user_defined_mapping from fuzz_lightyear.plugins import get_enabled_plugins from fuzz_lightyear.request import get_victim_session_factory from fuzz_lightyear.supplements.abstraction import get_abstraction
[ 11748, 12972, 9288, 198, 198, 6738, 26080, 62, 2971, 1941, 13, 19608, 459, 382, 1330, 4808, 7036, 62, 32782, 62, 38989, 30148, 62, 39, 15308, 50, 62, 17513, 62, 31054, 6234, 198, 6738, 26080, 62, 2971, 1941, 13, 19608, 459, 382, 1330,...
2.995851
241
"""Module for a Data Vault field.""" from typing import Optional from . import ( FIELD_PREFIX, FIELD_SUFFIX, METADATA_FIELDS, TABLE_PREFIXES, UNKNOWN, FieldDataType, FieldRole, TableType, )
[ 37811, 26796, 329, 257, 6060, 23450, 2214, 526, 15931, 198, 6738, 19720, 1330, 32233, 198, 198, 6738, 764, 1330, 357, 198, 220, 220, 220, 18930, 24639, 62, 47, 31688, 10426, 11, 198, 220, 220, 220, 18930, 24639, 62, 12564, 5777, 10426, ...
2.309278
97
"""DEEPSCORESV2 Provides access to the DEEPSCORESV2 database with a COCO-like interface. The only changes made compared to the coco.py file are the class labels. Author: Lukas Tuggener <tugg@zhaw.ch> Yvan Satyawan <y_satyawan@hotmail.com> Created on: November 23, 2019 """ from .coco import * import os import json from obb_anns import OBBAnns
[ 37811, 35, 6500, 3705, 44879, 1546, 53, 17, 198, 198, 15946, 1460, 1895, 284, 262, 5550, 36, 3705, 44879, 1546, 53, 17, 6831, 351, 257, 327, 4503, 46, 12, 2339, 7071, 13, 383, 198, 8807, 2458, 925, 3688, 284, 262, 8954, 78, 13, 90...
2.686567
134
#!/usr/bin/env python import unittest from xml.dom.minidom import parseString import xml.etree.ElementTree as ET from decimal import Decimal from gomatic import GoCdConfigurator, FetchArtifactDir, RakeTask, ExecTask, ScriptExecutorTask, FetchArtifactTask, \ FetchArtifactFile, Tab, GitMaterial, PipelineMaterial, Pipeline from gomatic.fake import FakeHostRestClient, empty_config_xml, config, empty_config from gomatic.gocd.pipelines import DEFAULT_LABEL_TEMPLATE from gomatic.gocd.artifacts import Artifact from gomatic.xml_operations import prettify def simplified(s): return s.strip().replace("\t", "").replace("\n", "").replace("\\", "").replace(" ", "") def sneakily_converted_to_xml(pipeline): if pipeline.is_template: return ET.tostring(pipeline.element) else: return ET.tostring(pipeline.parent.element)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 11748, 555, 715, 395, 198, 6738, 35555, 13, 3438, 13, 1084, 312, 296, 1330, 21136, 10100, 198, 11748, 35555, 13, 316, 631, 13, 20180, 27660, 355, 12152, 198, 198, 6738, 32465, 133...
2.896321
299
import logging from qrutilities.imageutils import ImageUtils from PyQt4.QtGui import QColor logger = logging.getLogger('console')
[ 11748, 18931, 198, 6738, 10662, 81, 315, 2410, 13, 9060, 26791, 1330, 7412, 18274, 4487, 198, 6738, 9485, 48, 83, 19, 13, 48, 83, 8205, 72, 1330, 1195, 10258, 198, 6404, 1362, 796, 18931, 13, 1136, 11187, 1362, 10786, 41947, 11537 ]
3.146341
41
from django.conf.urls import url from . import views app_name = "messages" urlpatterns = [ url(r'^$', views.InboxListView.as_view(), name='inbox'), url(r'^sent/$', views.SentMessagesListView.as_view(), name='sent'), url(r'^compose/$', views.MessagesCreateView.as_view(), name='compose'), # url(r'^compose-all/$', views.SendToAll.as_view(), name='compose_to_all'), url(r'^(?P<pk>\d+)/$', views.MessageDetailView.as_view(), name='message_detail'), url(r'^calendar/$', views.CalendarView.as_view(), name='calendar'), ]
[ 6738, 42625, 14208, 13, 10414, 13, 6371, 82, 1330, 19016, 201, 198, 201, 198, 6738, 764, 1330, 5009, 201, 198, 201, 198, 1324, 62, 3672, 796, 366, 37348, 1095, 1, 201, 198, 201, 198, 6371, 33279, 82, 796, 685, 201, 198, 201, 198, ...
2.162455
277
import datetime import json import dateutil.parser from django.contrib.auth import get_user_model from django.test import Client, TestCase from django.utils import timezone from apps.devicelocation.models import DeviceLocation from apps.physicaldevice.models import Device from apps.property.models import GenericProperty from apps.report.models import GeneratedUserReport from apps.sqsworker.exceptions import WorkerActionHardError from apps.stream.models import StreamId, StreamVariable from apps.streamdata.models import StreamData from apps.streamevent.models import StreamEventData from apps.streamfilter.models import * from apps.streamnote.models import StreamNote from apps.utils.data_mask.mask_utils import get_data_mask_event, set_data_mask from apps.utils.gid.convert import * from apps.utils.test_util import TestMixin from ..models import * from ..worker.archive_device_data import ArchiveDeviceDataAction user_model = get_user_model()
[ 11748, 4818, 8079, 198, 11748, 33918, 198, 198, 11748, 3128, 22602, 13, 48610, 198, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 1330, 651, 62, 7220, 62, 19849, 198, 6738, 42625, 14208, 13, 9288, 1330, 20985, 11, 6208, 20448, 198,...
3.719844
257
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base RULE_AOO = base.RULE_ADMIN_OR_OWNER SERVERS = 'os_compute_api:servers:%s' NETWORK_ATTACH_EXTERNAL = 'network:attach_external_network' ZERO_DISK_FLAVOR = SERVERS % 'create:zero_disk_flavor' REQUESTED_DESTINATION = 'compute:servers:create:requested_destination' CROSS_CELL_RESIZE = 'compute:servers:resize:cross_cell' rules = [ policy.DocumentedRuleDefault( SERVERS % 'index', RULE_AOO, "List all servers", [ { 'method': 'GET', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'detail', RULE_AOO, "List all servers with detailed information", [ { 'method': 'GET', 'path': '/servers/detail' } ]), policy.DocumentedRuleDefault( SERVERS % 'index:get_all_tenants', base.RULE_ADMIN_API, "List all servers for all projects", [ { 'method': 'GET', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'detail:get_all_tenants', base.RULE_ADMIN_API, "List all servers with detailed information for all projects", [ { 'method': 'GET', 'path': '/servers/detail' } ]), policy.DocumentedRuleDefault( SERVERS % 'allow_all_filters', base.RULE_ADMIN_API, "Allow all filters when listing servers", [ { 'method': 'GET', 'path': '/servers' }, { 'method': 'GET', 'path': '/servers/detail' } ]), policy.DocumentedRuleDefault( SERVERS % 'show', RULE_AOO, "Show a server", [ { 'method': 'GET', 'path': '/servers/{server_id}' } ]), # the details in host_status are pretty sensitive, only admins # should do that by default. policy.DocumentedRuleDefault( SERVERS % 'show:host_status', base.RULE_ADMIN_API, """ Show a server with additional host status information. This means host_status will be shown irrespective of status value. If showing only host_status UNKNOWN is desired, use the ``os_compute_api:servers:show:host_status:unknown-only`` policy rule. Microvision 2.75 added the ``host_status`` attribute in the ``PUT /servers/{server_id}`` and ``POST /servers/{server_id}/action (rebuild)`` API responses which are also controlled by this policy rule, like the ``GET /servers*`` APIs. """, [ { 'method': 'GET', 'path': '/servers/{server_id}' }, { 'method': 'GET', 'path': '/servers/detail' }, { 'method': 'PUT', 'path': '/servers/{server_id}' }, { 'method': 'POST', 'path': '/servers/{server_id}/action (rebuild)' } ]), policy.DocumentedRuleDefault( SERVERS % 'show:host_status:unknown-only', base.RULE_ADMIN_API, """ Show a server with additional host status information, only if host status is UNKNOWN. This policy rule will only be enforced when the ``os_compute_api:servers:show:host_status`` policy rule does not pass for the request. An example policy configuration could be where the ``os_compute_api:servers:show:host_status`` rule is set to allow admin-only and the ``os_compute_api:servers:show:host_status:unknown-only`` rule is set to allow everyone. """, [ { 'method': 'GET', 'path': '/servers/{server_id}' }, { 'method': 'GET', 'path': '/servers/detail' } ]), policy.DocumentedRuleDefault( SERVERS % 'create', RULE_AOO, "Create a server", [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'create:forced_host', base.RULE_ADMIN_API, """ Create a server on the specified host and/or node. In this case, the server is forced to launch on the specified host and/or node by bypassing the scheduler filters unlike the ``compute:servers:create:requested_destination`` rule. """, [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( REQUESTED_DESTINATION, base.RULE_ADMIN_API, """ Create a server on the requested compute service host and/or hypervisor_hostname. In this case, the requested host and/or hypervisor_hostname is validated by the scheduler filters unlike the ``os_compute_api:servers:create:forced_host`` rule. """, [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'create:attach_volume', RULE_AOO, "Create a server with the requested volume attached to it", [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'create:attach_network', RULE_AOO, "Create a server with the requested network attached to it", [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( SERVERS % 'create:trusted_certs', RULE_AOO, "Create a server with trusted image certificate IDs", [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( ZERO_DISK_FLAVOR, base.RULE_ADMIN_API, """ This rule controls the compute API validation behavior of creating a server with a flavor that has 0 disk, indicating the server should be volume-backed. For a flavor with disk=0, the root disk will be set to exactly the size of the image used to deploy the instance. However, in this case the filter_scheduler cannot select the compute host based on the virtual image size. Therefore, 0 should only be used for volume booted instances or for testing purposes. WARNING: It is a potential security exposure to enable this policy rule if users can upload their own images since repeated attempts to create a disk=0 flavor instance with a large image can exhaust the local disk of the compute (or shared storage cluster). See bug https://bugs.launchpad.net/nova/+bug/1739646 for details. """, [ { 'method': 'POST', 'path': '/servers' } ]), policy.DocumentedRuleDefault( NETWORK_ATTACH_EXTERNAL, 'is_admin:True', "Attach an unshared external network to a server", [ # Create a server with a requested network or port. { 'method': 'POST', 'path': '/servers' }, # Attach a network or port to an existing server. { 'method': 'POST', 'path': '/servers/{server_id}/os-interface' } ]), policy.DocumentedRuleDefault( SERVERS % 'delete', RULE_AOO, "Delete a server", [ { 'method': 'DELETE', 'path': '/servers/{server_id}' } ]), policy.DocumentedRuleDefault( SERVERS % 'update', RULE_AOO, "Update a server", [ { 'method': 'PUT', 'path': '/servers/{server_id}' } ]), policy.DocumentedRuleDefault( SERVERS % 'confirm_resize', RULE_AOO, "Confirm a server resize", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (confirmResize)' } ]), policy.DocumentedRuleDefault( SERVERS % 'revert_resize', RULE_AOO, "Revert a server resize", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (revertResize)' } ]), policy.DocumentedRuleDefault( SERVERS % 'reboot', RULE_AOO, "Reboot a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (reboot)' } ]), policy.DocumentedRuleDefault( SERVERS % 'resize', RULE_AOO, "Resize a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (resize)' } ]), policy.DocumentedRuleDefault( CROSS_CELL_RESIZE, base.RULE_NOBODY, "Resize a server across cells. By default, this is disabled for all " "users and recommended to be tested in a deployment for admin users " "before opening it up to non-admin users. Resizing within a cell is " "the default preferred behavior even if this is enabled. ", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (resize)' } ]), policy.DocumentedRuleDefault( SERVERS % 'rebuild', RULE_AOO, "Rebuild a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (rebuild)' } ]), policy.DocumentedRuleDefault( SERVERS % 'rebuild:trusted_certs', RULE_AOO, "Rebuild a server with trusted image certificate IDs", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (rebuild)' } ]), policy.DocumentedRuleDefault( SERVERS % 'create_image', RULE_AOO, "Create an image from a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (createImage)' } ]), policy.DocumentedRuleDefault( SERVERS % 'create_image:allow_volume_backed', RULE_AOO, "Create an image from a volume backed server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (createImage)' } ]), policy.DocumentedRuleDefault( SERVERS % 'start', RULE_AOO, "Start a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-start)' } ]), policy.DocumentedRuleDefault( SERVERS % 'stop', RULE_AOO, "Stop a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (os-stop)' } ]), policy.DocumentedRuleDefault( SERVERS % 'trigger_crash_dump', RULE_AOO, "Trigger crash dump in a server", [ { 'method': 'POST', 'path': '/servers/{server_id}/action (trigger_crash_dump)' } ]), ]
[ 2, 220, 220, 220, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 345, 743, 198, 2, 220, 220, 220, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 921, 743, 7330, 198, 2, 220, 220, ...
1.983881
6,142
# !/usr/bin python """ # # set-env - a small python program to setup the configuration environment for data-push.py # data-push.py contains the python program to push attribute values to vROps # Author Sajal Debnath <sdebnath@vmware.com> # """ # Importing the required modules import json import base64 import os,sys # Getting the absolute path from where the script is being run # Getting the inputs from user # Getting the path where env.json file should be kept path = get_script_path() fullpath = path+"/"+"env.json" # Getting the data for the env.json file final_data = get_the_inputs() # Saving the data to env.json file with open(fullpath, 'w') as outfile: json.dump(final_data, outfile, sort_keys = True, indent = 2, separators=(',', ':'), ensure_ascii=False)
[ 2, 5145, 14, 14629, 14, 8800, 21015, 201, 198, 201, 198, 37811, 201, 198, 2, 201, 198, 2, 900, 12, 24330, 532, 257, 1402, 21015, 1430, 284, 9058, 262, 8398, 2858, 329, 1366, 12, 14689, 13, 9078, 201, 198, 2, 1366, 12, 14689, 13, ...
2.903571
280
import unittest # Definition for a binary tree node. if __name__ == "__main__": unittest.main()
[ 11748, 555, 715, 395, 198, 2, 30396, 329, 257, 13934, 5509, 10139, 13, 628, 220, 220, 220, 220, 220, 220, 220, 220, 198, 198, 361, 11593, 3672, 834, 6624, 366, 834, 12417, 834, 1298, 198, 220, 220, 220, 555, 715, 395, 13, 12417, 3...
2.346939
49
TOKEN = "1876415562:AAEsX_c9k3Fot2IT0BYRqkCCQ5vFEHQDLDQ" CHAT_ID = [957539786] # e.g. [1234567, 2233445, 3466123...]
[ 10468, 43959, 796, 366, 23451, 2414, 1314, 43918, 25, 3838, 23041, 55, 62, 66, 24, 74, 18, 37, 313, 17, 2043, 15, 17513, 49, 80, 74, 4093, 48, 20, 85, 15112, 41275, 35, 11163, 48, 1, 198, 198, 31542, 62, 2389, 796, 685, 3865, 24...
1.761194
67
"""entry point""" from . import main start = main.app.launch
[ 37811, 13000, 966, 37811, 198, 198, 6738, 764, 1330, 1388, 198, 198, 9688, 796, 1388, 13, 1324, 13, 35681, 198 ]
3.15
20
from tests import run_main_and_assert FAST_LOCAL_TEST_ARGS = "--exp-name local_test --datasets mnist" \ " --network LeNet --num-tasks 5 --seed 1 --batch-size 32" \ " --nepochs 2 --num-workers 0 --stop-at-task 3"
[ 6738, 5254, 1330, 1057, 62, 12417, 62, 392, 62, 30493, 198, 198, 37, 11262, 62, 29701, 1847, 62, 51, 6465, 62, 1503, 14313, 796, 366, 438, 11201, 12, 3672, 1957, 62, 9288, 1377, 19608, 292, 1039, 285, 77, 396, 1, 3467, 198, 220, 2...
2.015504
129
# Autor: Anuj Sharma (@optider) # Github Profile: https://github.com/Optider/ # Problem Link: https://leetcode.com/problems/contains-duplicate/
[ 2, 5231, 273, 25, 1052, 23577, 40196, 4275, 8738, 1304, 8, 198, 2, 38994, 13118, 25, 3740, 1378, 12567, 13, 785, 14, 27871, 1304, 14, 198, 2, 20647, 7502, 25, 3740, 1378, 293, 316, 8189, 13, 785, 14, 1676, 22143, 14, 3642, 1299, 1...
2.959184
49
#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import logging import optparse import os import sys import tempfile import zipfile from util import build_utils if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 198, 2, 15069, 2211, 383, 18255, 1505, 46665, 13, 1439, 2489, 10395, 13, 198, 2, 5765, 286, 428, 2723, 2438, 318, 21825, 416, 257, 347, 10305, 12, 7635, 5964, 326, 460, 307, 198, ...
3.218487
119
from django.shortcuts import render from django.http import HttpResponse from django.contrib.auth.hashers import check_password, make_password from django.views import View from utils.response_code import RET, error_map from rest_framework.views import APIView from rest_framework.response import Response from apps.serializers import * from datetime import datetime # Create your views here. # # import json # def reg(request): password = make_password('123') admin = Sadmin(username='admin', password=password, is_admin=True) admin.save() return HttpResponse('ok') # # # # # # # # # # # # # # def addCate(request): # cate = Cate.objects.filter(pid=0).all() id=request.GET.get('id') try: # one_cate=Cate.objects.get(id=id) print(one_cate) except: id="" return render(request, "admin/add_cate.html", locals()) # # # # from day01.settings import UPLOADFILES import os # # # # def deleteCate(request): id=request.GET.get('id') Cate.objects.get(id=id).delete() return render(request, "admin/cate_list.html") # # def deleteTag(request): id=request.GET.get('id') Cate.objects.get(id=id).delete() return render(request, "admin/tag_list.html") # # def deleteGoods(request): id=request.GET.get('id') Goods.objects.get(id=id).delete() return render(request, "admin/goods_list.html") # # def deleteNews(request): id=request.GET.get('id') News.objects.get(id=id).delete() return render(request,"admin/news_list.html") # #
[ 6738, 42625, 14208, 13, 19509, 23779, 1330, 8543, 198, 6738, 42625, 14208, 13, 4023, 1330, 367, 29281, 31077, 198, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 10134, 7084, 1330, 2198, 62, 28712, 11, 787, 62, 28712, 198, 6738,...
2.400881
681
import numpy as np; from .covariance_target import CovarianceTarget;
[ 11748, 299, 32152, 355, 45941, 26, 198, 198, 6738, 764, 66, 709, 2743, 590, 62, 16793, 1330, 39751, 2743, 590, 21745, 26, 198 ]
3.043478
23
{ 'target_defaults': { 'win_delay_load_hook': 'false', 'conditions': [ ['OS=="win"', { 'msvs_disabled_warnings': [ 4530, # C++ exception handler used, but unwind semantics are not enabled 4506, # no definition for inline function ], }], ], }, 'targets': [ { 'target_name': 'fs_admin', 'defines': [ "NAPI_VERSION=<(napi_build_version)", ], 'cflags!': [ '-fno-exceptions' ], 'cflags_cc!': [ '-fno-exceptions' ], 'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES', 'CLANG_CXX_LIBRARY': 'libc++', 'MACOSX_DEPLOYMENT_TARGET': '10.7', }, 'msvs_settings': { 'VCCLCompilerTool': { 'ExceptionHandling': 1 }, }, 'sources': [ 'src/main.cc', ], 'include_dirs': [ '<!(node -p "require(\'node-addon-api\').include_dir")', ], 'conditions': [ ['OS=="win"', { 'sources': [ 'src/fs-admin-win.cc', ], 'libraries': [ '-lole32.lib', '-lshell32.lib', ], }], ['OS=="mac"', { 'sources': [ 'src/fs-admin-darwin.cc', ], 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/Security.framework', ], }], ['OS=="linux"', { 'sources': [ 'src/fs-admin-linux.cc', ], }], ], } ] }
[ 90, 198, 220, 705, 16793, 62, 12286, 82, 10354, 1391, 198, 220, 220, 220, 705, 5404, 62, 40850, 62, 2220, 62, 25480, 10354, 705, 9562, 3256, 198, 220, 220, 220, 705, 17561, 1756, 10354, 685, 198, 220, 220, 220, 220, 220, 37250, 2640...
1.757683
846
"""Django settings for botwtracker project. Copyright (c) 2017, Evan Moritz. botw-tracker is an open source software project released under the MIT License. See the accompanying LICENSE file for terms. """ import os from .config_local import * # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATA_DIR = os.path.join(BASE_DIR, '..', 'data') # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'quests.apps.QuestsConfig', 'user.apps.UserConfig', ] if USE_SIGNUP: INSTALLED_APPS.append('signup.apps.SignupConfig') MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'botwtracker.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, 'templates') ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'botwtracker.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(DATA_DIR, 'sqlite3.db'), } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR, "..", "static") ]
[ 37811, 35, 73, 14208, 6460, 329, 10214, 86, 2213, 10735, 1628, 13, 198, 198, 15269, 357, 66, 8, 2177, 11, 21523, 3461, 4224, 13, 198, 198, 13645, 86, 12, 2213, 10735, 318, 281, 1280, 2723, 3788, 1628, 2716, 739, 262, 17168, 13789, 1...
2.339214
1,247
from flask import Blueprint, request, jsonify from app.domains.users.actions import get_all_users, insert_user, get_user_by_id, update_user, delete_user app_users = Blueprint('app.users', __name__)
[ 6738, 42903, 1330, 39932, 11, 2581, 11, 33918, 1958, 198, 6738, 598, 13, 3438, 1299, 13, 18417, 13, 4658, 1330, 651, 62, 439, 62, 18417, 11, 7550, 62, 7220, 11, 651, 62, 7220, 62, 1525, 62, 312, 11, 4296, 62, 7220, 11, 12233, 62, ...
3.203125
64
import legacy_code.tf_cnn_siamese.configurations as conf import tensorflow as tf import numpy as np def construct_cnn(x, conv_weights, conv_biases, fc_weights, fc_biases, dropout = False): """ constructs the convolution graph for one image :param x: input node :param conv_weights: convolution weights :param conv_biases: relu biases for each convolution :param fc_weights: fully connected weights, only one set should be used here :param fc_biases: fully connected biases, only one set should be used here :param dropout: whether to add a dropout layer for the fully connected layer :return: output node """ k = conf.NUM_POOL for i in range(conf.NUM_CONVS): x = tf.nn.conv2d(x, conv_weights[i], strides=[1, 1, 1, 1], padding='SAME', data_format=conf.DATA_FORMAT) x = tf.nn.relu(tf.nn.bias_add(x, conv_biases[i], data_format=conf.DATA_FORMAT)) if k > 0: x = tf.nn.max_pool(x, ksize=conf.POOL_KDIM,strides=conf.POOL_KDIM, padding='VALID', data_format=conf.DATA_FORMAT) k -= 1 # Reshape the feature map cuboids into vectors for fc layers features_shape = x.get_shape().as_list() n = features_shape[0] m = features_shape[1] * features_shape[2] * features_shape[3] features = tf.reshape(x, [n, m]) # last fc_weights determine output dimensions fc = tf.nn.sigmoid(tf.matmul(features, fc_weights[0]) + fc_biases[0]) # for actual training if dropout: fc = tf.nn.dropout(fc, conf.DROP_RATE) return fc def construct_logits_model(x_1, x_2, conv_weights, conv_biases, fc_weights, fc_biases, dropout=False): """ constructs the logit node before the final sigmoid activation :param x_1: input image node 1 :param x_2: input image node 2 :param conv_weights: nodes for convolution weights :param conv_biases: nodes for convolution relu biases :param fc_weights: nodes for fully connected weights :param fc_biases: nodes for fully connected biases :param dropout: whether to include dropout layers :return: logit node """ with tf.name_scope("twin_1"): twin_1 = construct_cnn(x_1, conv_weights, conv_biases, fc_weights, fc_biases, dropout) with tf.name_scope("twin_2"): twin_2 = construct_cnn(x_2, conv_weights, conv_biases, fc_weights, fc_biases, dropout) # logits on squared difference sq_diff = tf.squared_difference(twin_1, twin_2) logits = tf.matmul(sq_diff, fc_weights[1]) + fc_biases[1] return logits def construct_full_model(x_1, x_2, conv_weights, conv_biases,fc_weights, fc_biases): """ constructs the graph for the neural network without loss node or optimizer :param x_1: input image node 1 :param x_2: input image node 2 :param conv_weights: nodes for convolution weights :param conv_biases: nodes for convolution relu biases :param fc_weights: nodes for fully connected weights :param fc_biases: nodes for fully connected biases :return: sigmoid output node """ logits = construct_logits_model(x_1, x_2, conv_weights, conv_biases, fc_weights, fc_biases, dropout=False) return tf.nn.sigmoid(logits) def construct_loss_optimizer(x_1, x_2, labels, conv_weights, conv_biases, fc_weights, fc_biases, dropout=False, lagrange=False): """ constructs the neural network graph with the loss and optimizer node :param x_1: input image node 1 :param x_2: input image node 2 :param labels: expected output :param conv_weights: nodes for convolution weights :param conv_biases: nodes for convolution relu biases :param fc_weights: nodes for fully connected weights :param fc_biases: nodes for fully connected biases :param dropout: whether to use dropout :param lagrange: whether to apply constraints :return: the node for the optimizer as well as the loss """ logits = construct_logits_model(x_1, x_2, conv_weights, conv_biases, fc_weights, fc_biases, dropout) # cross entropy loss on sigmoids of joined output and labels loss_vec = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels, logits=logits) loss = tf.reduce_mean(loss_vec) if lagrange: # constraints on sigmoid layers regularizers = (tf.nn.l2_loss(fc_weights[0]) + tf.nn.l2_loss(fc_biases[0]) + tf.nn.l2_loss(fc_weights[1]) + tf.nn.l2_loss(fc_biases[1])) loss += conf.LAMBDA * regularizers # setting up the optimization batch = tf.Variable(0, dtype=conf.DTYPE) # vanilla momentum optimizer # accumulation = momentum * accumulation + gradient # every epoch: variable -= learning_rate * accumulation # batch_total = labels.shape[0] # learning_rate = tf.train.exponential_decay( # conf.BASE_LEARNING_RATE, # batch * conf.BATCH_SIZE, # Current index into the dataset. # batch_total, # conf.DECAY_RATE, # Decay rate. # staircase=True) # trainer = tf.train.MomentumOptimizer(learning_rate, conf.MOMENTUM)\ # .minimize(loss, global_step=batch) # adaptive momentum estimation optimizer # default params: learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-08 trainer = tf.train.AdamOptimizer().minimize(loss, global_step=batch) return trainer, loss def construct_joined_model(twin_1, twin_2, fc_weights, fc_biases): """ constructs joined model for two sets of extracted features :param twin_1: features node extracted from first image :param twin_2: features node extracted from second image :param fc_weights: nodes for fully connected weights :param fc_biases: nodes for fully connected biases :return: logit node """ # logits on squared difference sq_diff = tf.squared_difference(twin_1, twin_2) logits = tf.matmul(sq_diff, fc_weights[1]) + fc_biases[1] return tf.nn.sigmoid(logits) def initialize_weights(): """ initializes the variable tensors to be trained in the neural network, decides network dimensions :return: nodes for the variables """ # twin network convolution and pooling variables conv_weights = [] conv_biases = [] fc_weights = [] fc_biases = [] for i in range(conf.NUM_CONVS): if i == 0: inp = conf.NUM_CHANNELS else: inp = conf.NUM_FILTERS[i - 1] out = conf.NUM_FILTERS[i] conv_dim = [conf.FILTER_LEN, conf.FILTER_LEN, inp, out] weight_name = "twin_conv" + str(i + 1) + "_weights" bias_name = "twin_conv" + str(i + 1) + "_biases" conv_weights.append(tf.Variable(tf.truncated_normal(conv_dim, stddev=0.1, seed=conf.SEED, dtype=conf.DTYPE), name=weight_name)) conv_biases.append(tf.Variable(tf.zeros([out], dtype=conf.DTYPE), name=bias_name)) # twin network fullly connected variables inp = conf.FEATURE_MAP_SIZE out = conf.NUM_FC_NEURONS fc_weights.append(tf.Variable(tf.truncated_normal([inp, out], stddev=0.1, seed=conf.SEED, dtype=conf.DTYPE), name="twin_fc_weights")) fc_biases.append(tf.Variable(tf.constant(0.1, shape=[out], dtype=conf.DTYPE), name="twin_fc_biases")) # joined network fully connected variables inp = conf.NUM_FC_NEURONS out = 1 fc_weights.append(tf.Variable(tf.truncated_normal([inp, out], stddev=0.1, seed=conf.SEED, dtype=conf.DTYPE), name="joined_fc_weights")) fc_biases.append(tf.Variable(tf.constant(0.1, shape=[out], dtype=conf.DTYPE), name="joined_fc_biases")) return conv_weights, conv_biases, fc_weights, fc_biases def num_params(): """ calculates the number of parameters in the model :return: m, number of parameters """ m = 0 for i in range(conf.NUM_CONVS): if i == 0: inp = conf.NUM_CHANNELS else: inp = conf.NUM_FILTERS[i - 1] out = conf.NUM_FILTERS[i] conv_dim = [conf.FILTER_LEN, conf.FILTER_LEN, inp, out] m += np.prod(conv_dim) + np.prod(out) inp = conf.FEATURE_MAP_SIZE out = conf.NUM_FC_NEURONS m += inp * out + out inp = conf.NUM_FC_NEURONS out = 1 m += inp * out + out return m if __name__ == "__main__": print("Number of Parameters: " + str(num_params()))
[ 11748, 10655, 62, 8189, 13, 27110, 62, 66, 20471, 62, 13396, 1047, 68, 13, 11250, 20074, 355, 1013, 198, 11748, 11192, 273, 11125, 355, 48700, 198, 11748, 299, 32152, 355, 45941, 628, 198, 4299, 5678, 62, 66, 20471, 7, 87, 11, 3063, ...
2.365379
3,599
import sys import logging import unittest from testfixtures import LogCapture from twisted.python.failure import Failure from scrapy.utils.log import (failure_to_exc_info, TopLevelFormatter, LogCounterHandler, StreamLogger) from scrapy.utils.test import get_crawler from scrapy.extensions import telnet
[ 11748, 25064, 198, 11748, 18931, 198, 11748, 555, 715, 395, 198, 198, 6738, 1332, 69, 25506, 1330, 5972, 49630, 198, 6738, 19074, 13, 29412, 13, 32165, 495, 1330, 25743, 198, 198, 6738, 15881, 88, 13, 26791, 13, 6404, 1330, 357, 32165, ...
2.872881
118
import heapq from typing import List from definitions import RoomPosition, Position import random import sys
[ 11748, 24575, 80, 198, 6738, 19720, 1330, 7343, 198, 6738, 17336, 1330, 10096, 26545, 11, 23158, 198, 11748, 4738, 198, 11748, 25064, 628, 628 ]
4.666667
24
from pypeflow.common import * from pypeflow.data import PypeLocalFile, makePypeLocalFile, fn from pypeflow.task import PypeTask, PypeThreadTaskBase, PypeTaskBase from pypeflow.controller import PypeWorkflow, PypeThreadWorkflow from falcon_kit.FastaReader import FastaReader import subprocess, shlex import os, re cigar_re = r"(\d+)([MIDNSHP=X])" if __name__ == "__main__": import argparse import re parser = argparse.ArgumentParser(description='phasing variants and reads from a bam file') # we can run this in parallel mode in the furture #parser.add_argument('--n_core', type=int, default=4, # help='number of processes used for generating consensus') parser.add_argument('--bam', type=str, help='path to sorted bam file', required=True) parser.add_argument('--fasta', type=str, help='path to the fasta file of contain the contig', required=True) parser.add_argument('--ctg_id', type=str, help='contig identifier in the bam file', required=True) parser.add_argument('--base_dir', type=str, default="./", help='the output base_dir, default to current working directory') args = parser.parse_args() bam_fn = args.bam fasta_fn = args.fasta ctg_id = args.ctg_id base_dir = args.base_dir ref_seq = "" for r in FastaReader(fasta_fn): rid = r.name.split()[0] if rid != ctg_id: continue ref_seq = r.sequence.upper() PypeThreadWorkflow.setNumThreadAllowed(1, 1) wf = PypeThreadWorkflow() bam_file = makePypeLocalFile(bam_fn) vmap_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "variant_map") ) vpos_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "variant_pos") ) q_id_map_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "q_id_map") ) parameters = {} parameters["ctg_id"] = ctg_id parameters["ref_seq"] = ref_seq parameters["base_dir"] = base_dir make_het_call_task = PypeTask( inputs = { "bam_file": bam_file }, outputs = { "vmap_file": vmap_file, "vpos_file": vpos_file, "q_id_map_file": q_id_map_file }, parameters = parameters, TaskType = PypeThreadTaskBase, URL = "task://localhost/het_call") (make_het_call) wf.addTasks([make_het_call_task]) atable_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "atable") ) parameters = {} parameters["ctg_id"] = ctg_id parameters["base_dir"] = base_dir generate_association_table_task = PypeTask( inputs = { "vmap_file": vmap_file }, outputs = { "atable_file": atable_file }, parameters = parameters, TaskType = PypeThreadTaskBase, URL = "task://localhost/g_atable") (generate_association_table) wf.addTasks([generate_association_table_task]) phased_variant_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "phased_variants") ) get_phased_blocks_task = PypeTask( inputs = { "vmap_file": vmap_file, "atable_file": atable_file }, outputs = { "phased_variant_file": phased_variant_file }, TaskType = PypeThreadTaskBase, URL = "task://localhost/get_phased_blocks") (get_phased_blocks) wf.addTasks([get_phased_blocks_task]) phased_read_file = makePypeLocalFile( os.path.join(base_dir, ctg_id, "phased_reads") ) get_phased_reads_task = PypeTask( inputs = { "vmap_file": vmap_file, "q_id_map_file": q_id_map_file, "phased_variant_file": phased_variant_file }, outputs = { "phased_read_file": phased_read_file }, parameters = {"ctg_id": ctg_id}, TaskType = PypeThreadTaskBase, URL = "task://localhost/get_phased_reads") (get_phased_reads) wf.addTasks([get_phased_reads_task]) wf.refreshTargets() #with open("fc_phasing_wf.dot", "w") as f: # print >>f, wf.graphvizDot
[ 6738, 279, 2981, 11125, 13, 11321, 1330, 1635, 220, 198, 6738, 279, 2981, 11125, 13, 7890, 1330, 350, 2981, 14565, 8979, 11, 787, 47, 2981, 14565, 8979, 11, 24714, 198, 6738, 279, 2981, 11125, 13, 35943, 1330, 350, 2981, 25714, 11, 35...
2.078177
2,085
from keras.datasets import mnist from keras.layers.merge import _Merge from keras.layers import Input, Dense, Reshape, Flatten, Dropout from keras.layers import BatchNormalization, Activation, ZeroPadding2D from keras.layers.advanced_activations import LeakyReLU from keras.layers.convolutional import UpSampling2D, Conv2D from keras.models import Sequential, Model from keras.optimizers import RMSprop from functools import partial from augmentation.discriminators import wasserstein_discriminator from augmentation.generators import wasserstein_generator import keras.backend as K import matplotlib.pyplot as plt import sys import numpy as np
[ 6738, 41927, 292, 13, 19608, 292, 1039, 1330, 285, 77, 396, 198, 6738, 41927, 292, 13, 75, 6962, 13, 647, 469, 1330, 4808, 13102, 469, 198, 6738, 41927, 292, 13, 75, 6962, 1330, 23412, 11, 360, 1072, 11, 1874, 71, 1758, 11, 1610, ...
3.245
200
from Core.IFactory import IFactory from Regs.Block_C import RC480
[ 6738, 7231, 13, 5064, 9548, 1330, 16876, 9548, 198, 6738, 3310, 82, 13, 12235, 62, 34, 1330, 13987, 22148, 628 ]
3.35
20
############################################################################### # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. ############################################################################### import os import onnx from distutils.version import StrictVersion # Rather than using ONNX protobuf definition throughout our codebase, we import ONNX protobuf definition here so that # we can conduct quick fixes by overwriting ONNX functions without changing any lines elsewhere. from onnx import onnx_pb as onnx_proto from onnx import helper _check_onnx_version() is_tf_keras = False if os.environ.get('TF_KERAS', '0') != '0': is_tf_keras = True if is_tf_keras: from tensorflow.python import keras else: try: import keras except ImportError: is_tf_keras = True from tensorflow.python import keras
[ 29113, 29113, 7804, 4242, 21017, 198, 2, 15069, 357, 66, 8, 5413, 10501, 13, 1439, 2489, 10395, 13, 198, 2, 49962, 739, 262, 17168, 13789, 13, 4091, 13789, 13, 14116, 287, 262, 1628, 6808, 329, 198, 2, 5964, 1321, 13, 198, 29113, 29...
3.439286
280
'''Test package.''' import xroms from glob import glob import os def test_open_netcdf(): '''Test xroms.open_netcdf().''' base = os.path.join(xroms.__path__[0],'..','tests','input') files = glob('%s/ocean_his_000?.nc' % base) ds = xroms.open_netcdf(files) assert ds def test_open_zarr(): '''Test xroms.open_zarr().''' base = os.path.join(xroms.__path__[0],'..','tests','input') files = glob('%s/ocean_his_000?' % base) ds = xroms.open_zarr(files, chunks={'ocean_time':2}) assert ds
[ 7061, 6, 14402, 5301, 2637, 7061, 198, 198, 11748, 2124, 398, 82, 198, 6738, 15095, 1330, 15095, 198, 11748, 28686, 628, 198, 4299, 1332, 62, 9654, 62, 3262, 66, 7568, 33529, 198, 220, 220, 220, 705, 7061, 14402, 2124, 398, 82, 13, ...
2.036765
272
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.home, name='home'), url(r'^piechart/', views.demo_piechart, name='demo_piechart'), url(r'^linechart/', views.demo_linechart, name='demo_linechart'), url(r'^linechart_without_date/', views.demo_linechart_without_date, name='demo_linechart_without_date'), url(r'^linewithfocuschart/', views.demo_linewithfocuschart, name='demo_linewithfocuschart'), url(r'^multibarchart/', views.demo_multibarchart, name='demo_multibarchart'), url(r'^stackedareachart/', views.demo_stackedareachart, name='demo_stackedareachart'), url(r'^multibarhorizontalchart/', views.demo_multibarhorizontalchart, name='demo_multibarhorizontalchart'), url(r'^lineplusbarchart/', views.demo_lineplusbarchart, name='demo_lineplusbarchart'), url(r'^cumulativelinechart/', views.demo_cumulativelinechart, name='demo_cumulativelinechart'), url(r'^discretebarchart/', views.demo_discretebarchart, name='demo_discretebarchart'), url(r'^discretebarchart_with_date/', views.demo_discretebarchart_with_date, name='demo_discretebarchart_date'), url(r'^scatterchart/', views.demo_scatterchart, name='demo_scatterchart'), url(r'^linechart_with_ampm/', views.demo_linechart_with_ampm, name='demo_linechart_with_ampm'), # url(r'^demoproject/', include('demoproject.foo.urls')), ]
[ 6738, 42625, 14208, 13, 10414, 13, 6371, 82, 1330, 19016, 198, 6738, 764, 1330, 5009, 628, 198, 6371, 33279, 82, 796, 685, 198, 220, 220, 220, 19016, 7, 81, 6, 61, 3, 3256, 5009, 13, 11195, 11, 1438, 11639, 11195, 33809, 198, 220, ...
2.470483
559
import time import mido from pinaps.piNapsController import PiNapsController from NeuroParser import NeuroParser """ Equation of motion used to modify virbato. """ CTRL_LFO_PITCH = 26 CTRL_LFO_RATE = 29 MIDI_MESSAGE_PERIOD = 1 vibratoPos = 0 vibratoVel = 0 vibratoAcc = 4 if __name__ == '__main__': main()
[ 11748, 640, 201, 198, 11748, 3095, 78, 201, 198, 201, 198, 6738, 6757, 1686, 13, 14415, 45, 1686, 22130, 1330, 13993, 45, 1686, 22130, 201, 198, 6738, 13782, 46677, 1330, 13782, 46677, 201, 198, 201, 198, 37811, 201, 198, 197, 23588, ...
2.317241
145
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Jul 31 11:47:47 2019 @author: yanyanyu """ """ Tab1-plot1: candlestick """ import json import datetime import pandas as pd from math import pi from random import choice from pytz import timezone from bokeh.plotting import figure,show from bokeh.palettes import all_palettes,Set3 from bokeh.models import ColumnDataSource, Select,HoverTool,LinearAxis, LabelSet,Range1d,PreText,Div from warehouse import CassandraStorage from util.util import pandas_factory,symbol_list,splitTextToTriplet,prev_weekday from util.config import path,timeZone
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 41972, 319, 3300, 5979, 3261, 1367, 25, 2857, 25, 2857, 13130, 198, 198, 31, 9800, 25, 331, 1092, 109...
2.966184
207
# -*- coding:utf-8 -*- import tensorflow as tf
[ 2, 532, 9, 12, 19617, 25, 40477, 12, 23, 532, 9, 12, 198, 11748, 11192, 273, 11125, 355, 48700, 628, 628, 628 ]
2.363636
22
import cv2 import mediapipe as mp from time import sleep import numpy as np import autopy import pynput wCam, hCam = 1280, 720 wScr, hScr = autopy.screen.size() cap = cv2.VideoCapture(0) cap.set(3, wCam) cap.set(4, hCam) mp_drawing = mp.solutions.drawing_utils mp_hands = mp.solutions.hands mouse = pynput.mouse.Controller() with mp_hands.Hands( min_detection_confidence=0.8, min_tracking_confidence=0.5) as hands: while cap.isOpened(): success, image = cap.read() if not success: print("Ignoring empty camera frame.") # If loading a video, use 'break' instead of 'continue'. continue # Flip the image horizontally for a later selfie-view display, and convert # the BGR image to RGB. image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB) # To improve performance, optionally mark the image as not writeable to # pass by reference. image.flags.writeable = False results = hands.process(image) # Draw the hand annotations on the image. image.flags.writeable = True image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) if results.multi_hand_landmarks: for hand_landmarks in results.multi_hand_landmarks: mp_drawing.draw_landmarks( image, hand_landmarks, mp_hands.HAND_CONNECTIONS) #cx, cy = int(hand_landmarks.landmark[8].x*wCam), int(hand_landmarks.landmark[8].y*hCam) targetX, targetY = int(hand_landmarks.landmark[8].x*wScr), int(hand_landmarks.landmark[8].y*hScr) mouse.position = (targetX, targetY) xy_dis_8_12, z_dis_8_12 = findNodeDistance(hCam, wCam, hand_landmarks.landmark, 8, 12) xy_dis_12_16, z_dis_12_16 = findNodeDistance(hCam, wCam, hand_landmarks.landmark, 12, 16) if xy_dis_8_12 < 40 and z_dis_8_12 < 20: mouse.click(pynput.mouse.Button.left) sleep(0.3) if xy_dis_12_16 < 40 and z_dis_12_16 < 20: mouse.click(pynput.mouse.Button.left, 2) sleep(0.3) cv2.imshow('MediaPipe Hands', image) if cv2.waitKey(5) & 0xFF == 27: break cap.release()
[ 11748, 269, 85, 17, 198, 11748, 16957, 499, 3757, 355, 29034, 198, 6738, 640, 1330, 3993, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 1960, 11081, 198, 11748, 279, 2047, 1996, 198, 198, 86, 21701, 11, 289, 21701, 796, 37674, 11, 2...
2.021645
1,155
import glob import logging import os import shutil import sys """script to divide a folder with generated/training data into a train and val folder - val folder contains 500 Samples if not changed in source code - DOES NOT work if images structured in subfolders, see below - if there is no dir in the given folder -> split this folder - if there are dir/s in the folder -> perform split on each folder - split on sorted list -> repeated runs should give the same result """ if __name__ == '__main__': main(sys.argv)
[ 11748, 15095, 198, 11748, 18931, 198, 11748, 28686, 198, 11748, 4423, 346, 198, 11748, 25064, 198, 198, 37811, 12048, 284, 14083, 257, 9483, 351, 7560, 14, 34409, 1366, 656, 257, 4512, 290, 1188, 9483, 198, 220, 220, 220, 532, 1188, 948...
3.564935
154
import threading from queue import Queue from multiprocessing.pool import ApplyResult import tabular_logger as tlogger def close(self): self.available_workers.put(None) self.input_queue.put(None) self.done_buffer.put(None) class AsyncTaskHub(object):
[ 198, 11748, 4704, 278, 198, 6738, 16834, 1330, 4670, 518, 198, 6738, 18540, 305, 919, 278, 13, 7742, 1330, 27967, 23004, 198, 198, 11748, 7400, 934, 62, 6404, 1362, 355, 256, 6404, 1362, 628, 198, 220, 220, 220, 825, 1969, 7, 944, 2...
2.638889
108
import os registry = {}
[ 198, 11748, 28686, 198, 198, 2301, 4592, 796, 23884, 198, 220, 220, 628, 628, 220, 220, 220, 220, 198 ]
1.947368
19
"""Django Admin Panels for App""" from django.contrib import admin from mailer import models
[ 37811, 35, 73, 14208, 32053, 5961, 1424, 329, 2034, 37811, 198, 6738, 42625, 14208, 13, 3642, 822, 1330, 13169, 198, 198, 6738, 6920, 263, 1330, 4981, 198 ]
3.481481
27
DEBUG = True USE_TZ = True SECRET_KEY = "dummy" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sites", "rest_framework", "django_filters", "belt", "tests.app", ] SITE_ID = 1 ROOT_URLCONF = "tests.app.urls" MIDDLEWARE = () REST_FRAMEWORK = { "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",) }
[ 30531, 796, 6407, 198, 19108, 62, 51, 57, 796, 6407, 198, 198, 23683, 26087, 62, 20373, 796, 366, 67, 13513, 1, 198, 198, 35, 1404, 6242, 1921, 1546, 796, 19779, 12286, 1298, 19779, 26808, 8881, 1298, 366, 28241, 14208, 13, 9945, 13, ...
2.157895
228
import sys import json import transformation if __name__ == "__main__": fin_u1 = int(sys.argv[1]) x_cells = int(sys.argv[2]) y_cells = int(sys.argv[3]) assert (x_cells%2) == 0 gate_u = 2 if fin_u1%2 != 0: fin_u = fin_u1 + 1 else: fin_u = fin_u1 uc = UnitCell() for (x,y) in ( (x,y) for x in range(x_cells) for y in range(y_cells)): uc.unit( x, y) uc.computeBbox() with open( "./mydesign_dr_globalrouting.json", "wt") as fp: data = { 'bbox' : uc.bbox.toList(), 'globalRoutes' : [], 'globalRouteGrid' : [], 'terminals' : uc.terminals} fp.write( json.dumps( data, indent=2) + '\n')
[ 11748, 25064, 198, 11748, 33918, 198, 11748, 13389, 198, 220, 220, 220, 220, 220, 220, 220, 220, 198, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 198, 220, 220, 220, 220...
1.853846
390
from selenium import webdriver from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.by import By import os import time import csv from webdriver_manager.chrome import ChromeDriverManager import math from basic_function import basic_login, find_element
[ 6738, 384, 11925, 1505, 1330, 3992, 26230, 198, 6738, 384, 11925, 1505, 13, 11321, 13, 1069, 11755, 1330, 1400, 16678, 20180, 16922, 198, 6738, 384, 11925, 1505, 13, 12384, 26230, 13, 46659, 13, 25811, 1330, 18634, 198, 6738, 384, 11925, ...
3.898876
89
from pydantic import BaseSettings settings = Settings()
[ 6738, 279, 5173, 5109, 1330, 7308, 26232, 628, 198, 198, 33692, 796, 16163, 3419, 198 ]
3.933333
15
#!/usr/bin/env python ''' This module provides configuration options for OS project. No more magic numbers! ''' BLOCK_SIZE = 16 # words WORD_SIZE = 4 # bytes # length od RS in blocks RESTRICTED_LENGTH = 1 # length of DS in blocks DS_LENGTH = 6 # timer value TIMER_VALUE = 10 # buffer size BUFFER_SIZE = 16 # number of blocks in HD HD_BLOCKS_SIZE = 500 # default priorities ROOT_PRIORITY = 40 VM_PRIORITY = 50 LOADER_PRIORITY = 60 INTERRUPT_PRIORITY = 70 PRINT_PRIORITY = 70 # Process states RUNNING_STATE = 'running' READY_STATE = 'ready' BLOCKED_STATE = 'blocked' # Page tables PAGE_TABLE_STARTING_BLOCK = 0 PAGE_TABLE_ENDING_BLOCK = 14 # Shared memory SH_MEMEORY_STARTING_BLOCK = 15 SH_MEMORY_ENDING_BLOCK = 31 # blocks dedicated for user tasks are from USER_STARTING_BLOCK = 32 USER_ENDING_BLOCK = 255
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 7061, 6, 770, 8265, 3769, 8398, 3689, 329, 7294, 1628, 13, 1400, 517, 5536, 3146, 0, 705, 7061, 198, 198, 9148, 11290, 62, 33489, 796, 1467, 220, 220, 1303, 2456, 198, 54, 12532...
2.641935
310
#!/usr/bin/env python import numpy as np from roboticstoolbox.robot.ERobot import ERobot from math import pi if __name__ == "__main__": # pragma nocover robot = Puma560() print(robot)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 25810, 301, 970, 3524, 13, 305, 13645, 13, 1137, 672, 313, 1330, 13793, 672, 313, 198, 6738, 10688, 1330, 31028, 628, 198, 198, 361, 1159...
2.571429
77
#!/usr/bin/env python # Copyright (c) 2016 Orange and others. # # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 """Define the classes required to fully cover odl.""" import errno import logging import os import unittest from keystoneauth1.exceptions import auth_plugins import mock from robot.errors import DataError, RobotError from robot.result import model from robot.utils.robottime import timestamp_to_secs import six from six.moves import urllib from functest.core import testcase from functest.opnfv_tests.sdn.odl import odl __author__ = "Cedric Ollivier <cedric.ollivier@orange.com>" def test_set_vars_auth1(self): self._test_set_vars( "@{AUTH1} foo bar", "@{AUTH1} foo bar") if __name__ == "__main__": logging.disable(logging.CRITICAL) unittest.main(verbosity=2)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 2, 15069, 357, 66, 8, 1584, 11942, 290, 1854, 13, 198, 2, 198, 2, 1439, 2489, 10395, 13, 770, 1430, 290, 262, 19249, 5696, 198, 2, 389, 925, 1695, 739, 262, 2846, 286, 262, ...
2.742188
384
#!/usr/bin/env python # -*- coding: utf-8 -*- """ ntptrace - trace peers of an NTP server Usage: ntptrace [-n | --numeric] [-m number | --max-hosts=number] [-r hostname | --host=hostname] [--help | --more-help] hostname See the manual page for details. """ # SPDX-License-Identifier: BSD-2-Clause from __future__ import print_function import getopt import re import subprocess import sys try: import ntp.util except ImportError as e: sys.stderr.write( "ntptrace: can't find Python NTP library.\n") sys.stderr.write("%s\n" % e) sys.exit(1) usage = r"""ntptrace - trace peers of an NTP server USAGE: ntptrace [-<flag> [<val>] | --<name>[{=| }<val>]]... [host] -n, --numeric Print IP addresses instead of hostnames -m, --max-hosts=num Maximum number of peers to trace -r, --host=str Single remote host -?, --help Display usage information and exit --more-help Pass the extended usage text through a pager Options are specified by doubled hyphens and their name or by a single hyphen and the flag character.""" + "\n" try: (options, arguments) = getopt.getopt( sys.argv[1:], "m:nr:?", ["help", "host=", "max-hosts=", "more-help", "numeric"]) except getopt.GetoptError as err: sys.stderr.write(str(err) + "\n") raise SystemExit(1) numeric = False maxhosts = 99 host = '127.0.0.1' for (switch, val) in options: if switch == "-m" or switch == "--max-hosts": errmsg = "Error: -m parameter '%s' not a number\n" maxhosts = ntp.util.safeargcast(val, int, errmsg, usage) elif switch == "-n" or switch == "--numeric": numeric = True elif switch == "-r" or switch == "--host": host = val elif switch == "-?" or switch == "--help" or switch == "--more-help": print(usage, file=sys.stderr) raise SystemExit(0) if len(arguments): host = arguments[0] hostcount = 0 while True: hostcount += 1 info = get_info(host) if info is None: break if not numeric: host = ntp.util.canonicalize_dns(host) print("%s: stratum %d, offset %f, synch distance %f" % (host, int(info['stratum']), info['offset'], info['syncdistance']), end='') if int(info['stratum']) == 1: print(", refid '%s'" % info['refid'], end='') print() if (int(info['stratum']) == 0 or int(info['stratum']) == 1 or int(info['stratum']) == 16): break if re.search(r'^127\.127\.\d{1,3}\.\d{1,3}$', info['refid']): break if hostcount == maxhosts: break next_host = get_next_host(info['peer'], host) if next_host is None: break if re.search(r'^127\.127\.\d{1,3}\.\d{1,3}$', next_host): break host = next_host
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 198, 429, 457, 16740, 532, 12854, 14495, 286, 281, 399, 7250, 4382, 198, 198, 28350, 25, 299, 83, 457, 16740, ...
2.23035
1,285
import struct import msgpack from lbrynet.wallet.transaction import Transaction, Output from torba.server.hash import hash_to_hex_str from torba.server.block_processor import BlockProcessor from lbrynet.schema.claim import Claim from lbrynet.wallet.server.model import ClaimInfo
[ 11748, 2878, 198, 198, 11748, 31456, 8002, 198, 198, 6738, 18360, 563, 3262, 13, 44623, 13, 7645, 2673, 1330, 45389, 11, 25235, 198, 6738, 7332, 7012, 13, 15388, 13, 17831, 1330, 12234, 62, 1462, 62, 33095, 62, 2536, 198, 198, 6738, 7...
3.506173
81
from ProjectEulerCommons.Base import * numbers_list = """37107287533902102798797998220837590246510135740250 46376937677490009712648124896970078050417018260538 74324986199524741059474233309513058123726617309629 91942213363574161572522430563301811072406154908250 23067588207539346171171980310421047513778063246676 89261670696623633820136378418383684178734361726757 28112879812849979408065481931592621691275889832738 44274228917432520321923589422876796487670272189318 47451445736001306439091167216856844588711603153276 70386486105843025439939619828917593665686757934951 62176457141856560629502157223196586755079324193331 64906352462741904929101432445813822663347944758178 92575867718337217661963751590579239728245598838407 58203565325359399008402633568948830189458628227828 80181199384826282014278194139940567587151170094390 35398664372827112653829987240784473053190104293586 86515506006295864861532075273371959191420517255829 71693888707715466499115593487603532921714970056938 54370070576826684624621495650076471787294438377604 53282654108756828443191190634694037855217779295145 36123272525000296071075082563815656710885258350721 45876576172410976447339110607218265236877223636045 17423706905851860660448207621209813287860733969412 81142660418086830619328460811191061556940512689692 51934325451728388641918047049293215058642563049483 62467221648435076201727918039944693004732956340691 15732444386908125794514089057706229429197107928209 55037687525678773091862540744969844508330393682126 18336384825330154686196124348767681297534375946515 80386287592878490201521685554828717201219257766954 78182833757993103614740356856449095527097864797581 16726320100436897842553539920931837441497806860984 48403098129077791799088218795327364475675590848030 87086987551392711854517078544161852424320693150332 59959406895756536782107074926966537676326235447210 69793950679652694742597709739166693763042633987085 41052684708299085211399427365734116182760315001271 65378607361501080857009149939512557028198746004375 35829035317434717326932123578154982629742552737307 94953759765105305946966067683156574377167401875275 88902802571733229619176668713819931811048770190271 25267680276078003013678680992525463401061632866526 36270218540497705585629946580636237993140746255962 24074486908231174977792365466257246923322810917141 91430288197103288597806669760892938638285025333403 34413065578016127815921815005561868836468420090470 23053081172816430487623791969842487255036638784583 11487696932154902810424020138335124462181441773470 63783299490636259666498587618221225225512486764533 67720186971698544312419572409913959008952310058822 95548255300263520781532296796249481641953868218774 76085327132285723110424803456124867697064507995236 37774242535411291684276865538926205024910326572967 23701913275725675285653248258265463092207058596522 29798860272258331913126375147341994889534765745501 18495701454879288984856827726077713721403798879715 38298203783031473527721580348144513491373226651381 34829543829199918180278916522431027392251122869539 40957953066405232632538044100059654939159879593635 29746152185502371307642255121183693803580388584903 41698116222072977186158236678424689157993532961922 62467957194401269043877107275048102390895523597457 23189706772547915061505504953922979530901129967519 86188088225875314529584099251203829009407770775672 11306739708304724483816533873502340845647058077308 82959174767140363198008187129011875491310547126581 97623331044818386269515456334926366572897563400500 42846280183517070527831839425882145521227251250327 55121603546981200581762165212827652751691296897789 32238195734329339946437501907836945765883352399886 75506164965184775180738168837861091527357929701337 62177842752192623401942399639168044983993173312731 32924185707147349566916674687634660915035914677504 99518671430235219628894890102423325116913619626622 73267460800591547471830798392868535206946944540724 76841822524674417161514036427982273348055556214818 97142617910342598647204516893989422179826088076852 87783646182799346313767754307809363333018982642090 10848802521674670883215120185883543223812876952786 71329612474782464538636993009049310363619763878039 62184073572399794223406235393808339651327408011116 66627891981488087797941876876144230030984490851411 60661826293682836764744779239180335110989069790714 85786944089552990653640447425576083659976645795096 66024396409905389607120198219976047599490197230297 64913982680032973156037120041377903785566085089252 16730939319872750275468906903707539413042652315011 94809377245048795150954100921645863754710598436791 78639167021187492431995700641917969777599028300699 15368713711936614952811305876380278410754449733078 40789923115535562561142322423255033685442488917353 44889911501440648020369068063960672322193204149535 41503128880339536053299340368006977710650566631954 81234880673210146739058568557934581403627822703280 82616570773948327592232845941706525094512325230608 22918802058777319719839450180888072429661980811197 77158542502016545090413245809786882778948721859617 72107838435069186155435662884062257473692284509516 20849603980134001723930671666823555245252804609722 53503534226472524250874054075591789781264330331690""".splitlines() Answer( str(sum([int(line) for line in numbers_list]))[0:10] ) """ ------------------------------------------------ ProjectEuler.Problem.013.py The Answer is: 5537376230 Time Elasped: 0.005984783172607422sec ------------------------------------------------ """
[ 6738, 4935, 36, 18173, 6935, 684, 13, 14881, 1330, 1635, 198, 198, 77, 17024, 62, 4868, 796, 37227, 2718, 15982, 2078, 2425, 29626, 2999, 40403, 4089, 44673, 2079, 6469, 1238, 5999, 2425, 3829, 1731, 2996, 8784, 27277, 1821, 9031, 198, ...
2.293771
2,376
"""Endpoint for the manipulation of datasets """ import hashlib from flask import Response from flask_restx import Namespace, Resource, abort from app.common import client from app.common import datasets as datasets_fcts from app.common import path api = Namespace("datasets", description="Datasets related endpoints") def add_openaire_links(datasets): for dataset in datasets: shared_id = dataset.get("shared_id") if not shared_id: dataset["openaireLink"] = "https://enermaps.openaire.eu/" else: shared_id_hash = hashlib.md5(shared_id.encode()) # nosec dataset["openaireLink"] = ( "https://enermaps.openaire.eu/search/dataset?datasetId=enermaps____::{}" .format(shared_id_hash.hexdigest()) )
[ 37811, 12915, 4122, 329, 262, 17512, 286, 40522, 198, 37811, 198, 198, 11748, 12234, 8019, 198, 198, 6738, 42903, 1330, 18261, 198, 6738, 42903, 62, 2118, 87, 1330, 28531, 10223, 11, 20857, 11, 15614, 198, 198, 6738, 598, 13, 11321, 133...
2.510769
325
from .knn import KNNClassifier __all__ = ['KNNClassifier']
[ 6738, 764, 15418, 77, 1330, 509, 6144, 9487, 7483, 198, 198, 834, 439, 834, 796, 37250, 42, 6144, 9487, 7483, 20520 ]
2.809524
21
import unicodedata from django.forms import fields
[ 11748, 28000, 9043, 1045, 198, 198, 6738, 42625, 14208, 13, 23914, 1330, 7032, 628 ]
3.785714
14
import sys import time import uasyncio as asyncio from ahttpserver import sendfile, Server app = Server() # @app.route("GET", "/") # if uncommented raises route already declared exception # async def also_root(reader, writer, request): # return async def hello(): """ For demo purposes show system is still alive """ count = 0 while True: print("hello", count) count += 1 await asyncio.sleep(60) if __name__ == "__main__": try: set_global_exception_handler() asyncio.create_task(hello()) asyncio.run(app.start()) # must be last, does not return except KeyboardInterrupt: pass finally: asyncio.run(app.stop()) asyncio.new_event_loop()
[ 11748, 25064, 201, 198, 11748, 640, 201, 198, 11748, 334, 292, 13361, 952, 355, 30351, 952, 201, 198, 201, 198, 6738, 257, 5450, 18497, 1330, 3758, 7753, 11, 9652, 201, 198, 201, 198, 1324, 796, 9652, 3419, 201, 198, 201, 198, 201, ...
2.355224
335
import torch import torch.nn as nn import torch.nn.functional as F # Defines the submodule with skip connection. # X -------------------identity---------------------- X # |-- downsampling -- |submodule| -- upsampling --|
[ 11748, 28034, 201, 198, 11748, 28034, 13, 20471, 355, 299, 77, 201, 198, 11748, 28034, 13, 20471, 13, 45124, 355, 376, 201, 198, 201, 198, 201, 198, 201, 198, 2, 2896, 1127, 262, 850, 21412, 351, 14267, 4637, 13, 201, 198, 2, 1395, ...
3.263889
72
#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os import sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest from kgtool.core import * # noqa if __name__ == '__main__': unittest.main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 10644, 8156, 198, 11748, 28686, 198, 11748, 25064, 198, 17597, 13, 6978, 13, 28463, 7, 15, 11, 28686, 13, 6978, 13...
2.368852
122
from __future__ import print_function from __future__ import absolute_import from __future__ import division import scriptcontext as sc import compas_rhino from compas_ags.rhino import SettingsForm from compas_ags.rhino import FormObject from compas_ags.rhino import ForceObject __commandname__ = "AGS_toolbar_display" # ============================================================================== # Main # ============================================================================== if __name__ == '__main__': RunCommand(True)
[ 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 198, 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 198, 6738, 11593, 37443, 834, 1330, 7297, 198, 198, 11748, 4226, 22866, 355, 629, 198, 198, 11748, 552, 292, 62, 17179, 2879, 198, 6738,...
4.283465
127
from typing import ( Any, cast, List, Optional, Type ) import lpp.ast as ast from lpp.builtins import BUILTINS from lpp.object import( Boolean, Builtin, Environment, Error, Function, Integer, Null, Object, ObjectType, String, Return ) TRUE = Boolean(True) FALSE = Boolean(False) NULL = Null() _NOT_A_FUNCTION = 'No es una funcion: {}' _TYPE_MISMATCH = 'Discrepancia de tipos: {} {} {}' _UNKNOWN_PREFIX_OPERATOR = 'Operador desconocido: {}{}' _UNKNOWN_INFIX_OPERATOR = 'Operador desconocido: {} {} {}' _UNKNOWN_IDENTIFIER = 'Identificador no encontrado: {}'
[ 6738, 19720, 1330, 357, 198, 220, 220, 220, 4377, 11, 198, 220, 220, 220, 3350, 11, 198, 220, 220, 220, 7343, 11, 198, 220, 220, 220, 32233, 11, 198, 220, 220, 220, 5994, 198, 8, 198, 198, 11748, 300, 381, 13, 459, 355, 6468, 19...
2.345865
266
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # with the License. A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and # limitations under the License. import datetime import json import os import re import configparser import pytest from assertpy import assert_that import tests.pcluster.config.utils as utils from pcluster.config.cfn_param_types import CfnParam, CfnSection from pcluster.config.mappings import ALLOWED_VALUES, FSX from pcluster.config.validators import ( DCV_MESSAGES, EBS_VOLUME_TYPE_TO_VOLUME_SIZE_BOUNDS, FSX_MESSAGES, FSX_SUPPORTED_ARCHITECTURES_OSES, LOGFILE_LOGGER, architecture_os_validator, check_usage_class, cluster_type_validator, compute_resource_validator, disable_hyperthreading_architecture_validator, efa_gdr_validator, efa_os_arch_validator, fsx_ignored_parameters_validator, instances_architecture_compatibility_validator, intel_hpc_architecture_validator, queue_compute_type_validator, queue_validator, region_validator, s3_bucket_region_validator, settings_validator, ) from pcluster.constants import FSX_HDD_THROUGHPUT, FSX_SSD_THROUGHPUT from tests.common import MockedBoto3Request from tests.pcluster.config.defaults import DefaultDict def test_ec2_key_pair_validator(mocker, boto3_stubber): describe_key_pairs_response = { "KeyPairs": [ {"KeyFingerprint": "12:bf:7c:56:6c:dd:4f:8c:24:45:75:f1:1b:16:54:89:82:09:a4:26", "KeyName": "key1"} ] } mocked_requests = [ MockedBoto3Request( method="describe_key_pairs", response=describe_key_pairs_response, expected_params={"KeyNames": ["key1"]} ) ] boto3_stubber("ec2", mocked_requests) # TODO test with invalid key config_parser_dict = {"cluster default": {"key_name": "key1"}} utils.assert_param_validator(mocker, config_parser_dict) def test_ec2_volume_validator(mocker, boto3_stubber): describe_volumes_response = { "Volumes": [ { "AvailabilityZone": "us-east-1a", "Attachments": [ { "AttachTime": "2013-12-18T22:35:00.000Z", "InstanceId": "i-1234567890abcdef0", "VolumeId": "vol-12345678", "State": "attached", "DeleteOnTermination": True, "Device": "/dev/sda1", } ], "Encrypted": False, "VolumeType": "gp2", "VolumeId": "vol-049df61146c4d7901", "State": "available", # TODO add test with "in-use" "SnapshotId": "snap-1234567890abcdef0", "CreateTime": "2013-12-18T22:35:00.084Z", "Size": 8, } ] } mocked_requests = [ MockedBoto3Request( method="describe_volumes", response=describe_volumes_response, expected_params={"VolumeIds": ["vol-12345678"]}, ) ] boto3_stubber("ec2", mocked_requests) # TODO test with invalid key config_parser_dict = { "cluster default": {"ebs_settings": "default"}, "ebs default": {"shared_dir": "test", "ebs_volume_id": "vol-12345678"}, } utils.assert_param_validator(mocker, config_parser_dict) def test_ec2_vpc_id_validator(mocker, boto3_stubber): mocked_requests = [] # mock describe_vpc boto3 call describe_vpc_response = { "Vpcs": [ { "VpcId": "vpc-12345678", "InstanceTenancy": "default", "Tags": [{"Value": "Default VPC", "Key": "Name"}], "State": "available", "DhcpOptionsId": "dopt-4ef69c2a", "CidrBlock": "172.31.0.0/16", "IsDefault": True, } ] } mocked_requests.append( MockedBoto3Request( method="describe_vpcs", response=describe_vpc_response, expected_params={"VpcIds": ["vpc-12345678"]} ) ) # mock describe_vpc_attribute boto3 call describe_vpc_attribute_response = { "VpcId": "vpc-12345678", "EnableDnsSupport": {"Value": True}, "EnableDnsHostnames": {"Value": True}, } mocked_requests.append( MockedBoto3Request( method="describe_vpc_attribute", response=describe_vpc_attribute_response, expected_params={"VpcId": "vpc-12345678", "Attribute": "enableDnsSupport"}, ) ) mocked_requests.append( MockedBoto3Request( method="describe_vpc_attribute", response=describe_vpc_attribute_response, expected_params={"VpcId": "vpc-12345678", "Attribute": "enableDnsHostnames"}, ) ) boto3_stubber("ec2", mocked_requests) # TODO mock and test invalid vpc-id for vpc_id, expected_message in [("vpc-12345678", None)]: config_parser_dict = {"cluster default": {"vpc_settings": "default"}, "vpc default": {"vpc_id": vpc_id}} utils.assert_param_validator(mocker, config_parser_dict, expected_message) def _kms_key_stubber(mocker, boto3_stubber, kms_key_id, expected_message, num_calls): describe_key_response = { "KeyMetadata": { "AWSAccountId": "1234567890", "Arn": "arn:aws:kms:us-east-1:1234567890:key/{0}".format(kms_key_id), "CreationDate": datetime.datetime(2019, 1, 10, 11, 25, 59, 128000), "Description": "", "Enabled": True, "KeyId": kms_key_id, "KeyManager": "CUSTOMER", "KeyState": "Enabled", "KeyUsage": "ENCRYPT_DECRYPT", "Origin": "AWS_KMS", } } mocked_requests = [ MockedBoto3Request( method="describe_key", response=expected_message if expected_message else describe_key_response, expected_params={"KeyId": kms_key_id}, generate_error=True if expected_message else False, ) ] * num_calls boto3_stubber("kms", mocked_requests) def _head_bucket_stubber(mocker, boto3_stubber, bucket, num_calls): head_bucket_response = { "ResponseMetadata": { "AcceptRanges": "bytes", "ContentType": "text/html", "LastModified": "Thu, 16 Apr 2015 18:19:14 GMT", "ContentLength": 77, "VersionId": "null", "ETag": '"30a6ec7e1a9ad79c203d05a589c8b400"', "Metadata": {}, } } mocked_requests = [ MockedBoto3Request(method="head_bucket", response=head_bucket_response, expected_params=bucket) ] * num_calls boto3_stubber("s3", mocked_requests) mocker.patch("pcluster.config.validators.urllib.request.urlopen") ######### # # architecture validator tests # # Two things make it difficult to test validators that key on architecture in the same way that: # 1) architecture is a derived parameter and cannot be configured directly via the config file # 2) many validators key on the architecture, which makes it impossible to test some combinations of # parameters for validators that run later than others, because those run earlier will have # already raised exceptions. # # Thus, the following code mocks the pcluster_config object passed to the validator functions # and calls those functions directly (as opposed to patching functions and instantiating a config # as would be done when running `pcluster create/update`). # ######### def get_default_pcluster_sections_dict(): """Return a dict similar in structure to that of a cluster config file.""" default_pcluster_sections_dict = {} for section_default_dict in DefaultDict: if section_default_dict.name == "pcluster": # Get rid of the extra layer in this case default_pcluster_sections_dict["cluster"] = section_default_dict.value.get("cluster") else: default_pcluster_sections_dict[section_default_dict.name] = section_default_dict.value return default_pcluster_sections_dict def make_pcluster_config_mock(mocker, config_dict): """Mock the calls that made on a pcluster_config by validator functions.""" cluster_config_dict = get_default_pcluster_sections_dict() for section_key in config_dict: cluster_config_dict = utils.merge_dicts(cluster_config_dict.get(section_key), config_dict.get(section_key)) section_to_mocks = {} for section_key, section_dict in config_dict.items(): section_mock = mocker.MagicMock() section_mock.get_param_value.side_effect = lambda param: section_dict.get(param) section_to_mocks[section_key] = section_mock pcluster_config_mock = mocker.MagicMock() pcluster_config_mock.get_section.side_effect = lambda section: section_to_mocks.get(section) return pcluster_config_mock def run_architecture_validator_test( mocker, config, constrained_param_section, constrained_param_name, param_name, param_val, validator, expected_warnings, expected_errors, ): """Run a test for a validator that's concerned with the architecture param.""" mocked_pcluster_config = make_pcluster_config_mock(mocker, config) errors, warnings = validator(param_name, param_val, mocked_pcluster_config) mocked_pcluster_config.get_section.assert_called_once_with(constrained_param_section) mocked_pcluster_config.get_section.side_effect(constrained_param_section).get_param_value.assert_called_with( constrained_param_name ) assert_that(len(warnings)).is_equal_to(len(expected_warnings)) for warnings, expected_warnings in zip(warnings, expected_warnings): assert_that(warnings).matches(re.escape(expected_warnings)) assert_that(len(errors)).is_equal_to(len(expected_errors)) for errors, expected_errors in zip(errors, expected_errors): assert_that(errors).matches(re.escape(expected_errors)) ######### # # ignored FSx params validator test # # Testing a validator that requires the fsx_fs_id parameter to be specified requires a lot of # boto3 stubbing due to the complexity contained in the fsx_id_validator. # # Thus, the following code mocks the pcluster_config object passed to the validator functions # and calls the validator directly. # ######### def test_ebs_allowed_values_all_have_volume_size_bounds(): """Ensure that all known EBS volume types are accounted for by the volume size validator.""" allowed_values_all_have_volume_size_bounds = set(ALLOWED_VALUES["volume_types"]) <= set( EBS_VOLUME_TYPE_TO_VOLUME_SIZE_BOUNDS.keys() ) assert_that(allowed_values_all_have_volume_size_bounds).is_true()
[ 2, 15069, 13130, 6186, 13, 785, 11, 3457, 13, 393, 663, 29116, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 11074, 921, 743, 407, 779, 428, 2393, 2845, 28...
2.330352
4,807
from flask import Flask from flask import request from flask import Response from resources import resourcePing, resourceResolution from message_protocol.resolution_input import parseResolutionInput import json app = Flask(__name__)
[ 6738, 42903, 1330, 46947, 198, 6738, 42903, 1330, 2581, 198, 6738, 42903, 1330, 18261, 198, 6738, 4133, 1330, 8271, 49806, 11, 8271, 4965, 2122, 198, 6738, 3275, 62, 11235, 4668, 13, 29268, 62, 15414, 1330, 21136, 4965, 2122, 20560, 198, ...
4.415094
53