content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
# -*- coding: utf-8 -*-
"""
@inproceedings{DBLP:conf/nips/SnellSZ17,
author = {Jake Snell and
Kevin Swersky and
Richard S. Zemel},
title = {Prototypical Networks for Few-shot Learning},
booktitle = {Advances in Neural Information Processing Systems 30: Annual Conference
on Neural Information Processing Systems 2017, December 4-9, 2017,
Long Beach, CA, {USA}},
pages = {4077--4087},
year = {2017},
url = {https://proceedings.neurips.cc/paper/2017/hash/cb8da6767461f2812ae4290eac7cbc42-Abstract.html}
}
https://arxiv.org/abs/1703.05175
Adapted from https://github.com/orobix/Prototypical-Networks-for-Few-shot-Learning-PyTorch.
"""
import torch
import torch.nn.functional as F
from torch import nn
from core.utils import accuracy
from .metric_model import MetricModel
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
31,
259,
1676,
2707,
654,
90,
35,
9148,
47,
25,
10414,
14,
77,
2419,
14,
50,
10076,
50,
57,
1558,
11,
198,
220,
1772,
220,
220,
220,
796,
1391,
43930,
... | 2.460227 | 352 |
import discord, os, platform, asyncio, csv
from discord import channel
from discord.ext import commands
import re
import core.config
from apps.verizon.utils import verizon_csv, OUT_REPORTS
| [
11748,
36446,
11,
28686,
11,
3859,
11,
30351,
952,
11,
269,
21370,
198,
6738,
36446,
1330,
6518,
198,
6738,
36446,
13,
2302,
1330,
9729,
198,
11748,
302,
198,
198,
11748,
4755,
13,
11250,
198,
6738,
6725,
13,
332,
8637,
13,
26791,
133... | 3.653846 | 52 |
"""Provides an interface for determining how Python objects are serialized and de-serialized."""
import threading
from amfast import AmFastError
class ClassDefError(AmFastError):
"""ClassDef related errors."""
pass
class ClassDef(object):
"""Defines how objects of a given class are serialized and de-serialized.
This class can be sub-classed to provide custom serialization.
attributes
===========
* class_ - class, the class object mapped to this definition
* alias - string, the AMF alias name of the mapped class
* static_attrs - tuple or list, a tuple of static attribute names,
all values must be strings or unicode.
* amf3 - bool, if True, this object will be encoded in AMF3.
* encode_types - dict, keys = attribute names, values = callables.
Callables must accept a single parameter
(the object being encoded) and return a new object.
* decode_types - dict, keys = attribute names, values = callables.
Callables must accept a single parameter
(the object being decoded) and return a new object.
"""
CLASS_DEF = True
def __init__(self, class_, alias=None, static_attrs=None,
amf3=None, encode_types=None, decode_types=None, _built_in=False):
"""arguments
=============
* class_ - class, the class being mapped.
* alias - string, specifies the amf class alias. Default = module.class
* static_attrs - tuple or list, a tuple of static attribute strings. Default = empty tuple
* amf3 - bool, if True, this object will be encoded in AMF3. Default = True
* encode_types - dict, keys = attribute names, values = callables. Default = None
* decode_types - dict, keys = attribute names, values = callables. Default = None
"""
self.class_ = class_
self._built_in = _built_in
if alias is None:
if hasattr(class_, ALIAS):
alias = getattr(class_, ALIAS)
else:
alias = '.'.join((class_.__module__, class_.__name__))
self.alias = alias
if static_attrs is None:
if hasattr(class_, STATIC_ATTRS):
static_attrs = self.static_attrs = getattr(class_, STATIC_ATTRS)
else:
static_attrs = ()
self.static_attrs = static_attrs
if amf3 is None:
if hasattr(class_, AMF3):
amf3 = getattr(class_, AMF3)
else:
amf3 = True
self.amf3 = amf3
self.encode_types = encode_types
self.decode_types = decode_types
def getStaticAttrVals(self, obj):
"""Returns a list of values of attributes defined in self.static_attrs
If this method is overridden to provide custom behavior, please note:
Returned values MUST BE IN THE SAME ORDER AS THEY APPEAR IN self.static_attrs.
arguments
==========
* obj - object, the object to get attribute values from.
"""
return [getattr(obj, attr, None) for attr in self.static_attrs]
def getInstance(self):
"""Returns an instance of the mapped class to be used
when an object of this type is deserialized.
"""
return self.class_.__new__(self.class_)
def applyAttrVals(self, obj, vals):
"""Set decoded attribute values on the object.
arguments
==========
* obj - object, the object to set the attribute values on.
* vals - dict, keys == attribute name, values == attribute values.
"""
[setattr(obj, key, val) for key, val in vals.iteritems()]
class DynamicClassDef(ClassDef):
"""A ClassDef with dynamic attributes."""
DYNAMIC_CLASS_DEF = True
def getDynamicAttrVals(self, obj, include_private=False):
"""Returns a dict where keys are attribute names and values are attribute values.
arguments
==========
obj - object, the object to get attributes for.
include_private - bool, if False do not include attributes with
names starting with '_'. Default = False.
"""
if self.include_private is None:
ip = include_private
else:
ip = self.include_private
return get_dynamic_attr_vals(obj, self.static_attrs, ip);
class ExternClassDef(ClassDef):
"""A ClassDef where the byte string encoding/decoding is customized.
The Actionscript version of the class must implement IExternalizeable.
"""
EXTERNALIZABLE_CLASS_DEF = True
def writeExternal(self, obj, context):
"""
This method must be overridden in a sub-class.
arguments
==========
* obj - object, The object that is being encoded.
* context - amfast.decoder.EncoderContext, holds encoding related properties.
"""
raise ClassDefError("This method must be implemented by a sub-class.")
def readExternal(self, obj, context):
"""
This method must be overridden in a sub-class.
arguments
==========
* obj - object, The object that the byte string is being applied to.
* context - amfast.decoder.DecoderContext, holds decoding related properties.
"""
raise ClassDefError("This method must be implemented by a sub-class.")
class _ProxyClassDef(ExternClassDef):
"""A special class used internally to encode/decode Proxied objects."""
PROXY_CLASS_DEF = True
PROXY_ALIAS = 'proxy'
class _ProxyObject(object):
"""Empty class used for mapping."""
pass
class _ArrayCollectionClassDef(_ProxyClassDef):
"""A special ClassDef used internally to encode/decode an ArrayCollection."""
ARRAY_COLLECTION_CLASS_DEF = True
PROXY_ALIAS = 'flex.messaging.io.ArrayCollection'
class _ObjectProxyClassDef(_ProxyClassDef):
"""A special ClassDef used internally to encode/decode an ObjectProxy."""
OBJECT_PROXY_CLASS_DEF = True
PROXY_ALIAS = 'flex.messaging.io.ObjectProxy'
class ClassDefMapper(object):
"""Map classes to ClassDefs, retrieve class_defs by class or alias name."""
def __init__(self):
"""
arguments
==========
* class_def_attr - string, an attribute with this name will be added
mapped classes. Default = '_amf_alias'
"""
self._lock = threading.RLock()
self._mapped_classes = {}
self._mapped_aliases = {}
self._mapBuiltIns()
def _mapBuiltIns(self):
"""Map built-in ClassDefs for default behavior."""
from as_types import AsError
from amfast.remoting import flex_messages as messaging
# Proxy objects
self.mapClass(_ArrayCollectionClassDef())
self.mapClass(_ObjectProxyClassDef())
# Exceptions
self.mapClass(ClassDef(AsError, _built_in=True))
self.mapClass(ClassDef(messaging.FaultError, _built_in=True))
# Flex remoting messages
self.mapClass(ClassDef(messaging.RemotingMessage, _built_in=True))
self.mapClass(messaging.AsyncSmallMsgDef(messaging.AsyncMessage,
alias="DSA", _built_in=True))
self.mapClass(ClassDef(messaging.AsyncMessage, _built_in=True))
self.mapClass(messaging.CommandSmallMsgDef(messaging.CommandMessage,
alias="DSC", _built_in=True))
self.mapClass(ClassDef(messaging.CommandMessage, _built_in=True))
self.mapClass(ClassDef(messaging.AcknowledgeMessage, _built_in=True))
self.mapClass(ClassDef(messaging.ErrorMessage, _built_in=True))
def mapClass(self, class_def):
"""Map a class_def implementation, so that it can be retrieved based on class attributes.
arguments
==========
* class_def - ClassDef, ClassDef being mapped.
"""
if not hasattr(class_def, 'CLASS_DEF'):
raise ClassDefError("class_def argument must be a ClassDef object.")
self._lock.acquire()
try:
self._mapped_classes[class_def.class_] = class_def
self._mapped_aliases[class_def.alias] = class_def
finally:
self._lock.release()
def getClassDefByClass(self, class_):
"""Get a ClassDef.
Returns None if ClassDef is not found.
arguments
==========
* class_ - class, the class to find a ClassDef for.
"""
return self._mapped_classes.get(class_, None)
def getClassDefByAlias(self, alias):
"""Get a ClassDef.
Returns None in not ClassDef is found.
arguments
==========
* alias - string, the alias to find a ClassDef for.
"""
return self._mapped_aliases.get(alias, None)
def unmapClass(self, class_):
"""Unmap a class definition.
arguments
==========
* class_ - class, the class to remove a ClassDef for.
"""
self._lock.acquire()
try:
for alias, klass in self._mapped_aliases.iteritems():
if class_ == klass:
del self._mapped_aliases[alias]
class_id = id(class_)
if class_id in self._mapped_classes:
del self._mapped_classes[class_id]
finally:
self._lock.release()
# ---- module attributes ---- #
def get_dynamic_attr_vals(obj, ignore_attrs=None, include_private=False):
"""Returns a dict of attribute values to encode.
keys = attribute names, values = attribute values.
argmuents
==========
* obj - object, object to get dynamic attribute values from.
* ignore_attrs - list or tuple of attributes to ignore. Default = empty tuple.
* include_private - bool, if False do not include attributes that start with '_'.
Default = False.
"""
vals = {}
if hasattr(obj, '__dict__'):
for attr, val in obj.__dict__.iteritems():
if ignore_attrs is not None:
if attr in ignore_attrs:
continue
if (include_private is False) and (attr.startswith('_')):
continue
vals[attr] = val
return vals
# These properties can be set on a class
# to map attributes within the class.
ALIAS = '_AMFAST_ALIAS'
STATIC_ATTRS = '_AMFAST_STATIC_ATTRS'
AMF3 = '_AMFAST_AMF3'
def assign_attrs(class_, alias=None, static_attrs=None, amf3=None):
"""
Use to map ClassDef attributes to a class. Useful if you want to keep
ClassDef configuration with the class being mapped, instead of at
the point where the ClassDef is created.
If you assign ClassDef attributes with this method, you can
call ClassDef(class_) to create a ClassDef, and the assigned
attributes will be applied to the new ClassDef.
Arguments provided to the ClassDef() will override attributes
that were assigned with this function.
arguments
==========
* class_ - class, the class to assign attributes to.
* alias - string, the amf alias name of the mapped class
* static_attrs - tuple, a tuple of static attribute names, all values must be strings or unicode
* amf3 - bool, if True, this object will be encoded in AMF3.
"""
if alias is not None:
setattr(class_, ALIAS, alias)
if static_attrs is not None:
setattr(class_, STATIC_ATTRS, static_attrs)
if amf3 is not None:
setattr(class_, AMF3, amf3)
| [
37811,
15946,
1460,
281,
7071,
329,
13213,
703,
11361,
5563,
389,
11389,
1143,
290,
390,
12,
46911,
1143,
526,
15931,
198,
11748,
4704,
278,
198,
198,
6738,
716,
7217,
1330,
1703,
22968,
12331,
198,
198,
4871,
5016,
7469,
12331,
7,
5840... | 2.521291 | 4,556 |
import os
from plot_testcase import plot_testcase
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
if __name__ == "__main__":
run_testcase()
| [
11748,
28686,
198,
6738,
7110,
62,
9288,
7442,
1330,
7110,
62,
9288,
7442,
198,
198,
2,
10097,
24305,
198,
198,
2,
10097,
24305,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
628,
220,
220,
220,
1057,
62,
9288,
... | 5.822222 | 45 |
import os
import argparse
from pydub import AudioSegment
from pydub.silence import split_on_silence
if __name__ == "__main__":
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, description="Split an audio file on silence.")
parser.add_argument('audio_file', help='the audio file to split')
parser.add_argument('--min_silence_len', default=400)
parser.add_argument('--silence_thresh', default=-36)
parser.add_argument('--keep_silence', default=400)
args = parser.parse_args()
result = splitAudio(os.path.join(os.getcwd(), args.audio_file), AudioSettingsContainer(args.min_silence_len, args.silence_thresh, args.keep_silence))
if not isinstance(result, int):
print(result)
else:
print(str(result) + " audio file(s) successfully created.") | [
11748,
28686,
198,
11748,
1822,
29572,
198,
198,
6738,
279,
5173,
549,
1330,
13491,
41030,
434,
198,
6738,
279,
5173,
549,
13,
18217,
594,
1330,
6626,
62,
261,
62,
18217,
594,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
83... | 3.046154 | 260 |
from django import forms
from rest_framework_extras.tests import models
| [
6738,
42625,
14208,
1330,
5107,
198,
198,
6738,
1334,
62,
30604,
62,
2302,
8847,
13,
41989,
1330,
4981,
628,
628
] | 3.8 | 20 |
# Copyright (c) "Neo4j"
# Neo4j Sweden AB [http://neo4j.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import pytest
from neo4j import (
Address,
AsyncResult,
Record,
ResultSummary,
ServerInfo,
SummaryCounters,
Version,
)
from neo4j._async_compat.util import AsyncUtil
from neo4j.data import DataHydrator
from neo4j.exceptions import ResultNotSingleError
from ...._async_compat import mark_async_test
@pytest.mark.parametrize("method",
("for loop", "next", "one iter", "new iter"))
@pytest.mark.parametrize("records", (
[],
[[42]],
[[1], [2], [3], [4], [5]],
))
@mark_async_test
@pytest.mark.parametrize("method",
("for loop", "next", "one iter", "new iter"))
@pytest.mark.parametrize("invert_fetch", (True, False))
@mark_async_test
@pytest.mark.parametrize("method",
("for loop", "next", "one iter", "new iter"))
@pytest.mark.parametrize("invert_fetch", (True, False))
@mark_async_test
@pytest.mark.parametrize("records", ([[1], [2]], [[1]], []))
@pytest.mark.parametrize("fetch_size", (1, 2))
@mark_async_test
@pytest.mark.parametrize("records", ([[1], [2]], [[1]], []))
@pytest.mark.parametrize("fetch_size", (1, 2))
@mark_async_test
@mark_async_test
@pytest.mark.parametrize("records", ([[1], [2]], [[1]], []))
@pytest.mark.parametrize("consume_one", (True, False))
@pytest.mark.parametrize("summary_meta", (None, {"database": "foobar"}))
@mark_async_test
@pytest.mark.parametrize("t_first", (None, 0, 1, 123456789))
@pytest.mark.parametrize("t_last", (None, 0, 1, 123456789))
@mark_async_test
@mark_async_test
@pytest.mark.parametrize("query_type", ("r", "w", "rw", "s"))
@mark_async_test
@pytest.mark.parametrize("num_records", range(0, 5))
@mark_async_test
| [
2,
15069,
357,
66,
8,
366,
8199,
78,
19,
73,
1,
198,
2,
21227,
19,
73,
10710,
9564,
685,
4023,
1378,
710,
78,
19,
73,
13,
785,
60,
198,
2,
198,
2,
770,
2393,
318,
636,
286,
21227,
19,
73,
13,
198,
2,
198,
2,
49962,
739,
26... | 2.459627 | 966 |
import importlib
import pytest
@pytest.fixture
@pytest.fixture
| [
198,
11748,
1330,
8019,
198,
198,
11748,
12972,
9288,
628,
198,
198,
31,
9078,
9288,
13,
69,
9602,
628,
198,
31,
9078,
9288,
13,
69,
9602,
198
] | 2.592593 | 27 |
import logging
from aiosmtpd.smtp import MISSING, SMTP, Session, syntax
from .config import Config
from .handlers import AUTH_REQUIRED
log = logging.getLogger(__name__)
| [
11748,
18931,
201,
198,
201,
198,
6738,
257,
4267,
16762,
30094,
13,
5796,
34788,
1330,
49684,
2751,
11,
9447,
7250,
11,
23575,
11,
15582,
201,
198,
201,
198,
6738,
764,
11250,
1330,
17056,
201,
198,
6738,
764,
4993,
8116,
1330,
37195,
... | 2.746269 | 67 |
nfreq_bands = 7
win_length_sec = 120
stride_sec = 120
n_channels = 16
n_timesteps = (600-win_length_sec)/stride_sec + 1
global_pooling = 1
nkerns = [16, 32, 512]
recept_width = [1, 1]
stride = [1, 1]
pool_width = [1, 1]
n_params = 0
c1_input_width = n_timesteps
print('c1:', nkerns[0], '@', ((n_timesteps - recept_width[0]) / stride[0] + 1) / pool_width[0])
n_params += (n_channels * nfreq_bands * recept_width[0] + 1) * nkerns[0]
c2_input_width = ((n_timesteps - recept_width[0]) / stride[0] + 1) / pool_width[0]
print('c2:', nkerns[1], '@', ((c2_input_width - recept_width[1]) / stride[1] + 1) / pool_width[1])
n_params += (nkerns[0]*recept_width[1] + 1)*nkerns[1]
if global_pooling:
f3_input_size = 6*nkerns[1]
else:
f3_input_size = nkerns[1]*((c2_input_width - recept_width[1]) / stride[1] + 1) / pool_width[1]
n_params += f3_input_size * nkerns[2] + 1
print('number of parameters', n_params)
| [
77,
19503,
80,
62,
21397,
796,
767,
201,
198,
5404,
62,
13664,
62,
2363,
796,
7982,
201,
198,
2536,
485,
62,
2363,
796,
7982,
201,
198,
77,
62,
354,
8961,
796,
1467,
201,
198,
77,
62,
16514,
395,
25386,
796,
357,
8054,
12,
5404,
... | 2.047826 | 460 |
""" Default configuration and hyperparameter values for dynamics objects. """
# DynamicsPriorGMM
DYN_PRIOR_GMM = {
'min_samples_per_cluster': 20,
'max_clusters': 50,
'max_samples': 20,
'strength': 1.0,
}
| [
37811,
15161,
8398,
290,
8718,
17143,
2357,
3815,
329,
17262,
5563,
13,
37227,
198,
198,
2,
33806,
22442,
38,
12038,
198,
35,
40760,
62,
4805,
41254,
62,
38,
12038,
796,
1391,
198,
220,
220,
220,
705,
1084,
62,
82,
12629,
62,
525,
6... | 2.569767 | 86 |
from floodsystem.stationdata import build_station_list
from floodsystem.geo import stations_by_distance, stations_with_radius, rivers_with_stations, stations_by_river
import itertools
stations = build_station_list()
p = (52.2053, 0.1218) # coords of cambridge
print("closest 10 stations from cambridge: {}".format(stations_by_distance(stations, p)[0:10]))
print("furthest 10 stations from cambridge: {}".format(stations_by_distance(stations, p)[-10:])) | [
6738,
6947,
10057,
13,
17529,
7890,
1330,
1382,
62,
17529,
62,
4868,
198,
6738,
6947,
10057,
13,
469,
78,
1330,
8985,
62,
1525,
62,
30246,
11,
8985,
62,
4480,
62,
42172,
11,
18180,
62,
4480,
62,
301,
602,
11,
8985,
62,
1525,
62,
3... | 3.095238 | 147 |
import matplotlib.pyplot as plt
from ann import attractor_network as AA
from ann import pattern as p
import numpy as np
N = 1024 # Number of neurons
K = 4 # Degree
a = 0.5 # Sparseness
T = 800 # Steps
# Create network with same parameters as above
activity_net = AA.SimpleAttractor(N, K, a, "ring")
print(activity_net)
# Make topology (random by default)
activity_net.generate_topology()
# Make weights (random -1, 1)
activity_net.make_weights()
# Initializa network state (binary random with a activity)
activity_net.make_initialization()
print(activity_net)v
t1 = datetime.datetime.now()
# Update network T steps (returns activity for each step)
activity = activity_net.update_steps(T)
t2 = datetime.datetime.now()
print(t2-t1)
# Plot activity
plt.figure(figsize=(20,6))
plt.plot(activity)
plt.show() | [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
1529,
1330,
4729,
273,
62,
27349,
355,
15923,
198,
6738,
1529,
1330,
3912,
355,
279,
198,
11748,
299,
32152,
355,
45941,
198,
198,
45,
796,
28119,
1303,
7913,
286,
16... | 3.01487 | 269 |
'''This program contains the functions averageFinder and midpointFinder. AverageData calculates the averages of the
"columns" of a list of numbers (a list of lists of numbers) for real and fake samples (separately) and midpointFinder
finds the midpoint between the real and fake averages.
Data is either given from the test case or from user input, which is run through incomingData.
Assignment 2: classifier_builder
Name: Anna Wood
Student Number: 20091785
NetID: 17aaw2'''
def averageFinder(sample_data):
'''will take a list of attributes and:
averageFinder calculates the average of each of the attributes across all the samples with the
same classification (0 or 1)
input: sample list / list of numbers
output: none, averages are passed to midpointFinder
note - 1 IS REAL 0 IS COUNTERFEIT
'''
real_avgs_counter = 0
counter_avgs_counter = 0
real_avgs = []
counter_avgs = []
avg_len_real = 0
indx = 0
while indx < 4: # while-loop that sums each attribute and adds it to the list of its category (real or counter)
for i in range(0,len(sample_data)): # loop to separate data into 0 and 1
if sample_data[i][4] == 1:
real_avgs_counter += sample_data[i][indx]# if real, attribute is summed in counter
avg_len_real = avg_len_real + 1 /4 # used to count the length of how many real bills
elif sample_data[i][4] == 0: # attribute sum for counterfeit bills
counter_avgs_counter += sample_data[i][indx]
real_avgs.append(real_avgs_counter) # after each attribute is summed it is added to the final list
counter_avgs.append(counter_avgs_counter)
real_avgs_counter = 0 # counters are reset to 0 after each list
counter_avgs_counter = 0
indx += 1 # index for counting the "columns"
avg_len_counter = len(sample_data) - avg_len_real # number of real / counter bills calculated for finding the average
for i in range(0, 4): # divides the real, counterfeit sums by the amount of real & counterfeit items respectively
real_avgs[i] = round((real_avgs[i] / avg_len_real), 3)
counter_avgs[i] = round((counter_avgs[i] / avg_len_counter), 3) # each average rounded to 3 decimal points
return real_avgs, counter_avgs
def midpointFinder(real_avgs, counter_avgs):
'''part 2 of the building classifier, takes the averages of the real and and fake samples and finds
the midpoint (divides by 2). midpoints list should then be returned to classifier
for further classifying
input: averages of real, fake samples
output: midpoints (returned to incomingData)'''
midpoints = [] # empty list for midpoints
for i in range(0,4): # finds midpoints by adding averages and dividing by 2
midpoint = (real_avgs[i] + counter_avgs[i]) / 2
midpoints.append(round(midpoint,3))
return midpoints #returns midpoints to incomingData
def incomingData(training_data):
'''function runs from here when data is passed from our main interface
input: training_data
output: midpoints'''
real_avgs, counter_avgs = averageFinder(training_data)
midpoints = midpointFinder(real_avgs, counter_avgs)
return midpoints # midpoints returned to main interface
if __name__ == '__main__':
sample_data_main = [[ 3, 8, -2, 0, 0], [4, 8, -2, -1,0],[3, -2, 1, 0, 0], [2, 1, 0, -2, 0], # fake samples (5th item 0)
[0, 3, -3, -2, 1], [-3, 3, 0, -3, 1],
[-6, 7, 0, -3, 1] ] # real samples (5th item is 1)
real_avgs , counter_avgs = averageFinder(sample_data_main)
midpoints = midpointFinder(real_avgs, counter_avgs)
print('real averages (test case)',real_avgs, 'should be -3 , 4.333, -1. -2.667')
print('counter averages (test case)',counter_avgs, 'should be 3, 3.75, -0.75, -0.75')
print('midpoints (test case)', midpoints, 'should be 0, 4.041 ish, -0.875, -1.708')
| [
7061,
6,
1212,
1430,
4909,
262,
5499,
2811,
37,
5540,
290,
3095,
4122,
37,
5540,
13,
13475,
6601,
43707,
262,
25694,
286,
262,
198,
1,
28665,
82,
1,
286,
257,
1351,
286,
3146,
357,
64,
1351,
286,
8341,
286,
3146,
8,
329,
1103,
290... | 2.753996 | 1,439 |
#! /usr/bin/python3
# @Author: allen
# @Date: Nov 29 11:07 2019
import configparser
import json
import locale
import os
import re
import sys
from collections import OrderedDict
import click
from dialog import Dialog
__author__ = 'Allen Shaw'
__version__ = '0.1.1'
CONFIG = os.path.expanduser('~/.config/gitclk/config/config.json')
TEMPERATE_CONFIG = os.path.expanduser('~/.config/gitclk/config/temperate.json')
@click.group(help='Git remotes setting.')
@click.command('config', help='Configure the git platforms.')
@click.option('-e', '--edit', 'edit', is_flag=True, default=False, help='Edit the config file.')
@click.command('set', help='Set remotes setting to git config.')
@click.option('-a', '--all', 'set_all', is_flag=True, default=False, show_default=True,
help='Set all remotes include ignored.')
@click.option('-n', '--repository-name', 'repo', required=True, help='The repository name.')
cli.add_command(config)
cli.add_command(set_remotes)
if __name__ == '__main__':
GIT_CONFIG = check_repository()
if GIT_CONFIG is False:
click.echo('fatal: not in a git directory')
sys.exit(1)
cli()
| [
2,
0,
1220,
14629,
14,
8800,
14,
29412,
18,
198,
2,
2488,
13838,
25,
477,
268,
198,
2,
2488,
10430,
25,
5267,
2808,
1367,
25,
2998,
13130,
198,
11748,
4566,
48610,
198,
11748,
33918,
198,
11748,
36693,
198,
11748,
28686,
198,
11748,
... | 2.682135 | 431 |
import unittest
from src import validators
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
6738,
12351,
1330,
4938,
2024,
628,
628,
628,
628,
628,
628,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419,
198
] | 2.666667 | 39 |
from tvsched.application.models.schedule import EpisodeInSchedule, ShowInSchedule
class ShowAlreadyExistsInScheduleError(Exception):
"""Will be raised when trying to add already existed in schedule show to schedule"""
@property
class ShowOrScheduleNotFoundError(Exception):
"""Will be raised when trying to add not existed show to schedule
or show to not existed schedule
"""
@property
class EpisodeOrScheduleNotFoundError(Exception):
"""Will be raised when trying to add not existed episode to schedule
or episode to not existed schedule
"""
@property
class EpisodeAlreadyMarkedAsWatchedError(Exception):
"""Will be raised when trying to mark as watched
already marked episode in schedule.
"""
@property
| [
6738,
31557,
1416,
704,
13,
31438,
13,
27530,
13,
15952,
5950,
1330,
7922,
818,
27054,
5950,
11,
5438,
818,
27054,
5950,
628,
198,
4871,
5438,
37447,
3109,
1023,
818,
27054,
5950,
12331,
7,
16922,
2599,
198,
220,
220,
220,
37227,
8743,
... | 3.629108 | 213 |
import os
from mock import patch
import json
from tests.unit.backend.corpora.api_server.base_api_test import BaseAuthAPITest
| [
11748,
28686,
198,
6738,
15290,
1330,
8529,
198,
11748,
33918,
198,
198,
6738,
5254,
13,
20850,
13,
1891,
437,
13,
10215,
38851,
13,
15042,
62,
15388,
13,
8692,
62,
15042,
62,
9288,
1330,
7308,
30515,
2969,
2043,
395,
628
] | 3.25641 | 39 |
from django.db import models
| [
6738,
42625,
14208,
13,
9945,
1330,
4981,
628
] | 3.75 | 8 |
from __future__ import print_function, absolute_import
from qtpy.QtCore import Qt, Slot, QRectF, QPointF
from qtpy.QtGui import QPen, QBrush, QColor
from qtpy.QtWidgets import QGraphicsObject, QGraphicsRectItem, QGraphicsItemGroup, \
QGraphicsSimpleTextItem, QGraphicsEllipseItem, QGraphicsLineItem
from .mapitems import MapItem
from .functions import makePen, makeBrush
from .qtsupport import getQVariantValue
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
11,
4112,
62,
11748,
198,
198,
6738,
10662,
83,
9078,
13,
48,
83,
14055,
1330,
33734,
11,
32026,
11,
42137,
478,
37,
11,
1195,
12727,
37,
198,
6738,
10662,
83,
9078,
13,
48,
83,
8205,
... | 3.043478 | 138 |
''' show_lisp.py
IOSXE parsers for the following show commands:
* show lisp session
* show lisp platform
* show lisp all extranet <extranet> instance-id <instance_id>
* show lisp all instance-id <instance_id> dynamic-eid detail
* show lisp all service ipv4
* show lisp all service ipv6
* show lisp all service ethernet
* show lisp all instance-id <instance_id> ipv4
* show lisp all instance-id <instance_id> ipv6
* show lisp all instance-id <instance_id> ethernet
* show lisp all instance-id <instance_id> ipv4 map-cache
* show lisp all instance-id <instance_id> ipv6 map-cache
* show lisp all instance-id <instance_id> ethernet map-cache
* show lisp all instance-id <instance_id> ipv4 server rloc members
* show lisp all instance-id <instance_id> ipv6 server rloc members
* show lisp all instance-id <instance_id> ethernet server rloc members
* show lisp all instance-id <instance_id> ipv4 smr
* show lisp all instance-id <instance_id> ipv6 smr
* show lisp all instance-id <instance_id> ethernet smr
* show lisp all service ipv4 summary
* show lisp all service ipv6 summary
* show lisp all service ethernet summary
* show lisp all instance-id <instance_id> ipv4 database
* show lisp all instance-id <instance_id> ipv6 database
* show lisp all instance-id <instance_id> ethernet database
* show lisp all instance-id <instance_id> ipv4 server summary
* show lisp all instance-id <instance_id> ipv6 server summary
* show lisp all instance-id <instance_id> ethernet server summary
* show lisp all instance-id <instance_id> ipv4 server detail internal
* show lisp all instance-id <instance_id> ipv6 server detail internal
* show lisp all instance-id <instance_id> ethernet server detail internal
* show lisp all instance-id <instance_id> ipv4 statistics
* show lisp all instance-id <instance_id> ipv6 statistics
* show lisp all instance-id <instance_id> ethernet statistics
'''
# Python
import re
# Metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Or, Optional
from genie.libs.parser.utils.common import Common
# ==============================
# Schema for 'show lisp session'
# ==============================
class ShowLispSessionSchema(MetaParser):
''' Schema for "show lisp session" '''
schema = {
'vrf':
{Any():
{'sessions':
{'total': int,
'established': int,
'peers':
{Any():
{'state': str,
'time': str,
'total_in': int,
'total_out': int,
'users': int,
},
},
},
},
},
}
# ==============================
# Parser for 'show lisp session'
# ==============================
class ShowLispSession(ShowLispSessionSchema):
''' Parser for "show lisp session"'''
cli_command = 'show lisp session'
exclude = ['time']
# ===============================
# Schema for 'show lisp platform'
# ===============================
class ShowLispPlatformSchema(MetaParser):
''' Schema for "show lisp platform" '''
schema = {
'parallel_lisp_instance_limit': int,
'rloc_forwarding_support':
{'local':
{'ipv4': str,
'ipv6': str,
'mac': str,
},
'remote':
{'ipv4': str,
'ipv6': str,
'mac': str,
},
},
'latest_supported_config_style': str,
'current_config_style': str,
}
# ==============================
# Parser for 'show lisp platform'
# ==============================
class ShowLispPlatform(ShowLispPlatformSchema):
''' Parser for "show lisp platform" '''
cli_command = 'show lisp platform'
# ========================================================================
# Schema for 'show lisp all extranet <extranet> instance-id <instance_id>'
# ========================================================================
class ShowLispExtranetSchema(MetaParser):
''' Schema for "show lisp all extranet <extranet> instance-id <instance_id>"'''
schema = {
'lisp_router_instances':
{Any():
{Optional('service'):
{Any():
{Optional('map_server'):
{Optional('virtual_network_ids'):
{'total_extranet_entries': int,
Any():
{'vni': str,
'extranets':
{Any():
{'extranet': str,
'home_instance_id': int,
Optional('provider'):
{Any():
{'eid_record': str,
'bidirectional': bool,
},
},
Optional('subscriber'):
{Any():
{'eid_record': str,
'bidirectional': bool,
},
},
},
},
},
},
},
},
},
},
},
}
# ========================================================================
# Parser for 'show lisp all extranet <extranet> instance-id <instance_id>'
# ========================================================================
class ShowLispExtranet(ShowLispExtranetSchema):
''' Parser for "show lisp all extranet <extranet> instance-id <instance_id>"'''
cli_command = 'show lisp all extranet {extranet} instance-id {instance_id}'
# =======================================================================
# Schema for 'show lisp all instance-id <instance_id> dynamic-eid detail'
# =======================================================================
class ShowLispDynamicEidDetailSchema(MetaParser):
''' Schema for "show lisp all instance-id <instance_id> dynamic-eid detail" '''
schema = {
'lisp_router_instances':
{Any():
{Optional('service'):
{Any():
{'etr':
{'local_eids':
{Any():
{'dynamic_eids':
{Any():
{'dynamic_eid_name': str,
'id': str,
'rlocs': str,
Optional('registering_more_specific'): bool,
Optional('loopback_address'): str,
Optional('priority'): int,
Optional('weight'): int,
Optional('record_ttl'): int,
Optional('site_based_multicast_map_notify_group'): str,
Optional('proxy_reply'): bool,
Optional('registration_interval'): int,
Optional('global_map_server'): bool,
Optional('num_of_roaming_dynamic_eid'): int,
Optional('mapping_servers'):
{Any():
{Optional('proxy_reply'): bool,
},
},
Optional('last_dynamic_eid'):
{Any():
{'last_dynamic_eid_discovery_elaps_time': str,
'eids':
{Any():
{'interface': str,
'uptime': str,
'last_activity': str,
'discovered_by': str,
},
},
},
},
Optional('eid_address'):
{Optional('address_type'): str,
Optional('virtual_network_id'): str,
},
},
},
},
},
},
},
},
},
},
}
# =======================================================================
# Parser for 'show lisp all instance-id <instance_id> dynamic-eid detail'
# =======================================================================
class ShowLispDynamicEidDetail(ShowLispDynamicEidDetailSchema):
''' Parser for "show lisp all instance-id <instance_id> dynamic-eid detail"'''
cli_command = 'show lisp all instance-id {instance_id} dynamic-eid detail'
# ==============================================================
# Schema for 'show lisp all instance-id <instance_id> <service>'
# ==============================================================
class ShowLispServiceSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service>" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
Optional('lisp_router_id'):
{'site_id': str,
'xtr_id': str,
},
Optional('service'):
{Any():
{'service': str,
'delegated_database_tree': bool,
'locator_table': str,
'mobility_first_hop_router': bool,
'nat_traversal_router': bool,
'instance_id':
{Any():
{Optional('eid_table'): str,
Optional('site_registration_limit'): int,
Optional('map_request_source'): str,
'database':
{Optional('dynamic_database_limit'): int,
Optional('dynamic_database_size'): int,
Optional('inactive_deconfig_away_size'): int,
Optional('route_import_database_limit'): int,
Optional('route_import_database_size'): int,
Optional('static_database_size'): int,
Optional('static_database_limit'): int,
Optional('total_database_mapping_size'): int,
Optional('dynamic_database_mapping_limit'): int,
Optional('import_site_db_size'): int,
Optional('import_site_db_limit'): int,
Optional('proxy_db_size'): int,
},
Optional('mapping_servers'):
{Any():
{'ms_address': str,
Optional('uptime'): str,
},
},
'itr':
{'local_rloc_last_resort': str,
Optional('use_proxy_etr_rloc'): str,
},
Optional('map_cache'):
{Optional('imported_route_count'): int,
Optional('imported_route_limit'): int,
Optional('map_cache_size'): int,
Optional('persistent_map_cache'): bool,
Optional('static_mappings_configured'): int,
},
},
},
'etr':
{'enabled': bool,
Optional('encapsulation'): str,
'proxy_etr_router': bool,
'accept_mapping_data': str,
'map_cache_ttl': str,
Optional('use_petrs'):
{Any():
{'use_petr': str,
},
},
Optional('mapping_servers'):
{Any():
{'ms_address': str,
Optional('uptime'): str,
},
},
},
'itr':
{'enabled': bool,
'proxy_itr_router': bool,
Optional('proxy_itrs'):
{Any():
{'proxy_etr_address': str,
},
},
'solicit_map_request': str,
'max_smr_per_map_cache_entry': str,
'multiple_smr_suppression_time': int,
Optional('map_resolvers'):
{Any():
{'map_resolver': str,
},
},
},
'locator_status_algorithms':
{'rloc_probe_algorithm': bool,
'rloc_probe_on_route_change': str,
'rloc_probe_on_member_change': bool,
'lsb_reports': str,
'ipv4_rloc_min_mask_len': int,
'ipv6_rloc_min_mask_len': int,
},
'map_cache':
{'map_cache_activity_check_period': int,
Optional('map_cache_fib_updates'): str,
'map_cache_limit': int,
},
'map_server':
{'enabled': bool,
},
'map_resolver':
{'enabled': bool,
},
Optional('source_locator_configuration'):
{'vlans':
{Any():
{'address': str,
'interface': str,
},
},
},
},
},
},
},
}
# ==============================================================
# Parser for 'show lisp all instance-id <instance_id> <service>'
# ==============================================================
class ShowLispService(ShowLispServiceSchema):
'''Parser for "show lisp all instance-id <instance_id> <service>"'''
cli_command = ['show lisp all instance-id {instance_id} {service}','show lisp all service {service}']
# ========================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> map-cache'
# ========================================================================
class ShowLispServiceMapCacheSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> map-cache" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
Optional('service'):
{Any():
{'service': str,
'itr':
{'map_cache':
{Any():
{'vni': str,
'entries': int,
'mappings':
{Any():
{'id': str,
'creation_time': str,
'time_to_live': str,
'via': str,
'eid':
{'address_type': str,
'vrf': str,
Optional('ipv4'):
{'ipv4': str,
},
Optional('ipv4_prefix'):
{'ipv4_prefix': str,
},
Optional('ipv6'):
{'ipv6': str,
},
Optional('ipv6_prefix'):
{'ipv6_prefix': str,
},
},
Optional('negative_mapping'):
{'map_reply_action': str,
},
Optional('positive_mapping'):
{'rlocs':
{Any():
{'id': str,
'uptime': str,
'state': str,
'priority': int,
'weight': int,
Optional('encap_iid'): str,
'locator_address':
{'address_type': str,
'virtual_network_id': str,
Optional('ipv4'):
{'ipv4': str,
},
Optional('ipv4_prefix'):
{'ipv4_prefix': str,
},
Optional('ipv6'):
{'ipv6': str,
},
Optional('ipv6_prefix'):
{'ipv6_prefix': str,
},
},
},
},
},
},
},
},
},
},
},
},
},
},
}
# ========================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> map-cache'
# ========================================================================
class ShowLispServiceMapCache(ShowLispServiceMapCacheSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> map-cache"'''
cli_command = 'show lisp all instance-id {instance_id} {service} map-cache'
exclude = ['creation_time']
# ===========================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> rloc members'
# ===========================================================================
class ShowLispServiceRlocMembersSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> rloc members" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
Optional('service'):
{Optional(Any()):
{'instance_id':
{Any():
{Optional('rloc'):
{'total_entries': int,
'valid_entries': int,
'distribution': bool,
'members':
{Any():
{'origin': str,
'valid': str,
},
},
},
},
},
},
},
},
},
}
# ===========================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> rloc members'
# ===========================================================================
class ShowLispServiceRlocMembers(ShowLispServiceRlocMembersSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> rloc members"'''
cli_command = 'show lisp all instance-id {instance_id} service {service} rloc members'
# ==================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> smr'
# ==================================================================
class ShowLispServiceSmrSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> smr" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
Optional('service'):
{Optional(Any()):
{'instance_id':
{Any():
{Optional('smr'):
{'vrf': str,
'entries': int,
'prefixes':
{Any():
{'producer': str,
},
},
},
},
},
},
},
},
},
}
# ==================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> smr'
# ==================================================================
class ShowLispServiceSmr(ShowLispServiceSmrSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> smr"'''
cli_command = 'show lisp all instance-id {instance_id} service {service} smr'
# ====================================================
# Schema for 'show lisp all service <service> summary'
# ====================================================
class ShowLispServiceSummarySchema(MetaParser):
'''Schema for "show lisp all <service> summary" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
Optional('service'):
{Optional(Any()):
{Optional('virtual_network_ids'):
{Any():
{Optional('vrf'): str,
'interface': str,
'db_size': int,
'db_no_route': int,
'cache_size': int,
'incomplete': str,
'cache_idle': str,
'lisp_role':
{Any():
{'lisp_role_type': str,
},
},
},
},
'etr':
{'summary':
{'instance_count': int,
'total_eid_tables': int,
'total_db_entries': int,
'total_map_cache_entries': int,
'total_db_entries_inactive': int,
'eid_tables_inconsistent_locators': int,
'eid_tables_incomplete_map_cache_entries': int,
'eid_tables_pending_map_cache_update_to_fib': int,
},
},
},
},
},
},
}
# ====================================================
# Parser for 'show lisp all service <service> summary'
# ====================================================
class ShowLispServiceSummary(ShowLispServiceSummarySchema):
'''Parser for "show lisp all service <service> summary"'''
cli_command = 'show lisp all service {service} summary'
# =======================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> dabatase'
# =======================================================================
class ShowLispServiceDatabaseSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> dabatase" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
'locator_sets':
{Any():
{'locator_set_name': str,
},
},
Optional('service'):
{Optional(Any()):
{'etr':
{'local_eids':
{Any():
{'vni': str,
'total_eid_entries': int,
'no_route_eid_entries': int,
'inactive_eid_entries': int,
Optional('dynamic_eids'):
{Any():
{'id': str,
Optional('dynamic_eid'): str,
'eid_address':
{'address_type': str,
'vrf': str,
},
'rlocs': str,
'loopback_address': str,
'priority': int,
'weight': int,
'source': str,
'state': str,
},
},
Optional('eids'):
{Any():
{'id': str,
'eid_address':
{'address_type': str,
'vrf': str,
},
'rlocs': str,
'loopback_address': str,
'priority': int,
'weight': int,
'source': str,
'state': str,
},
},
},
},
},
},
},
},
},
}
# =======================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> dabatase'
# =======================================================================
class ShowLispServiceDatabase(ShowLispServiceDatabaseSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> dabatase"'''
cli_command = 'show lisp all instance-id {instance_id} {service} database'
# =============================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> server summary'
# =============================================================================
class ShowLispServiceServerSummarySchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> server summary" '''
schema = {
'lisp_router_instances':
{Any():
{'lisp_router_instance_id': int,
'service':
{Any():
{'instance_id':
{Any():
{'map_server':
{Optional('sites'):
{Any():
{'site_id': str,
'configured': int,
'registered': int,
'inconsistent': int,
},
},
'summary':
{'number_configured_sites': int,
'number_registered_sites':int,
Optional('af_datum'):
{Any():
{'address_type': str,
Optional('number_configured_eids'): int,
Optional('number_registered_eids'): int,
},
},
'sites_with_inconsistent_registrations': int,
Optional('site_registration_limit'): int,
Optional('site_registration_count'): int,
},
},
},
},
},
},
},
},
}
# =============================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> server summary'
# =============================================================================
class ShowLispServiceServerSummary(ShowLispServiceServerSummarySchema):
'''Parser for "show lisp all instance-id <instance_id> <service> server summary"'''
cli_command = 'show lisp all instance-id {instance_id} {service} server summary'
# =====================================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> server detail internal'
# =====================================================================================
class ShowLispServiceServerDetailInternalSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> server detail internal" '''
schema = {
'lisp_router_instances':
{Any():
{Optional('service'):
{Any():
{'map_server':
{'sites':
{Any():
{'site_id': str,
'allowed_configured_locators': str,
},
},
Optional('virtual_network_ids'):
{Any():
{'vni': str,
'mappings':
{Any():
{'eid_id': str,
'eid_address':
{'address_type': str,
'virtual_network_id': str,
Optional('ipv4'):
{'ipv4': str,
},
Optional('ipv6'):
{'ipv6': str,
},
Optional('ipv4_prefix'):
{'ipv4_prefix': str,
},
Optional('ipv6_prefix'):
{'ipv6_prefix': str,
},
},
'site_id': str,
'first_registered': str,
'last_registered': str,
'routing_table_tag': int,
'origin': str,
Optional('more_specifics_accepted'): bool,
'merge_active': bool,
'proxy_reply': bool,
'ttl': str,
'state': str,
'registration_errors':
{'authentication_failures': int,
'allowed_locators_mismatch': int,
},
Optional('mapping_records'):
{Any():
{'xtr_id': str,
'site_id': str,
'etr': str,
'eid':
{'address_type': str,
'virtual_network_id': str,
Optional('ipv4'):
{'ipv4': str,
},
Optional('ipv6'):
{'ipv6': str,
},
Optional('ipv4_prefix'):
{'ipv4_prefix': str,
},
Optional('ipv6_prefix'):
{'ipv6_prefix': str,
},
},
'ttl': str,
'time_to_live': int,
'creation_time': str,
'merge': bool,
'proxy_reply': bool,
'map_notify': bool,
'hash_function': str,
'nonce': str,
'state': str,
'security_capability': bool,
'sourced_by': str,
'locator':
{Any():
{'local': bool,
'state': str,
'priority': int,
'weight': int,
'scope': str,
},
},
},
},
},
},
},
},
},
},
},
},
},
}
# =====================================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> server detail internal'
# =====================================================================================
class ShowLispServiceServerDetailInternal(ShowLispServiceServerDetailInternalSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> server detail internal"'''
cli_command = 'show lisp all instance-id {instance_id} {service} server detail internal'
# =========================================================================
# Schema for 'show lisp all instance-id <instance_id> <service> statistics'
# =========================================================================
class ShowLispServiceStatisticsSchema(MetaParser):
'''Schema for "show lisp all instance-id <instance_id> <service> statistics" '''
schema = {
'lisp_router_instances':
{Any():
{'service':
{Any():
{'statistics':
{Any():
{'last_cleared': str,
Any(): Any(),
Optional('map_resolvers'):
{Any():
{'last_reply': str,
'metric': str,
'reqs_sent': int,
'positive': int,
'negative': int,
'no_reply': int,
},
},
},
},
},
},
},
},
}
# =========================================================================
# Parser for 'show lisp all instance-id <instance_id> <service> statistics'
# =========================================================================
class ShowLispServiceStatistics(ShowLispServiceStatisticsSchema):
'''Parser for "show lisp all instance-id <instance_id> <service> statistics"'''
cli_command = 'show lisp all instance-id {instance_id} {service} statistics'
exclude = ['map_register_records_out']
| [
7061,
6,
905,
62,
75,
8802,
13,
9078,
198,
198,
40,
2640,
55,
36,
13544,
364,
329,
262,
1708,
905,
9729,
25,
198,
220,
220,
220,
1635,
905,
300,
8802,
6246,
198,
220,
220,
220,
1635,
905,
300,
8802,
3859,
198,
220,
220,
220,
163... | 1.533033 | 28,214 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.EventInfo import EventInfo
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
2829,
17752,
355,
33918,
198,
198,
6738,
435,
541,
323,
13,
64,
404,
13,
15042,
13,
9979,
415,
13,
22973,
3418... | 2.691176 | 68 |
import copy
class Course:
"""
All information needed to be collected about a course
"""
def __init__(
self,
course_dict=None,
course=None,
):
"""
All None attributes must be provided
Can be constructed as empty, from a dictionary
"""
### Mandatory attributes ###
### Strings
# The formal name of the course which acts as an ID
self.course_id = None
# The Title of the course; more human readable
self.title = None
self.department = None
### Optional Attributes ###
# nullable in our db models
self.classes = list()
self.units = -1
self.prerequisites_str = ""
self.restrictions = ""
self.school = ""
# a more readable department name
# Ex: COMSPSCI -> Computer Science
self.department_title = ""
# Who provided this data (3rd party API? coded in-house? team member?)
self.provider = ""
if course_dict != None:
self._init_from_dict(course_dict)
if course != None:
self._init_from_dict(course.__dict__)
# must deep copy list
self.classes = copy.deepcopy(course.classes)
| [
11748,
4866,
628,
198,
4871,
20537,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
1439,
1321,
2622,
284,
307,
7723,
546,
257,
1781,
198,
220,
220,
220,
37227,
628,
220,
220,
220,
825,
11593,
15003,
834,
7,
198,
220,
220,
220,
... | 2.354898 | 541 |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Dou Du.
# Distributed under the terms of the Modified BSD License.
import pytest
from ..periodic_table import PTableWidget
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
2,
15069,
357,
66,
8,
5728,
10343,
13,
198,
2,
4307,
6169,
739,
262,
2846,
286,
262,
40499,
347,
10305,
13789,
13,
198,
198,
11748,
12972,... | 3.12069 | 58 |
"""Groundtruth saved in XML, saved as compatible format in txt.
Author: Yuhuang Hu
Email : yuhuang.hu@ini.uch.ch
"""
import argparse
import os
import glob
import json
import xmltodict
from v2e_exps.utils import expandpath
parser = argparse.ArgumentParser()
parser.add_argument("--data_root", type=expandpath)
parser.add_argument("--output_root", type=expandpath)
args = parser.parse_args()
file_list = sorted(
glob.glob(
os.path.join(args.data_root, "*.xml")))
if not os.path.isdir(args.output_root):
os.makedirs(args.output_root)
for file_path in file_list:
file_base = os.path.basename(file_path)
output_path = os.path.join(
args.output_root, file_base[:-4]+".txt")
with open(file_path) as f:
data = xmltodict.parse(f.read())
objects = data["annotation"]["object"]
# print(output_path, len(objects))
# print(json.dumps(data,
# indent=4, sort_keys=True))
if type(objects) is list:
for obj in objects:
bndbox = obj["bndbox"]
xmin = bndbox["xmin"]
ymin = bndbox["ymin"]
xmax = bndbox["xmax"]
ymax = bndbox["ymax"]
with open(output_path, "a+") as f:
f.write("car {} {} {} {}\n".format(xmin, ymin, xmax, ymax))
else:
bndbox = objects["bndbox"]
xmin = bndbox["xmin"]
ymin = bndbox["ymin"]
xmax = bndbox["xmax"]
ymax = bndbox["ymax"]
with open(output_path, "a+") as f:
f.write("car {} {} {} {}\n".format(xmin, ymin, xmax, ymax))
print("Write to {}".format(output_path))
| [
37811,
35539,
35310,
7448,
287,
23735,
11,
7448,
355,
11670,
5794,
287,
256,
742,
13,
198,
198,
13838,
25,
575,
7456,
84,
648,
11256,
198,
15333,
1058,
331,
7456,
84,
648,
13,
13415,
31,
5362,
13,
794,
13,
354,
198,
37811,
198,
198,... | 2.127604 | 768 |
# This code is a part of XMM: Generate and Analyse (XGA), a module designed for the XMM Cluster Survey (XCS).
# Last modified by David J Turner (david.turner@sussex.ac.uk) 08/03/2021, 17:29. Copyright (c) David J Turner
import inspect
from types import FunctionType
# Doing star imports just because its more convenient, and there won't ever be enough code in these that
# it becomes a big inefficiency
from .density import *
from .misc import *
from .sb import *
from .temperature import *
# This dictionary is meant to provide pretty versions of model/function names to go in plots
# This method of merging dictionaries only works in Python 3.5+, but that should be fine
MODEL_PUBLICATION_NAMES = {**DENS_MODELS_PUB_NAMES, **MISC_MODELS_PUB_NAMES, **SB_MODELS_PUB_NAMES,
**TEMP_MODELS_PUB_NAMES}
MODEL_PUBLICATION_PAR_NAMES = {**DENS_MODELS_PAR_NAMES, **MISC_MODELS_PAR_NAMES, **SB_MODELS_PAR_NAMES,
**TEMP_MODELS_PAR_NAMES}
# These dictionaries tell the profile fitting function what models, start pars, and priors are allowed
PROF_TYPE_MODELS = {"brightness": SB_MODELS, "gas_density": DENS_MODELS, "gas_temperature": TEMP_MODELS}
def convert_to_odr_compatible(model_func: FunctionType, new_par_name: str = 'β', new_data_name: str = 'x_values') \
-> FunctionType:
"""
This is a bit of a weird one; its meant to convert model functions from the standard XGA setup
(i.e. pass x values, then parameters as individual variables), into the form expected by Scipy's ODR.
I'd recommend running a check to compare results from the original and converted functions where-ever
this function is called - I don't completely trust it.
:param FunctionType model_func: The original model function to be converted.
:param str new_par_name: The name we want to use for the new list/array of fit parameters.
:param str new_data_name: The new name we want to use for the x_data.
:return: A successfully converted model function (hopefully) which can be used with ODR.
:rtype: FunctionType
"""
# This is not at all perfect, but its a bodge that will do for now. If type hints are included in
# the signature (as they should be in all XGA models), then np.ndarray will be numpy.ndarray in the
# signature I extract. This dictionary will be used to swap that out, along with any similar problems I encounter
common_conversions = {'numpy': 'np'}
# This reads out the function signature - which should be structured as x_values, par1, par2, par3 etc.
mod_sig = inspect.signature(model_func)
# Convert that signature into a string
str_mod_sig = str(mod_sig)
# Go through the conversion dictionary and 'correct' the signature
for conv in common_conversions:
str_mod_sig = str_mod_sig.replace(conv, common_conversions[conv])
# For ODR I've decided that β is the name of the new fit parameter array, and x_values the name of the
# x data. This will replace the current signature of the function.
new_mod_sig = '({np}, {nd})'.format(np=new_par_name, nd=new_data_name)
# I find the current names of the parameters in the signature, excluding the x value name in the original function
# and reading that into a separate variable
mod_sig_pars = list(mod_sig.parameters.keys())
par_names = mod_sig_pars[1:]
# Store the name of the x data here
data_name = mod_sig_pars[0]
# This gets the source code of the function as a string
mod_code = inspect.getsource(model_func)
# I swap in the new signature
new_mod_code = mod_code.replace(str_mod_sig, new_mod_sig)
# And now I know the exact form of the whole def line I can define that as a variable and then temporarily
# remove it from the source code
known_def = 'def {mn}'.format(mn=model_func.__name__) + new_mod_sig + ':'
new_mod_code = new_mod_code.replace(known_def, '')
# Then I swing through all the original parameter names and replace them with accessing elements of our
# new beta parameter list/array.
for par_ind, par_name in enumerate(par_names):
new_mod_code = new_mod_code.replace(par_name, '{np}[{i}]'.format(np=new_par_name, i=par_ind))
# Then I do the same thing for the new x data variable name
new_mod_code = new_mod_code.replace(data_name, new_data_name)
# Adds the def SIGNATURE line back in
new_mod_code = known_def + new_mod_code
# This compiles the code and creates a new function
new_model_func_code = compile(new_mod_code, '<string>', 'exec')
new_model_func = FunctionType(new_model_func_code.co_consts[0], globals(), model_func.__name__)
return new_model_func
| [
2,
220,
770,
2438,
318,
257,
636,
286,
1395,
12038,
25,
2980,
378,
290,
16213,
325,
357,
55,
9273,
828,
257,
8265,
3562,
329,
262,
1395,
12038,
38279,
13084,
357,
55,
7902,
737,
198,
2,
220,
4586,
9518,
416,
3271,
449,
15406,
357,
... | 2.954232 | 1,595 |
from logging import getLogger
from datetime import datetime
from os3_rll.models.challenge import Challenge, ChallengeException
from os3_rll.models.player import Player, PlayerException
from os3_rll.models.db import Database
logger = getLogger(__name__)
def do_challenge_sanity_check(p1, p2, may_already_by_challenged=False, may_be_expired=False):
"""
Preform checks for a new challenge to be created
param os3_rll.models.player.Player() p1: The player model for player 1
param os3_rll.models.player.Player() p2: The player model for player 2
param bool may_already_by_challenged: If True skips the player.challenged check
param bool may_be_expired: Skips the date check if set
raises ChallengeException on sanity check failure
"""
if p1.challenged and not may_already_by_challenged:
raise ChallengeException("{} is already challenged".format(p1.gamertag))
if p2.challenged and not may_already_by_challenged:
raise ChallengeException("{} is already challenged".format(p2.gamertag))
# Check if the rank of player 1 is lower than the rank of player 2:
if p1.rank < p2.rank:
raise ChallengeException("The rank of {} is lower than of {}".format(p1.gamertag, p2.gamertag))
# Check if the ranks are the same; this should not happen
if p1.rank == p2.rank:
raise ChallengeException(
"The ranks of both player {} and player {} are the same. This should not happen. EVERYBODY PANIC!!!".format(
p1.gamertag, p2.gamertag
)
)
# Check if the timeout of player 1 has expired
if p1.timeout > datetime.now() and not may_be_expired:
raise ChallengeException("The timeout counter of {} is still active".format(p1.gamertag))
def process_completed_challenge_args(args):
"""
Processes the completed challenge arguments
args str: of the played matches separated by spaces and scores by dashes.
Example "1-2 5-3 2-4" corresponds to 3 matches played with the first match ending in 1-2, the second in 5-3 ect.
"""
p1_wins, p2_wins, p1_score, p2_score = 0, 0, 0, 0
logger.debug("Trying to parse challenge result, got the following user input {}".format(args))
matches = args.split()
for match in matches:
scores = list(filter(None, match.split("-")))
if len(scores) != 2:
raise ChallengeException("Unable to parse challenge arguments")
# Check for dummies who didn't pass the last score
# Assign the win to the player with the highest score
scores[0] = int(scores[0])
scores[1] = int(scores[1])
if scores[0] > scores[1]:
p1_wins += 1
elif scores[1] > scores[0]:
p2_wins += 1
# Assign the amount of goals
p1_score += scores[0]
p2_score += scores[1]
# Check for a draw
if p1_wins == p2_wins:
raise ChallengeException("Draws are not allowed")
return p1_wins, p2_wins, p1_score, p2_score
def get_player_objects_from_challenge_info(player, should_be_completed=False, search_by_discord_name=True):
"""
Search for a challenge in the DB corresponding to the player
param str/int player: The gamertag or id of the player to search for
param bool should_be_completed: If the challenge should already be completed or not
param bool search_by_discord_name: Searches for player by full discord_name instead of gamertag
param str message_author: The discord_user that send the message (eg. Pandabeer#2202)
returns tuple os3_rll.models.player.Player: (p1, p2)
"""
if isinstance(player, str):
player = Player.get_player_id_by_username(player, discord_name=search_by_discord_name)
with Database() as db:
db.execute_prepared_statement(
"SELECT `p1`, `p2` FROM `challenges` WHERE (`p1`=%s OR `p2`=%s) AND `winner` IS {} NULL ORDER BY `id` DESC".format(
"NOT" if should_be_completed else ""
),
(player, player),
)
if db.rowcount == 0:
raise ChallengeException("No challenges found")
p1, p2 = db.fetchone()
return Player(p1), Player(p2)
def get_latest_challenge_from_player_id(player, should_be_completed=False):
"""
Tries to find the latest challenge belonging to a player
param int player: The player ID to search the challenges for
param bool should_be_completed: If the challenge should already be completed or not
returns os3_rll.models.challenge: if a challenge is found
raises ChallengeException/PlayerException: on not found / on error
"""
logger.info("Trying to get latest challenge from player with id {}".format(player))
with Player(player) as p:
if not p.challenged and not should_be_completed:
raise PlayerException("Player {} is currently not in an active challenge".format(p.gamertag))
# Try to find a challenge
p.db.execute(
"SELECT `id` FROM `challenges` WHERE (`p1`={0} OR `p2`={0}) AND `winner` is {1} NULL ORDER BY `id` LIMIT 1".format(
p.id, "NOT" if should_be_completed else ""
)
)
p.check_row_count()
challenge = p.db.fetchone()[0]
# Return the Challenge model
return Challenge(challenge)
| [
6738,
18931,
1330,
651,
11187,
1362,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
6738,
28686,
18,
62,
81,
297,
13,
27530,
13,
36747,
3540,
1330,
13879,
11,
13879,
16922,
198,
6738,
28686,
18,
62,
81,
297,
13,
27530,
13,
7829,
... | 2.631631 | 2,017 |
# -*- coding: utf-8 -*-
"""SUOD
"""
# Author: Yue Zhao <zhaoy@cmu.edu>
# License: BSD 2 clause
from __future__ import division
from __future__ import print_function
import numpy as np
from sklearn.utils import check_array
from sklearn.utils.validation import check_is_fitted
try:
import suod
except ImportError:
print('please install suod first for SUOD by `pip install suod`')
from suod.models.base import SUOD as SUOD_model
from .base import BaseDetector
from .lof import LOF
from .hbos import HBOS
from .iforest import IForest
from .copod import COPOD
from .combination import average, maximization
from ..utils.utility import standardizer
class SUOD(BaseDetector):
# noinspection PyPep8
"""SUOD (Scalable Unsupervised Outlier Detection) is an acceleration
framework for large scale unsupervised outlier detector training and
prediction. See :cite:`zhao2021suod` for details.
Parameters
----------
base_estimators : list, length must be greater than 1
A list of base estimators. Certain methods must be present, e.g.,
`fit` and `predict`.
combination : str, optional (default='average')
Decide how to aggregate the results from multiple models:
- "average" : average the results from all base detectors
- "maximization" : output the max value across all base detectors
contamination : float in (0., 0.5), optional (default=0.1)
The amount of contamination of the data set,
i.e. the proportion of outliers in the data set. Used when fitting to
define the threshold on the decision function.
n_jobs : optional (default=1)
The number of jobs to run in parallel for both `fit` and
`predict`. If -1, then the number of jobs is set to the
the number of jobs that can actually run in parallel.
rp_clf_list : list, optional (default=None)
The list of outlier detection models to use random projection. The
detector name should be consistent with PyOD.
rp_ng_clf_list : list, optional (default=None)
The list of outlier detection models NOT to use random projection. The
detector name should be consistent with PyOD.
rp_flag_global : bool, optional (default=True)
If set to False, random projection is turned off for all base models.
target_dim_frac : float in (0., 1), optional (default=0.5)
The target compression ratio.
jl_method : string, optional (default = 'basic')
The JL projection method:
- "basic": each component of the transformation matrix is taken at
random in N(0,1).
- "discrete", each component of the transformation matrix is taken at
random in {-1,1}.
- "circulant": the first row of the transformation matrix is taken at
random in N(0,1), and each row is obtained from the previous one
by a one-left shift.
- "toeplitz": the first row and column of the transformation matrix
is taken at random in N(0,1), and each diagonal has a constant value
taken from these first vector.
bps_flag : bool, optional (default=True)
If set to False, balanced parallel scheduling is turned off.
approx_clf_list : list, optional (default=None)
The list of outlier detection models to use pseudo-supervised
approximation. The detector name should be consistent with PyOD.
approx_ng_clf_list : list, optional (default=None)
The list of outlier detection models NOT to use pseudo-supervised
approximation. The detector name should be consistent with PyOD.
approx_flag_global : bool, optional (default=True)
If set to False, pseudo-supervised approximation is turned off.
approx_clf : object, optional (default: sklearn RandomForestRegressor)
The supervised model used to approximate unsupervised models.
cost_forecast_loc_fit : str, optional
The location of the pretrained cost prediction forecast for training.
cost_forecast_loc_pred : str, optional
The location of the pretrained cost prediction forecast for prediction.
verbose : int, optional (default=0)
Controls the verbosity of the building process.
Attributes
----------
decision_scores_ : numpy array of shape (n_samples,)
The outlier scores of the training data.
The higher, the more abnormal. Outliers tend to have higher
scores. This value is available once the detector is
fitted.
threshold_ : float
The threshold is based on ``contamination``. It is the
``n_samples * contamination`` most abnormal samples in
``decision_scores_``. The threshold is calculated for generating
binary outlier labels.
labels_ : int, either 0 or 1
The binary labels of the training data. 0 stands for inliers
and 1 for outliers/anomalies. It is generated by applying
``threshold_`` on ``decision_scores_``.
"""
def fit(self, X, y=None):
"""Fit detector. y is ignored in unsupervised methods.
Parameters
----------
X : numpy array of shape (n_samples, n_features)
The input samples.
y : Ignored
Not used, present for API consistency by convention.
Returns
-------
self : object
Fitted estimator.
"""
# validate inputs X and y (optional)
X = check_array(X)
n_samples, n_features = X.shape[0], X.shape[1]
self._set_n_classes(y)
# fit the model and then approximate it
self.model_.fit(X)
self.model_.approximate(X)
# get the decision scores from each base estimators
decision_score_mat = np.zeros([n_samples, self.n_estimators])
for i in range(self.n_estimators):
decision_score_mat[:, i] = self.model_.base_estimators[
i].decision_scores_
# the scores must be standardized before combination
decision_score_mat, self.score_scalar_ = standardizer(
decision_score_mat, keep_scalar=True)
# todo: may support other combination
if self.combination == 'average':
decision_score = average(decision_score_mat)
else:
decision_score = maximization(decision_score_mat)
assert (len(decision_score) == n_samples)
self.decision_scores_ = decision_score.ravel()
self._process_decision_scores()
return self
def decision_function(self, X):
"""Predict raw anomaly score of X using the fitted detectors.
The anomaly score of an input sample is computed based on different
detector algorithms. For consistency, outliers are assigned with
larger anomaly scores.
Parameters
----------
X : numpy array of shape (n_samples, n_features)
The training input samples. Sparse matrices are accepted only
if they are supported by the base estimator.
Returns
-------
anomaly_scores : numpy array of shape (n_samples,)
The anomaly score of the input samples.
"""
check_is_fitted(self, ['model_', 'decision_scores_',
'threshold_', 'labels_'])
X = check_array(X)
# initialize the output score
predicted_scores = self.model_.decision_function(X)
# standardize the score and combine
predicted_scores = self.score_scalar_.transform(predicted_scores)
# todo: may support other combination
if self.combination == 'average':
decision_score = average(predicted_scores)
else:
decision_score = maximization(predicted_scores)
assert (len(decision_score) == X.shape[0])
return decision_score.ravel()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
12564,
3727,
198,
37811,
198,
2,
6434,
25,
32854,
29436,
1279,
89,
3099,
726,
31,
11215,
84,
13,
15532,
29,
198,
2,
13789,
25,
347,
10305,
362,
13444,
198,
6738... | 2.730462 | 2,879 |
from jiwer import wer
import os
import sys
gt_file = sys.argv[1]
pred_file = sys.argv[2]
with open(gt_file) as f:
gt_lines = f.readlines()
gt_lines = [' '.join(x.strip().split()) for x in gt_lines]
print(len(gt_lines))
with open(pred_file) as f:
pred_lines = f.readlines()
pred_lines = [' '.join(x.strip().split()) for x in pred_lines]
print(len(pred_lines))
cerr = total_cer(gt_lines, pred_lines)
werr = total_wer(gt_lines, pred_lines)
serr = total_ser(gt_lines, pred_lines)
print("CER:", cerr)
print("WER:", werr)
print("SER:", serr)
| [
6738,
474,
14246,
263,
1330,
266,
263,
628,
628,
198,
198,
11748,
28686,
198,
11748,
25064,
198,
198,
13655,
62,
7753,
796,
25064,
13,
853,
85,
58,
16,
60,
198,
28764,
62,
7753,
220,
796,
25064,
13,
853,
85,
58,
17,
60,
198,
198,
... | 2.361702 | 235 |
from pydantic import BaseModel
| [
6738,
279,
5173,
5109,
1330,
7308,
17633,
198
] | 3.875 | 8 |
import networkx as nx
import numpy as np
import math
import pandas as pd
from networkx.drawing.nx_agraph import graphviz_layout
import matplotlib
from time import gmtime, strftime
import scipy
margFeat=[]
| [
11748,
3127,
87,
355,
299,
87,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
10688,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
3127,
87,
13,
19334,
278,
13,
77,
87,
62,
6111,
1330,
4823,
85,
528,
62,
39786,
198,
11748,
2603,... | 2.698795 | 83 |
class EstadBoton:
'''Esta clase maneja la configuracion del tablero,
ya sea el valor de las casillas, los colores, y el estado de los botones '''
| [
4871,
10062,
324,
33,
18970,
25,
201,
198,
197,
7061,
6,
22362,
64,
537,
589,
582,
68,
6592,
8591,
4566,
333,
49443,
1619,
7400,
1754,
78,
11,
201,
198,
197,
21349,
5417,
1288,
1188,
273,
390,
39990,
6124,
25314,
11,
22346,
951,
285... | 1.842105 | 114 |
"""
Linear regression objects for panel data
"""
# pylint: disable-msg=W0231
# pylint: disable-msg=E1101,E1103
from __future__ import division
from pandas.compat import range
from pandas import compat
import warnings
import numpy as np
from pandas.core.panel import Panel
from pandas.core.frame import DataFrame
from pandas.core.reshape import get_dummies
from pandas.core.series import Series
from pandas.core.sparse import SparsePanel
from pandas.stats.ols import OLS, MovingOLS
import pandas.stats.common as com
import pandas.stats.math as math
from pandas.util.decorators import cache_readonly
class PanelOLS(OLS):
"""Implements panel OLS.
See ols function docs
"""
_panel_model = True
def _prepare_data(self):
"""Cleans and stacks input data into DataFrame objects
If time effects is True, then we turn off intercepts and omit an item
from every (entity and x) fixed effect.
Otherwise:
- If we have an intercept, we omit an item from every fixed effect.
- Else, we omit an item from every fixed effect except one of them.
The categorical variables will get dropped from x.
"""
(x, x_filtered, y, weights, cat_mapping) = self._filter_data()
self.log('Adding dummies to X variables')
x = self._add_dummies(x, cat_mapping)
self.log('Adding dummies to filtered X variables')
x_filtered = self._add_dummies(x_filtered, cat_mapping)
if self._x_effects:
x = x.drop(self._x_effects, axis=1)
x_filtered = x_filtered.drop(self._x_effects, axis=1)
if self._time_effects:
x_regressor = x.sub(x.mean(level=0), level=0)
unstacked_y = y.unstack()
y_regressor = unstacked_y.sub(unstacked_y.mean(1), axis=0).stack()
y_regressor.index = y.index
elif self._intercept:
# only add intercept when no time effects
self.log('Adding intercept')
x = x_regressor = add_intercept(x)
x_filtered = add_intercept(x_filtered)
y_regressor = y
else:
self.log('No intercept added')
x_regressor = x
y_regressor = y
if weights is not None:
if not y_regressor.index.equals(weights.index):
raise AssertionError("y_regressor and weights must have the "
"same index")
if not x_regressor.index.equals(weights.index):
raise AssertionError("x_regressor and weights must have the "
"same index")
rt_weights = np.sqrt(weights)
y_regressor = y_regressor * rt_weights
x_regressor = x_regressor.mul(rt_weights, axis=0)
return x, x_regressor, x_filtered, y, y_regressor
def _filter_data(self):
"""
"""
data = self._x_orig
cat_mapping = {}
if isinstance(data, DataFrame):
data = data.to_panel()
else:
if isinstance(data, Panel):
data = data.copy()
if not isinstance(data, SparsePanel):
data, cat_mapping = self._convert_x(data)
if not isinstance(data, Panel):
data = Panel.from_dict(data, intersect=True)
x_names = data.items
if self._weights is not None:
data['__weights__'] = self._weights
# Filter x's without y (so we can make a prediction)
filtered = data.to_frame()
# Filter all data together using to_frame
# convert to DataFrame
y = self._y_orig
if isinstance(y, Series):
y = y.unstack()
data['__y__'] = y
data_long = data.to_frame()
x_filt = filtered.filter(x_names)
x = data_long.filter(x_names)
y = data_long['__y__']
if self._weights is not None and not self._weights.empty:
weights = data_long['__weights__']
else:
weights = None
return x, x_filt, y, weights, cat_mapping
def _add_dummies(self, panel, mapping):
"""
Add entity and / or categorical dummies to input X DataFrame
Returns
-------
DataFrame
"""
panel = self._add_entity_effects(panel)
panel = self._add_categorical_dummies(panel, mapping)
return panel
def _add_entity_effects(self, panel):
"""
Add entity dummies to panel
Returns
-------
DataFrame
"""
from pandas.core.reshape import make_axis_dummies
if not self._entity_effects:
return panel
self.log('-- Adding entity fixed effect dummies')
dummies = make_axis_dummies(panel, 'minor')
if not self._use_all_dummies:
if 'entity' in self._dropped_dummies:
to_exclude = str(self._dropped_dummies.get('entity'))
else:
to_exclude = dummies.columns[0]
if to_exclude not in dummies.columns:
raise Exception('%s not in %s' % (to_exclude,
dummies.columns))
self.log('-- Excluding dummy for entity: %s' % to_exclude)
dummies = dummies.filter(dummies.columns.difference([to_exclude]))
dummies = dummies.add_prefix('FE_')
panel = panel.join(dummies)
return panel
def _add_categorical_dummies(self, panel, cat_mappings):
"""
Add categorical dummies to panel
Returns
-------
DataFrame
"""
if not self._x_effects:
return panel
dropped_dummy = (self._entity_effects and not self._use_all_dummies)
for effect in self._x_effects:
self.log('-- Adding fixed effect dummies for %s' % effect)
dummies = get_dummies(panel[effect])
val_map = cat_mappings.get(effect)
if val_map:
val_map = dict((v, k) for k, v in compat.iteritems(val_map))
if dropped_dummy or not self._use_all_dummies:
if effect in self._dropped_dummies:
to_exclude = mapped_name = self._dropped_dummies.get(
effect)
if val_map:
mapped_name = val_map[to_exclude]
else:
to_exclude = mapped_name = dummies.columns[0]
if mapped_name not in dummies.columns: # pragma: no cover
raise Exception('%s not in %s' % (to_exclude,
dummies.columns))
self.log(
'-- Excluding dummy for %s: %s' % (effect, to_exclude))
dummies = dummies.filter(dummies.columns.difference([mapped_name]))
dropped_dummy = True
dummies = _convertDummies(dummies, cat_mappings.get(effect))
dummies = dummies.add_prefix('%s_' % effect)
panel = panel.join(dummies)
return panel
@property
def _use_all_dummies(self):
"""
In the case of using an intercept or including time fixed
effects, completely partitioning the sample would make the X
not full rank.
"""
return (not self._intercept and not self._time_effects)
@cache_readonly
def _beta_raw(self):
"""Runs the regression and returns the beta."""
X = self._x_trans.values
Y = self._y_trans.values.squeeze()
beta, _, _, _ = np.linalg.lstsq(X, Y)
return beta
@cache_readonly
@cache_readonly
def _df_model_raw(self):
"""Returns the raw model degrees of freedom."""
return self._df_raw - 1
@cache_readonly
def _df_resid_raw(self):
"""Returns the raw residual degrees of freedom."""
return self._nobs - self._df_raw
@cache_readonly
def _df_raw(self):
"""Returns the degrees of freedom."""
df = math.rank(self._x_trans.values)
if self._time_effects:
df += self._total_times
return df
@cache_readonly
@property
@cache_readonly
def _r2_adj_raw(self):
"""Returns the raw r-squared adjusted values."""
nobs = self._nobs
factors = (nobs - 1) / (nobs - self._df_raw)
return 1 - (1 - self._r2_raw) * factors
@cache_readonly
@cache_readonly
@cache_readonly
def _rmse_raw(self):
"""Returns the raw rmse values."""
# X = self._x.values
# Y = self._y.values.squeeze()
X = self._x_trans.values
Y = self._y_trans.values.squeeze()
resid = Y - np.dot(X, self._beta_raw)
ss = (resid ** 2).sum()
return np.sqrt(ss / (self._nobs - self._df_raw))
@cache_readonly
@cache_readonly
def _y_fitted_raw(self):
"""Returns the raw fitted y values."""
return np.dot(self._x.values, self._beta_raw)
@cache_readonly
@cache_readonly
@cache_readonly
@property
def add_intercept(panel, name='intercept'):
"""
Add column of ones to input panel
Parameters
----------
panel: Panel / DataFrame
name: string, default 'intercept']
Returns
-------
New object (same type as input)
"""
panel = panel.copy()
panel[name] = 1.
return panel.consolidate()
class MovingPanelOLS(MovingOLS, PanelOLS):
"""Implements rolling/expanding panel OLS.
See ols function docs
"""
_panel_model = True
@cache_readonly
@cache_readonly
@cache_readonly
def y_predict(self):
"""Returns the predicted y values."""
return self._unstack_y(self._y_predict_raw)
def lagged_y_predict(self, lag=1):
"""
Compute forecast Y value lagging coefficient by input number
of time periods
Parameters
----------
lag : int
Returns
-------
DataFrame
"""
x = self._x.values
betas = self._beta_matrix(lag=lag)
return self._unstack_y((betas * x).sum(1))
@cache_readonly
@cache_readonly
def _df_raw(self):
"""Returns the degrees of freedom."""
df = self._rolling_rank()
if self._time_effects:
df += self._window_time_obs
return df[self._valid_indices]
@cache_readonly
def _var_beta_raw(self):
"""Returns the raw covariance of beta."""
x = self._x
y = self._y
dates = x.index.levels[0]
cluster_axis = None
if self._cluster == 'time':
cluster_axis = 0
elif self._cluster == 'entity':
cluster_axis = 1
nobs = self._nobs
rmse = self._rmse_raw
beta = self._beta_raw
df = self._df_raw
window = self._window
if not self._time_effects:
# Non-transformed X
cum_xx = self._cum_xx(x)
results = []
for n, i in enumerate(self._valid_indices):
if self._is_rolling and i >= window:
prior_date = dates[i - window + 1]
else:
prior_date = dates[0]
date = dates[i]
x_slice = x.truncate(prior_date, date)
y_slice = y.truncate(prior_date, date)
if self._time_effects:
xx = _xx_time_effects(x_slice, y_slice)
else:
xx = cum_xx[i]
if self._is_rolling and i >= window:
xx = xx - cum_xx[i - window]
result = _var_beta_panel(y_slice, x_slice, beta[n], xx, rmse[n],
cluster_axis, self._nw_lags,
nobs[n], df[n], self._nw_overlap)
results.append(result)
return np.array(results)
@cache_readonly
@cache_readonly
@cache_readonly
def _y_predict_raw(self):
"""Returns the raw predicted y values."""
x = self._x.values
betas = self._beta_matrix(lag=1)
return (betas * x).sum(1)
@cache_readonly
class NonPooledPanelOLS(object):
"""Implements non-pooled panel OLS.
Parameters
----------
y : DataFrame
x : Series, DataFrame, or dict of Series
intercept : bool
True if you want an intercept.
nw_lags : None or int
Number of Newey-West lags.
window_type : {'full_sample', 'rolling', 'expanding'}
'full_sample' by default
window : int
size of window (for rolling/expanding OLS)
"""
ATTRIBUTES = [
'beta',
'df',
'df_model',
'df_resid',
'f_stat',
'p_value',
'r2',
'r2_adj',
'resid',
'rmse',
'std_err',
'summary_as_matrix',
't_stat',
'var_beta',
'x',
'y',
'y_fitted',
'y_predict'
]
def _group_agg(values, bounds, f):
"""
R-style aggregator
Parameters
----------
values : N-length or N x K ndarray
bounds : B-length ndarray
f : ndarray aggregation function
Returns
-------
ndarray with same length as bounds array
"""
if values.ndim == 1:
N = len(values)
result = np.empty(len(bounds), dtype=float)
elif values.ndim == 2:
N, K = values.shape
result = np.empty((len(bounds), K), dtype=float)
testagg = f(values[:min(1, len(values))])
if isinstance(testagg, np.ndarray) and testagg.ndim == 2:
raise AssertionError('Function must reduce')
for i, left_bound in enumerate(bounds):
if i == len(bounds) - 1:
right_bound = N
else:
right_bound = bounds[i + 1]
result[i] = f(values[left_bound:right_bound])
return result
def _xx_time_effects(x, y):
"""
Returns X'X - (X'T) (T'T)^-1 (T'X)
"""
# X'X
xx = np.dot(x.values.T, x.values)
xt = x.sum(level=0).values
count = y.unstack().count(1).values
selector = count > 0
# X'X - (T'T)^-1 (T'X)
xt = xt[selector]
count = count[selector]
return xx - np.dot(xt.T / count, xt)
| [
37811,
198,
14993,
451,
20683,
5563,
329,
6103,
1366,
198,
37811,
198,
198,
2,
279,
2645,
600,
25,
15560,
12,
19662,
28,
54,
15,
25667,
198,
2,
279,
2645,
600,
25,
15560,
12,
19662,
28,
36,
1157,
486,
11,
36,
11442,
18,
198,
198,
... | 2.089349 | 6,816 |
# Copyright 2020 The HuggingFace Team All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Adapted from:
# https://github.com/huggingface/transformers/blob/master/examples/question-answering/run_qa.py
# yapf: off
from dataclasses import dataclass, field
from lightning_transformers.core.nlp import HFTransformerDataConfig
@dataclass
class QuestionAnsweringDataConfig(HFTransformerDataConfig):
"""Arguments pertaining to what data we are going to input our model for training and eval."""
max_length: int = field(
default=384,
metadata={
"help": "The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded."
},
)
version_2_with_negative: bool = field(
default=False, metadata={"help": "If true, some of the examples do not have an answer."}
)
null_score_diff_threshold: float = field(
default=0.0,
metadata={
"help": "The threshold used to select the null answer: if the best answer has a score that is less than "
"the score of the null answer minus this threshold, the null answer is selected for this example. "
"Only useful when `version_2_with_negative=True`."
},
)
doc_stride: int = field(
default=128,
metadata={"help": "When splitting up a long document into chunks, how much stride to take between chunks."},
)
n_best_size: int = field(
default=20,
metadata={"help": "The total number of n-best predictions to generate when looking for an answer."},
)
max_answer_length: int = field(
default=30,
metadata={
"help": "The maximum length of an answer that can be generated. This is needed because the start "
"and end predictions are not conditioned on one another."
},
)
| [
2,
15069,
12131,
383,
12905,
2667,
32388,
4816,
1439,
2489,
10395,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351... | 2.963415 | 820 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/uploader/proto/server_info.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorboard/uploader/proto/server_info.proto',
package='tensorboard.service',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n,tensorboard/uploader/proto/server_info.proto\x12\x13tensorboard.service\"l\n\x11ServerInfoRequest\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x46\n\x14plugin_specification\x18\x02 \x01(\x0b\x32(.tensorboard.service.PluginSpecification\"\xb7\x02\n\x12ServerInfoResponse\x12\x39\n\rcompatibility\x18\x01 \x01(\x0b\x32\".tensorboard.service.Compatibility\x12\x32\n\napi_server\x18\x02 \x01(\x0b\x32\x1e.tensorboard.service.ApiServer\x12<\n\nurl_format\x18\x03 \x01(\x0b\x32(.tensorboard.service.ExperimentUrlFormat\x12:\n\x0eplugin_control\x18\x04 \x01(\x0b\x32\".tensorboard.service.PluginControl\x12\x38\n\rupload_limits\x18\x05 \x01(\x0b\x32!.tensorboard.service.UploadLimits\"\\\n\rCompatibility\x12:\n\x07verdict\x18\x01 \x01(\x0e\x32).tensorboard.service.CompatibilityVerdict\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\"\x1d\n\tApiServer\x12\x10\n\x08\x65ndpoint\x18\x01 \x01(\t\"?\n\x13\x45xperimentUrlFormat\x12\x10\n\x08template\x18\x01 \x01(\t\x12\x16\n\x0eid_placeholder\x18\x02 \x01(\t\"-\n\x13PluginSpecification\x12\x16\n\x0eupload_plugins\x18\x02 \x03(\t\"(\n\rPluginControl\x12\x17\n\x0f\x61llowed_plugins\x18\x01 \x03(\t\"\x92\x02\n\x0cUploadLimits\x12\x1f\n\x17max_scalar_request_size\x18\x03 \x01(\x03\x12\x1f\n\x17max_tensor_request_size\x18\x04 \x01(\x03\x12\x1d\n\x15max_blob_request_size\x18\x05 \x01(\x03\x12#\n\x1bmin_scalar_request_interval\x18\x06 \x01(\x03\x12#\n\x1bmin_tensor_request_interval\x18\x07 \x01(\x03\x12!\n\x19min_blob_request_interval\x18\x08 \x01(\x03\x12\x15\n\rmax_blob_size\x18\x01 \x01(\x03\x12\x1d\n\x15max_tensor_point_size\x18\x02 \x01(\x03*`\n\x14\x43ompatibilityVerdict\x12\x13\n\x0fVERDICT_UNKNOWN\x10\x00\x12\x0e\n\nVERDICT_OK\x10\x01\x12\x10\n\x0cVERDICT_WARN\x10\x02\x12\x11\n\rVERDICT_ERROR\x10\x03\x62\x06proto3')
)
_COMPATIBILITYVERDICT = _descriptor.EnumDescriptor(
name='CompatibilityVerdict',
full_name='tensorboard.service.CompatibilityVerdict',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='VERDICT_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VERDICT_OK', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VERDICT_WARN', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VERDICT_ERROR', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1049,
serialized_end=1145,
)
_sym_db.RegisterEnumDescriptor(_COMPATIBILITYVERDICT)
CompatibilityVerdict = enum_type_wrapper.EnumTypeWrapper(_COMPATIBILITYVERDICT)
VERDICT_UNKNOWN = 0
VERDICT_OK = 1
VERDICT_WARN = 2
VERDICT_ERROR = 3
_SERVERINFOREQUEST = _descriptor.Descriptor(
name='ServerInfoRequest',
full_name='tensorboard.service.ServerInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='tensorboard.service.ServerInfoRequest.version', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plugin_specification', full_name='tensorboard.service.ServerInfoRequest.plugin_specification', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=69,
serialized_end=177,
)
_SERVERINFORESPONSE = _descriptor.Descriptor(
name='ServerInfoResponse',
full_name='tensorboard.service.ServerInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='compatibility', full_name='tensorboard.service.ServerInfoResponse.compatibility', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='api_server', full_name='tensorboard.service.ServerInfoResponse.api_server', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url_format', full_name='tensorboard.service.ServerInfoResponse.url_format', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plugin_control', full_name='tensorboard.service.ServerInfoResponse.plugin_control', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='upload_limits', full_name='tensorboard.service.ServerInfoResponse.upload_limits', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=180,
serialized_end=491,
)
_COMPATIBILITY = _descriptor.Descriptor(
name='Compatibility',
full_name='tensorboard.service.Compatibility',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='verdict', full_name='tensorboard.service.Compatibility.verdict', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='details', full_name='tensorboard.service.Compatibility.details', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=493,
serialized_end=585,
)
_APISERVER = _descriptor.Descriptor(
name='ApiServer',
full_name='tensorboard.service.ApiServer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='endpoint', full_name='tensorboard.service.ApiServer.endpoint', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=587,
serialized_end=616,
)
_EXPERIMENTURLFORMAT = _descriptor.Descriptor(
name='ExperimentUrlFormat',
full_name='tensorboard.service.ExperimentUrlFormat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='template', full_name='tensorboard.service.ExperimentUrlFormat.template', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id_placeholder', full_name='tensorboard.service.ExperimentUrlFormat.id_placeholder', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=618,
serialized_end=681,
)
_PLUGINSPECIFICATION = _descriptor.Descriptor(
name='PluginSpecification',
full_name='tensorboard.service.PluginSpecification',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='upload_plugins', full_name='tensorboard.service.PluginSpecification.upload_plugins', index=0,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=683,
serialized_end=728,
)
_PLUGINCONTROL = _descriptor.Descriptor(
name='PluginControl',
full_name='tensorboard.service.PluginControl',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='allowed_plugins', full_name='tensorboard.service.PluginControl.allowed_plugins', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=730,
serialized_end=770,
)
_UPLOADLIMITS = _descriptor.Descriptor(
name='UploadLimits',
full_name='tensorboard.service.UploadLimits',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max_scalar_request_size', full_name='tensorboard.service.UploadLimits.max_scalar_request_size', index=0,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_tensor_request_size', full_name='tensorboard.service.UploadLimits.max_tensor_request_size', index=1,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_blob_request_size', full_name='tensorboard.service.UploadLimits.max_blob_request_size', index=2,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_scalar_request_interval', full_name='tensorboard.service.UploadLimits.min_scalar_request_interval', index=3,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_tensor_request_interval', full_name='tensorboard.service.UploadLimits.min_tensor_request_interval', index=4,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_blob_request_interval', full_name='tensorboard.service.UploadLimits.min_blob_request_interval', index=5,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_blob_size', full_name='tensorboard.service.UploadLimits.max_blob_size', index=6,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_tensor_point_size', full_name='tensorboard.service.UploadLimits.max_tensor_point_size', index=7,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=773,
serialized_end=1047,
)
_SERVERINFOREQUEST.fields_by_name['plugin_specification'].message_type = _PLUGINSPECIFICATION
_SERVERINFORESPONSE.fields_by_name['compatibility'].message_type = _COMPATIBILITY
_SERVERINFORESPONSE.fields_by_name['api_server'].message_type = _APISERVER
_SERVERINFORESPONSE.fields_by_name['url_format'].message_type = _EXPERIMENTURLFORMAT
_SERVERINFORESPONSE.fields_by_name['plugin_control'].message_type = _PLUGINCONTROL
_SERVERINFORESPONSE.fields_by_name['upload_limits'].message_type = _UPLOADLIMITS
_COMPATIBILITY.fields_by_name['verdict'].enum_type = _COMPATIBILITYVERDICT
DESCRIPTOR.message_types_by_name['ServerInfoRequest'] = _SERVERINFOREQUEST
DESCRIPTOR.message_types_by_name['ServerInfoResponse'] = _SERVERINFORESPONSE
DESCRIPTOR.message_types_by_name['Compatibility'] = _COMPATIBILITY
DESCRIPTOR.message_types_by_name['ApiServer'] = _APISERVER
DESCRIPTOR.message_types_by_name['ExperimentUrlFormat'] = _EXPERIMENTURLFORMAT
DESCRIPTOR.message_types_by_name['PluginSpecification'] = _PLUGINSPECIFICATION
DESCRIPTOR.message_types_by_name['PluginControl'] = _PLUGINCONTROL
DESCRIPTOR.message_types_by_name['UploadLimits'] = _UPLOADLIMITS
DESCRIPTOR.enum_types_by_name['CompatibilityVerdict'] = _COMPATIBILITYVERDICT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ServerInfoRequest = _reflection.GeneratedProtocolMessageType('ServerInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _SERVERINFOREQUEST,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.ServerInfoRequest)
})
_sym_db.RegisterMessage(ServerInfoRequest)
ServerInfoResponse = _reflection.GeneratedProtocolMessageType('ServerInfoResponse', (_message.Message,), {
'DESCRIPTOR' : _SERVERINFORESPONSE,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.ServerInfoResponse)
})
_sym_db.RegisterMessage(ServerInfoResponse)
Compatibility = _reflection.GeneratedProtocolMessageType('Compatibility', (_message.Message,), {
'DESCRIPTOR' : _COMPATIBILITY,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.Compatibility)
})
_sym_db.RegisterMessage(Compatibility)
ApiServer = _reflection.GeneratedProtocolMessageType('ApiServer', (_message.Message,), {
'DESCRIPTOR' : _APISERVER,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.ApiServer)
})
_sym_db.RegisterMessage(ApiServer)
ExperimentUrlFormat = _reflection.GeneratedProtocolMessageType('ExperimentUrlFormat', (_message.Message,), {
'DESCRIPTOR' : _EXPERIMENTURLFORMAT,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.ExperimentUrlFormat)
})
_sym_db.RegisterMessage(ExperimentUrlFormat)
PluginSpecification = _reflection.GeneratedProtocolMessageType('PluginSpecification', (_message.Message,), {
'DESCRIPTOR' : _PLUGINSPECIFICATION,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.PluginSpecification)
})
_sym_db.RegisterMessage(PluginSpecification)
PluginControl = _reflection.GeneratedProtocolMessageType('PluginControl', (_message.Message,), {
'DESCRIPTOR' : _PLUGINCONTROL,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.PluginControl)
})
_sym_db.RegisterMessage(PluginControl)
UploadLimits = _reflection.GeneratedProtocolMessageType('UploadLimits', (_message.Message,), {
'DESCRIPTOR' : _UPLOADLIMITS,
'__module__' : 'tensorboard.uploader.proto.server_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.service.UploadLimits)
})
_sym_db.RegisterMessage(UploadLimits)
# @@protoc_insertion_point(module_scope)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
262,
8435,
11876,
17050,
13,
220,
8410,
5626,
48483,
0,
198,
2,
2723,
25,
11192,
273,
3526,
14,
25850,
263,
14,
1676,
1462,
14,
15388,
62,
10951,
... | 2.493224 | 7,822 |
# Exercício Python #051 - Progressão Aritmética
#
# Desenvolva um programa que leia o PRIMEIRO TERMO e a RAZÃO de uma PA. No final, mostre os 10 primeiros termos dessa
# progressão.
#
# OBS: Eu tentei realizar esse exercicio, fiz o codígo mas ele apresentou ERRO DE LÓGICA! Estude mais P.A e
# esse conteudo!
print('\033[0;35m-=-\033[m' * 10)
print('\033[1;36mPROGRESSÃO ARITIMÉRICA (P.A)\033[m')
print('\033[0;35m-=-\033[m' * 10)
t1 = str(input('\033[0;30mDigite o PRIMEIRO TERMO da P.A: \033[m')).strip()
r = str(input('\033[0;30mAgora, digite a RAZÃO da P.A: \033[m')).strip()
t1 = int(str(t1))
r = int(str(r))
a10 = t1 + (10 - 1) * r # Fórmula da P.A ADAPTADA para a linguagem PYTHON
print(' ' * 20)
print('\033[1;30mAbaixo,seguem os 10 primeiros termos da Progressão Aritimética: \033[m')
for p_a in range(t1 ,a10 + r, r): # Está escrito a10 + r porque o Python ignora o último termo.
print(f'\033[0;34m{p_a}\033[m', end=' ')
print('\n\033[1;31mFIM!\033[m')
| [
2,
1475,
2798,
8836,
66,
952,
11361,
1303,
2713,
16,
532,
18387,
28749,
317,
799,
76,
25125,
3970,
198,
2,
198,
2,
2935,
268,
10396,
6862,
23781,
1430,
64,
8358,
443,
544,
267,
4810,
12789,
43708,
28994,
11770,
304,
257,
17926,
57,
... | 2.101732 | 462 |
import json
import binascii
import uuid
import os
import tempfile
import pathlib
import urllib.error
import urllib.request
import shutil
from flask import current_app as app
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy import text
import tenacity
import numpy as np
from sqlalchemy.orm.exc import NoResultFound
from app.main.lib.shared_models.shared_model import SharedModel
from app.main import db
from app.main.model.audio import Audio
| [
11748,
33918,
198,
11748,
9874,
292,
979,
72,
198,
11748,
334,
27112,
198,
11748,
28686,
198,
11748,
20218,
7753,
198,
11748,
3108,
8019,
198,
11748,
2956,
297,
571,
13,
18224,
198,
11748,
2956,
297,
571,
13,
25927,
198,
11748,
4423,
34... | 3.426471 | 136 |
import logging
import pandas as pd
from datetime import timedelta
from ..models import Index, Quote, Quote_CSI300, Ublb_cross, Rsi_predict_report
from ..utils.utils import gen_id
from stockstats import StockDataFrame
logger = logging.getLogger('main.ublb')
pd.set_option('mode.chained_assignment', None)
| [
11748,
18931,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
4818,
8079,
1330,
28805,
12514,
198,
6738,
11485,
27530,
1330,
12901,
11,
19879,
11,
19879,
62,
7902,
40,
6200,
11,
471,
2436,
65,
62,
19692,
11,
12820,
72,
62,
79,
17407,... | 3.177083 | 96 |
__version__ = '0.1.0'
default_app_config = 'wagtailrelevancy.apps.WagtailRelevancyConfig'
| [
834,
9641,
834,
796,
705,
15,
13,
16,
13,
15,
6,
198,
12286,
62,
1324,
62,
11250,
796,
705,
86,
363,
13199,
260,
2768,
3883,
13,
18211,
13,
54,
363,
13199,
3041,
2768,
3883,
16934,
6,
198
] | 2.432432 | 37 |
#
# PySNMP MIB module CISCO-BITS-CLOCK-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-BITS-CLOCK-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:34:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
entPhysicalDescr, entPhysicalIndex = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalDescr", "entPhysicalIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
ModuleIdentity, Counter32, IpAddress, TimeTicks, NotificationType, Bits, Unsigned32, Gauge32, iso, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Integer32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Counter32", "IpAddress", "TimeTicks", "NotificationType", "Bits", "Unsigned32", "Gauge32", "iso", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Integer32", "Counter64")
TimeStamp, TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "TruthValue", "DisplayString", "TextualConvention")
ciscoBitsClockMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 459))
ciscoBitsClockMIB.setRevisions(('2005-01-21 00:00',))
if mibBuilder.loadTexts: ciscoBitsClockMIB.setLastUpdated('200501210000Z')
if mibBuilder.loadTexts: ciscoBitsClockMIB.setOrganization('Cisco Systems, Inc.')
ciscoBitsClockMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 459, 0))
ciscoBitsClockMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 459, 1))
ciscoBitsClockMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 459, 2))
cBitsClkSourceTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1), )
if mibBuilder.loadTexts: cBitsClkSourceTable.setStatus('current')
cBitsClkSourceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"))
if mibBuilder.loadTexts: cBitsClkSourceEntry.setStatus('current')
cBitsClkSourceRoleAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("primary", 1), ("secondary", 2), ("tertiary", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cBitsClkSourceRoleAdmin.setStatus('current')
cBitsClkSourceRoleCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("unavailable", 0), ("primary", 1), ("secondary", 2), ("tertiary", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cBitsClkSourceRoleCurrent.setStatus('current')
cBitsClkSourceTimestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 3), TimeStamp()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cBitsClkSourceTimestamp.setStatus('current')
cBitsClkSourceActiveSeconds = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 4), Counter32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: cBitsClkSourceActiveSeconds.setStatus('current')
cBitsClkSourceInactiveSeconds = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 5), Counter32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: cBitsClkSourceInactiveSeconds.setStatus('current')
cBitsClkSourceDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cBitsClkSourceDescription.setStatus('current')
cBitsClkNotifEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 459, 1, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cBitsClkNotifEnabled.setStatus('current')
ciscoBitsClockSource = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 459, 0, 1)).setObjects(("ENTITY-MIB", "entPhysicalDescr"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceDescription"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceRoleAdmin"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceRoleCurrent"))
if mibBuilder.loadTexts: ciscoBitsClockSource.setStatus('current')
ciscoBitsClockFreerun = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 459, 0, 2)).setObjects(("ENTITY-MIB", "entPhysicalDescr"))
if mibBuilder.loadTexts: ciscoBitsClockFreerun.setStatus('current')
ciscoBitsClockHoldover = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 459, 0, 3)).setObjects(("ENTITY-MIB", "entPhysicalDescr"))
if mibBuilder.loadTexts: ciscoBitsClockHoldover.setStatus('current')
ciscoBitsClockMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 459, 2, 1))
ciscoBitsClockMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 459, 2, 2))
ciscoBitsClockMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 459, 2, 1, 1)).setObjects(("CISCO-BITS-CLOCK-MIB", "ciscoBitsClockSourceGroup"), ("CISCO-BITS-CLOCK-MIB", "ciscoBitsClockNotifGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoBitsClockMIBCompliance = ciscoBitsClockMIBCompliance.setStatus('current')
ciscoBitsClockSourceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 459, 2, 2, 1)).setObjects(("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceRoleAdmin"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceRoleCurrent"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceTimestamp"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceActiveSeconds"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceInactiveSeconds"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkSourceDescription"), ("CISCO-BITS-CLOCK-MIB", "cBitsClkNotifEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoBitsClockSourceGroup = ciscoBitsClockSourceGroup.setStatus('current')
ciscoBitsClockNotifGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 459, 2, 2, 2)).setObjects(("CISCO-BITS-CLOCK-MIB", "ciscoBitsClockSource"), ("CISCO-BITS-CLOCK-MIB", "ciscoBitsClockFreerun"), ("CISCO-BITS-CLOCK-MIB", "ciscoBitsClockHoldover"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoBitsClockNotifGroup = ciscoBitsClockNotifGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-BITS-CLOCK-MIB", cBitsClkSourceRoleAdmin=cBitsClkSourceRoleAdmin, cBitsClkNotifEnabled=cBitsClkNotifEnabled, cBitsClkSourceRoleCurrent=cBitsClkSourceRoleCurrent, ciscoBitsClockFreerun=ciscoBitsClockFreerun, ciscoBitsClockNotifGroup=ciscoBitsClockNotifGroup, ciscoBitsClockMIBConform=ciscoBitsClockMIBConform, ciscoBitsClockSource=ciscoBitsClockSource, cBitsClkSourceInactiveSeconds=cBitsClkSourceInactiveSeconds, cBitsClkSourceTable=cBitsClkSourceTable, ciscoBitsClockSourceGroup=ciscoBitsClockSourceGroup, ciscoBitsClockMIBCompliances=ciscoBitsClockMIBCompliances, ciscoBitsClockMIB=ciscoBitsClockMIB, ciscoBitsClockMIBGroups=ciscoBitsClockMIBGroups, ciscoBitsClockMIBCompliance=ciscoBitsClockMIBCompliance, ciscoBitsClockHoldover=ciscoBitsClockHoldover, ciscoBitsClockMIBNotifs=ciscoBitsClockMIBNotifs, PYSNMP_MODULE_ID=ciscoBitsClockMIB, ciscoBitsClockMIBObjects=ciscoBitsClockMIBObjects, cBitsClkSourceEntry=cBitsClkSourceEntry, cBitsClkSourceDescription=cBitsClkSourceDescription, cBitsClkSourceActiveSeconds=cBitsClkSourceActiveSeconds, cBitsClkSourceTimestamp=cBitsClkSourceTimestamp)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
36159,
8220,
12,
26094,
50,
12,
5097,
11290,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
144... | 2.544774 | 3,071 |
#!/usr/bin/env python3
"""
Crabada script to send mining all available teams for
the given user.
Usage:
python3 -m bin.mining.sendTeamsMining <userAddress>
Author:
@coccoinomane (Twitter)
"""
from src.bot.mining.sendTeamsMining import sendTeamsMining
from src.helpers.general import secondOrNone
from src.models.User import User
from src.common.logger import logger
from sys import argv, exit
userAddress = secondOrNone(argv)
if not userAddress:
logger.error("Specify a user address")
exit(1)
nSent = sendTeamsMining(User(userAddress), loot_point_filter=True)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
198,
13916,
397,
4763,
4226,
284,
3758,
9691,
477,
1695,
3466,
329,
198,
1169,
1813,
2836,
13,
198,
198,
28350,
25,
198,
220,
220,
220,
21015,
18,
532,
76,
9874,
13,
45374... | 2.954315 | 197 |
from . import model as torchTensor
| [
6738,
764,
1330,
2746,
355,
28034,
51,
22854,
198
] | 3.888889 | 9 |
"""event_tweaks
Revision ID: e88bc62b6e4
Revises: 4d3c2b4ceacb
Create Date: 2015-03-17 14:04:09.394924
"""
# revision identifiers, used by Alembic.
revision = 'e88bc62b6e4'
down_revision = '4d3c2b4ceacb'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
| [
37811,
15596,
62,
83,
732,
4730,
198,
198,
18009,
1166,
4522,
25,
304,
3459,
15630,
5237,
65,
21,
68,
19,
198,
18009,
2696,
25,
604,
67,
18,
66,
17,
65,
19,
344,
330,
65,
198,
16447,
7536,
25,
1853,
12,
3070,
12,
1558,
1478,
25,... | 2.363636 | 143 |
# -*- coding: utf-8 -*-
import datetime
import decimal
from unittest import TestCase
from jukoro import arrow
from jukoro import json
from jukoro import pg
A = arrow.JuArrow
D = decimal.Decimal
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
4818,
8079,
198,
11748,
32465,
198,
6738,
555,
715,
395,
1330,
6208,
20448,
198,
198,
6738,
474,
2724,
16522,
1330,
15452,
198,
6738,
474,
2724,
16522,
1330,
... | 2.926471 | 68 |
"""Information about the app."""
__version__ = '0.5.0'
__url__ = 'https://github.com/fennekki/cdparacord'
| [
37811,
21918,
546,
262,
598,
526,
15931,
198,
834,
9641,
834,
796,
705,
15,
13,
20,
13,
15,
6,
198,
834,
6371,
834,
796,
705,
5450,
1378,
12567,
13,
785,
14,
69,
29727,
74,
4106,
14,
10210,
1845,
330,
585,
6,
198
] | 2.52381 | 42 |
#!/usr/bin/python
import serial, time, sys, fileinput
#open and configure serial port
ser = serial.Serial(
port='/dev/ttyUSB0',
baudrate=19200,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout = .1
)
#first, clear out 210 buffer
count = 0
while (count < 3):
count +=1
ser.write("\r".encode())
time.sleep(.1)
#open file for download
file = open("download.mem", "w+")
#send command to rc210 to start download
ser.write("1SendEram\r\n".encode())
indata =""
Counter = 0
progresscounter = 0
var = 1
while var == 1:
inser = str(ser.readline())
indata = inser.strip
print (indata)
if indata == "Complete": #check for first character of "Complete" and exit loop
break
Counter = Counter + 1
else:
Counter = 0
ser.write("\r".encode())
file.write(indata)
#LineCount -= 1
progresscounter += 1
progress = progresscounter / 44
if( progress > 100 ) : progress = 100
ser.write("OK\r\n".encode())
#print( '\rDownloading: %s (%d%%)' % ("|"*(progress/2), progress)),
sys.stdout.flush()
if Counter > 10 :
file.close()
sys.exit("RC210 did not respond. Exiting")
print ("\nDownload Complete")
file.close()
#now for RTC
sys.exit()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
11748,
11389,
11,
640,
11,
25064,
11,
2393,
15414,
198,
220,
198,
2,
9654,
290,
17425,
11389,
2493,
198,
2655,
796,
11389,
13,
32634,
7,
198,
220,
220,
220,
2493,
11639,
14,
7959,
14,
... | 2.502913 | 515 |
#!/usr/bin/env python3
#coding: utf-8
"""
pythonic way
"""
lst = ['aaabb', 'caca', 'dabc', 'acc', 'abbb']
res = ','.join(lst[1:-1])
print(res)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
66,
7656,
25,
3384,
69,
12,
23,
198,
37811,
198,
29412,
291,
835,
198,
37811,
198,
198,
75,
301,
796,
37250,
7252,
6485,
3256,
705,
66,
22260,
3256,
705,
67,
39305,
3256,
70... | 2.028169 | 71 |
"""A pre-commit hook dependency."""
from __future__ import annotations
from dataclasses import dataclass
from neophile.dependency.base import Dependency
__all__ = ["PreCommitDependency"]
@dataclass(frozen=True, order=True)
class PreCommitDependency(Dependency):
"""Represents a single pre-commit dependency."""
repository: str
"""The URL of the GitHub repository providing this pre-commit hook."""
owner: str
"""The GitHub repository owner of the pre-commit hook."""
repo: str
"""The GitHub repository name of the pre-commit hook."""
version: str
"""The version of the dependency (may be a match pattern)."""
| [
37811,
32,
662,
12,
41509,
8011,
20203,
526,
15931,
198,
198,
6738,
11593,
37443,
834,
1330,
37647,
198,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
198,
6738,
497,
37161,
13,
45841,
1387,
13,
8692,
1330,
37947,
1387,
198,... | 3.308081 | 198 |
#!/bin/python3
#
# Copyright (c) 2019 Joakim Eriksson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
#
# Experimental NAT64 and DNS64 using tun-interface (for the NAT64 IPv6 interface and the
# regular sockets as the TCP/UDP interface.
#
import ipaddress, os, select, time
import socket, logging, struct
import dpkt
# TCP State machine
TCP_INIT = 1 # Same as LISTEN... more or less...
TCP_SYN_RECEIVED = 2
TCP_SYN_SENT = 3
TCP_ESTABLISHED = 4
TCP_FIN_WAIT = 5
TCP_FIN_CLOSE_WAIT = 6
TYPE_HANDSHAKE_MAC_GET = 1
TYPE_HANDSHAKE_MAC_SET = 2
TYPE_RAW_IPV6 = 6
# Protocol numbers
PROTO_UDP = 17
PROTO_TCP = 6
PROTO_ICMP = 58
PROTOS = {PROTO_UDP: "udp", PROTO_TCP: "tcp", PROTO_ICMP: "icmp"}
MAC = b'\xca\xba\x88\x88\x00\xaa\xbb\x01'
macaddr = 1
sockmap = {}
adrmap = {}
input = []
tuntcp = []
tun = None
tunconnection = None
prefix = ipaddress.ip_address("64:ff9b::0").packed
log = logging.getLogger('nat64')
log.setLevel(logging.DEBUG)
# create log formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
log.addHandler(ch)
# Remove the state for this specific socket
# Tun is reception from local machine - not from native or NBR.
# TunTcp from NBR or native platform.
# Only for OS-X for now.
# Should be easy to adapt for linux also.
tun = os.open("/dev/tun12", os.O_RDWR)
os.system("ifconfig tun12 inet6 64:ff9b::1/96 up")
os.system("sysctl -w net.inet.ip.forwarding=1");
input = [tun]
tunconnection = None
tunsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 8888)
tunsock.bind(server_address)
tunsock.listen(1)
log.info("Accepting tunctp connections over TCP on 8888.")
input = input + [tunsock]
try:
while 1:
inputready,outputready,exceptready = select.select(input,[],input)
for r in inputready:
if r == tun:
packet = os.read(tun, 4000)
recv_from_tun(packet)
# Something from the tuntcp connections.
elif r in tuntcp:
data = r.recv(4000)
if not data:
log.debug(">> TUNTCP Socket shutdown - removing socket!")
input.remove(r)
tuntcp.remove(r)
else:
recv_from_tuntcp(r, data)
# Something on the accept socket!?
elif r == tunsock:
tunconnection, client_address = tunsock.accept()
log.debug("Connection from: %s", client_address)
input = input + [tunconnection]
tuntcp = tuntcp + [tunconnection]
# Otherwise it is on a NAT64:ed socket
else:
st = sockmap[r]
# Receive will receive and send back over tun.
data = st.receive()
if not data:
log.debug(">> Socket shutdown - remove socket?!")
for r in exceptready:
print(r)
except KeyboardInterrupt:
log.error("Stopped by user.") | [
2,
48443,
8800,
14,
29412,
18,
198,
2,
198,
2,
15069,
357,
66,
8,
13130,
5302,
461,
320,
412,
39370,
1559,
198,
2,
1439,
2489,
10395,
13,
198,
2,
198,
2,
2297,
396,
3890,
290,
779,
287,
2723,
290,
13934,
5107,
11,
351,
393,
1231... | 2.501941 | 1,803 |
from setuptools import setup, find_packages
from setuptools import setup
import os
import re
dependencies = [
'pandas>=0.16.0',
'numpy>=1.9.2',
'scipy>=1.6.0',
'statsmodels>=0.12.2',
'networkx>=2.5',
]
with open(os.path.join(os.path.dirname(__file__), "fixedeffect", "_version.py")) as file:
for line in file:
m = re.fullmatch("__version__ = '([^']+)'\n", line)
if m:
version = m.group(1)
setup(name='FixedEffectModel',
version=version,
description='Solutions to linear model with high dimensional fixed effects.',
long_description=readme(),
long_description_content_type="text/markdown",
author='ksecology',
author_email='da_ecology@kuaishou.com',
url='https://github.com/ksecology/FixedEffectModel',
packages=find_packages(),
install_requires=dependencies,
zip_safe=False,
license='MIT',
python_requires='>=3.6',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Sociology',
'Topic :: Scientific/Engineering :: Information Analysis',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: OS Independent',
]
)
| [
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
198,
6738,
900,
37623,
10141,
1330,
9058,
198,
11748,
28686,
198,
11748,
302,
628,
198,
45841,
3976,
796,
685,
198,
220,
220,
220,
705,
79,
392,
292,
29,
28,
15,
13,
1433,
13... | 2.394525 | 621 |
import os
import pandas as pd
import matplotlib.pyplot as plt
if os.path.exists("datapoints.csv"):
data = pd.read_csv("datapoints.csv")
else:
print("File does not exist")
dataY = data['y'].copy()
dataX = data['x'].copy()
sumx = sum1(dataX)
sumy = sum1(dataY)
sumxy = sum2(dataX, dataY)
sumxx = sum2(dataX, dataX)
resultA = (sumxy-sumx*sumy)/(sumxx - sumx**2)
resultB = sumy - resultA*sumx
print(resultA, resultB)
| [
11748,
28686,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
361,
28686,
13,
6978,
13,
1069,
1023,
7203,
19608,
499,
1563,
82,
13,
40664,
1,
2599,
198,
220,
220,
220,
1366,
7... | 2.274194 | 186 |
import aspose.slides as slides
| [
11748,
355,
3455,
13,
6649,
1460,
355,
19392,
198
] | 3.444444 | 9 |
"""
Placeholders
"""
# You're writing a program, and you don't know what your starting value for your 'initial' variable is yet. The program won't run if you leave it blank, but you don't want to forget you need it! Make a workaround.
| [
37811,
198,
27271,
10476,
198,
37811,
198,
198,
2,
921,
821,
3597,
257,
1430,
11,
290,
345,
836,
470,
760,
644,
534,
3599,
1988,
329,
534,
705,
36733,
6,
7885,
318,
1865,
13,
383,
1430,
1839,
470,
1057,
611,
345,
2666,
340,
9178,
... | 3.95 | 60 |
import io
import os
import pathlib
import tempfile
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
import oscar
from tests import _site
| [
11748,
33245,
198,
11748,
28686,
198,
11748,
3108,
8019,
198,
11748,
20218,
7753,
198,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
1330,
869,
62,
21812,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
9455,
12331,
198,
6... | 3.75 | 60 |
from ctypes import *
"""
>>> p = parse_log_line('/plogger/ || 50.73.113.242 || - || 21/Mar/2013:13:22:13 +0000 || GET /plogger/?rand=1363872131875&idsite=deadspin.com&url=http%3A%2F%2Fdeadspin.com%2Frecommended&urlref=http%3A%2F%2Fdeadspin.com%2F&screen=1024x768%7C1024x738%7C24&data=%7B%22parsely_uuid%22%3A%22908932BF-0935-46AD-84BD-10120D5297CA%22%2C%22parsely_site_uuid%22%3A%22908932BF-0935-46AD-84BD-10120D5297CA%22%7D&title=Deadspin+-+Sports+News+without+Access%2C+Favor%2C+or+Discretion&date=Thu+Mar+21+2013+08%3A22%3A11+GMT-0500+(Central+Daylight+Time)&action=pageview HTTP/1.1 || 200 || 363 || - || Mozilla/5.0 (Windows NT 5.1; rv:19.0) Gecko/20100101 Firefox/19.0" || - || - || parsely_network_uuid=CrMHN1FLCYUJWgTmkT47Ag==; expires=Thu, 31-Dec-37 23:55:55 GMT; domain=track.parse.ly; path=/ || 0.000')
>>> {'i': '50.73.113.242', 'r': {'title': 'Deadspin - Sports News without Access, Favor, or Discretion', 'url': 'http://deadspin.com/recommended', 'screen': '1024x768|1024x738|24', 'action': 'pageview', 'urlref': 'http://deadspin.com/', 'date': 'Thu Mar 21 2013 08:22:11 GMT-0500 (Central Daylight Time)', 'idsite': 'deadspin.com', 'data': {'parsely_site_uuid': '908932BF-0935-46AD-84BD-10120D5297CA', 'parsely_uuid': '908932BF-0935-46AD-84BD-10120D5297CA'}}, 'u': 'Mozilla/5.0 (Windows NT 5.1; rv:19.0) Gecko/20100101 Firefox/19.0', 't': dt.datetime(2013, 3, 21, 13, 22, 11, 875000)} == p
True
"""
line = "/plogger/ || 191.251.123.60 || - || 31/Aug/2015:23:49:01 +0000 || GET /plogger/?rand=1441064941650&idsite=bolsademulher.com&url=http%3A%2F%2Fwww.bolsademulher.com%2Fbebe%2Fo-que-o-bebe-sente-dentro-da-barriga-quando-a-mae-faz-sexo-4-sensacoes-surpreendentes%2F%3Futm_source%3Dfacebook%26utm_medium%3Dmanual%26utm_campaign%3DBolsaFB&urlref=http%3A%2F%2Fm.facebook.com%2F&screen=360x592%7C360x592%7C32&data=%7B%22parsely_uuid%22%3A%22b5e2fcb7-966f-40f8-b41c-fca446908a56%22%2C%22parsely_site_uuid%22%3A%226e9ab165-497c-45be-9998-e029372b5a92%22%7D&sid=1&surl=http%3A%2F%2Fwww.bolsademulher.com%2Fbebe%2Fo-que-o-bebe-sente-dentro-da-barriga-quando-a-mae-faz-sexo-4-sensacoes-surpreendentes%2F%3Futm_source%3Dfacebook%26utm_medium%3Dmanual%26utm_campaign%3DBolsaFB&sref=http%3A%2F%2Fm.facebook.com%2F&sts=1441064914096&slts=0&date=Mon+Aug+31+2015+20%3A49%3A01+GMT-0300+(BRT)&action=heartbeat&inc=6 HTTP/1.1 || 200 || 236 || http://www.bolsademulher.com/bebe/o-que-o-bebe-sente-dentro-da-barriga-quando-a-mae-faz-sexo-4-sensacoes-surpreendentes/?utm_source=facebook&utm_medium=manual&utm_campaign=BolsaFB || Mozilla/5.0 (Linux; Android 4.4.4; XT1025 Build/KXC21.5-40) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/33.0.0.0 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/34.0.0.43.267;] || - || - || - || 0.000"
main()
| [
6738,
269,
19199,
1330,
1635,
198,
198,
37811,
198,
33409,
279,
796,
21136,
62,
6404,
62,
1370,
10786,
14,
489,
519,
1362,
14,
8614,
2026,
13,
4790,
13,
16616,
13,
27877,
8614,
532,
8614,
2310,
14,
7676,
14,
6390,
25,
1485,
25,
1828... | 2.052161 | 1,342 |
import unittest
from typing import List
from decoder import Decoder
if __name__ == '__main__':
unittest.main()
| [
11748,
555,
715,
395,
198,
6738,
19720,
1330,
7343,
198,
6738,
875,
12342,
1330,
34580,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
715,
395,
13,
12417,
3419,
198
] | 3.025641 | 39 |
from abc import ABCMeta, abstractmethod
from pydantic import BaseModel
class ServiceInterface(metaclass=ABCMeta):
"""Class representing the service interface."""
@abstractmethod
async def create(self, schema_in: BaseModel):
"""
Create new entity and returns the saved instance.
"""
raise NotImplementedError()
@abstractmethod
async def update(self, instance: BaseModel, schema_in: BaseModel):
"""Updates an entity and returns the saved instance."""
raise NotImplementedError()
@abstractmethod
async def get(self, **kwargs):
"""Get and return one instance by filter."""
raise NotImplementedError()
@abstractmethod
async def delete(self, **kwargs):
"""Delete one instance by filter."""
raise NotImplementedError()
| [
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
198,
198,
6738,
279,
5173,
5109,
1330,
7308,
17633,
628,
198,
4871,
4809,
39317,
7,
4164,
330,
31172,
28,
24694,
48526,
2599,
198,
220,
220,
220,
37227,
9487,
10200,
262,
2139,
7071,
... | 2.859589 | 292 |
# ---------------------------------------------------------------------------------------------
# Copyright (c) Akash Nag. All rights reserved.
# Licensed under the MIT License. See LICENSE.md in the project root for license information.
# ---------------------------------------------------------------------------------------------
# This module implements the CheckBox widget
from ash.gui import *
# when checkbox receives focus
# when checkbox loses focus
# returns checkbox state
# draws the checkbox
# when keypress occurs: space toggles checkbox state
# returns the string representation: checkbox text
# set the checked-status of the checkbox | [
2,
16529,
1783,
32501,
198,
2,
220,
15069,
357,
66,
8,
9084,
1077,
15196,
13,
1439,
2489,
10395,
13,
198,
2,
220,
49962,
739,
262,
17168,
13789,
13,
4091,
38559,
24290,
13,
9132,
287,
262,
1628,
6808,
329,
5964,
1321,
13,
198,
2,
... | 5.030075 | 133 |
from {{cookiecutter.directory_name}} import models
import pytest
| [
6738,
22935,
44453,
8968,
353,
13,
34945,
62,
3672,
11709,
1330,
4981,
198,
11748,
12972,
9288,
198
] | 3.823529 | 17 |
import optparse
from des115.lib.interface import *
| [
11748,
2172,
29572,
198,
198,
6738,
748,
15363,
13,
8019,
13,
39994,
1330,
1635,
628,
628,
198
] | 3.294118 | 17 |
from flask_restplus import Resource
from flask_jwt_extended import jwt_required, get_jwt_identity, get_jwt_claims
from . import api_v1, fqdn_ns, fqdn_m, fqdn_mr
from ..data.fqdns import Fqdns, Fqdn
from ..helpers.helpers import Haproxy
@fqdn_ns.route('', endpoint='fqdn')
@fqdn_ns.response(401, "Token has expired, bad credentials or reserved for administrators")
@fqdn_ns.response(201, "Successfully created")
@fqdn_ns.response(409, "Can't create already THIS present 'fqdn' fqdn")
@fqdn_ns.response(406, "Error on definition content, please rewrite your definition")
class Fqdns_R(Resource):
"""Shows a list of all Fqdns(), and lets you POST to add new fqdn"""
@jwt_required
@fqdn_ns.doc('list_frontends', security='apikey')
@fqdn_ns.marshal_list_with(fqdn_mr)
def get(self):
"""List all fqdn entries that you own."""
if get_jwt_claims()['roles'] == 'admin':
return Fqdns().json()
return Fqdns().json(get_jwt_identity())
@jwt_required
@fqdn_ns.doc('Add Frontend fqdn', security='apikey')
@fqdn_ns.expect(fqdn_m)
@fqdn_ns.marshal_with(fqdn_mr)
def post(self):
"""Create a new fqdn entry"""
api_v1.payload.update({'owner': get_jwt_identity()})
if Fqdn(api_v1.payload['fqdn']).exists():
return { 'message': "Can't create WTF already present 'fqdn' fqdn"}, 409
f = Fqdn().create(api_v1.payload)
if not f.is_publish_fail():
return f.json(), 201
else:
f.destroy()
f.state = "publish_failed"
return f.json(), 406
@fqdn_ns.route('/<string:fqdn>', endpoint='fqdnchange')
@fqdn_ns.response(400, "can't get or modify non-existent fqdn")
@fqdn_ns.response(401, "Token has expired, bad credentials or reserved for administrators")
@fqdn_ns.response(409, "Can't modify not present 'fqdn' fqdn")
@fqdn_ns.response(200, "Operation is successful")
class Fqdn_R(Resource):
"""Modify fqdn"""
@jwt_required
@fqdn_ns.doc('show fqdn', security='apikey')
@fqdn_ns.marshal_with(fqdn_mr)
def get(self, fqdn):
"""Show a fqdn entry that you own"""
result = Fqdn(fqdn)
if not result.exists():
fqdn_ns.abort(400, "can't get non-existent fqdn")
if get_jwt_claims()['roles'] == 'admin' or get_jwt_identity() == result.owner:
return result.json()
@jwt_required
@fqdn_ns.doc('update fqdn', security='apikey')
@fqdn_ns.expect(fqdn_m)
@fqdn_ns.marshal_with(fqdn_mr)
def put(self, fqdn):
"""Modify a fqdn entry that you own"""
if not Fqdn(fqdn).exists():
fqdn_ns.abort(400, "can't modify non-existent fqdn")
if Fqdn(fqdn).owner != get_jwt_identity() and get_jwt_claims()['roles'] != 'admin':
fqdn_ns.abort(401, "you don't own this fqdn")
f = Fqdn(fqdn).update(api_v1.payload)
if f.is_publish_fail():
return f.json(), 406
else:
return f.json(), 201
@jwt_required
@fqdn_ns.doc('remove fqdn', security='apikey')
@fqdn_ns.marshal_with(fqdn_mr)
# @tenant.response(204, 'fqdn deleted (set state to remove)')
def delete(self, fqdn):
"""definitly remove a fqdn entry that you own from this service."""
if not Fqdn(fqdn).exists():
fqdn_ns.abort(400, "can't modify non-existent fqdn")
if Fqdn(fqdn).owner != get_jwt_identity() and get_jwt_claims()['roles'] != 'admin':
fqdn_ns.abort(401, "you don't own this fqdn")
return Fqdn(fqdn).destroy().json()
@fqdn_ns.route('/<string:fqdn>/hastats')
@fqdn_ns.response(400, "can't get non-existent fqdn")
@fqdn_ns.response(401, "Token has expired, bad credentials or reserved for administrators")
@fqdn_ns.response(200, "Operation is successful")
class Hastats_R(Resource):
"""Haproxy stats"""
@jwt_required
@fqdn_ns.doc("show backend's fqdn full stats", security='apikey')
def get(self, fqdn):
"""Show backend's fqdn full stats that you own"""
result = Fqdn(fqdn)
if not result.exists():
fqdn_ns.abort(400, "can't get stats on non-existent fqdn")
if get_jwt_claims()['roles'] == 'admin' or get_jwt_identity() == result.owner:
return Haproxy().getstats(result.backend_name)
@fqdn_ns.route('/<string:fqdn>/status')
@fqdn_ns.response(400, "can't get non-existent fqdn")
@fqdn_ns.response(401, "Token has expired, bad credentials or reserved for administrators")
@fqdn_ns.response(200, "Operation is successful")
class Hastatus_R(Resource):
"""Haproxy status"""
@jwt_required
@fqdn_ns.doc("show backend's fqdn short status", security='apikey')
def get(self, fqdn):
"""Show backend's fqdn short status"""
result = Fqdn(fqdn)
if not result.exists():
fqdn_ns.abort(400, "can't get stats on non-existent fqdn")
if get_jwt_claims()['roles'] == 'admin' or get_jwt_identity() == result.owner:
return Haproxy().getstatus(result.backend_name)
| [
6738,
42903,
62,
2118,
9541,
1330,
20857,
198,
6738,
42903,
62,
73,
46569,
62,
2302,
1631,
1330,
474,
46569,
62,
35827,
11,
651,
62,
73,
46569,
62,
738,
414,
11,
651,
62,
73,
46569,
62,
6604,
82,
198,
6738,
764,
1330,
40391,
62,
8... | 2.188016 | 2,303 |
from peewee import DoubleField, CompositeKey
from wx.app import database
| [
6738,
613,
413,
1453,
1330,
11198,
15878,
11,
49355,
9218,
198,
198,
6738,
266,
87,
13,
1324,
1330,
6831,
628
] | 3.75 | 20 |
from flask import Flask, redirect,render_template, request
import json
import view_model as vm
app = Flask(__name__)
@app.route("/")
@app.route("/get_price",methods=['POST'])
if __name__ == "__main__":
app.run(debug=True) | [
6738,
42903,
1330,
46947,
11,
18941,
11,
13287,
62,
28243,
11,
2581,
198,
11748,
33918,
198,
11748,
1570,
62,
19849,
355,
45887,
198,
198,
1324,
796,
46947,
7,
834,
3672,
834,
8,
198,
198,
31,
1324,
13,
38629,
7203,
14,
4943,
198,
1... | 2.72619 | 84 |
"""Add the staff columns to the feature and user tables.
Revision ID: ef3059e0396
Revises: 3bf1c2289e8d
Create Date: 2015-07-30 16:25:14.837823
"""
# revision identifiers, used by Alembic.
revision = 'ef3059e0396'
down_revision = '3bf1c2289e8d'
from alembic import op
import sqlalchemy as sa
| [
37811,
4550,
262,
3085,
15180,
284,
262,
3895,
290,
2836,
8893,
13,
198,
198,
18009,
1166,
4522,
25,
304,
69,
1270,
3270,
68,
15,
34107,
198,
18009,
2696,
25,
513,
19881,
16,
66,
1828,
4531,
68,
23,
67,
198,
16447,
7536,
25,
1853,
... | 2.547009 | 117 |
from unittest.mock import MagicMock
from tda.utils import AccountIdMismatchException, Utils
from tda.utils import UnsuccessfulOrderException
from tda.utils import EnumEnforcer
from .utils import no_duplicates, MockResponse
import enum
import unittest
| [
6738,
555,
715,
395,
13,
76,
735,
1330,
6139,
44,
735,
198,
6738,
256,
6814,
13,
26791,
1330,
10781,
7390,
44,
1042,
963,
16922,
11,
7273,
4487,
198,
6738,
256,
6814,
13,
26791,
1330,
791,
17212,
18743,
16922,
198,
6738,
256,
6814,
... | 3.479452 | 73 |
# -*- test-case-name: twistedcaldav.test.test_extensions -*-
##
# Copyright (c) 2005-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
"""
Extensions to web2.dav
"""
__all__ = [
"DAVResource",
"DAVResourceWithChildrenMixin",
"DAVPrincipalResource",
"DAVFile",
"ReadOnlyWritePropertiesResourceMixIn",
"ReadOnlyResourceMixIn",
"CachingPropertyStore",
]
import urllib
import time
from itertools import cycle
from twisted.internet.defer import succeed, maybeDeferred
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.web.template import Element, XMLFile, renderer, tags, flattenString
from twisted.python.modules import getModule
from txweb2 import responsecode, server
from txweb2.http import HTTPError, Response, RedirectResponse
from txweb2.http import StatusResponse
from txweb2.http_headers import MimeType
from txweb2.stream import FileStream
from txweb2.static import MetaDataMixin, StaticRenderMixin
from txdav.xml import element
from txdav.xml.base import encodeXMLName
from txdav.xml.element import dav_namespace
from txweb2.dav.http import MultiStatusResponse
from txweb2.dav.static import DAVFile as SuperDAVFile
from txweb2.dav.resource import DAVResource as SuperDAVResource
from txweb2.dav.resource import (
DAVPrincipalResource as SuperDAVPrincipalResource
)
from twisted.internet.defer import gatherResults
from txweb2.dav.method import prop_common
from twext.python.log import Logger
from twistedcaldav import customxml
from twistedcaldav.customxml import calendarserver_namespace
from twistedcaldav.method.report import http_REPORT
from twistedcaldav.config import config
from txdav.who.directory import CalendarDirectoryRecordMixin
from twext.who.expression import Operand, MatchType, MatchFlags
thisModule = getModule(__name__)
log = Logger()
class DirectoryElement(Element):
"""
A L{DirectoryElement} is an L{Element} for rendering the contents of a
L{DirectoryRenderingMixIn} resource as HTML.
"""
loader = XMLFile(
thisModule.filePath.sibling("directory-listing.html")
)
def __init__(self, resource):
"""
@param resource: the L{DirectoryRenderingMixIn} resource being
listed.
"""
super(DirectoryElement, self).__init__()
self.resource = resource
@renderer
def resourceDetail(self, request, tag):
"""
Renderer which returns a distinct element for this resource's data.
Subclasses should override.
"""
return ''
@renderer
def children(self, request, tag):
"""
Renderer which yields all child object tags as table rows.
"""
whenChildren = (
maybeDeferred(self.resource.listChildren)
.addCallback(sorted)
.addCallback(
lambda names: gatherResults(
[maybeDeferred(self.resource.getChild, x) for x in names]
)
.addCallback(lambda children: zip(children, names))
)
)
@whenChildren.addCallback
return whenChildren
@renderer
def main(self, request, tag):
"""
Main renderer; fills slots for title, etc.
"""
return tag.fillSlots(name=request.path)
@renderer
def properties(self, request, tag):
"""
Renderer which yields all properties as table row tags.
"""
whenPropertiesListed = self.resource.listProperties(request)
@whenPropertiesListed.addCallback
return whenPropertiesListed
class DAVResource (DirectoryPrincipalPropertySearchMixIn,
SuperDAVResource,
DirectoryRenderingMixIn, StaticRenderMixin):
"""
Extended L{txweb2.dav.resource.DAVResource} implementation.
Note we add StaticRenderMixin as a base class because we need all the etag etc behavior
that is currently in static.py but is actually applicable to any type of resource.
"""
log = Logger()
http_REPORT = http_REPORT
class DAVResourceWithChildrenMixin (object):
"""
Bits needed from txweb2.static
"""
def putChild(self, name, child):
"""
Register a child with the given name with this resource.
@param name: the name of the child (a URI path segment)
@param child: the child to register
"""
self.putChildren[name] = child
def getChild(self, name):
"""
Look up a child resource. First check C{self.putChildren}, then call
C{self.makeChild} if no pre-existing children were found.
@return: the child of this resource with the given name.
"""
if name == "":
return self
result = self.putChildren.get(name, None)
if not result:
result = self.makeChild(name)
return result
def makeChild(self, name):
"""
Called by L{DAVResourceWithChildrenMixin.getChild} to dynamically
create children that have not been pre-created with C{putChild}.
"""
return None
def listChildren(self):
"""
@return: a sequence of the names of all known children of this resource.
"""
return self.putChildren.keys()
def countChildren(self):
"""
@return: the number of all known children of this resource.
"""
return len(self.putChildren.keys())
def locateChild(self, req, segments):
"""
See L{IResource.locateChild}.
"""
thisSegment = segments[0]
moreSegments = segments[1:]
return maybeDeferred(self.getChild, thisSegment).addCallback(
lambda it: (it, moreSegments)
)
class DAVResourceWithoutChildrenMixin (object):
"""
Bits needed from txweb2.static
"""
class DAVPrincipalResource (DirectoryPrincipalPropertySearchMixIn,
SuperDAVPrincipalResource,
DirectoryRenderingMixIn):
"""
Extended L{txweb2.dav.static.DAVFile} implementation.
"""
log = Logger()
http_REPORT = http_REPORT
@inlineCallbacks
class DAVFile (SuperDAVFile, DirectoryRenderingMixIn):
"""
Extended L{txweb2.dav.static.DAVFile} implementation.
"""
log = Logger()
class ReadOnlyWritePropertiesResourceMixIn (object):
"""
Read only that will allow writing of properties resource.
"""
readOnlyResponse = StatusResponse(
responsecode.FORBIDDEN,
"Resource is read only."
)
http_DELETE = _forbidden
http_MOVE = _forbidden
http_PUT = _forbidden
class ReadOnlyResourceMixIn (ReadOnlyWritePropertiesResourceMixIn):
"""
Read only resource.
"""
http_PROPPATCH = ReadOnlyWritePropertiesResourceMixIn._forbidden
class CachingPropertyStore (object):
"""
DAV property store using a dict in memory on top of another
property store implementation.
"""
log = Logger()
def extractCalendarServerPrincipalSearchData(doc):
"""
Extract relevant info from a CalendarServerPrincipalSearch document
@param doc: CalendarServerPrincipalSearch object to extract info from
@return: A tuple containing:
the list of tokens
the context string
the applyTo boolean
the clientLimit integer
the propElement containing the properties to return
"""
context = doc.attributes.get("context", None)
applyTo = False
tokens = []
clientLimit = None
for child in doc.children:
if child.qname() == (dav_namespace, "prop"):
propElement = child
elif child.qname() == (
dav_namespace,
"apply-to-principal-collection-set"
):
applyTo = True
elif child.qname() == (calendarserver_namespace, "search-token"):
tokenValue = child.toString().strip()
if tokenValue:
tokens.append(tokenValue)
elif child.qname() == (calendarserver_namespace, "limit"):
try:
nresults = child.childOfType(customxml.NResults)
clientLimit = int(str(nresults))
except (TypeError, ValueError,):
msg = "Bad XML: unknown value for <limit> element"
log.warn(msg)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, msg))
return tokens, context, applyTo, clientLimit, propElement
def validateTokens(tokens):
"""
Make sure there is at least one token longer than one character
@param tokens: the tokens to inspect
@type tokens: iterable of utf-8 encoded strings
@return: True if tokens are valid, False otherwise
@rtype: boolean
"""
for token in tokens:
if len(token) > 1:
return True
return False
| [
2,
532,
9,
12,
1332,
12,
7442,
12,
3672,
25,
19074,
66,
1940,
615,
13,
9288,
13,
9288,
62,
2302,
5736,
532,
9,
12,
198,
2235,
198,
2,
15069,
357,
66,
8,
5075,
12,
4626,
4196,
3457,
13,
1439,
2489,
10395,
13,
198,
2,
198,
2,
... | 2.635527 | 3,586 |
from django.conf.urls import url, include
from .views import ProductCreateView, ProductUpdateView, ProductIndexCreateView, upload_images, \
remove_uploaded_image, ProductDeleteView, change_publish_status, upload_images_product_update, \
delete_product_images, add_product_review, ProductListView, update_product_review, ProductDetailView, delete_product
urlpatterns = [
url(r'^$', ProductDetailView.as_view(), name='product_detail'),
url(r'^all/$', ProductListView.as_view(), name='product_list'),
url(r'^(?P<slug>[\w-]+)/add-product/$', ProductCreateView.as_view(), name='add_product'),
url(r'^(?P<slug>[\w-]+)/update-product/$', ProductUpdateView.as_view(), name='update_product'),
url(r'^(?P<slug>[\w-]+)/upload-product-images/$', upload_images_product_update, name='upload_product_images'),
url(r'^(?P<slug>[\w-]+)/delete-product/$', delete_product, name='delete_product'),
url(r'^add-product/$', ProductIndexCreateView.as_view(), name='add_product_index'),
url(r'^delete-product-images/$', delete_product_images, name='delete_product_images'),
url(r'^upload/images/$', upload_images, name='upload_images'),
url(r'^remove/images/$', remove_uploaded_image, name='remove_images'),
url(r'^(?P<slug>[\w-]+)/add-review/$', add_product_review, name='add_review'),
url(r'^(?P<slug>[\w-]+)/update-review/(?P<pk>[0-9]+)/$$', update_product_review, name='update_review'),
]
| [
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
11,
2291,
198,
198,
6738,
764,
33571,
1330,
8721,
16447,
7680,
11,
8721,
10260,
7680,
11,
8721,
15732,
16447,
7680,
11,
9516,
62,
17566,
11,
3467,
198,
220,
220,
220,
4781,
62,... | 2.725191 | 524 |
#!/usr/bin/python3
from tornado import web
import random
| [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
198,
6738,
33718,
1330,
3992,
198,
11748,
4738,
198
] | 3.222222 | 18 |
from django.urls import path
from .views import clients, edit_client, edit_client_plan
urlpatterns = [
path('clients', clients, name='clients'),
path('add_client', edit_client, name='add_client'),
path('add_client_plan', edit_client_plan, name='add_client_plan'),
path('edit_client/<id>', edit_client, name='edit_client'),
# path('add_client_plan', edit_client_plan, name='add_client_plan'),
path('update_subscription/<id>', edit_client_plan, name='update_subscription'),
]
| [
6738,
42625,
14208,
13,
6371,
82,
1330,
3108,
198,
6738,
764,
33571,
1330,
7534,
11,
4370,
62,
16366,
11,
4370,
62,
16366,
62,
11578,
198,
198,
6371,
33279,
82,
796,
685,
628,
220,
220,
220,
3108,
10786,
565,
2334,
3256,
7534,
11,
1... | 2.863636 | 176 |
from datetime import datetime
import github.Issue
from gh import HasLabel, AddLabel
from .policy import Policy
StaleIssuePolicy()
| [
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
11748,
33084,
13,
45147,
198,
198,
6738,
24997,
1330,
7875,
33986,
11,
3060,
33986,
198,
6738,
764,
30586,
1330,
7820,
628,
198,
1273,
1000,
45147,
36727,
3419,
198
] | 3.722222 | 36 |
from cogbot.cogs.robo_mod.robo_mod_condition import RoboModCondition
from cogbot.cogs.robo_mod.robo_mod_trigger import RoboModTrigger
| [
6738,
43072,
13645,
13,
66,
18463,
13,
305,
2127,
62,
4666,
13,
305,
2127,
62,
4666,
62,
31448,
1330,
39702,
5841,
48362,
198,
6738,
43072,
13645,
13,
66,
18463,
13,
305,
2127,
62,
4666,
13,
305,
2127,
62,
4666,
62,
46284,
1330,
397... | 2.934783 | 46 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'PLAFile.ui'
#
# Created: Fri Jun 10 09:11:23 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
from core.SpecificationTable import SpecificationTable
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
705,
6489,
8579,
576,
13,
9019,
6,
198,
2,
198,
2,
15622,
25,
19480,
7653,
838,
7769,
25,
1157,
25,
1954,
2813,
... | 2.681818 | 154 |
import datetime
import unittest
from .EgyptianNationalId import EgyptianNationalId
VALID_ID = "30103211203135"
if __name__ == "__main__":
unittest.main()
| [
11748,
4818,
8079,
198,
11748,
555,
715,
395,
198,
198,
6738,
764,
39299,
666,
16186,
7390,
1330,
14075,
16186,
7390,
198,
198,
23428,
2389,
62,
2389,
796,
366,
18938,
3070,
2481,
1065,
3070,
17059,
1,
628,
198,
198,
361,
11593,
3672,
... | 2.762712 | 59 |
import numpy as np
import time
from common import get_args,experiment_setup
if __name__=='__main__':
args = get_args()
env, env_test, agent, buffer, learner, tester = experiment_setup(args)
args.logger.summary_init(agent.graph, agent.sess)
# Progress info
args.logger.add_item('Epoch')
args.logger.add_item('Cycle')
args.logger.add_item('Episodes@green')
args.logger.add_item('Timesteps')
args.logger.add_item('TimeCost(sec)')
# Algorithm info
for key in agent.train_info.keys():
args.logger.add_item(key, 'scalar')
# Test info
for key in tester.info:
args.logger.add_item(key, 'scalar')
args.logger.summary_setup()
for epoch in range(args.epochs):
for cycle in range(args.cycles):
args.logger.tabular_clear()
args.logger.summary_clear()
start_time = time.time()
learner.learn(args, env, env_test, agent, buffer)
tester.cycle_summary()
args.logger.add_record('Epoch', str(epoch)+'/'+str(args.epochs))
args.logger.add_record('Cycle', str(cycle)+'/'+str(args.cycles))
args.logger.add_record('Episodes', buffer.counter)
args.logger.add_record('Timesteps', buffer.steps_counter)
args.logger.add_record('TimeCost(sec)', time.time()-start_time)
args.logger.tabular_show(args.tag)
args.logger.summary_show(buffer.counter)
tester.epoch_summary()
tester.final_summary()
| [
11748,
299,
32152,
355,
45941,
198,
11748,
640,
198,
6738,
2219,
1330,
651,
62,
22046,
11,
23100,
3681,
62,
40406,
198,
198,
361,
11593,
3672,
834,
855,
6,
834,
12417,
834,
10354,
198,
197,
22046,
796,
651,
62,
22046,
3419,
198,
197,
... | 2.505639 | 532 |
'''
Python code used in APS 2016 Python lecture 5
'''
import h5py
import lecture5_lib
f = h5py.File('writer_1_3.hdf5', 'r')
x = f['/Scan/data/two_theta']
y = f['/Scan/data/counts']
print 'file:', f.filename
print 'peak position:', lecture5_lib.peak_position(x, y)
print 'center-of-mass:', lecture5_lib.center_of_mass(x, y)
print 'FWHM:', lecture5_lib.fwhm(x, y)
f.close()
def dummy():
''' '''
| [
7061,
6,
198,
37906,
2438,
973,
287,
3486,
50,
1584,
11361,
19143,
642,
198,
7061,
6,
198,
198,
11748,
289,
20,
9078,
198,
11748,
19143,
20,
62,
8019,
198,
198,
69,
796,
289,
20,
9078,
13,
8979,
10786,
16002,
62,
16,
62,
18,
13,
... | 2.327485 | 171 |
import json
import pytest
import requests
@pytest.mark.parametrize("userid, firstname", [(1,"George"),(2,"Janet")])
| [
11748,
33918,
198,
11748,
12972,
9288,
198,
11748,
7007,
198,
198,
31,
9078,
9288,
13,
4102,
13,
17143,
316,
380,
2736,
7203,
7220,
312,
11,
717,
3672,
1600,
47527,
16,
553,
20191,
12340,
7,
17,
553,
12128,
316,
4943,
12962,
198
] | 2.853659 | 41 |
__author__ = "ujjwal"
import os
import json
import requests
import urllib
import xmltodict
from priceprobi.utils import get_env
from priceprobi.config import get_config
from priceprobi.db.mongo_helper import MongoHelper
from priceprobi.config import NOMINATIM, CSIS
from priceprobi import logger
if __name__ == "__main__":
config = get_config(get_env())
geocoder = Geocoder(config=config)
geocoder.check_mandi_locations()
geocoder.create_batch_geocodes()
| [
834,
9800,
834,
796,
366,
23577,
73,
16783,
1,
198,
11748,
28686,
198,
11748,
33918,
198,
11748,
7007,
198,
11748,
2956,
297,
571,
198,
11748,
2124,
76,
2528,
375,
713,
198,
6738,
2756,
1676,
8482,
13,
26791,
1330,
651,
62,
24330,
198... | 2.838323 | 167 |
from django.contrib import admin
from .models import *
admin.site.register(Book)
admin.site.register(Booking)
admin.site.register(Achievement)
admin.site.register(AboutUs)
admin.site.register(Card)
| [
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
198,
6738,
764,
27530,
1330,
1635,
628,
198,
28482,
13,
15654,
13,
30238,
7,
10482,
8,
198,
28482,
13,
15654,
13,
30238,
7,
10482,
278,
8,
198,
28482,
13,
15654,
13,
30238,
7,
32,
2495... | 3.075758 | 66 |
from rosalind import parse_fasta
from rosalind import translate
from rosalind import transcribe
if(__name__=='__main__'):
main() | [
6738,
686,
21680,
521,
1330,
21136,
62,
7217,
64,
198,
6738,
686,
21680,
521,
1330,
15772,
198,
6738,
686,
21680,
521,
1330,
23589,
4892,
198,
220,
220,
220,
220,
220,
220,
220,
220,
198,
361,
7,
834,
3672,
834,
855,
6,
834,
12417,
... | 2.711538 | 52 |
#! /usr/bin/env python3
"""Setup script
run "./setup.py --help-commands" for help.
"""
from datetime import datetime
from os.path import abspath, dirname, join
PACKAGE_INFO = dict(
name="compilertools",
description="A library for helping optimizing Python extensions compilation.",
long_description_content_type="text/markdown; charset=UTF-8",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
],
keywords="compiler distutils setuptools build_ext wheels setup build",
author="J.Goutin",
url="https://github.com/JGoutin/compilertools",
project_urls={
"Documentation": "http://compilertools.readthedocs.io/",
"Download": "https://pypi.org/project/compilertools",
},
license="BSD",
zip_safe=True,
python_requires=">=3.6",
setup_requires=["setuptools"],
tests_require=["pytest-cov", "pytest-flake8", "pytest-black"],
command_options={},
)
SETUP_DIR = abspath(dirname(__file__))
with open(join(SETUP_DIR, "compilertools/_version.py")) as file:
for line in file:
if line.rstrip().startswith("__version__"):
PACKAGE_INFO["version"] = line.split("=", 1)[1].strip(" \"'\n")
break
with open(join(SETUP_DIR, "readme.md")) as file:
PACKAGE_INFO["long_description"] = file.read()
PACKAGE_INFO["command_options"]["build_sphinx"] = {
"project": ("setup.py", PACKAGE_INFO["name"].capitalize()),
"version": ("setup.py", PACKAGE_INFO["version"]),
"release": ("setup.py", PACKAGE_INFO["version"]),
"copyright": (
"setup.py",
"2017-%s, %s" % (datetime.now().year, PACKAGE_INFO["author"]),
),
}
if __name__ == "__main__":
from os import chdir
from sys import argv
from setuptools import setup, find_packages
if {"pytest", "test", "ptr"}.intersection(argv):
PACKAGE_INFO["setup_requires"].append("pytest-runner")
elif "build_sphinx" in argv:
PACKAGE_INFO["setup_requires"] += ["sphinx", "sphinx_rtd_theme"]
chdir(SETUP_DIR)
setup(packages=find_packages(exclude=["tests", "doc"]), **PACKAGE_INFO)
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
40786,
4226,
198,
198,
5143,
366,
19571,
40406,
13,
9078,
1377,
16794,
12,
9503,
1746,
1,
329,
1037,
13,
198,
37811,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
... | 2.538682 | 1,047 |
import os
import h5py
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def load_h5_data_label(h5_filename):
""" load the data from the hdf5 files """
f = h5py.File(h5_filename)
data = f['data'][:]
labels = f['label'][:]
# normal = f['normal'][:]
return (data, labels)
def pointnet_to_cleanpoint(data_dir, samples, labels):
""" read pointnet dataset point cloud and transfer it to pcpnet dataset format """
shape_names_dir = os.path.join(data_dir, 'shape_names.txt')
with open(shape_names_dir) as f:
label_names = f.readlines()
# save in the pcp data set format
new_dir = os.path.join(data_dir, '../modelNetDataset')
if not os.path.exists(new_dir):
os.mkdir(new_dir)
for i, _ in enumerate(samples, 0):
sample = samples[i, :, :]
# save clean sample
num = 0
filename = os.path.join(new_dir, label_names[labels[i][0]].strip() + '_{:d}_{:02d}.xyz'.format(labels[i][0], num))
while os.path.exists(filename):
num = num + 1
filename = os.path.join(new_dir, label_names[labels[i][0]].strip() + '_{:d}_{:02d}.xyz'.format(labels[i][0], num))
if num > 10:
continue
with open(filename, 'w') as f:
f.write('\n'.join([' '.join(map(str, x)) for x in sample]))
f.close()
# save noisy sample - white noise std 0.25%
filename_1 = os.path.splitext(filename)[0]
noisy_sample = sample + np.random.normal(scale=0.0025, size=sample.shape)
filename_1 = filename_1 + '_2.50e-03.xyz'
with open(filename_1, 'w') as f:
f.write('\n'.join([' '.join(map(str, x)) for x in noisy_sample]))
f.close()
# save noisy sample - white noise std 1%
filename_2 = os.path.splitext(filename)[0]
noisy_sample = sample + np.random.normal(scale=0.01, size=sample.shape)
filename_2 = filename_2 + '_1.00e-02.xyz'
with open(filename_2, 'w') as f:
f.write('\n'.join([' '.join(map(str, x)) for x in noisy_sample]))
f.close()
# save noisy sample - white noise std 2.5%
filename_3 = os.path.splitext(filename)[0]
noisy_sample = sample + np.random.normal(scale=0.025, size=sample.shape)
filename_3 = filename_3 + '_2.50e-02.xyz'
with open(filename_3, 'w') as f:
f.write('\n'.join([' '.join(map(str, x)) for x in noisy_sample]))
f.close()
# for each file create clean copy for GT
filename = os.path.splitext(filename)[0] + '.clean_xyz'
filename_1 = os.path.splitext(filename_1)[0] + '.clean_xyz'
filename_2 = os.path.splitext(filename_2)[0] + '.clean_xyz'
filename_3 = os.path.splitext(filename_3)[0] + '.clean_xyz'
with open(filename, 'w') as f, open(filename_1, 'w') as f1, open(filename_2, 'w') as f2, open(filename_3, 'w') as f3:
f.write('\n'.join([' '.join(map(str, x)) for x in sample]))
f1.write('\n'.join([' '.join(map(str, x)) for x in sample]))
f2.write('\n'.join([' '.join(map(str, x)) for x in sample]))
f3.write('\n'.join([' '.join(map(str, x)) for x in sample]))
f.close()
f1.close()
f2.close()
f3.close()
return
def visualize_point_cloud(pc, output_filename='null', fig_num=0, color=np.array([1])):
""" points is a Nx3 numpy array """
fig = plt.figure(fig_num)
ax = fig.add_subplot(111, projection='3d')
if color[0] == 1:
color = pc[:, 2]
ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=color, s=5, marker='.', depthshade=True)
# ax.set_xlabel('x')
# ax.set_ylabel('y')
# ax.set_zlabel('z')
ax.axis('on')
# plt.savefig(output_filename)
def prep_training_set_file(data_dir):
""" prep trainingset.txt and validationset.txt files """
trainset_file = os.path.join(data_dir, 'trainingset.txt')
valiset_file = os.path.join(data_dir, 'validationset.txt')
with open(trainset_file, 'w') as f1, open(valiset_file, 'w') as f2:
for path, subdirs, files in os.walk(data_dir):
for file in files:
file = os.path.splitext(file)[0]
if file != 'trainingset' and file != 'validationset':
if int(file.split('_')[2]) <= 8:
f1.write(file + '\n')
else:
f2.write(file + '\n')
f1.close()
f2.close()
if __name__ == '__main__':
clean = np.loadtxt('data/modelNetDataset/airplane_0_09_1.00e-02.clean_xyz')
pc1 = np.loadtxt('results/airplane_0_09_1.00e-02_0.xyz')
pc2 = np.loadtxt('results/airplane_0_09_1.00e-02_1.xyz')
err1 = np.sum(np.square(pc1 - clean), axis=1)
err2 = np.sum(np.square(pc2 - clean), axis=1)
visualize_point_cloud(pc1, fig_num=1)
visualize_point_cloud(pc2, fig_num=2)
visualize_point_cloud(pc1, fig_num=3, color=err1)
visualize_point_cloud(pc2, fig_num=4, color=err2)
plt.show()
# samples, labels = load_dataset()
# in_dir = os.path.join(os.getcwd(), 'data/modelnet40_ply_hdf5_2048')
# pointnet_to_cleanpoint(in_dir, samples, labels)
# prep_training_set_file(os.path.join(os.getcwd(), 'data/modelNetDataset'))
| [
11748,
28686,
201,
198,
11748,
289,
20,
9078,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
201,
198,
6738,
285,
489,
62,
25981,
74,
896,
13,
76,
29487,
18,
67,
1330,
121... | 2.016906 | 2,721 |
'''
File: canvas.py
Description:
Implements the custom GlyphyType canvas widget
for drawing characters/glyphs.
'''
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from tools import brush, line | [
7061,
6,
198,
8979,
25,
220,
220,
220,
220,
220,
220,
21978,
13,
9078,
198,
11828,
25,
220,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1846,
1154,
902,
262,
2183,
27949,
6883,
6030,
21978,
26295,
198,
220,
220,
22... | 2.357143 | 98 |
from django.contrib.postgres.fields import ArrayField
from django.db import models
__all__ = (
'Doll',
'Status',
)
| [
6738,
42625,
14208,
13,
3642,
822,
13,
7353,
34239,
13,
25747,
1330,
15690,
15878,
198,
6738,
42625,
14208,
13,
9945,
1330,
4981,
198,
198,
834,
439,
834,
796,
357,
198,
220,
220,
220,
705,
35,
692,
3256,
198,
220,
220,
220,
705,
19... | 2.625 | 48 |
#!/usr/bin/env python
import argparse
import os
import re
import json
def version_increased(former_version, current_version):
"""Check that version in the package is greater than version_master
and that only one int has increased of 1.
Args:
version_master (str): former version
"""
current_version_int = int("".join(current_version.split(".")))
former_version_int = int("".join(former_version.split(".")))
assert current_version_int > former_version_int, f"New version ({current_version}) should be greater than former version ({former_version})."
version = [int(e) for e in current_version.split(".")]
version_former = [int(e) for e in former_version.split(".")]
diffs = []
for new, old in zip(version, version_former):
diffs.append(max(0, new - old))
assert sum(
diffs) == 1, f"Only one digit should be increased by one in version. Got {diffs}."
print("Version increased validation passed!")
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
1822,
29572,
198,
11748,
28686,
198,
11748,
302,
198,
11748,
33918,
628,
198,
198,
4299,
2196,
62,
24988,
839,
7,
16354,
62,
9641,
11,
1459,
62,
9641,
2599,
198,
220,
220,
... | 2.951009 | 347 |
import json
import os
from os import path | [
11748,
33918,
198,
11748,
28686,
198,
6738,
28686,
1330,
3108
] | 4.1 | 10 |
# -*- encoding: utf-8 -*-
"""
License: MIT
Copyright (c) 2019 - present AppSeed.us
"""
from django.contrib import admin
from django.urls import path, include # add this
from testing import views
urlpatterns = [
path('', views.index, name='tester'),
path('/take', views.tester, name='tester1'),
# add this
]
| [
2,
532,
9,
12,
21004,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
34156,
25,
17168,
198,
15269,
357,
66,
8,
13130,
532,
1944,
2034,
50,
2308,
13,
385,
198,
37811,
198,
198,
6738,
42625,
14208,
13,
3642,
822,
1330,
13169,
... | 2.722689 | 119 |
# from django.shortcuts import render
from django.http import HttpResponse
| [
2,
422,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
628
] | 3.666667 | 21 |
from typing import Iterable, Union, List
from pathlib import Path
import json
import torch
import torch.distributed as dist
from torch.utils.data import TensorDataset
class PretrainInputExample:
"""A single example for unsupervised pre-training.
"""
class ClsInputExample:
"""A single example for supervised fine-tuning (classification).
"""
class PretrainInputFeatures:
"""A single set of features of pre-training data.
"""
class ClsInputFeatures:
"""A single set of features of fine-tuning data (classification).
""" | [
6738,
19720,
1330,
40806,
540,
11,
4479,
11,
7343,
198,
6738,
3108,
8019,
1330,
10644,
198,
11748,
33918,
198,
198,
11748,
28034,
198,
11748,
28034,
13,
17080,
6169,
355,
1233,
198,
6738,
28034,
13,
26791,
13,
7890,
1330,
309,
22854,
27... | 3.363636 | 165 |
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
from oslo_config import fixture as config_fixture
import six
from networking_generic_switch import config
fake_config = """
[genericswitch:foo]
device_type = foo_device
spam = eggs
[genericswitch:bar]
device_type = bar_device
ham = vikings
"""
| [
2,
15069,
1584,
7381,
20836,
11,
3457,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
407,
779,
428,
2393,
2845,
287,
11846,... | 3.335849 | 265 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
# Copyright (c) 2015-2020 by Rocky Bernstein
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import marshal, py_compile, sys, tempfile, time
from struct import unpack, pack
import os.path as osp
import xdis.unmarshal
from xdis.version_info import PYTHON3, PYTHON_VERSION
from xdis.magics import (
IS_PYPY3,
PYTHON_MAGIC_INT,
int2magic,
magic_int2float,
magic2int,
magicint2version,
versions,
)
from xdis.dropbox.decrypt25 import fix_dropbox_pyc
def load_file(filename, out=sys.stdout):
"""
load a Python source file and compile it to byte-code
_load_file(filename: string): code_object
filename: name of file containing Python source code
(normally a .py)
code_object: code_object compiled from this source code
This function does NOT write any file!
"""
fp = open(filename, "rb")
try:
source = fp.read()
try:
if PYTHON_VERSION < 2.6:
co = compile(source, filename, "exec")
else:
co = compile(source, filename, "exec", dont_inherit=True)
except SyntaxError:
out.write(">>Syntax error in %s\n" % filename)
raise
finally:
fp.close()
return co
def load_module(filename, code_objects=None, fast_load=False, get_code=True):
"""load a module without importing it.
Parameters:
filename: name of file containing Python byte-code object
(normally a .pyc)
code_objects: list of additional code_object from this
file. This might be a types.CodeType or one of
the portable xdis code types, e.g. Code38, Code3,
Code2, etc. This can be empty
get_code: bool. Parsing the code object takes a bit of
parsing time, but sometimes all you want is the
module info, time string, code size, python
version, etc. For that, set `get_code` to
`False`.
Return values are as follows:
float_version: float; the floating-point version number for the given magic_int,
e.g. 2.7 or 3.4
timestamp: int; the seconds since EPOCH of the time of the bytecode creation, or None
if no timestamp was stored
magic_int: int, a more specific than version number. The actual byte code version of the
code object
co : code object
ispypy : True if this was a PyPy code object
source_size: The size of the source code mod 2**32, if that was stored in the bytecode.
None otherwise.
sip_hash : the SIP Hash for the file (only in Python 3.7 or greater), if the file
was created with a SIP hash or None otherwise. Note that if the sip_hash is not
none, then the timestamp and source_size will be invalid.
"""
# Some sanity checks
if not osp.exists(filename):
raise ImportError("File name: '%s' doesn't exist" % filename)
elif not osp.isfile(filename):
raise ImportError("File name: '%s' isn't a file" % filename)
elif osp.getsize(filename) < 50:
raise ImportError(
"File name: '%s (%d bytes)' is too short to be a valid pyc file"
% (filename, osp.getsize(filename))
)
with open(filename, "rb") as fp:
return load_module_from_file_object(
fp,
filename=filename,
code_objects=code_objects,
fast_load=fast_load,
get_code=get_code,
)
def load_module_from_file_object(
fp, filename="<unknown>", code_objects=None, fast_load=False, get_code=True
):
"""load a module from a file object without importing it.
See :func:load_module for a list of return values.
"""
if code_objects is None:
code_objects = {}
timestamp = 0
try:
magic = fp.read(4)
magic_int = magic2int(magic)
# For reasons I don't understand, PyPy 3.2 stores a magic
# of '0'... The two values below are for Python 2.x and 3.x respectively
if magic[0:1] in ["0", b"0"]:
magic = int2magic(3180 + 7)
try:
# FIXME: use the internal routine below
float_version = magic_int2float(magic_int)
except KeyError:
if magic_int in (2657, 22138):
raise ImportError("This smells like Pyston which is not supported.")
if len(magic) >= 2:
raise ImportError(
"Unknown magic number %s in %s"
% (ord(magic[0:1]) + 256 * ord(magic[1:2]), filename)
)
else:
raise ImportError("Bad magic number: '%s'" % magic)
if magic_int in (
3010,
3020,
3030,
3040,
3050,
3060,
3061,
3071,
3361,
3091,
3101,
3103,
3141,
3270,
3280,
3290,
3300,
3320,
3330,
3371,
62071,
62071,
62081,
62091,
62092,
62111,
):
raise ImportError(
"%s is interim Python %s (%d) bytecode which is "
"not supported.\nFinal released versions are "
"supported." % (filename, versions[magic], magic2int(magic))
)
elif magic_int == 62135:
fp.seek(0)
return fix_dropbox_pyc(fp)
elif magic_int == 62215:
raise ImportError(
"%s is a dropbox-hacked Python %s (bytecode %d).\n"
"See https://github.com/kholia/dedrop for how to "
"decrypt." % (filename, versions[magic], magic2int(magic))
)
try:
# print version
my_magic_int = PYTHON_MAGIC_INT
magic_int = magic2int(magic)
version = magic_int2float(magic_int)
timestamp = None
source_size = None
sip_hash = None
ts = fp.read(4)
if version >= 3.7:
# PEP 552. https://www.python.org/dev/peps/pep-0552/
pep_bits = ts[-1]
if PYTHON_VERSION <= 2.7:
pep_bits = ord(pep_bits)
if (pep_bits & 1) or magic_int == 3393: # 3393 is 3.7.0beta3
# SipHash
sip_hash = unpack("<Q", fp.read(8))[0]
else:
# Uses older-style timestamp and size
timestamp = unpack("<I", fp.read(4))[0] # pep552_bits
source_size = unpack("<I", fp.read(4))[0] # size mod 2**32
pass
else:
timestamp = unpack("<I", ts)[0]
# Note: a higher magic number doesn't necessarily mean a later
# release. At Python 3.0 the magic number decreased
# significantly. Hence the range below. Also note inclusion of
# the size info, occurred within a Python major/minor
# release. Hence the test on the magic value rather than
# PYTHON_VERSION, although PYTHON_VERSION would probably work.
if (
(3200 <= magic_int < 20121)
and version >= 1.5
or magic_int in IS_PYPY3
):
source_size = unpack("<I", fp.read(4))[0] # size mod 2**32
if get_code:
if my_magic_int == magic_int:
bytecode = fp.read()
co = marshal.loads(bytecode)
elif fast_load:
co = xdis.marsh.load(fp, magicint2version[magic_int])
else:
co = xdis.unmarshal.load_code(fp, magic_int, code_objects)
pass
else:
co = None
except:
kind, msg = sys.exc_info()[0:2]
import traceback
traceback.print_exc()
raise ImportError(
"Ill-formed bytecode file %s\n%s; %s" % (filename, kind, msg)
)
finally:
fp.close()
return (
float_version,
timestamp,
magic_int,
co,
is_pypy(magic_int),
source_size,
sip_hash,
)
def write_bytecode_file(bytecode_path, code, magic_int, filesize=0):
"""Write bytecode file _bytecode_path_, with code for having Python
magic_int (i.e. bytecode associated with some version of Python)
"""
fp = open(bytecode_path, "wb")
try:
if PYTHON3:
fp.write(pack("<Hcc", magic_int, b"\r", b"\n"))
else:
fp.write(pack("<Hcc", magic_int, "\r", "\n"))
fp.write(pack("<I", int(time.time())))
if 3000 <= magic_int < 20121:
# In Python 3 you need to write out the size mod 2**32 here
fp.write(pack("<I", filesize))
fp.write(marshal.dumps(code))
finally:
fp.close()
if __name__ == "__main__":
co = load_file(__file__)
obj_path = check_object_path(__file__)
version, timestamp, magic_int, co2, pypy, source_size, sip_hash = load_module(
obj_path
)
print("version", version, "magic int", magic_int, "is_pypy", pypy)
if timestamp is not None:
import datetime
print(datetime.datetime.fromtimestamp(timestamp))
if source_size is not None:
print("source size mod 2**32: %d" % source_size)
if sip_hash is not None:
print("Sip Hash: 0x%x" % sip_hash)
assert co == co2
| [
2,
15069,
357,
66,
8,
1853,
12,
42334,
416,
24534,
37584,
198,
2,
220,
770,
1430,
318,
1479,
3788,
26,
345,
460,
17678,
4163,
340,
290,
14,
273,
198,
2,
220,
13096,
340,
739,
262,
2846,
286,
262,
22961,
3611,
5094,
13789,
198,
2,
... | 2.033301 | 5,165 |
# Generated by Django 3.2.6 on 2021-08-26 14:49
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
21,
319,
33448,
12,
2919,
12,
2075,
1478,
25,
2920,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
from gensim.models import word2vec
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-word', required=True,help="word file")
parser.add_argument('-iter', default=3)
parser.add_argument('-sg', default=0)
parser.add_argument('-hs', default=1)
parser.add_argument('-window', default=3)
parser.add_argument('-size', default=100)
opt = parser.parse_args()
print(opt)
tomodel(opt.word,opt.iter,opt.sg,opt.hs,opt.window,opt.size)
print("end..................")
| [
198,
6738,
308,
641,
320,
13,
27530,
1330,
1573,
17,
35138,
628,
198,
198,
11748,
1822,
29572,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
628,
220,
220,
220,
30751,
796,
1822,
29572,
13,
28100,
1713,
46677,
3419,
1... | 2.645933 | 209 |
import json
from azure.core.exceptions import ClientAuthenticationError
from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface
from cloudshell.shell.core.driver_context import AutoLoadDetails
from cloudshell.shell.core.session.cloudshell_session import CloudShellSessionContext
from cloudshell.shell.core.session.logging_session import LoggingSessionContext
from constants import AZURE2G_MODEL, AZURE_MODELS
from data_model import AzureTfBackend
from azure.storage.blob import BlobServiceClient
from msrestazure.azure_active_directory import ServicePrincipalCredentials
from azure.mgmt.storage import StorageManagementClient
| [
11748,
33918,
198,
198,
6738,
35560,
495,
13,
7295,
13,
1069,
11755,
1330,
20985,
47649,
3299,
12331,
198,
6738,
6279,
29149,
13,
29149,
13,
7295,
13,
31092,
62,
26230,
62,
39994,
1330,
20857,
32103,
39317,
198,
6738,
6279,
29149,
13,
2... | 3.876471 | 170 |
from unittest import TestCase
import numpy as np
import unittest
import placentagen
import os
if __name__ == '__main__':
unittest.main()
| [
6738,
555,
715,
395,
1330,
6208,
20448,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
555,
715,
395,
198,
11748,
458,
12643,
11286,
198,
11748,
28686,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
... | 2.88 | 50 |
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' ##This is used to suppress the warnings given by tensorflow
import tensorflow as tf
#Comment the above lines if there's an error and the code isn't working as expected
from deepface import DeepFace
import pandas as pd
# Add photos to the photos folder in order to analyse them or do facial recognition on them
############################### Face recognition
# This function looks at a photo (Argument 1) and sees if the person is in another photo in a database (Argument 2)
############################### Face analysis
# This function analyzes the face in the picture(Argument) and gives out the estimated age, gender, race and emotion
############################### Face verification
# This function returns whether or not two pictures(Both arguments) contain the same person
############################### Real time face analysis
# This function will give a real time analysis (Age, gender, emotion) of your face by opening the
# webcamera and compares it to pictures in the database specified in the argument
# Note: My webcamera is not working properly so I am not sure if this works as intended or not.
if __name__ == "__main__":
main()
| [
11748,
28686,
198,
418,
13,
268,
2268,
17816,
10234,
62,
8697,
47,
62,
23678,
62,
25294,
62,
2538,
18697,
20520,
796,
705,
18,
6,
22492,
1212,
318,
973,
284,
18175,
262,
14601,
1813,
416,
11192,
273,
11125,
198,
11748,
11192,
273,
111... | 4.128814 | 295 |
"""Basic message."""
from datetime import datetime
from typing import Union
from marshmallow import fields
from ...agent_message import AgentMessage, AgentMessageSchema
from ...util import datetime_now, datetime_to_str
from ...valid import INDY_ISO8601_DATETIME
from ..message_types import FEDERATEDLEARNING_MESSAGE
HANDLER_CLASS = (
"aries_cloudagent.messaging.federatedlearningmessage."
+ "handlers.basicmessage_handler.FederatedLearningMessageHandler"
)
class FederatedLearningMessage(AgentMessage):
"""Class defining the structure of a federated learning message."""
class Meta:
"""Federated learning message metadata class."""
handler_class = HANDLER_CLASS
message_type = FEDERATEDLEARNING_MESSAGE
schema_class = "FederatedLearningMessageSchema"
def __init__(
self,
*,
sent_time: Union[str, datetime] = None,
content: str = None,
localization: str = None,
**kwargs
):
"""
Initialize federated learning message object.
Args:
sent_time: Time message was sent
content: message content
localization: localization
"""
super(FederatedLearningMessage, self).__init__(**kwargs)
if not sent_time:
sent_time = datetime_now()
self.sent_time = datetime_to_str(sent_time)
self.content = content
self.localization = localization
class FederatedLearningMessageSchema(AgentMessageSchema):
"""FederatedLearning message schema class."""
class Meta:
"""FederatedLearning message schema metadata."""
model_class = FederatedLearningMessage
localization = fields.Str(
required=False,
description="Localization",
example="en-CA",
data_key="l10n",
)
sent_time = fields.Str(
required=False,
description="Time message was sent, ISO8601 with space date/time separator",
**INDY_ISO8601_DATETIME
)
content = fields.Str(
required=True,
description="Message content",
example="Hello",
)
| [
37811,
26416,
3275,
526,
15931,
198,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
6738,
19720,
1330,
4479,
198,
198,
6738,
22397,
42725,
1330,
7032,
198,
198,
6738,
2644,
25781,
62,
20500,
1330,
15906,
12837,
11,
15906,
12837,
27054,
261... | 2.566265 | 830 |
# Generated by Django 3.1.2 on 2020-11-25 12:47
from django.db import migrations, models
import wagtail.core.fields
| [
2,
2980,
515,
416,
37770,
513,
13,
16,
13,
17,
319,
12131,
12,
1157,
12,
1495,
1105,
25,
2857,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
198,
11748,
266,
363,
13199,
13,
7295,
13,
25747,
628
] | 2.878049 | 41 |
# import the necessary packages
from keras.applications import ResNet50
from keras.applications import MobileNet
import matplotlib.pyplot as plt
import numpy as np
import cv2
import dlv
if __name__ == "__main__":
test = np.random.rand(500,300)
cv2.imshow('img',test)
cv2.waitKey(0)
resnet50Model = MobileNet(weights="imagenet")
dlvModel = dlv.Model(resnet50Model)
dlvModel.addInputData('dog.jpg')
dlvModel.getFeaturesFromFetchedList()
result = dlvModel._indata_FeatureMap_Dict['dog.jpg']._featureMapList[0]
result = np.moveaxis(result, -1, 0)
filter0 = result[0]
# filter0 = filter0.astype(np.uint8)
cv2.imshow('image',filter0)
cv2.waitKey(0)
print('tmp')
#
# if __name__ == "__main__":
# resnet50Model = MobileNet(weights="imagenet")
# dlvModel = dlv.Model(resnet50Model)
# dlvModel.addInputData('dog.jpg')
# dlvModel.addInputData('cat.jpg')
# dlvModel.getFeaturesFromFetchedList()
#
# # Prepare pyplot
#
# w = 112
# h = 112
#
# # fig = plt.figure(figsize=(64, len(dlvModel._indata_FeatureMap_Dict['cat.jpg']._featureMapList)))
# fig = plt.figure(figsize=(64, 1))
#
# columns = 1
# rows = 64
#
# for j in range(0, columns):
# conv_j_result = dlvModel._indata_FeatureMap_Dict['cat.jpg']._featureMapList[j]
#
# for i in range(0, rows ):
# subplot = fig.add_subplot(j+1, 64, j*64 + i + 1)
# subplot.set_xticks([])
# subplot.set_yticks([])
# image = conv_j_result[:, :, i]
# subplot.imshow(image)
# plt.show()
| [
2,
1330,
262,
3306,
10392,
198,
6738,
41927,
292,
13,
1324,
677,
602,
1330,
1874,
7934,
1120,
198,
6738,
41927,
292,
13,
1324,
677,
602,
1330,
12173,
7934,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
11748,... | 2.253435 | 655 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: onos/uenib/ran.proto, onos/uenib/uenib.proto
# plugin: python-betterproto
from dataclasses import dataclass
from typing import AsyncIterator, Dict, List
import betterproto
import grpclib
@dataclass(eq=False, repr=False)
class CellConnection(betterproto.Message):
"""CellConnection represents UE cell connection."""
id: str = betterproto.string_field(1)
signal_strength: float = betterproto.double_field(2)
@dataclass(eq=False, repr=False)
class CellInfo(betterproto.Message):
"""CellInfo provides data on serving cell and candidate cells."""
serving_cell: "CellConnection" = betterproto.message_field(1)
candidate_cells: List["CellConnection"] = betterproto.message_field(2)
@dataclass(eq=False, repr=False)
class Event(betterproto.Message):
"""CellConnection represents UE cell connection."""
type: "EventType" = betterproto.enum_field(1)
ue: "Ue" = betterproto.message_field(2)
@dataclass(eq=False, repr=False)
class CreateUeRequest(betterproto.Message):
"""CellInfo provides data on serving cell and candidate cells."""
ue: "Ue" = betterproto.message_field(1)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
@dataclass(eq=False, repr=False)
import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
| [
2,
2980,
515,
416,
262,
8435,
11876,
17050,
13,
220,
8410,
5626,
48483,
0,
198,
2,
4237,
25,
319,
418,
14,
84,
268,
571,
14,
2596,
13,
1676,
1462,
11,
319,
418,
14,
84,
268,
571,
14,
84,
268,
571,
13,
1676,
1462,
198,
2,
13877... | 2.771757 | 609 |