content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
from pathlib import Path
from setuptools import setup
root = Path(__file__).parent.absolute()
with open(str(root / 'README.rst')) as f:
long_description = f.read()
setup(
name='obs-ws-rc',
version='2.3.0',
description=("asyncio-based Python 3.5+ client to obs-websocket "
"plugin for OBS Studio"),
long_description=long_description,
url="https://github.com/KirillMysnik/obs-ws-rc",
author="Kirill Mysnik",
author_email = "kirill@mysnik.com",
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Video',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='obs obs-websocket',
packages=['obswsrc', ],
install_requires=['websockets', ],
python_requires='>=3.5',
package_data={
'obswsrc': ["protocol.json", ],
}
)
| [
6738,
3108,
8019,
1330,
10644,
198,
6738,
900,
37623,
10141,
1330,
9058,
628,
198,
15763,
796,
10644,
7,
834,
7753,
834,
737,
8000,
13,
48546,
3419,
198,
198,
4480,
1280,
7,
2536,
7,
15763,
1220,
705,
15675,
11682,
13,
81,
301,
6,
4... | 2.553684 | 475 |
# Daenerys IDA/Ghidra interop framework
# by Elias Bachaalany <elias.bachaalany@gmail.com>
#
# Python utility functions
import numbers | [
2,
9637,
877,
893,
4522,
32,
14,
41126,
312,
430,
987,
404,
9355,
198,
2,
416,
41462,
25332,
64,
282,
1092,
1279,
417,
4448,
13,
19496,
64,
282,
1092,
31,
14816,
13,
785,
29,
198,
2,
198,
2,
11361,
10361,
5499,
198,
198,
11748,
... | 3 | 45 |
'''
Super Encrypt - Encryption Algorithm with 4 key encryption and decryption
__author__ = "PyBash"
__version__ = "v1.0.0"
'''
import random
from . import error
BASE_LETTERS = 'abcdefghijklmnopqrstuvwxyz'
BASE_SYMBOLS = ' 1234567890!@#$%^&*()-_=+[{]};:\'"<,>.?/`~|\\'
| [
7061,
6,
198,
12442,
14711,
6012,
532,
14711,
13168,
978,
42289,
351,
604,
1994,
15835,
290,
875,
13168,
198,
198,
834,
9800,
834,
796,
366,
20519,
33,
1077,
1,
198,
834,
9641,
834,
796,
366,
85,
16,
13,
15,
13,
15,
1,
198,
7061,
... | 2.231405 | 121 |
from ._Test import *
| [
6738,
47540,
14402,
1330,
1635,
198
] | 3.5 | 6 |
# The code below almost works
name = input("Enter your name")
print("Hello", name)
##2
# This first line is provided for you
hrs = input("Enter Hours:")
# hrs = float(hrs) #use the one in line 9 instead
ratePerHour = input("Enter rate per hour:")
# rateperHour = float(ratePerHour) #use the one in line 9 instead
# you will hit the following error, if you don't convert the inputs to float
# TypeError: can't multiply sequence by non-int of type 'str' on line 5
grossPay = float(hrs) * float(ratePerHour)
print("Pay:", grossPay)
##3.1
hrs = input("Enter Hours:")
ratePerHour = input("Enter rate per hour:")
try:
fhrs = float(hrs)
fratePerHour = float(ratePerHour)
except:
print("Error, please enter numeric input")
quit()
#print(fhrs, fratePerHour)
#Pay = float(hrs) * float(ratePerHour)
if fhrs > 40:
PayNormal = fhrs * fratePerHour
PayExtra = (fhrs - 40) * (fratePerHour * 0.5)
Pay = PayExtra + PayNormal
else:
Pay = fhrs * fratePerHour
print(Pay)
### 3.3
"""3.3 Write a program to prompt for a score between 0.0 and 1.0. If the score is out of range, print an error. If the score is between 0.0 and 1.0, print a grade using the following table:
Score Grade
>= 0.9 A
>= 0.8 B
>= 0.7 C
>= 0.6 D
< 0.6 F
If the user enters a value out of range, print a suitable error message and exit. For the test, enter a score of 0.85."""
### done
score = input("Enter Score: ")
score = float(score)
#if score in range(0.0,0.9): #only ints
if 0.0 <= score and score <= 1.0:
if score >= 0.9 and score <= 1.0:
print("A")
elif score >= 0.8 and score < 0.9:
print("B")
elif score >= 0.7 and score < 0.8:
print("C")
elif score >= 0.6 and score < 0.7:
print("D")
elif score < 0.6:
print("F")
else:
print("Error, please enter the score in the range of 0.0 and 1.0")
quit()
### 4.6
"""
4.6 Write a program to prompt the user for hours and rate per hour using input to compute gross pay. Pay should be the normal rate for
hours up to 40 and time-and-a-half for the hourly rate for all hours worked above 40 hours. Put the logic to do the computation of pay in a
function called computepay() and use the function to do the computation. The function should return a value. Use 45 hours and a rate of
10.50 per hour to test the program (the pay should be 498.75). You should use input to read a string and float() to convert the string to a
number. Do not worry about error checking the user input unless you want to - you can assume the user types numbers properly. Do not name
your variable sum or use the sum() function."""
p = computepay(10,20)
print("Pay", p)
### 5.1
# print Total, count of numbers, and their average entered by a user
# also do verification
num = 0
total = 0
while True :
sval = input('Enter a number')
if sval == 'done' :
break
try:
fval = float(sval)
except:
print('Invalid input')
continue
num = num + 1
total = total + fval
print(total, num, total/num)
### 5.2
"""
5.2 Write a program that repeatedly prompts a user for integer numbers until the user enters 'done'. Once 'done' is entered, print out the
largest and smallest of the numbers. If the user enters anything other than a valid number catch it with a try/except and put out an
appropriate message and ignore the number. Enter 7, 2, bob, 10, and 4 and match the output below.
"""
largest = None
smallest = None
count = 0
while True:
num = input("Enter a number: ")
if num == "done" : break
else:
try:
ival = int(num)
except:
print('Invalid input')
continue
if count == 0 and smallest is None:
smallest = ival
if ival < smallest:
smallest = ival
elif largest is None or ival > largest:
largest = ival
count = count + 1
#print(num)
print("Maximum is", largest)
print("Minimum is", smallest)
| [
2,
383,
2438,
2174,
2048,
2499,
198,
198,
3672,
796,
5128,
7203,
17469,
534,
1438,
4943,
198,
4798,
7203,
15496,
1600,
1438,
8,
628,
198,
2235,
17,
198,
2,
770,
717,
1627,
318,
2810,
329,
345,
198,
198,
71,
3808,
796,
5128,
7203,
... | 2.809422 | 1,401 |
#!/usr/local/bin/python
import os
import time
import collections
import subprocess
o("minikube delete")
o("minikube start --memory 10000 --cpus 4 --disk-size=60g")
#o("kubectl delete namespace spinnaker")
#time.sleep(30)
o("kubectl create namespace spinnaker")
c("applications/kubedash/bundle.yaml")
c("applications/tectonic/pull.yml")
c("applications/tectonic/tectonic-console.yaml")
c("applications/tectonic/tectonic.json")
components = ('jenkins', 'registry', 'registryui', 'debweb')
for component in components:
c("applications/" + component + "/deployment.yml")
c("applications/" + component + "/service.json")
c("applications/kubeproxy/pod.yml")
components = ('cassandra', 'redis')
for component in components:
c("applications/spinnaker/" + component + "/deployment.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
os.system("rm -rf minikube")
os.system("mkdir minikube")
os.system("cp ~/.minikube/apiserver.crt minikube/apiserver.crt")
os.system("cp ~/.minikube/apiserver.key minikube/apiserver.key")
os.system("cp ~/.minikube/ca.crt minikube/ca.crt")
ip = os.popen('minikube ip').read().strip()
kubeConfig = """
apiVersion: v1
clusters:
- cluster:
certificate-authority: /root/.kube/ca.crt
server: https://""" + ip + """:8443
name: minikube
contexts:
- context:
cluster: minikube
user: minikube
name: minikube
current-context: minikube
kind: Config
preferences: {}
users:
- name: minikube
user:
client-certificate: /root/.kube/apiserver.crt
client-key: /root/.kube/apiserver.key
"""
with open("minikube/config", "w") as text_file:
text_file.write(kubeConfig)
time.sleep(1)
os.system("kubectl create secret generic spinnaker-config --from-file=./config/echo.yml --from-file=./config/igor.yml --from-file=./config/gate.yml --from-file=./config/orca.yml --from-file=./config/rosco.yml --from-file=./config/front50.yml --from-file=./config/clouddriver.yml --namespace spinnaker")
os.system("kubectl create secret generic minikube-config --from-file=./minikube/config --from-file=./minikube/ca.crt --from-file=./minikube/apiserver.crt --from-file=./minikube/apiserver.key --namespace spinnaker")
os.system("rm -rf minikube")
#print "seeding spinnaking images"
o("./podexec jenkins /usr/local/jenkins/jobs/seed.sh")
components = ('front50', 'clouddriver', 'rosco', 'orca', 'igor', 'gate', 'deck')
for component in components:
c("applications/spinnaker/" + component + "/controller.yml")
c("applications/spinnaker/" + component + "/service.json")
poll()
time.sleep(2)
services = '''
{
"services" : [
{
"title": "Spinnaker Dashboard",
"description": "Spinnaker UI",
"link": "''' + cmdOut("minikube service spinnaker-deck --namespace spinnaker --url") + '''"
},
{
"title": "Kubernetes Dashboard",
"description": "Management UI",
"link": "''' + cmdOut("minikube service kubernetes-dashboard --namespace kube-system --url") + '''"
},
{
"title": "Tectonic Console",
"description": "Alternative management UI",
"link": "''' + cmdOut("minikube service tectonic --namespace spinnaker --url") + '''"
},
{
"title": "Jenkins",
"description": "Automation Server",
"link": "''' + cmdOut("minikube service spinnaker-jenkins --namespace spinnaker --url") + '''"
},
{
"title": "Cluster Performace",
"description": "Performance analytics UI",
"link": "''' + cmdOut("minikube service kubedash --namespace spinnaker --url") + '''"
},
{
"title": "Container Image Registry",
"description": "Local image repository",
"link": "''' + cmdOut("minikube service spinnaker-registryui --namespace spinnaker --url") + '''"
},
{
"title": "Apt Repository",
"description": "Local apt repository",
"link": "''' + cmdOut("minikube service spinnaker-debweb --namespace spinnaker --url") + '''"
}
]
}
'''
os.system("rm -f applications/start/services.json")
with open("applications/start/services.json", "w") as text_file:
text_file.write(services)
os.system("kubectl create secret generic start-config --from-file=./applications/start/index.html --from-file=./applications/start/services.json --namespace spinnaker")
#cqlsh -e "COPY front50.pipeline TO '/front50.pipeline.csv' WITH HEADER = 'true'"
c("applications/start/deployment.yml")
c("applications/start/service.json")
poll()
#add example pipeline
o("./podexec spinnaker apt-get update")
o("./podexec spinnaker apt-get install -y git")
o("./podexec spinnaker git clone git@github.com:moondev/SpiniKube.git /SpiniKube")
o("./podexec spinnaker cqlsh -e 'COPY front50.pipeline FROM \'/SpiniKube/pipelines/pipelines.csv\' WITH HEADER = \'true\';'")
o("minikube service spinnaker-start -n spinnaker") | [
2,
48443,
14629,
14,
12001,
14,
8800,
14,
29412,
198,
198,
11748,
28686,
198,
11748,
640,
198,
11748,
17268,
198,
11748,
850,
14681,
198,
198,
78,
7203,
1084,
1134,
3266,
12233,
4943,
198,
198,
78,
7203,
1084,
1134,
3266,
923,
1377,
3... | 2.603486 | 1,836 |
from typing import List
from collections import deque
class Line:
"""
Properties:
start_x {0}
start_y {1}
end_x {2}
end_y {3}
dots = [dot1, ..., dotN] {4}
coords = (start_x, start_y, end_x, end_y)
"""
@property
# @property
# def x1(self):
# return self.start_x
# @property
# def y1(self):
# return self.start_y
# @property
# def x2(self):
# return self.end_x
# @property
# def y2(self):
# return self.end_y
@property
@property
@property
@property
| [
6738,
19720,
1330,
7343,
198,
6738,
17268,
1330,
390,
4188,
628,
198,
4871,
6910,
25,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
24946,
25,
198,
220,
220,
220,
220,
220,
220,
220,
923,
62,
87,
220,
1391,
15,
92,
198,
220,
220,... | 1.983766 | 308 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\VistaAnalisisDatos.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from UI.Recursos import images_rc
# import DATOS IMAGENES_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
VistaAnalisisDatos = QtWidgets.QMainWindow()
ui = Ui_VistaAnalisisDatos()
ui.setupUi(VistaAnalisisDatos)
VistaAnalisisDatos.show()
sys.exit(app.exec_())
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
45302,
59,
53,
12523,
2025,
27315,
271,
27354,
418,
13,
9019,
6,
198,
2,
198,
2,
15622,
416,
25,
9485,
48,
83,
... | 2.452991 | 234 |
from .crypto_stream_reader import CryptoWrappedStreamReader
from .crypto_stream_writer import CryptoWrappedStreamWriter
| [
6738,
764,
29609,
78,
62,
5532,
62,
46862,
1330,
36579,
36918,
1496,
12124,
33634,
198,
6738,
764,
29609,
78,
62,
5532,
62,
16002,
1330,
36579,
36918,
1496,
12124,
34379,
198
] | 4 | 30 |
from geoopt import Stereographic
import torch
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib import rcParams
import shutil
if shutil.which("latex") is not None:
rcParams["text.latex.preamble"] = r"\usepackage{amsmath}"
rcParams["text.usetex"] = True
sns.set_style("white")
x = torch.tensor((-0.25, -0.75)) / 2
y = torch.tensor((0.65, -0.55)) / 2
manifold = Stereographic(-1)
x_plus_y = manifold.mobius_add(x, y)
circle = plt.Circle((0, 0), 1, fill=False, color="b")
plt.gca().add_artist(circle)
plt.xlim(-1.1, 1.1)
plt.ylim(-1.1, 1.1)
plt.gca().set_aspect("equal")
plt.annotate("x", x - 0.09, fontsize=15)
plt.annotate("y", y - 0.09, fontsize=15)
plt.annotate(r"$x\oplus y$", x_plus_y - torch.tensor([0.1, 0.15]), fontsize=15)
plt.arrow(0, 0, *x, width=0.01, color="r")
plt.arrow(0, 0, *y, width=0.01, color="g")
plt.arrow(0, 0, *x_plus_y, width=0.01, color="b")
plt.title(r"Addition $x\oplus y$")
plt.show()
| [
6738,
40087,
8738,
1330,
520,
567,
6826,
198,
11748,
28034,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
384,
397,
1211,
355,
3013,
82,
198,
6738,
2603,
29487,
8019,
1330,
48321,
10044,
4105,
198,
11748,
442... | 2.147392 | 441 |
from django import forms
from models import Project
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, Submit, Div, HTML, ButtonHolder
from crispy_forms.bootstrap import FormActions, StrictButton, TabHolder, Tab, PrependedText, InlineCheckboxes, InlineField
import requests
from django.contrib.auth.models import User
from django_currentuser.middleware import (
get_current_user, get_current_authenticated_user)
| [
6738,
42625,
14208,
1330,
5107,
198,
6738,
4981,
1330,
4935,
198,
6738,
42807,
62,
23914,
13,
2978,
525,
1330,
5178,
47429,
198,
6738,
42807,
62,
23914,
13,
39786,
1330,
47639,
11,
7663,
11,
39900,
11,
4777,
11,
11532,
11,
20969,
39,
... | 3.534351 | 131 |
import tensorflow as tf
import numpy as np
import time
import matplotlib
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_circles
N=210
K=2
# Maximum number of iterations, if the conditions are not met
MAX_ITERS = 1000
cut=int(N*0.7)
start = time.time()
data, features = make_circles(n_samples=N, shuffle=True, noise= 0.12, factor=0.4)
tr_data, tr_features= data[:cut], features[:cut]
te_data,te_features=data[cut:], features[cut:]
fig, ax = plt.subplots()
ax.scatter(tr_data.transpose()[0], tr_data.transpose()[1], marker = 'o', s = 100, c = tr_features, cmap=plt.cm.coolwarm )
plt.plot()
points=tf.Variable(data)
cluster_assignments = tf.Variable(tf.zeros([N], dtype=tf.int64))
sess = tf.Session()
sess.run(tf.initialize_all_variables())
test=[]
for i, j in zip(te_data, te_features):
distances = tf.reduce_sum(tf.square(tf.sub(i , tr_data)),reduction_indices=1)
neighbor = tf.arg_min(distances,0)
#print tr_features[sess.run(neighbor)]
#print j
test.append(tr_features[sess.run(neighbor)])
print test
fig, ax = plt.subplots()
ax.scatter(te_data.transpose()[0], te_data.transpose()[1], marker = 'o', s = 100, c = test, cmap=plt.cm.coolwarm )
plt.plot()
#rep_points_v = tf.reshape(points, [1, N, 2])
#rep_points_h = tf.reshape(points, [N, 2])
#sum_squares = tf.reduce_sum(tf.square(rep_points - rep_points), reduction_indices=2)
#print(sess.run(tf.square(rep_points_v - rep_points_h)))
end = time.time()
print ("Found in %.2f seconds" % (end-start))
print "Cluster assignments:", test
| [
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
640,
198,
198,
11748,
2603,
29487,
8019,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
6738,
1341,
35720,
13,
19608,
292,
10... | 2.433697 | 641 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## Baiqiang XIA implementation of data structures
## Graph represented as Adjacency List
## init
## add Edge
## print
## bread first search (traversal), assuming all node are connected
## the task is to visit all the nodes
## bread first search
## https://www.geeksforgeeks.org/breadth-first-search-or-bfs-for-a-graph/
### depth first search
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2235,
40750,
80,
15483,
1395,
3539,
7822,
286,
1366,
8573,
198,
198,
2235,
29681,
7997,
355,
1215,
30482,
1387,
7343,
1... | 2.26484 | 219 |
#!/usr/bin/env python3
# https://codeforces.com/problemset/problem/1359/A
t = int(input())
for _ in range(t):
n,m,k = list(map(int,input().split()))
winner = min(n//k,m)
second = (m-winner+k-2)//(k-1)
print(winner-second)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
3740,
1378,
19815,
891,
273,
728,
13,
785,
14,
1676,
22143,
316,
14,
45573,
14,
1485,
3270,
14,
32,
198,
198,
83,
796,
493,
7,
15414,
28955,
198,
1640,
4808,
287,
2837,... | 2.067797 | 118 |
"""
2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
What is the sum of the digits of the number 2^1000?
"""
if __name__ == "__main__":
print(sum_digits(2**1000))
| [
37811,
198,
17,
61,
1314,
796,
36203,
3104,
290,
262,
2160,
286,
663,
19561,
318,
513,
1343,
362,
1343,
767,
1343,
718,
1343,
807,
796,
2608,
13,
198,
2061,
318,
262,
2160,
286,
262,
19561,
286,
262,
1271,
362,
61,
12825,
30,
198,
... | 2.569444 | 72 |
import os
from tasrif.data_readers.fitbit_interday_dataset import FitbitInterdayDataset
from tasrif.processing_pipeline import SequenceOperator
from tasrif.processing_pipeline.custom import AggregateOperator, CreateFeatureOperator
from tasrif.processing_pipeline.pandas import (
ConvertToDatetimeOperator,
SetIndexOperator,
FillNAOperator
)
interday_folder_path = os.environ.get("FITBIT_INTERDAY_PATH", "/mnt/data/fitbit-data/")
df = FitbitInterdayDataset(interday_folder_path, table_name="Sleep").process()[0]
pipeline = SequenceOperator(
[
ConvertToDatetimeOperator(
feature_names=["Start Time", "End Time"], infer_datetime_format=True
),
FillNAOperator(values={'End Time': df['Start Time']}),
CreateFeatureOperator(
feature_name="Date", feature_creator=lambda df: df["End Time"].dt.date
),
AggregateOperator(
groupby_feature_names="Date",
aggregation_definition={
"Minutes Asleep": "sum",
"Minutes Awake": "sum",
"Number of Awakenings": "sum",
"Time in Bed": "sum",
"Minutes REM Sleep": "sum",
"Minutes Light Sleep": "sum",
"Minutes Deep Sleep": "sum",
},
),
SetIndexOperator("Date"),
]
)
df = pipeline.process()
print(df)
import yaml
import tasrif.yaml_parser as yaml_parser
# This is done because this file is executed within a unit test from a different directory
# The relative path would not work in that case.
# __file__ is not defined in iPython interactive shell
try:
yaml_config_path = os.path.join(
os.path.dirname(__file__), "yaml_config/sleep_dataset.yaml"
)
except:
yaml_config_path = "yaml_config/sleep_dataset.yaml"
with open(yaml_config_path, "r") as stream:
try:
# print(json.dumps(yaml.safe_load(stream), indent=4, sort_keys=True))
p = yaml_parser.from_yaml(stream)
except yaml.YAMLError as exc:
print(exc)
df = p.process()
print(df)
| [
11748,
28686,
198,
198,
6738,
256,
292,
81,
361,
13,
7890,
62,
961,
364,
13,
11147,
2545,
62,
3849,
820,
62,
19608,
292,
316,
1330,
25048,
2545,
818,
6432,
27354,
292,
316,
198,
6738,
256,
292,
81,
361,
13,
36948,
62,
79,
541,
447... | 2.294053 | 908 |
from rest_framework import serializers
from landing.models import Contact
import re
from rest_framework.response import Response
#from store.models import Store
| [
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
6738,
9581,
13,
27530,
1330,
14039,
198,
11748,
302,
198,
6738,
1334,
62,
30604,
13,
26209,
1330,
18261,
198,
2,
6738,
3650,
13,
27530,
1330,
9363,
198
] | 4.6 | 35 |
"""
Dash callbacks
Only applicant and posting data depend on the requisition number value
Pipeline and subfield data depend on the posting data
Callback chain:
Requisition number dropdown value changes
-> applicant data and posting data load
---> pipeline and subfield data load
-----> charts and footers load
"""
# Third party imports
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import plotly.graph_objs as go
from flask import current_app
from boto3.dynamodb.conditions import Key
# Local application imports
from app.extensions import dynamo
from app.searchcom.chart_config.styling import axes
from app.searchcom.chart_config.colors import colors
# Crosstab table outputs
## Category values as they are entered in HTML id fields
gen_cat_html = ['fem', 'male', 'na']
ethn_cat_html = ['amind', 'asian', 'black', 'pacific', 'white', 'na']
hisp_cat_html = ['hisp', 'nonhisp', 'na']
## Build a list of outputs to all combinations of those fields
xtab_output_list = [Output('searchcom-xtab-table', 'style'), Output('searchcom-xtab-threshold-warning', 'style')]
for i in ethn_cat_html:
for j in hisp_cat_html:
for k in gen_cat_html:
xtab_output_list.append(Output(f"{i}-{j}-{k}", 'children'))
# Category values as they are in the xtab data
# The order should be the same as in HTML categories so that the callback output matches the output list (Male-White-Yes matches male-white-hisp, etc.)
gen_cat = ['Female', 'Male', 'Blank']
ethn_cat = ['American Indian or Alaska Native', 'Asian', 'Black or African American', 'Native Hawaiian or Other Pacific Islander', 'White', 'Blank']
hisp_cat = ['Yes', 'No', 'Blank']
| [
37811,
198,
43041,
869,
10146,
198,
198,
10049,
16919,
290,
10754,
1366,
4745,
319,
262,
1038,
10027,
1271,
1988,
198,
47,
541,
4470,
290,
850,
3245,
1366,
4745,
319,
262,
10754,
1366,
198,
198,
47258,
6333,
25,
198,
16844,
10027,
1271,... | 3.24952 | 521 |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
import json
# Create your views here.
from .models import Track | [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
11,
651,
62,
15252,
62,
273,
62,
26429,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
11,
367,
29281,
26429,
198,
11748,
33918,
198,
2,
13610,
534,
5009,
994,
13,
198,
67... | 3.565217 | 46 |
""" Performance test using CP2K quantum chemistry and solid state physics software package for atomistic simulations.
See README.md for details.
NB:
- The executable is either cp2k.popt (for MPI only) or cp2k.psmp (for MPI + OpenMP).
- Only the former is currently implemented here.
"""
import reframe as rfm
import reframe.utility.sanity as sn
from reframe.utility.sanity import defer
from pprint import pprint
import sys, os
from collections import namedtuple
from reframe.core.logging import getlogger
sys.path.append('.')
from reframe_extras import sequence, Scheduler_Info, CachedRunTest
from modules.utils import parse_time_cmd
# CSCS include a CP2k test which provides the input file we need, so find that test:
RFM_CP2K_PATH = os.path.join(os.path.dirname(rfm.__path__[0]), 'cscs-checks', 'apps', 'cp2k')
node_seq = sequence(1, Scheduler_Info().num_nodes + 1, 2)
@rfm.parameterized_test(*[[n_nodes] for n_nodes in node_seq]) | [
37811,
15193,
1332,
1262,
16932,
17,
42,
14821,
16585,
290,
4735,
1181,
11887,
3788,
5301,
329,
22037,
2569,
27785,
13,
628,
220,
220,
220,
4091,
20832,
11682,
13,
9132,
329,
3307,
13,
628,
220,
220,
220,
41354,
25,
198,
220,
220,
220... | 3.025397 | 315 |
# Application Configuration
import os
import yaml
from base.utils.data_utils import json_encoder
# CeNDR Version
APP_CONFIG, CENDR_VERSION = os.environ['GAE_VERSION'].split("-", 1)
if APP_CONFIG not in ['development', 'master']:
APP_CONFIG = 'development'
CENDR_VERSION = CENDR_VERSION.replace("-", '.')
# BUILDS AND RELEASES
# The first release is the current release
# (RELEASE, ANNOTATION_GENOME)
RELEASES = [("20210121", "WS276"),
("20200815", "WS276"),
("20180527", "WS263"),
("20170531", "WS258"),
("20160408", "WS245")]
# The most recent release
DATASET_RELEASE, WORMBASE_VERSION = RELEASES[0]
# SQLITE DATABASE
SQLITE_PATH = f"base/cendr.{DATASET_RELEASE}.{WORMBASE_VERSION}.db"
# CONFIG
def get_config(APP_CONFIG):
"""Load all configuration information including
constants defined above.
(BASE_VARS are the same regardless of whether we are debugging or in production)
"""
config = dict()
BASE_VARS = load_yaml("env_config/base.yaml")
APP_CONFIG_VARS = load_yaml(f"env_config/{APP_CONFIG}.yaml")
config.update(BASE_VARS)
config.update(APP_CONFIG_VARS)
# Add configuration variables
# Remove base prefix for SQLAlchemy as it is loaded
# from application folder
config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{SQLITE_PATH}".replace("base/", "")
config['json_encoder'] = json_encoder
config.update({"CENDR_VERSION": CENDR_VERSION,
"APP_CONFIG": APP_CONFIG,
"DATASET_RELEASE": DATASET_RELEASE,
"WORMBASE_VERSION": WORMBASE_VERSION,
"RELEASES": RELEASES})
return config
# Generate the configuration
config = get_config(APP_CONFIG)
| [
2,
15678,
28373,
198,
11748,
28686,
198,
11748,
331,
43695,
198,
6738,
2779,
13,
26791,
13,
7890,
62,
26791,
1330,
33918,
62,
12685,
12342,
198,
198,
2,
20101,
45,
7707,
10628,
198,
24805,
62,
10943,
16254,
11,
327,
1677,
7707,
62,
43... | 2.393939 | 726 |
import argparse
from hugsvision.nnet.ObjectDetectionTrainer import ObjectDetectionTrainer
parser = argparse.ArgumentParser(description='Object Detection')
parser.add_argument('--name', type=str, default="MyDETRModel", help='The name of the model')
parser.add_argument('--train', type=str, default="./BCCD_COCO/train/", help='The directory of the train folder containing the _annotations.coco.json')
parser.add_argument('--dev', type=str, default="./BCCD_COCO/valid/", help='The directory of the dev folder containing the _annotations.coco.json')
parser.add_argument('--test', type=str, default="./BCCD_COCO/test/", help='The directory of the test folder containing the _annotations.coco.json')
parser.add_argument('--output', type=str, default="./out/", help='The output directory of the model')
parser.add_argument('--epochs', type=int, default=1, help='Number of Epochs')
parser.add_argument('--batch_size', type=int, default=4, help='Batch size')
args = parser.parse_args()
huggingface_model = "facebook/detr-resnet-50"
# huggingface_model = "facebook/detr-resnet-101"
# Train the model
trainer = ObjectDetectionTrainer(
model_name = args.name,
output_dir = args.output,
train_path = args.train,
dev_path = args.dev,
test_path = args.test,
model_path = huggingface_model,
max_epochs = args.epochs,
batch_size = args.batch_size,
)
# Test on a single image
trainer.testing(img_path='../../../samples/blood_cells/42.jpg') | [
11748,
1822,
29572,
201,
198,
201,
198,
6738,
40657,
10178,
13,
77,
3262,
13,
10267,
11242,
3213,
2898,
10613,
1330,
9515,
11242,
3213,
2898,
10613,
201,
198,
201,
198,
48610,
796,
1822,
29572,
13,
28100,
1713,
46677,
7,
11213,
11639,
1... | 2.834933 | 521 |
"""
.. module:: PolyChord
:synopsis: Interface the PolyChord program with MontePython
This implementation relies heavily on the existing Python wrapper for
PolyChord, called PyPolyChord, which comes with the PolyChord code.
To install PolyChord, download it from
`its GitHub repo <https://github.com/PolyChord/PolyChordLite>`_
and follow `these instructions <https://github.com/PolyChord/PolyChordLite#python-likelihoods-pypolychord>`_.
The main routine, :func:`run`, truly interfaces the two codes. It takes for
input the cosmological module, data and command line. It then defines
internally two functions, :func:`prior() <PolyChord.prior>` and
:func:`loglike` that will serve as input for the run function of PyPolyChord.
.. moduleauthor:: Will Handley <wh260@cam.ac.uk>
"""
from __future__ import print_function
from pypolychord import run_polychord as polychord_run
from pypolychord.settings import PolyChordSettings as PC_Settings
import numpy as np
import os
#from copy import copy
import warnings
import io_mp
import sampler
# Data on file names and PolyChord options, that may be called by other modules
# PolyChord subfolder and name separator
PC_subfolder = 'PC'
name_rejected = '_dead-birth.txt' # rejected points
name_post = '.txt' # accepted points
name_stats = '.stats' # summarized information, explained
# New files
name_paramnames = '.paramnames' # in the PC/ subfolder
name_arguments = '.arguments' # in the PC/ subfolder
name_chain_acc = 'chain_PC__accepted.txt' # in the chain root folder
name_chain_rej = 'chain_PC__rejected.txt' # in the chain root folder
# Log.param name (ideally, we should import this one from somewhere else)
name_logparam = 'log.param'
# PolyChord option prefix
PC_prefix = 'PC_'
# User-defined arguments of PyPolyChord, and 'argparse' keywords
# First: basic string -> bool type conversion:
str2bool = lambda s: True if s.lower() == 'true' else False
PC_user_arguments = {
'nlive':
{'type': int,
'help':(
'(Default: nDims*25)\n'
'The number of live points.\n'
'Increasing nlive increases the accuracy of posteriors and evidences,\n'
'and proportionally increases runtime ~ O(nlive).'
)
},
'num_repeats' :
{'type': int,
'help':(
'(Default: nDims*5)\n'
'The number of slice slice-sampling steps to generate a new point.\n'
'Increasing num_repeats increases the reliability of the algorithm.\n'
'Typically\n'
'* for reliable evidences need num_repeats ~ O(5*nDims).\n'
'* for reliable posteriors need num_repeats ~ O(nDims)'
)
},
'do_clustering' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Whether or not to use clustering at run time.'
)
},
'feedback' :
{'type': int,
'help':(
'(Default: 1)\n'
'How much command line feedback to give\n'
'[0,1,2,3]'
)
},
'precision_criterion' :
{'type': float,
'help':(
'(Default: 0.001)\n'
'Termination criterion. Nested sampling terminates when the evidence\n'
'contained in the live points is precision_criterion fraction of the\n'
'total evidence.'
)
},
'max_ndead' :
{'type': int,
'help':(
'(Default: -1)\n'
'Alternative termination criterion. Stop after max_ndead iterations.\n'
'Set negative to ignore (default).'
)
},
'boost_posterior' :
{'type': float,
'help':(
'(Default: 0.0)\n'
'Increase the number of posterior samples produced. This can be set\n'
'arbitrarily high, but you won\'t be able to boost by more than\n'
'num_repeats\n'
'Warning: in high dimensions PolyChord produces _a lot_ of posterior\n'
'samples. You probably don\'t need to change this'
)
},
'posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (weighted) posterior samples. Stored in <root>.txt.'
)
},
'equals' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce (equally weighted) posterior samples. Stored in\n'
'<root>_equal_weights.txt'
)
},
'cluster_posteriors' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Produce posterior files for each cluster?\n'
'Does nothing if do_clustering=False.'
)
},
'write_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Create a resume file.'
)
},
'read_resume' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Read from resume file.'
)
},
'write_stats' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write an evidence statistics file.'
)
},
'write_live' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a live points file.'
)
},
'write_dead' :
{'type': str2bool,
'help':(
'(Default: True)\n'
'Write a dead points file.'
)
},
'compression_factor' :
{'type': float,
'help':(
'(Default: exp(-1))\n'
'How often to update the files and do clustering.'
)
}
}
# Automatically-defined arguments of PyMultiNest, type specified
PC_auto_arguments = {
'file_root': {'type': str},
'base_dir': {'type': str},
'grade_dims': {'type': list},
'grade_frac': {'type': list}
}
# FK: changes for 2cosmos capability
def initialise(cosmo1, cosmo2, data, command_line):
"""
Main call to prepare the information for the MultiNest run.
"""
# Convenience variables
varying_param_names = data.get_mcmc_parameters(['varying'])
derived_param_names = data.get_mcmc_parameters(['derived'])
nslow = len(data.get_mcmc_parameters(['varying', 'cosmo1'])) + len(data.get_mcmc_parameters(['varying', 'cosmo2']))
nfast = len(data.get_mcmc_parameters(['varying', 'nuisance']))
# Check that all the priors are flat and that all the parameters are bound
is_flat, is_bound = sampler.check_flat_bound_priors(
data.mcmc_parameters, varying_param_names)
if not is_flat:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'with flat priors. Sorry!')
if not is_bound:
raise io_mp.ConfigurationError(
'Nested Sampling with PolyChord is only possible ' +
'for bound parameters. Set reasonable bounds for them in the ' +
'".param" file.')
# If absent, create the sub-folder PC
PC_folder = os.path.join(command_line.folder, PC_subfolder)
if not os.path.exists(PC_folder):
os.makedirs(PC_folder)
# If absent, create the sub-folder PC/clusters
PC_clusters_folder = os.path.join(PC_folder,'clusters')
if not os.path.exists(PC_clusters_folder):
os.makedirs(PC_clusters_folder)
# Use chain name as a base name for PolyChord files
chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1]
base_name = os.path.join(PC_folder, chain_name)
# Prepare arguments for PyPolyChord
# -- Automatic arguments
data.PC_arguments['file_root'] = chain_name
data.PC_arguments['base_dir'] = PC_folder
data.PC_arguments['grade_dims'] = []
data.PC_arguments['grade_frac'] = []
if nslow:
data.PC_arguments['grade_dims'].append(nslow)
data.PC_arguments['grade_frac'].append(0.75)
if nfast:
data.PC_arguments['grade_dims'].append(nfast)
data.PC_arguments['grade_frac'].append(0.25)
data.PC_arguments['num_repeats'] = data.PC_arguments['grade_dims'][0] * 2
# -- User-defined arguments
for arg in PC_user_arguments:
value = getattr(command_line, PC_prefix+arg)
if value != -1:
data.PC_arguments[arg] = value
# else: don't define them -> use PyPolyChord default value
data.PC_param_names = varying_param_names
# Write the PolyChord arguments and parameter ordering
with open(base_name+name_arguments, 'w') as afile:
for arg in data.PC_arguments:
afile.write(' = '.join(
[str(arg), str(data.PC_arguments[arg])]))
afile.write('\n')
with open(base_name+name_paramnames, 'w') as pfile:
pfile.write('\n'.join(data.PC_param_names+derived_param_names))
# FK: changes for 2cosmos capability
def run(cosmo1, cosmo2, data, command_line):
"""
Main call to run the PolyChord sampler.
Note the unusual set-up here, with the two following functions, `prior` and
`loglike` having their docstrings written in the encompassing function.
This trick was necessary as PolyChord required these two functions to be
defined with a given number of parameters, so we could not add `data`. By
defining them inside the run function, this problem was by-passed.
.. function:: prior
Generate the prior function for PolyChord
It should transform the input unit cube into the parameter cube. This
function actually wraps the method :func:`map_from_unit_interval()
<prior.Prior.map_from_unit_interval>` of the class :class:`Prior
<prior.Prior>`.
Parameters
----------
cube : list
Contains the current point in unit parameter space that has been
selected within the PolyChord part.
Returns
-------
theta : list
The transformed physical parameters
.. function:: loglike
Generate the Likelihood function for PolyChord
Parameters
----------
theta : array
Contains the current point in the correct parameter space after
transformation from :func:`prior`.
Returns
-------
logl : float
The loglikelihood of theta
phi : list
The derived parameters
"""
# Convenience variables
derived_param_names = data.get_mcmc_parameters(['derived'])
nDims = len(data.PC_param_names)
nDerived = len(derived_param_names)
# Function giving the prior probability
def prior(hypercube):
"""
Please see the encompassing function docstring
"""
theta = [0.0] * nDims
for i, name in enumerate(data.PC_param_names):
theta[i] = data.mcmc_parameters[name]['prior']\
.map_from_unit_interval(hypercube[i])
return theta
# Function giving the likelihood probability
def loglike(theta):
"""
Please see the encompassing function docstring
"""
# Updates values: theta --> data
try:
data.check_for_slow_step(theta)
except KeyError:
pass
for i, name in enumerate(data.PC_param_names):
data.mcmc_parameters[name]['current'] = theta[i]
data.update_cosmo1_arguments()
data.update_cosmo2_arguments()
# Compute likelihood
#logl = sampler.compute_lkl(cosmo1, cosmo2, data)[0,0]
# FK: index to scalar variable error...
logl = sampler.compute_lkl(cosmo1, cosmo2, data)
# Compute derived parameters and pass them back
phi = [0.0] * nDerived
for i, name in enumerate(derived_param_names):
phi[i] = float(data.mcmc_parameters[name]['current'])
return logl, phi
# Pass over the settings
settings = PC_Settings(nDims,nDerived)
for arg, val in data.PC_arguments.iteritems():
setattr(settings, arg, val)
# Launch PolyChord
polychord_run(loglike, nDims, nDerived, settings, prior)
# FK: write out the warning message below also as a file in the PC-subfolder
# so that there's a clear indication for convergence instead of just looking at
# the STDOUT-log!
text = 'The sampling with PolyChord is done.\n' + \
'You can now analyse the output calling Monte Python ' + \
'with the -info flag in the chain_name/PC subfolder.'
warnings.warn(text)
fname = os.path.join(data.PC_arguments['base_dir'], 'convergence.txt')
with open(fname, 'w') as afile:
afile.write(text)
def from_PC_output_to_chains(folder):
"""
Translate the output of PolyChord into readable output for Monte Python
This routine will be called by the module :mod:`analyze`.
If mode separation has been performed (i.e., multimodal=True), it creates
'mode_#' subfolders containing a chain file with the corresponding samples
and a 'log.param' file in which the starting point is the best fit of the
nested sampling, and the same for the sigma. The minimum and maximum value
are cropped to the extent of the modes in the case of the parameters used
for the mode separation, and preserved in the rest.
The mono-modal case is treated as a special case of the multi-modal one.
"""
chain_name = [a for a in folder.split(os.path.sep) if a][-2]
base_name = os.path.join(folder, chain_name)
# Read the arguments of the PC run
# This file is intended to be machine generated: no "#" ignored or tests
# done
PC_arguments = {}
with open(base_name+name_arguments, 'r') as afile:
for line in afile:
arg = line.split('=')[0].strip()
value = line.split('=')[1].strip()
arg_type = (PC_user_arguments[arg]['type']
if arg in PC_user_arguments else
PC_auto_arguments[arg]['type'])
value = arg_type(value)
if arg == 'clustering_params':
value = [a.strip() for a in value.split()]
PC_arguments[arg] = value
multimodal = PC_arguments.get('multimodal')
# Read parameters order
PC_param_names = np.loadtxt(base_name+name_paramnames, dtype='str').tolist()
# In multimodal case, if there were no clustering params specified, ALL are
if multimodal and not PC_arguments.get('clustering_params'):
PC_arguments['clustering_params'] = PC_param_names
# Extract the necessary information from the log.param file
# Including line numbers of the parameters
with open(os.path.join(folder, '..', name_logparam), 'r') as log_file:
log_lines = log_file.readlines()
# Number of the lines to be changed
param_names = []
param_lines = {}
param_data = {}
pre, pos = 'data.parameters[', ']'
for i, line in enumerate(log_lines):
if pre in line:
if line.strip()[0] == '#':
continue
# These lines allow PolyChord to deal with fixed nuisance parameters
sigma = float(line.split(',')[3].strip())
if sigma == 0.0:
#If derived parameter, keep it, else discard it:
paramtype = line.split(',')[5].strip()[1:-2]
if paramtype != 'derived':
continue
param_name = line.split('=')[0][line.find(pre)+len(pre):
line.find(pos)]
param_name = param_name.replace('"','').replace("'",'').strip()
param_names.append(param_name)
param_data[param_name] = [a.strip() for a in
line.split('=')[1].strip('[]').split(',')]
param_lines[param_name] = i
# Create the mapping from PC ordering to log.param ordering
columns_reorder = [PC_param_names.index(param) for param in param_names]
# Open the 'stats.dat' file to see what happened and retrieve some info
stats_file = open(base_name+name_stats, 'r')
lines = stats_file.readlines()
stats_file.close()
for line in lines:
if 'log(Z) =' in line:
global_logZ, global_logZ_err = [float(a.strip()) for a in
line.split('=')[1].split('+/-')]
# Prepare the accepted-points file -- modes are separated by 2 line breaks
accepted_name = base_name + name_post
data = np.loadtxt(accepted_name)
data[:, 1] = data[:, 1] / 2.
np.savetxt(os.path.join(folder, '..', name_chain_acc), data, fmt='%.6e')
| [
37811,
198,
492,
8265,
3712,
12280,
1925,
585,
198,
220,
220,
220,
1058,
28869,
24608,
25,
26491,
262,
12280,
1925,
585,
1430,
351,
22489,
37906,
198,
198,
1212,
7822,
16507,
7272,
319,
262,
4683,
11361,
29908,
329,
198,
34220,
1925,
58... | 2.336811 | 7,093 |
import json
from pyld import jsonld
from services.proto import ldnorm_pb2_grpc
from services.proto import ldnorm_pb2 as lpb2
from services.proto import general_pb2
| [
11748,
33918,
198,
6738,
12972,
335,
1330,
33918,
335,
198,
198,
6738,
2594,
13,
1676,
1462,
1330,
300,
67,
27237,
62,
40842,
17,
62,
2164,
14751,
198,
6738,
2594,
13,
1676,
1462,
1330,
300,
67,
27237,
62,
40842,
17,
355,
300,
40842,
... | 3.018182 | 55 |
'''Conway's Game of Life
See: https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life
This particular Game of Life is implemented as a two dimensional
world populated with cells.
'''
__author__ = '\n'.join(["Erik O'Shaughnessy",
'erik.oshaughnessy@gmail.com',
'https://github.com/JnyJny/GameOfLife'])
__version__ = "0.1.3"
from .cell import Cell as Cell
from .world import OptimizedWorld as World
from .world import OptimizedNumpyWorld as NumpyWorld
from .patterns import Patterns
__all__ = ['Cell', 'World', 'Patterns', 'tests',
'NumpyWorld']
| [
7061,
6,
3103,
1014,
338,
3776,
286,
5155,
198,
198,
6214,
25,
3740,
1378,
268,
13,
31266,
13,
2398,
14,
15466,
14,
3103,
1014,
4,
1983,
82,
62,
8777,
62,
1659,
62,
14662,
198,
198,
1212,
1948,
3776,
286,
5155,
318,
9177,
355,
257... | 2.497942 | 243 |
"""
The MIT License (MIT)
Copyright (c) 2016 Stratos Goudelis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import hashlib
from cacher import Cacher
from coinaddress import CoinAddress
from cointransaction import CoinTransaction
from mybitbank.libs import misc
from mybitbank.libs.connections import connector
class CoinAccount(object):
'''
Class for an account
'''
@property
def provider_id(self):
'''
Property for the provider id
'''
return self.get('provider_id', None)
def __getitem__(self, key):
'''
Getter for dictionary-line behavior
'''
if key == "addresses":
return self.getAddresses()
elif key == "last_activity":
return self.getLastActivity()
elif key == "currency_symbol":
return self.getCurrencySymbol()
elif key == "currency_code":
return self.getCurrencyCode()
elif key == 'identifier':
return self.getIdentifier()
account = getattr(self, '_account')
return account.get(key, None)
def __setitem__(self, key, value):
'''
Setter for dictionary-line behavior
'''
account = getattr(self, '_account')
account[key] = value
return setattr(self, '_account', account)
def get(self, key, default=False):
'''
Getter for dictionary-line behavior
'''
if self._account.get(key, False):
return self._account.get(key, False)
else:
return default
def haskey(self, key):
'''
Check the existence of key
'''
if key in self._account.keys():
return True
else:
return False
def getParamHash(self, param=""):
'''
This function takes a string and calculates a sha224 hash out of it.
It is used to hash the input parameters of functions/method in order to
uniquely identify a cached result based only on the input parameters of
the function/method call.
'''
cache_hash = hashlib.sha224(param).hexdigest()
return cache_hash
def getIdentifier(self):
'''
There is no unique identifier for an account in a xxxcoind daemon
so lets make one. Hopefully the below hashing method will uniquely
identify an account for us
'''
unique_string = "provider_id=%s&name=%s¤cy=%s" % (self['provider_id'], self['name'], self['currency'])
identifier = hashlib.sha1(unique_string).hexdigest()
return identifier
def isDefault(self):
'''
Return bool whether this is a default account or not
'''
if self._account['name'] == u"":
self._hidden = True
return True
else:
return False
def getBalance(self):
'''
Return the account balance
'''
balance = connector.getBalance(self.provider_id, self['name'])
return misc.longNumber(balance)
def isHidden(self):
'''
Return bool if this account is hidden
'''
return self._hidden or self._account['hidden'] or self.isDefault()
def getAddresses(self):
'''
Get the address for an account name
'''
# check for cached data, use that or get it again
cache_hash = self.getParamHash("name=%s" % (self['name']))
cached_object = self._cache.fetch('addressesbyaccount', cache_hash)
if cached_object:
return cached_object
addresses = connector.getAddressesByAccount(self['name'], self.provider_id)
addresses_list = []
for address in addresses:
coinaddr = CoinAddress(address, self)
addresses_list.append(coinaddr)
# cache the result
self._cache.store('addressesbyaccount', cache_hash, addresses_list)
return addresses_list
def getAddressesCount(self):
'''
Return the number of address under this account
'''
addresses = self.getAddresses()
return len(addresses)
def getLastActivity(self):
'''
Return the date of the last activity
'''
last_transaction = self.listTransactions(1, 0)
if last_transaction:
last_activity = misc.twitterizeDate(last_transaction[0]['time'])
else:
last_activity = "never"
self['last_activity'] = last_activity
return last_activity
def getCurrencySymbol(self):
'''
Return the Unicode currency symbol
'''
return misc.getCurrencySymbol(connector, self.getCurrencyCode())
def getCurrencyCode(self):
'''
Return the currency code
'''
return self.get('currency', "").lower()
def listTransactions(self, limit=100000, start=0, orderby='time', reverse=True):
'''
Get a list of transactions by account name and provider_id
'''
cache_hash = self.getParamHash("limit=%s&start=%sorderby=%s&reverse=%s" % (limit, start, orderby, reverse))
cached_object = self._cache.fetch('transactions', cache_hash)
if cached_object:
return cached_object
transactions = []
transaction_list = connector.listTransactionsByAccount(self['name'], self['provider_id'], limit, start)
for entry in transaction_list:
if entry.get('address', False):
entry['address'] = CoinAddress(entry['address'], self)
# give out a provider id and a currency code to the transaction dict
entry['provider_id'] = self.provider_id
entry['currency'] = self['currency']
if entry['category'] == 'receive':
entry['source_address'] = CoinAddress(entry.get('details', {}).get('sender_address', False), "This is a sender address!")
elif entry['category'] == 'send':
entry['source_addresses'] = self['wallet'].getAddressesByAccount(entry['account'])
entry['wallet'] = self['wallet']
coin_transaction = CoinTransaction(entry)
transactions.append(coin_transaction)
# sort result
transactions = sorted(transactions, key=lambda transaction: transaction[orderby], reverse=reverse)
# cache the result
self._cache.store('transactions', cache_hash, transactions)
return transactions
| [
37811,
198,
464,
17168,
13789,
357,
36393,
8,
198,
198,
15269,
357,
66,
8,
1584,
29186,
418,
402,
2778,
417,
271,
198,
198,
5990,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
4866,
198,
1659,
428,
378... | 2.394213 | 3,214 |
main() | [
198,
12417,
3419
] | 2.333333 | 3 |
# Generated by Django 3.2.4 on 2021-07-07 22:45
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
19,
319,
33448,
12,
2998,
12,
2998,
2534,
25,
2231,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
a = {'a': 1, 'b' : 2}
b = list(a.items())
print(b)
| [
64,
796,
1391,
6,
64,
10354,
352,
11,
705,
65,
6,
1058,
362,
92,
198,
65,
796,
1351,
7,
64,
13,
23814,
28955,
198,
4798,
7,
65,
8,
198
] | 1.758621 | 29 |
import pygame, sys
from Move import Move
from BoardModel import BoardModel
from MoveController import MoveController
from HelpModel import HelpModel
from MenuModel import MenuModel
from Model import Model
'''
CSC290 Group Project
C4: Four In A Row
University of Toronto Mississauga
'''
class Game():
'''
Base game instance, which handles game states and transitions
between the different game screens. The play() method serves
as the main gameplay loop.
Structure of state machine inspired by iminurnamez:
https://gist.github.com/iminurnamez/8d51f5b40032f106a847
Licensed under CC0 1.0 Universal.
'''
def __init__(self, display, screens, model_name):
'''
Initialize the Game object.
Keyword arguments:
display -- the display Surface used to draw the game
screens -- a dict mapping names of models to their Model objects
model_name -- the name of the model of the first game screen
'''
self.playing = True
self.fps = 60
self.screen = display
self.screens = screens
self.model_name = model_name
self.model = screens[self.model_name]
self.clock = pygame.time.Clock()
self.controller = MoveController(screens["board"])
def event_loop(self):
'''Pass pygame events to current model to handle current game state.'''
for event in pygame.event.get():
if self.model_name == "board":
self.model.get_event(event, self.controller)
else:
self.model.get_event(event)
def draw(self):
'''Pass screen to current model to draw current game state.'''
self.model.draw(self.screen)
def update(self, frame_time):
'''
Update current model if there is a change, it
signals for a change in models, or if there is
a game quit event.
Keyword arguments:
frame_time -- milliseconds since last frame
'''
if self.model.quit:
self.playing = False
elif self.model.done:
self.change_screen()
def change_screen(self):
'''Change the model being used according to next_model.'''
self.model.done = False
self.model_name = self.model.next_model
self.model = screens[self.model_name]
def play(self):
'''The main game loop. Halts upon game exit.'''
while self.playing:
frame_time = self.clock.tick(self.fps)
self.event_loop()
self.update(frame_time)
self.draw()
pygame.display.update()
if __name__ == '__main__':
pygame.init()
pygame.font.init()
screen = pygame.display.set_mode((900, 700))
screens = {"menu": MenuModel(), "help": HelpModel(), "board": BoardModel()}
game = Game(screen, screens, "menu")
game.play()
pygame.quit()
sys.exit()
| [
11748,
12972,
6057,
11,
25064,
198,
6738,
10028,
1330,
10028,
198,
6738,
5926,
17633,
1330,
5926,
17633,
198,
6738,
10028,
22130,
1330,
10028,
22130,
198,
6738,
10478,
17633,
1330,
10478,
17633,
198,
6738,
21860,
17633,
1330,
21860,
17633,
... | 2.424837 | 1,224 |
import cauldron
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestStepsInsert(scaffolds.ResultsTest):
"""..."""
def test_before(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candice')
support.add_step(self)
support.add_step(self, position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
def test_multiple_file_types_many(self):
"""Should properly rename default filenames."""
support.create_project(self, 'candy')
support.add_step(self)
support.add_step(self)
support.add_step(self)
support.add_step(self, name='.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
def test_multiple_file_types_named(self):
"""Should properly rename customized filenames."""
support.create_project(self, 'candera')
support.add_step(self, name='A')
support.add_step(self, name='B')
support.add_step(self, name='C')
support.add_step(self, name='D.md', position='0')
project = cauldron.project.get_internal_project()
steps = project.steps
self.assertTrue(steps[0].filename.startswith('S01-D'))
self.assertTrue(steps[1].filename.startswith('S02'))
self.assertTrue(steps[2].filename.startswith('S03'))
self.assertTrue(steps[3].filename.startswith('S04'))
| [
11748,
269,
45637,
198,
6738,
269,
45637,
13,
9288,
1330,
1104,
198,
6738,
269,
45637,
13,
9288,
13,
11284,
1330,
41498,
10119,
628,
198,
4871,
6208,
8600,
82,
44402,
7,
1416,
2001,
10119,
13,
25468,
14402,
2599,
198,
220,
220,
220,
3... | 2.433054 | 956 |
from abc import ABCMeta, abstractmethod
| [
6738,
450,
66,
1330,
9738,
48526,
11,
12531,
24396,
628
] | 4.1 | 10 |
#!/usr/bin/env python
# FMU-QSS Generator
#
# Project: QSS Solver
#
# Language: Python 2.7 and 3.x
#
# Developed by Objexx Engineering, Inc. (https://objexx.com) under contract to
# the National Renewable Energy Laboratory of the U.S. Department of Energy
#
# Copyright (c) 2017-2021 Objexx Engineering, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER, THE UNITED STATES
# GOVERNMENT, OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Notes
# Generates an FMU-QSS from an FMU-ME with specified QSS options
# ElementTree reorders attributes lexicographically and omits comments so we are using lxml
# lxml should be faster than ElementTree other than initial parsing
# lxml is not included with most Python distributions but can be installed with pip or, on Linux, from a package
# Discrete variables that aren't output variables are elided from the FMU-QSS XML
# Zero-crossing variables (__zc_*) are output variables in our FMU-ME for now but are elided from the FMU-QSS XML
# Do
# Add more QSS options->annotations as needed
# Imports
import argparse, errno, os, platform, shutil, subprocess, sys
from zipfile import ZipFile
from collections import OrderedDict
from lxml import etree
def fmu_qss_gen():
'''Generate an FMU-QSS from an FMU-ME'''
# Process arguments
parser = argparse.ArgumentParser()
parser.add_argument( 'ME', help = 'FMU-ME fmu or xml file', default = 'modelDescription.xml' )
parser.add_argument( '--qss', help = 'QSS method (x)(LI)QSS(1|2|3) [QSS2]', default = 'QSS2' )
parser.add_argument( '--rTol', help = 'relative tolerance [FMU]', type = float )
parser.add_argument( '--aTol', help = 'absolute tolerance [1e-6]', type = float, default = 1.0e-6 )
parser.add_argument( '--tEnd', help = 'simulation end time [FMU]', type = float )
args = parser.parse_args()
args.qss = args.qss.upper()
if args.qss not in ( 'QSS1', 'QSS2', 'QSS3', 'LIQSS1', 'LIQSS2', 'LIQSS3', 'xQSS1', 'xQSS2', 'xQSS3' ):
print( '\nUnsupported QSS method: ' + args.qss + ': Must be one of QSS1, QSS2, QSS3, LIQSS1, LIQSS2, LIQSS3, xQSS1, xQSS2, xQSS3' )
sys.exit( 1 )
if ( args.rTol is not None ) and ( args.rTol < 0.0 ):
print( '\nNegative rTol: ' + "{:.16f}".format( args.rTol ) )
sys.exit( 1 )
if args.aTol <= 0.0:
print( '\nNonpositive aTol: ' +"{:.16f}".format( args.aTol ) )
sys.exit( 1 )
if ( args.tEnd is not None ) and ( args.tEnd < 0.0 ):
print( '\nNegative tEnd: ' +"{:.16f}".format( args.tEnd ) )
sys.exit( 1 )
ME_lower = args.ME.lower()
if ME_lower.endswith( '.xml' ): # XML input
me_fmu_name = me_name = None
me_xml_name = args.ME
elif ME_lower.endswith( '.fmu' ): # FMU input
me_fmu_name = args.ME
me_name = os.path.splitext( os.path.basename( me_fmu_name ) )[ 0 ]
me_xml_name = 'modelDescription.xml'
else:
print( '\nFMU-ME input is not a .fmu or .xml file: ' + args.ME )
sys.exit( 1 )
# Extract modelDescription.xml from FMU-ME
if me_fmu_name:
try:
zip_file = ZipFile( me_fmu_name )
zip_file.extract( 'modelDescription.xml' )
zip_file.close()
except:
print( '\nExtracting modelDescription.xml from FMU-ME fmu failed' )
sys.exit( 1 )
# Read FMU-ME xml file
try:
parser = etree.XMLParser( remove_blank_text = True )
tree = etree.parse( me_xml_name, parser )
root = tree.getroot()
except:
print( '\nFMU-ME XML open and parse failed: ' + me_xml_name )
sys.exit( 1 )
# fmiModelDescription changes
if root.tag != "fmiModelDescription":
print( '\nRoot is not fmiModelDescription in FMU-ME XML: ' + me_xml_name )
sys.exit( 1 )
fmiModelDescription = root
if 'modelName' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'modelName' ] = fmiModelDescription.attrib[ 'modelName' ] + '_QSS' # FMU-QSS model name gets _QSS suffix
if 'numberOfEventIndicators' in fmiModelDescription.attrib:
fmiModelDescription.attrib[ 'numberOfEventIndicators' ] = '0' # FMU-QSS has no event indicators
guid_placeholder = '@FMU-QSS_GUID@'
fmiModelDescription.attrib[ 'guid' ] = guid_placeholder # Placeholder for running fmu-uuid
# ModelExchange changes
ModelExchange = root.find( 'ModelExchange' )
if ModelExchange is None:
print( '\nModelExchange not found in ' + me_xml_name )
sys.exit( 1 )
if 'modelIdentifier' in ModelExchange.attrib:
ModelExchange.attrib[ 'modelIdentifier' ] = ModelExchange.attrib[ 'modelIdentifier' ] + '_QSS' # FMU-QSS model identifier gets _QSS suffix
# Find ModelVariables
ModelVariables = root.find( 'ModelVariables' )
if ModelVariables is None:
print( '\nModelVariables not found in ' + me_xml_name )
sys.exit( 1 )
# Add QSS annotations
VendorAnnotations = root.find( 'VendorAnnotations' )
if VendorAnnotations is None:
VendorAnnotations = etree.Element( 'VendorAnnotations' )
ModelVariables.addprevious( VendorAnnotations )
QSS = etree.SubElement( VendorAnnotations, 'Tool', attrib = { 'name': 'QSS' } )
Annotations = etree.SubElement( QSS, 'Annotations' )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'qss' ), ( 'value', args.qss ) ] ) )
if args.rTol is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'rTol' ), ( 'value', "{:.16f}".format( args.rTol ) ) ] ) )
etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'aTol' ), ( 'value', "{:.16f}".format( args.aTol ) ) ] ) )
if args.tEnd is not None: etree.SubElement( Annotations, 'Annotation', attrib = OrderedDict( [ ( 'name', 'tEnd' ), ( 'value', "{:.16f}".format( args.tEnd ) ) ] ) )
#Do Add other annotations as needed
# Generate model-specific QSS header
try: # Create QSS options header
QSS_option_name = 'FMU_QSS_options.hh'
if sys.version_info >= ( 3, 0 ):
QSS_option_file = open( QSS_option_name, 'w', newline = '\n' )
else:
QSS_option_file = open( QSS_option_name, 'wb' )
except:
print( '\nQSS options header open failed: ' + QSS_option_name )
sys.exit( 1 )
try: # Write QSS_option header
QSS_option_file.write( '#ifndef FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( '#define FMU_QSS_options_hh_INCLUDED\n' )
QSS_option_file.write( 'QSS::options::QSS const fmu_qss_qss( QSS::options::QSS::' + args.qss + ' );\n' )
if args.rTol is not None:
QSS_option_file.write( 'double const fmu_qss_rTol( ' + "{:.16f}".format( args.rTol ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_rTol( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( 'double const fmu_qss_aTol( ' + "{:.16f}".format( args.aTol ) + ' );\n' )
if args.tEnd is not None:
QSS_option_file.write( 'double const fmu_qss_tEnd( ' + "{:.16f}".format( args.tEnd ) + ' );\n' )
else:
QSS_option_file.write( 'double const fmu_qss_tEnd( -1.0 ); // Negative => Unspecified\n' )
QSS_option_file.write( '#endif\n' )
QSS_option_file.close()
except Exception as err:
print( '\nQSS options header write failed: ' + QSS_option_name + ': ' + str( err ) )
sys.exit( 1 )
# Find ScalarVariables
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable
# Identify state variables
try:
ModelStructure = root.find( 'ModelStructure' )
Derivatives = ModelStructure.find( 'Derivatives' )
Unknowns = Derivatives.findall( 'Unknown' )
except:
Unknowns = [] # No state variable derivatives
derivatives_indexes = set() # State variable derivative indexes
for Unknown in Unknowns:
try:
derivatives_indexes.add( int( Unknown.attrib[ 'index' ] ) )
except:
pass
is_state = { i: False for i in range( 1, len( ScalarVariables ) + 1 ) }
for i in range( len( ScalarVariables ) ):
v = ScalarVariables[ i ]
Real = v.find( 'Real' )
if Real is not None:
derivative_of = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative_of is not None:
try:
derivative_of_index = int( derivative_of )
if i + 1 in derivatives_indexes: is_state[ derivative_of_index ] = True # Otherwise could be a non-state (internal or input) variable with a derivative
except Exception as err:
name = v.attrib[ 'name' ] if 'name' in v.attrib else ''
print( 'Non-integer derivative in ' + name + ': ' + str( derivative_of ) )
# Migrate variables and map indexes
io = {} # FMU-ME to FMU-QSS variable index map
# oi = {} # FMU-QSS to FMU-ME variable index map
i = o = 0
outputs = []
n_real = n_integer = n_boolean = n_string = 0
n_input_real = n_output_real = 0
try:
n_input_real_max_order = n_output_real_max_order = int( args.qss[ -1 ] )
except Exception as err:
print( '\nFMU-QSS XML generation failed: QSS method order not identified from last character of qss argument: ' + str( args.qss ) )
sys.exit( 1 )
for v in ScalarVariables:
i += 1 # FMU-ME variable index
a = v.attrib
name = a[ 'name' ] if 'name' in a else ''
causality = a[ 'causality' ] if 'causality' in a else 'local'
variability = a[ 'variability' ] if 'variability' in a else 'continuous'
previous = v.getprevious()
comment = previous if ( previous is not None ) and ( previous.tag is etree.Comment ) and str( previous ).startswith( ( '<!-- Variable with index #', '<!-- Index for next variable = ' ) ) else None
if causality in ( 'input', 'output' ) and not ( ( causality == 'output' ) and name.startswith( '__zc_' ) ): # Keep (except zero-crossing output variables)
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
Real = v.find( 'Real' )
Integer = v.find( 'Integer' )
Boolean = v.find( 'Boolean' )
String = v.find( 'String' )
if Real is not None:
n_real += 1
elif Integer is not None:
n_integer += 1
elif Boolean is not None:
n_boolean += 1
elif String is not None:
n_string += 1
if causality == 'output':
outputs.append( o )
if Real is not None:
n_output_real += 1
else: # Input
if Real is not None:
n_input_real += 1
set_comment = True
elif ( causality == 'local' ) and ( variability == 'continuous' ) and is_state[ i ]: # State to output variable
a[ 'causality' ] = 'output'
o += 1 # FMU-QSS variable index
io[ i ] = o
# oi[ o ] = i
outputs.append( o )
if 'initial' in a: del a[ 'initial' ] # Drop initial spec
set_comment = True
Real = v.find( 'Real' )
if Real is not None:
n_real += 1
n_output_real += 1
if 'start' in Real.attrib: del Real.attrib[ 'start' ] # Remove start spec
else:
print( '\nFMU-ME (continuous) state variable is not Real: ' + name )
sys.exit( 1 )
else: # Remove
ModelVariables.remove( v )
if comment is not None: ModelVariables.remove( comment )
set_comment = False
if set_comment:
if comment is not None: # Adjust variable index in comment
# comment.text = ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' # Dymola format
comment.text = ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' # OCT format
else: # Insert comment
# v.addprevious( etree.Comment( ' Index for next variable = ' + str( o ) + ' (' + str( i ) + ') ' ) ) # Dymola format
v.addprevious( etree.Comment( ' Variable with index #' + str( o ) + ' (' + str( i ) + ') ' ) ) # OCT format
# Re-index derivatives
ScalarVariables = ModelVariables.findall( 'ScalarVariable' ) # List of ScalarVariable after above pruning
for v in ScalarVariables:
Real = v.find( 'Real' )
if Real is not None:
derivative = Real.attrib[ 'derivative' ] if 'derivative' in Real.attrib else None
if derivative is not None:
try:
derivative_index = int( derivative )
try:
Real.attrib[ 'derivative' ] = str( io[ derivative_index ] )
except Exception as err:
print( 'Derivative re-indexing failed for ' + ( v.attrib[ 'name' ] if 'name' in v.attrib else '' ) + ': ' + str( err ) )
except:
pass # Already reported this above
# Migrate ModelStructure
ModelStructure = root.find( 'ModelStructure' )
if ModelStructure is None:
ModelStructure = etree.Element( 'ModelStructure' )
ModelVariables.addnext( ModelStructure )
for g in ( 'Derivatives', 'DiscreteStates', 'InitialUnknowns' ): # Remove these sections
e = ModelStructure.find( g )
if e is not None:
ModelStructure.remove( e )
Outputs = ModelStructure.find( 'Outputs' )
if Outputs is None:
Outputs = etree.SubElement( ModelStructure, 'Outputs' )
Unknowns = Outputs.findall( 'Unknown' )
for u in Unknowns: # Remove previous entries
Outputs.remove( u )
for o in outputs:
etree.SubElement( Outputs, 'Unknown', attrib = OrderedDict( [ ( 'index', str( o ) ), ( 'dependencies', '' ) ] ) )
# Write FMU-QSS xml file
#print( etree.tostring( root, pretty_print=True, encoding='unicode' ) ) #Debug#####
try:
qss_xml_name = 'FMU-QSS_' + os.path.basename( me_xml_name )
tree.write( qss_xml_name, encoding = 'UTF-8', xml_declaration = True, pretty_print = True )
except Exception as err:
print( '\nFMU-QSS XML write failed: ' + qss_xml_name + ': ' + str( err ) )
sys.exit( 1 )
# Add GUID to FMU-QSS xml file and generate GUID header
try:
subprocess.call( [ 'fmu-uuid', qss_xml_name, guid_placeholder, qss_xml_name, 'FMU_QSS_GUID.hh', 'FMU_QSS_GUID' ] )
except OSError as e:
if e.errno == errno.ENOENT:
print( '\nFMU-QSS XML GUID computation failed: fmu-uuid program not in PATH' )
else:
print( '\nFMU-QSS XML GUID computation failed: ' + str( e ) )
print( 'Generic no-check GUID header generated' )
try:
guid_name = 'FMU_QSS_GUID.hh'
if sys.version_info >= ( 3, 0 ):
guid_file = open( guid_name, 'w', newline = '\n' )
else:
guid_file = open( guid_name, 'wb' )
except:
print( '\nGUID header open failed: ' + guid_name )
sys.exit( 1 )
try:
guid_file.write( '#ifndef FMU_QSS_GUID\n' )
guid_file.write( '#define FMU_QSS_GUID "FMU-QSS_GUID" // No-check value\n' )
guid_file.write( '#endif\n' )
guid_file.close()
except:
print( '\nGUID header write failed: ' + guid_name )
sys.exit( 1 )
# Generate model-specific size definitions header
try: # Create sizing header
sizing_name = 'FMU_QSS_defines.hh'
if sys.version_info >= ( 3, 0 ):
sizing_file = open( sizing_name, 'w', newline = '\n' )
else:
sizing_file = open( sizing_name, 'wb' )
except:
print( '\nSizing header open failed: ' + sizing_name )
sys.exit( 1 )
try: # Write sizing header: Sizes >=1 to avoid illegal 0-sized arrays
sizing_file.write( '#ifndef FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '#define FMU_QSS_defines_hh_INCLUDED\n' )
sizing_file.write( '// Note: Sizes are >=1 to avoid illegal 0-sized arrays\n' )
sizing_file.write( '#define BUFFER 1024\n' )
sizing_file.write( '#define N_REAL ' + str( max( n_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INTEGER ' + str( max( n_integer, 1 ) ) + '\n' )
sizing_file.write( '#define N_BOOLEAN ' + str( max( n_boolean, 1 ) ) + '\n' )
sizing_file.write( '#define N_STRING ' + str( max( n_string, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL ' + str( max( n_input_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_INPUT_REAL_MAX_ORDER ' + str( max( n_input_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL ' + str( max( n_output_real, 1 ) ) + '\n' )
sizing_file.write( '#define N_OUTPUT_REAL_MAX_ORDER ' + str( max( n_output_real_max_order, 1 ) ) + '\n' )
sizing_file.write( '#endif\n' )
sizing_file.close()
except Exception as err:
print( '\nSizing header write failed: ' + sizing_name + ': ' + str( err ) )
sys.exit( 1 )
# Generate FMU-QSS
if me_fmu_name:
try: # Directory tree setup
qss_name = me_name + '_QSS'
if os.path.exists( qss_name ):
if os.path.isdir( qss_name ):
shutil.rmtree( qss_name )
elif os.path.isfile( qss_name ):
os.remove( qss_name )
os.mkdir( qss_name )
os.mkdir( os.path.join( qss_name, 'binaries' ) )
if not platform.machine().endswith( '64' ):
print( '\nFMU-QSS generation only supports 64-bit OS at this time' )
sys.exit( 1 )
if sys.platform.startswith( 'linux' ):
binaries_dir = qss_name + '/binaries/linux64'
elif sys.platform.startswith( 'win' ):
binaries_dir = qss_name + '\\binaries\\win64'
else:
print( '\nPlatform is not supported for FMU-QSS generation' )
sys.exit( 1 )
os.mkdir( binaries_dir )
os.mkdir( qss_name + os.sep + 'resources' )
#os.mkdir( qss_name + os.sep + 'sources' )
except Exception as err:
print( '\nFMU-QSS directory tree setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Build FMU-QSS library
QSS = os.environ.get( 'QSS' )
QSS_bin = os.environ.get( 'QSS_bin' )
PlatformCompiler = os.environ.get( 'PlatformCompiler' )
if QSS and QSS_bin and PlatformCompiler: # Configured for building
QSS_src = os.path.join( QSS, 'src', 'QSS' )
fmu_src = os.path.join( QSS_src, 'fmu' )
if os.path.exists( 'src' ):
if os.path.isdir( 'src' ):
shutil.rmtree( 'src' )
elif os.path.isfile( 'src' ):
os.remove( 'src' )
src_bld = 'src'
fmu_bld = os.path.join( 'src', 'QSS', 'fmu' )
os.makedirs( fmu_bld )
shutil.copy( 'FMU_QSS_defines.hh', fmu_bld )
shutil.copy( 'FMU_QSS_GUID.hh', fmu_bld )
shutil.copy( os.path.join( fmu_src, PlatformCompiler, 'GNUmakefile' ), src_bld )
cwd = os.getcwd()
os.chdir( src_bld )
with open( 'GNUmakefile', 'r' ) as sources:
lines = sources.readlines()
with open( 'GNUmakefile', 'w' ) as sources:
for line in lines:
if line.startswith( 'DLB := $(BIN_PATH)' + os.sep ):
sources.write( line.replace( '$(BIN_PATH)' + os.sep, '' ) )
else:
sources.write( line )
try:
import psutil
n_processors = psutil.cpu_count()
except:
print( '\nNon-parallel make used: psutil processor count lookup failed' )
n_processors = 1
try:
subprocess.call( [ 'make', '-j', str( n_processors ) ] )
try:
if sys.platform.startswith( 'linux' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.so' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.so', qss_lib )
elif sys.platform.startswith( 'win' ):
qss_lib = os.path.join( cwd, binaries_dir, qss_name + '.dll' )
if os.path.isfile( qss_lib ): os.remove( qss_lib )
os.rename( 'libFMU-QSS.dll', qss_lib )
except Exception as err:
print( '\nFMU-QSS library move into staging directory failed: ' + str( err ) )
except Exception as err:
print( '\nFMU-QSS library make failed: ' + str( err ) )
os.chdir( cwd )
shutil.rmtree( src_bld )
else:
print( '\nFMU-QSS library can\'t be built: QSS and QSS_bin environment variables are not set' )
except Exception as err:
print( '\nFMU-QSS library build failed: ' + str( err ) )
try: # File setup
shutil.copyfile( qss_xml_name, qss_name + os.sep + 'modelDescription.xml' )
shutil.copy( me_fmu_name, qss_name + os.sep + 'resources' )
except Exception as err:
print( '\nFMU-QSS file setup failed: ' + str( err ) )
sys.exit( 1 )
try: # Zip FMU-QSS
qss_fmu_name = qss_name + '.fmu'
if os.path.exists( qss_fmu_name ):
if os.path.isfile( qss_fmu_name ):
os.remove( qss_fmu_name )
elif os.path.isdir( qss_fmu_name ):
shutil.rmtree( qss_fmu_name )
zip_file = ZipFile( qss_fmu_name, mode = 'w' )
os.chdir( qss_name )
for root, dirs, files in os.walk( '.' ):
dirs.sort()
for dir in dirs:
zip_file.write( os.path.join( root, dir ) )
files.sort()
for file in files:
zip_file.write( os.path.join( root, file ) )
os.chdir( '..' )
zip_file.close()
except Exception as err:
print( '\nFMU-QSS zip into .fmu failed: ' + str( err ) )
sys.exit( 1 )
if __name__ == '__main__':
fmu_qss_gen()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
18695,
52,
12,
48,
5432,
35986,
198,
2,
198,
2,
4935,
25,
1195,
5432,
4294,
332,
198,
2,
198,
2,
15417,
25,
11361,
362,
13,
22,
290,
513,
13,
87,
198,
2,
198,
2,
6013,
... | 2.132575 | 11,435 |
"""
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>
@author: Josiah Walker
"""
import numpy,random
from BlockSparseMatrix import BlockSparseMatrix
from BresenhamAlgorithms import BresenhamLine,BresenhamTriangle,BresenhamPolygon
#ranges all given in cm
SonarSensor = {"spread": 15.*numpy.pi/180., "range": 500., "phitfree": -0.3, "phitoccupied": 3.}
class GridMap:
"""
Sparse gridmap for 2D mapping.
"""
def __init__(self,scale=1.0):
"""
@brief Initialise a sparse block grid-map with arc-based sensor updates.
@param scale The multiplier to rescale from input units to map cell size.
"""
self._scale = scale
self._map = BlockSparseMatrix()
def update(self,position,distance,sensorangle,sensor):
"""
@brief Update the map with a sensor reading.
@param position The robot's current position given as (x,y,theta) for hte robot's position and angle.
@param distance The distance measurement from the sensor.
@param sensorangle The current angle from the robot's forward direction to the sensor.
@param sensor A dict holding sensor-specific hardware data (see SonarSensor in this file).
"""
#generate the angle positions (change angleUpdates for more accurate approximation)
angleUpdates = 4
thetas = []
for i in xrange(angleUpdates-1):
thetas.append(position[2] + i*sensor["spread"]/angleUpdates - sensor["spread"]/2. + sensorangle)
thetas.append(position[2] + sensor["spread"]/2. + sensorangle)
#generate the arc and robot positions
positions = [numpy.array(position[:2])*self._scale]
for t in thetas:
positions.append(
numpy.round(
numpy.array([numpy.cos(t),numpy.sin(t)]) *
distance *
self._scale + positions[0]
).astype(numpy.int64)
)
positions[0] = numpy.round(positions[0]).astype(numpy.int64)
#FILL THE EMPTY ARC AREA OF THE SENSOR (as an approximate polygon)
emptyVal = sensor["phitfree"]
for cell in BresenhamPolygon(positions):
self._map[cell[0],cell[1]] = max(emptyVal+self._map[cell[0],cell[1]],-20.) #clip to -20
#DO BRESENHAM detection on the arc edge for object hits
hitVals = BresenhamLine(positions[1],positions[2])
solidVal = sensor["phitoccupied"]
startpt = 0
for i in xrange(1,len(positions)-1):
hitVals = BresenhamLine(positions[i],positions[i+1])
solidVal = sensor["phitoccupied"]
for h in hitVals[startpt:]:
self._map[h[0],h[1]] = min(solidVal+self._map[h[0],h[1]],120.) #clip to 120
startpt = 1 #skip the first part of all following line segments
def get(self,location):
"""
@brief Get the value at a certain x,y location.
@param location A location in the form [x,y]
"""
location = numpy.round(location*self._scale).astype(numpy.int64)
return self._map(location[0],location[1])
def getRange(self,topleft,bottomright):
"""
@brief Get the values for a range of locations as a matrix. Note: this returns at the internal scale, not the external scale
@param topleft A location in the form [x,y] in external units designating the top left of the area
@param bottomright A location in the form [x,y] in external units designating the bottom right of the area
"""
#convert into map scale
topleft = numpy.round(numpy.array(topleft)*self._scale).astype(numpy.int64)
bottomright = numpy.round(numpy.array(bottomright)*self._scale).astype(numpy.int64)
#fill in the output
result = numpy.zeros((bottomright[0]-topleft[0],bottomright[1]-topleft[1]))
for i in xrange(topleft[0],bottomright[0]):
ival = numpy.round(i).astype(numpy.int64)
for j in xrange(topleft[1],bottomright[1]):
jval = numpy.round(j).astype(numpy.int64)
result[i-topleft[0],j-topleft[1]] = self._map[ival,jval]
return result
if __name__ == '__main__':
"""
Do validation test
"""
import time,os
from matplotlib import pyplot
#set this true and have mencoder to create a video of the test
makevideo = True
#set up the map and scale
scale = 100.0
groundtruth = ((1,1,1,1,1),
(1,0,0,0,1),
(1,0,1,0,1),
(1,0,0,0,1),
(1,1,1,1,1))
gridScale = 0.5
#set up the grid map on a 2cm scale (half the input resolution)
estmap = GridMap(scale=gridScale)
#this is the set of positions the rover moves between
tour = ((150.0,150.0,0.0),(350.0,150.0,0.0),
(350.0,150.0,numpy.pi/2.0),(350.0,350.0,numpy.pi/2.0),
(350.0,350.0,numpy.pi),(150.0,350.0,numpy.pi),
(150.0,350.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*1.5),(150.0,150.0,numpy.pi*2))
#this is the number of steps along each part of the tour
divs =100
vals = []
for i in xrange(len(tour)-1):
for j in xrange(divs):
position = numpy.array(tour[i])*(1.-j/float(divs))+numpy.array(tour[(i+1)%len(tour)])*(j/float(divs))
p = position[:2]
a = -position[2]+numpy.pi
offset = numpy.array([numpy.sin(a),numpy.cos(a)])*20.
for k in xrange(4):
#simulate each of the sonar sensor sweeps and see if we hit anything.
sensor = SonarSensor
sensorangle = numpy.pi/2*k
thetamax = position[2] + sensor["spread"]/2. + sensorangle
thetamin = position[2] - sensor["spread"]/2. + sensorangle
baseB = numpy.array([numpy.cos(thetamax),numpy.sin(thetamax)])
baseC = numpy.array([numpy.cos(thetamin),numpy.sin(thetamin)])
hit = False
for distance in xrange(int(sensor["range"])):
B = numpy.round(baseB*distance + position[:2]).astype(numpy.int32)
C = numpy.round(baseC*distance + position[:2]).astype(numpy.int32)
for pos in BresenhamLine(B,C):
if groundtruth[int((pos[0]/scale))][int((pos[1]/scale))] == 1:
distance = numpy.linalg.norm(position[:2] - pos) #add noise in here if you want noise
hit = True
break
if hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
break
if not hit:
t0 = time.time()
estmap.update(position,distance,sensorangle,sensor)
vals.append(time.time()-t0)
if makevideo: #save out png's for the video
fname = '_tmp%05d.png'%(i*divs+j)
tl = (95,95)
print (i*divs+j)
robot = (numpy.array([p+offset,p-offset,p+numpy.array([-offset[1],offset[0]])])*gridScale-numpy.array(tl)*gridScale).astype(numpy.int64)
emap = numpy.clip(estmap.getRange(tl,(405,405)), -1000,1000 )
for cell in BresenhamTriangle(robot[0],robot[1],robot[2]):
emap[cell[0],cell[1]] = 120
pyplot.imsave(fname,emap)
pyplot.clf()
print "Mean Sensor Update Time:", numpy.mean(vals)
if makevideo: #convert png's to video
#recent ubuntu versions use avconv
os.system("avconv -r 30 -i _tmp%05d.png -b:v 1000k rovertest.mp4")
#os.system("mencoder 'mf://*.png' -mf type=png:fps=30 -ovc lavc -lavcopts vcodec=wmv2 -oac copy -o rovertest.avi")
os.system("rm -f _tmp*.png")
| [
37811,
198,
1212,
318,
1479,
290,
555,
12685,
26584,
3788,
2716,
656,
262,
1171,
7386,
13,
198,
198,
21129,
318,
1479,
284,
4866,
11,
13096,
11,
7715,
11,
779,
11,
17632,
11,
3677,
11,
393,
198,
17080,
4163,
428,
3788,
11,
2035,
287... | 2.118128 | 4,402 |
import pandas as pd
from database.tools import Database, str_list
from app.functions import time_method
audio_features = ['danceability', 'energy', 'key', 'loudness', 'mode', 'speechiness', 'acousticness',
'instrumentalness', 'liveness', 'valence', 'tempo', 'duration_ms', 'time_signature']
@time_method
| [
11748,
19798,
292,
355,
279,
67,
198,
198,
6738,
6831,
13,
31391,
1330,
24047,
11,
965,
62,
4868,
198,
6738,
598,
13,
12543,
2733,
1330,
640,
62,
24396,
198,
198,
24051,
62,
40890,
796,
37250,
67,
590,
1799,
3256,
705,
22554,
3256,
... | 2.77686 | 121 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 8 10:19:43 2021
@author: ali
"""
import numpy as np
import matplotlib.pyplot as plt
from funcs import calcHelfrichUq
from funcs import calc_Helfrich_curve
from funcs import u_m2_calculator
from funcs import calcNelsonUl
from funcs import calc_nelson_amplitude
from funcs import calc_SoftMatter_amplitude
from funcs import get_um2
from funcs import process_ulm2
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
26223,
5979,
220,
807,
838,
25,
1129,
25,
3559,
33448,
198,
198,
31,
9800,
25,
34965,
198... | 2.619048 | 189 |
import time
import torch
import math
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from utils_multi import *
| [
11748,
640,
198,
11748,
28034,
198,
11748,
10688,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
35748,
198,
6738,
3384,
4487,
62,
41684,
1330,... | 3.522727 | 44 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from categories.models import Category
| [
198,
6738,
42625,
14208,
1330,
5107,
198,
6738,
42625,
14208,
13,
26791,
13,
41519,
1330,
334,
1136,
5239,
62,
75,
12582,
355,
4808,
198,
198,
6738,
9376,
13,
27530,
1330,
21743,
628
] | 3.84375 | 32 |
"""This module will deal with password management"""
import csv
def storepass(user, passwd, target="admins.csv"):
"""This function is used for storing user-pass combo
as elements to a csv file. By default, the values will be
stored in `creds/admins.csv`. The csvs will always be
saved in the `creds` directory but the filenames can
be changed by using the optional `target` parameter
"""
with open(f"creds/{target}", 'a+') as fil:
writer = csv.writer(fil)
writer.writerow([user, passwd])
def cipherpass(passwd):
"""Inputs a string. Ciphers it using the following
algorithm and returns the ciphered password
Algo:
1. Takes the string.
2. Tranverse though each letter.
3. Take the ascii value of that letter
and doubles it using `chr` function
4. Converts the new ascii value back to
a new letter.
5. Adds that letter to an empty string and
repeat from Step 1 until all letters are
traversed.
6. Returns the `ciphered` string.
"""
tmp = ""
for i in passwd:
tmp += chr(ord(i)*2)
return tmp
def decipherpass(encr):
"""Inputs a strings. Deciphers in using the same algorithm
that was used in `cipherpass`. Returns the original passwd
"""
tmp = ""
for i in encr:
tmp += chr(int(ord(i)/2))
return tmp
def get_pass(target="admins.csv"): # gets the user info from the Csv file
"""This function is used for reading a csv file
and returning the contents in the form of a
dictionary
"""
with open(f"creds/{target}", 'r+', encoding="utf8") as fil:
reader = csv.reader(fil)
print(list(reader))
dic = {}
for i in reader:
dic[i[0]] = i[1]
return dic
| [
37811,
1212,
8265,
481,
1730,
351,
9206,
4542,
37811,
628,
198,
11748,
269,
21370,
628,
198,
4299,
3650,
6603,
7,
7220,
11,
1208,
16993,
11,
2496,
2625,
324,
42951,
13,
40664,
1,
2599,
198,
220,
220,
220,
37227,
1212,
2163,
318,
973,
... | 2.57659 | 692 |
from flask import make_response, jsonify
class Error(Exception):
"""Base class for exceptions in this module."""
pass
| [
6738,
42903,
1330,
787,
62,
26209,
11,
33918,
1958,
628,
198,
198,
4871,
13047,
7,
16922,
2599,
198,
220,
220,
220,
37227,
14881,
1398,
329,
13269,
287,
428,
8265,
526,
15931,
198,
220,
220,
220,
1208,
628,
628,
628,
198
] | 3.375 | 40 |
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# usage: rule_binding.py INPUT CPPDIR HDIR -- INPUTS -- OPTIONS
#
# INPUT is an IDL file, such as Whatever.idl.
#
# CPPDIR is the directory into which V8Whatever.cpp will be placed. HDIR is
# the directory into which V8Whatever.h will be placed.
#
# The first item in INPUTS is the path to generate-bindings.pl. Remaining
# items in INPUTS are used to build the Perl module include path.
#
# OPTIONS are passed as-is to generate-bindings.pl as additional arguments.
import errno
import os
import shlex
import shutil
import subprocess
import sys
if __name__ == '__main__':
sys.exit(main(sys.argv))
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
2,
15069,
357,
66,
8,
3717,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
1... | 3.184739 | 249 |
#
# Copyright 2013 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from apache.aurora.client.hooks.hooked_api import HookedAuroraClientAPI
from apache.aurora.common.cluster import Cluster
from apache.aurora.common.clusters import Clusters
from gen.apache.aurora.ttypes import (
Response,
ResponseCode,
Result,
)
from mock import Mock
| [
2,
198,
2,
15069,
2211,
24843,
10442,
5693,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,... | 3.505882 | 255 |
"""
Some statistics utils.
"""
import numpy as np
from os.path import join
def expectation_2d(pdf):
"""
Computes the statistical expectation of a pdf defined
over two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the expectation for the x and y random variables.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
weighted_grid = pdf[..., None] * grid # broadcasting
E = np.apply_over_axes(np.sum, weighted_grid, axes=[0, 1])
E = np.squeeze(E)
return E
def covariance_matrix_2d(pdf):
"""
Computes the covariance matrix of a 2-dimensional gaussian
fitted over a joint pdf of two discrete random variables.
Parameters
----------
pdf: ndarray
a numpy 2-dimensional array with probability for each (x, y).
Returns
-------
ndarray
the covariance matrix.
"""
h, w = pdf.shape
pdf = np.float32(pdf)
pdf /= np.sum(pdf)
x_range = range(0, w)
y_range = range(0, h)
cols, rows = np.meshgrid(x_range, y_range)
grid = np.stack((rows, cols), axis=-1)
mu = expectation_2d(pdf)
grid = np.float32(grid)
# remove mean
grid -= mu[None, None, :]
grid_flat = np.reshape(grid, newshape=(-1, 2))
# in computing the dot product, pdf has to be counted one (outside the square!)
cov = np.dot(grid_flat.T, grid_flat * np.reshape(pdf, -1)[..., None])
return cov
def read_dreyeve_design(dreyeve_root):
"""
Reads the whole dr(eye)ve design.
Returns
-------
ndarray
the dr(eye)ve design in the form (sequences, params).
"""
with open(join(dreyeve_root, 'dr(eye)ve_design.txt')) as f:
dreyeve_design = np.array([l.rstrip().split('\t') for l in f.readlines()])
return dreyeve_design
| [
37811,
198,
4366,
7869,
3384,
4487,
13,
198,
37811,
628,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
28686,
13,
6978,
1330,
4654,
628,
198,
4299,
17507,
62,
17,
67,
7,
12315,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
3... | 2.442105 | 855 |
#!/usr/bin/env pybricks-micropython
from pybricks.hubs import EV3Brick
from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor,
InfraredSensor, UltrasonicSensor, GyroSensor)
from pybricks.parameters import Port, Stop, Direction, Button, Color
from pybricks.tools import wait, StopWatch, DataLog
from pybricks.robotics import DriveBase
from pybricks.media.ev3dev import SoundFile, ImageFile
# Config
ev3 = EV3Brick()
left_motor = Motor(Port.B)
right_motor = Motor(Port.C)
right_ultra = UltrasonicSensor(Port.S4)
front_ultra = UltrasonicSensor(Port.S3)
gyro = GyroSensor(Port.S2)
data = []
gyro.reset_angle(0)
count = 0
while gyro.angle() < 320:
left_motor.run(200)
right_motor.run(-200)
print(front_ultra.distance())
data.append(front_ultra.distance())
print(data) | [
2,
48443,
14629,
14,
8800,
14,
24330,
12972,
65,
23706,
12,
9383,
1773,
7535,
198,
6738,
12972,
65,
23706,
13,
71,
23161,
1330,
8696,
18,
33,
5557,
198,
6738,
12972,
65,
23706,
13,
1990,
18,
42034,
1330,
357,
34919,
11,
15957,
47864,
... | 2.566978 | 321 |
#coding:utf-8 -*-
import config, metodos
from mensagens import bemvindo
from pprint import *
from metodos import sendMessage
| [
2,
66,
7656,
25,
40477,
12,
23,
532,
9,
12,
198,
11748,
4566,
11,
1138,
375,
418,
198,
6738,
285,
641,
363,
641,
1330,
307,
76,
50172,
78,
198,
6738,
279,
4798,
1330,
1635,
198,
6738,
1138,
375,
418,
1330,
3758,
12837,
198
] | 2.906977 | 43 |
import IceRayCpp
| [
11748,
6663,
19591,
34,
381,
201,
198
] | 2.571429 | 7 |
from .distribution import Distribution
| [
6738,
764,
17080,
3890,
1330,
27484,
201,
198,
201,
198
] | 4.2 | 10 |
import pytest
from data_structures.linked_list.ll_zip import *
from data_structures.linked_list.linked_list import *
@pytest.fixture
@pytest.fixture
| [
11748,
12972,
9288,
198,
6738,
1366,
62,
7249,
942,
13,
25614,
62,
4868,
13,
297,
62,
13344,
1330,
1635,
198,
6738,
1366,
62,
7249,
942,
13,
25614,
62,
4868,
13,
25614,
62,
4868,
1330,
1635,
628,
628,
198,
31,
9078,
9288,
13,
69,
... | 2.90566 | 53 |
import logging
from .resnet_enc import ResNet_D, BasicBlock, Bottleneck
from .res_localHOP_posEmb_enc import ResLocalHOP_PosEmb
__all__ = ['resnet_localHOP_posEmb_encoder_29']
if __name__ == "__main__":
import torch
logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%m-%d %H:%M:%S')
resnet_encoder = resnet_encoder_29()
x = torch.randn(4,6,512,512)
z = resnet_encoder(x)
print(z[0].shape)
| [
11748,
18931,
198,
6738,
764,
411,
3262,
62,
12685,
1330,
1874,
7934,
62,
35,
11,
14392,
12235,
11,
14835,
43163,
198,
6738,
764,
411,
62,
12001,
39,
3185,
62,
1930,
31567,
62,
12685,
1330,
1874,
14565,
39,
3185,
62,
21604,
31567,
628... | 2.166667 | 228 |
# Program to check closing of all brackets
print(par_checker('{({([][])}())}')) # True
print(par_checker('[{()]')) # False
| [
2,
6118,
284,
2198,
9605,
286,
477,
28103,
628,
628,
198,
198,
4798,
7,
1845,
62,
9122,
263,
10786,
90,
15090,
26933,
7131,
12962,
92,
28955,
92,
6,
4008,
220,
1303,
6407,
198,
4798,
7,
1845,
62,
9122,
263,
10786,
58,
90,
3419,
49... | 2.653061 | 49 |
from . import csvTasks
from . import Gender
# from . import Learn | [
6738,
764,
1330,
269,
21370,
51,
6791,
198,
6738,
764,
1330,
20247,
198,
2,
422,
764,
1330,
14365
] | 3.611111 | 18 |
VERIFY_TOKEN = '' | [
5959,
5064,
56,
62,
10468,
43959,
796,
10148
] | 2.125 | 8 |
#!/usr/bin/env python
import html
import re
from subprocess import check_output
import yaml
find_code = re.compile(r'code: (\w+)').search
table_html = '''
<table class="code table table-sm">
<tbody>
<tr>
<td>{py}</td>
<td>{go}</td>
</tr>
</tbody>
</table>
'''
module_html = '''
<tr>
<td>{task}</td>
<td><a href="https://docs.python.org/3/library/{python}.html">
{python}</a>
</td>
<td><a href="https://golang.org/pkg/{go}/">{go}</a></td>
</tr>
'''
is_start = re.compile(r'(//|#) START').search
is_end = re.compile(r'(//|#) END').search
find_spaces = re.compile('^[ \t]+').match
if __name__ == '__main__':
from argparse import ArgumentParser, FileType
from sys import stdin
parser = ArgumentParser()
parser.add_argument('--file', type=FileType(), default=stdin)
args = parser.parse_args()
for line in args.file:
line = line[:-1] # trim newline
match = find_code(line)
if match:
name = match.group(1)
py = htmlize(code_for(name, 'python'), 'python')
go = htmlize(code_for(name, 'go'), 'go')
print(table_html.format(py=py, go=go))
elif line.strip() == ':modules:':
modules()
else:
print(line)
continue
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
27711,
198,
11748,
302,
198,
6738,
850,
14681,
1330,
2198,
62,
22915,
198,
198,
11748,
331,
43695,
198,
198,
19796,
62,
8189,
796,
302,
13,
5589,
576,
7,
81,
6,
8189,
25,... | 2.146179 | 602 |
"""
Systems
Endpoints for installing, reading, configuring and deleting transit systems.
"""
import flask
import requests
from transiter import exceptions
from transiter.http import httpmanager, httpviews
from transiter.http.httpmanager import (
http_endpoint,
link_target,
HttpMethod,
HttpStatus,
)
from transiter.http.permissions import requires_permissions, PermissionsLevel
from transiter.services import stopservice, systemservice, views
system_endpoints = flask.Blueprint(__name__, __name__)
@http_endpoint(system_endpoints, "")
@link_target(httpviews.SystemsInstalled)
def list_all():
"""
List all systems
List all transit systems that are installed in this Transiter instance.
"""
return systemservice.list_all()
@http_endpoint(system_endpoints, "/<system_id>")
@link_target(views.System, ["id"])
def get_by_id(system_id):
"""
Get a specific system
Get a system by its ID.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
return systemservice.get_by_id(system_id)
@http_endpoint(system_endpoints, "/<system_id>/transfers")
@link_target(views.TransfersInSystem, ["_system_id"])
def list_all_transfers_in_system(system_id):
"""
List all transfers in a system
List all transfers in a system.
Return code | Description
------------|-------------
`200 OK` | A system with this ID exists.
`404 NOT FOUND` | No system with the provided ID is installed.
"""
from_stop_ids = httpmanager.get_list_url_parameter("from_stop_id")
to_stop_ids = httpmanager.get_list_url_parameter("to_stop_id")
return stopservice.list_all_transfers_in_system(
system_id, from_stop_ids=from_stop_ids, to_stop_ids=to_stop_ids
)
@http_endpoint(
system_endpoints, "/<system_id>", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def install(system_id):
"""
Install a system
This endpoint is used to install or update transit systems.
Installs/updates can be performed asynchronously (recommended)
or synchronously (using the optional URL parameter `sync=true`; not recommended);
see below for more information.
The endpoint accepts `multipart/form-data` requests.
There is a single required parameter, `config_file`, which
specifies the YAML configuration file for the Transit system.
(There is a [dedicated documentation page](systems.md) concerned with creating transit system configuration files.)
The parameter can either be:
- A file upload of the configuration file, or
- A text string, which will be interpreted as a URL pointing to the configuration file.
In addition, depending on the configuration file, the endpoint will also accept extra text form data parameters.
These additional parameters are used for things like API keys, which are different
for each user installing the transit system.
The configuration file will customize certain information using the parameters -
for example, it might include an API key as a GET parameter in a feed URL.
If you are installing a system using a YAML configuration provided by someone else, you
should be advised of which additional parameters are needed.
If you attempt to install a system without the required parameters, the install will fail and
the response will detail which parameters you're missing.
#### Async versus sync
Often the install/update process is long because it often involves performing
large feed updates
of static feeds - for example, in the case of the New York City Subway,
an install takes close to two minutes.
If you perform a synchronous install, the install request is liable
to timeout - for example, Gunicorn by default terminates HTTP
requests that take over 60 seconds.
For this reason you should generally install asynchronously.
After triggering the install asynchronously, you can track its
progress by hitting the `GET` system endpoint repeatedly.
Synchronous installs are supported and useful when writing new
transit system configs, in which case getting feedback from a single request
is quicker.
Return code | Description
--------------------|-------------
`201 CREATED` | For synchronous installs, returned if the transit system was successfully installed.
`202 ACCEPTED` | For asynchronous installs, returned if the install is successfully triggered. This does not necessarily mean the system will be succesfully installed.
`400 BAD REQUEST` | Returned if the YAML configuration file cannot be retrieved. For synchronous installs, this code is also returned if there is any kind of install error.
"""
form_key_to_value = flask.request.form.to_dict()
config_file_url = form_key_to_value.pop("config_file", None)
sync = httpmanager.is_sync_request()
system_update_pk = systemservice.install(
system_id=system_id,
config_str=_get_config_file(
config_file_url, flask.request.files.get("config_file")
),
extra_settings=form_key_to_value,
config_source_url=config_file_url,
sync=sync,
)
if sync:
if (
systemservice.get_update_by_id(system_update_pk).status
== views.SystemUpdateStatus.SUCCESS
):
status = HttpStatus.CREATED
else:
status = HttpStatus.BAD_REQUEST
else:
status = HttpStatus.ACCEPTED
a = systemservice.get_update_by_id(system_update_pk), status
print("Status", a)
return a
@http_endpoint(
system_endpoints,
"/<system_id>",
method=HttpMethod.DELETE,
returns_json_response=False,
)
@requires_permissions(PermissionsLevel.ALL)
def delete_by_id(system_id):
"""
Uninstall (delete) a system
The uninstall can be performed asynchronously or synchronously (using the
optional URL parameter `sync=true`).
You should almost always use the asynchronous version of this endpoint.
It works by changing the system ID to be a new "random" ID, and then performs
the delete asynchronously.
This means that at soon as the HTTP request ends (within a few milliseconds)
the system is invisible to users, and available for installing a new system.
The actual delete takes up to a few minutes for large transit systems like
the NYC Subway.
Return code | Description
--------------------|-------------
`202 ACCEPTED` | For asynchronous deletes, returned if the delete is successfully triggered.
`204 NO CONTENT` | For synchronous deletes, returned if the system was successfully deleted.
`404 NOT FOUND` | Returned if the system does not exist.
"""
systemservice.delete_by_id(
system_id, error_if_not_exists=True, sync=httpmanager.is_sync_request()
)
if httpmanager.is_sync_request():
status = HttpStatus.NO_CONTENT
else:
status = HttpStatus.ACCEPTED
return flask.Response(response="", status=status, content_type="")
@http_endpoint(
system_endpoints, "/<system_id>/auto-update", method=HttpMethod.PUT,
)
@requires_permissions(PermissionsLevel.ALL)
def set_auto_update_enabled(system_id):
"""
Configure system auto-update
Configure whether auto-update is enabled for
auto-updatable feeds in a system.
The endpoint takes a single form parameter `enabled`
which can either be `true` or `false` (case insensitive).
Return code | Description
--------------------|-------------
`204 NO CONTENT` | The configuration was applied successfully.
`400 BAD REQUEST` | Returned if the form parameter is not provided or is invalid.
`404 NOT FOUND` | Returned if the system does not exist.
"""
# TODO: this should just accept a URL parameter
form_key_to_value = flask.request.form.to_dict()
enabled = form_key_to_value.get("enabled")
if enabled is None:
raise exceptions.InvalidInput("The form variable 'enabled' is required")
enabled = enabled.lower()
if enabled not in {"false", "true"}:
raise exceptions.InvalidInput(
"The form variable 'enabled' has to be 'true' or 'false', not '{}'".format(
enabled
)
)
systemservice.set_auto_update_enabled(
system_id, form_key_to_value["enabled"].lower() == "true"
)
return "", HttpStatus.NO_CONTENT
| [
37811,
198,
11964,
82,
198,
198,
12915,
13033,
329,
15975,
11,
3555,
11,
4566,
870,
290,
34817,
11168,
3341,
13,
198,
37811,
198,
11748,
42903,
198,
11748,
7007,
198,
198,
6738,
1007,
2676,
1330,
13269,
198,
6738,
1007,
2676,
13,
4023,
... | 3.077419 | 2,790 |
# xctf{35eedc512678301f582de3176d1fc81c}
payload = ''
for i in range(16):
payload += 'x%d=["Source"()[%d]];' % (i, i)
payload += 'a=['
payload += ','.join('x%d[0]' % i for i in range(16))
payload += '];"Sink"(a)'
print payload
| [
2,
2124,
310,
69,
90,
2327,
2308,
66,
25836,
3134,
5999,
486,
69,
46044,
2934,
18,
24096,
67,
16,
16072,
6659,
66,
92,
198,
15577,
2220,
796,
10148,
198,
198,
1640,
1312,
287,
2837,
7,
1433,
2599,
198,
220,
220,
220,
21437,
15853,
... | 2.052632 | 114 |
from django.conf import settings
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('',
url(r'^$', 'fbapp.views.canvas', name="fbapp.canvas"),
url(r'^page_tab$', 'fbapp.views.page_tab', name="fbapp.page_tab"),
url(r'^owa$', 'fbapp.views.open_web_app', name="fbapp.open_web_app"),
url(r'^owa/chirpradio\.webapp$', 'fbapp.views.open_web_app_manifest',
name="fbapp.open_web_app_manifest"),
url(r'^channel\.html$', 'fbapp.views.channel', name="fbapp.channel"),
)
| [
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
13,
12286,
82,
1330,
7572,
11,
19016,
198,
198,
6371,
33279,
82,
796,
7572,
10786,
3256,
198,
220,
220,
220,
19016,
7,
81,
6,
61,
3,
3256,
... | 2.348624 | 218 |
import torch
import math | [
198,
11748,
28034,
198,
11748,
10688
] | 4.166667 | 6 |
# coding=utf-8
# name=hu_yang_jie
#coding=utf-8
import cv2
import numpy as np
img = cv2.imread("bili.jpg") #载入图像
h, w = img.shape[:2] #获取图像的高和宽
cv2.imshow("Origin", img) #显示原始图像
blured = cv2.blur(img,(5,5)) #进行滤波去掉噪声
cv2.imshow("Blur", blured) #显示低通滤波后的图像
mask = np.zeros((h+2, w+2), np.uint8) #掩码长和宽都比输入图像多两个像素点,满水填充不会超出掩码的非零边缘
#进行泛洪填充
cv2.floodFill(blured, mask, (w-1,h-1), (255,255,255), (2,2,2),(3,3,3),8)
cv2.imshow("floodfill", blured)
#得到灰度图
gray = cv2.cvtColor(blured,cv2.COLOR_BGR2GRAY)
cv2.imshow("gray", gray)
#定义结构元素
kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(50, 50))
#开闭运算,先开运算去除背景噪声,再继续闭运算填充目标内的孔洞
opened = cv2.morphologyEx(gray, cv2.MORPH_OPEN, kernel)
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, kernel)
cv2.imshow("closed", closed)
#求二值图
ret, binary = cv2.threshold(closed,250,255,cv2.THRESH_BINARY)
cv2.imshow("binary", binary)
#找到轮廓
_,contours, hierarchy = cv2.findContours(binary,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
#绘制轮廓
cv2.drawContours(img,contours,-1,(0,0,255),3)
#绘制结果
cv2.imshow("result", img)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
2,
19617,
28,
40477,
12,
23,
198,
2,
1438,
28,
13415,
62,
17859,
62,
73,
494,
198,
2,
66,
7656,
28,
40477,
12,
23,
198,
11748,
269,
85,
17,
198,
11748,
299,
32152,
355,
45941,
198,
198,
9600,
796,
269,
85,
17,
13,
320,
961,
72... | 1.368356 | 809 |
"""
Package version number.
"""
# We have to put it here, because setup.py (and easy_install) cannot import
# nxturtle before requirements are satisfied.
__version__ = "1.0.4"
| [
37811,
201,
198,
27813,
2196,
1271,
13,
201,
198,
37811,
201,
198,
2,
775,
423,
284,
1234,
340,
994,
11,
780,
9058,
13,
9078,
357,
392,
2562,
62,
17350,
8,
2314,
1330,
220,
201,
198,
2,
299,
742,
17964,
878,
5359,
389,
11378,
13,
... | 3.066667 | 60 |
import dataclasses
import datetime
import json
import typing
import uuid
from dataclasses_avroschema.schema_generator import SchemaGenerator
| [
11748,
4818,
330,
28958,
198,
11748,
4818,
8079,
198,
11748,
33918,
198,
11748,
19720,
198,
11748,
334,
27112,
198,
198,
6738,
4818,
330,
28958,
62,
615,
305,
15952,
2611,
13,
15952,
2611,
62,
8612,
1352,
1330,
10011,
2611,
8645,
1352,
... | 3.452381 | 42 |
import os
import spira
import pygmsh
import meshio
import inspect
from spira.core.lists import ElementList
# from spira.gdsii.utils import numpy_to_list
from spira import param
from spira.lne.mesh import Mesh
from spira.core.initializer import ElementalInitializer
| [
11748,
28686,
198,
11748,
599,
8704,
198,
11748,
12972,
70,
907,
71,
198,
11748,
19609,
952,
198,
11748,
10104,
198,
198,
6738,
599,
8704,
13,
7295,
13,
20713,
1330,
11703,
8053,
198,
2,
422,
599,
8704,
13,
70,
9310,
4178,
13,
26791,
... | 3.280488 | 82 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
=============
TAP plus
=============
@author: Juan Carlos Segovia
@contact: juan.carlos.segovia@sciops.esa.int
European Space Astronomy Centre (ESAC)
European Space Agency (ESA)
Created on 30 jun. 2016
"""
from astroquery.utils.tap import taputils
from six.moves.urllib.parse import urlencode
| [
2,
49962,
739,
257,
513,
12,
565,
682,
347,
10305,
3918,
5964,
532,
766,
38559,
24290,
13,
81,
301,
198,
37811,
198,
25609,
28,
198,
51,
2969,
5556,
198,
25609,
28,
198,
198,
31,
9800,
25,
16852,
17409,
1001,
9567,
544,
198,
31,
3... | 3.041322 | 121 |
# encoding=utf8
"""Simulated annealing algorithm module."""
import numpy as np
from WeOptPy.algorithms.interfaces import Algorithm
__all__ = [
'SimulatedAnnealing',
'coolDelta',
'coolLinear'
]
def coolDelta(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate new temperature by differences.
Args:
currentT (float):
T (float):
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - deltaT
def coolLinear(currentT, T, deltaT, nFES, **kwargs):
r"""Calculate temperature with linear function.
Args:
currentT (float): Current temperature.
T (float):
deltaT (float):
nFES (int): Number of evaluations done.
kwargs (Dict[str, Any]): Additional arguments.
Returns:
float: New temperature.
"""
return currentT - T / nFES
class SimulatedAnnealing(Algorithm):
r"""Implementation of Simulated Annealing Algorithm.
Algorithm:
Simulated Annealing Algorithm
Date:
2018
Authors:
Jan Popič and Klemen Berkovič
License:
MIT
Reference URL:
TODO
Reference paper:
TODO
Attributes:
Name (List[str]): List of strings representing algorithm name.
delta (float): Movement for neighbour search.
T (float); Starting temperature.
deltaT (float): Change in temperature.
coolingMethod (Callable): Neighbourhood function.
epsilon (float): Error value.
See Also:
* :class:`NiaPy.algorithms.Algorithm`
"""
Name = ['SimulatedAnnealing', 'SA']
@staticmethod
def algorithm_info():
r"""Get basic information of algorithm.
Returns:
str: Basic information of algorithm.
See Also:
* :func:`NiaPy.algorithms.Algorithm.algorithmInfo`
"""
return r"""None"""
@staticmethod
def type_parameters():
r"""Get dictionary with functions for checking values of parameters.
Returns:
Dict[str, Callable]:
* delta (Callable[[Union[float, int], bool]): TODO
"""
return {
'delta': lambda x: isinstance(x, (int, float)) and x > 0,
'T': lambda x: isinstance(x, (int, float)) and x > 0,
'deltaT': lambda x: isinstance(x, (int, float)) and x > 0,
'epsilon': lambda x: isinstance(x, float) and 0 < x < 1
}
def set_parameters(self, delta=0.5, T=2000, deltaT=0.8, coolingMethod=coolDelta, epsilon=1e-23, **ukwargs):
r"""Set the algorithm parameters/arguments.
Arguments:
delta (Optional[float]): Movement for neighbour search.
T (Optional[float]); Starting temperature.
deltaT (Optional[float]): Change in temperature.
coolingMethod (Optional[Callable]): Neighbourhood function.
epsilon (Optional[float]): Error value.
See Also
* :func:`NiaPy.algorithms.Algorithm.setParameters`
"""
ukwargs.pop('n', None)
Algorithm.set_parameters(self, n=1, **ukwargs)
self.delta, self.T, self.deltaT, self.cool, self.epsilon = delta, T, deltaT, coolingMethod, epsilon
def get_parameters(self):
r"""Get algorithms parametes values.
Returns:
Dict[str, Any]:
See Also
* :func:`NiaPy.algorithms.Algorithm.getParameters`
"""
d = Algorithm.get_parameters(self)
d.update({
'delta': self.delta,
'deltaT': self.deltaT,
'T': self.T,
'epsilon': self.epsilon
})
return d
def init_population(self, task):
r"""Initialize the starting population.
Args:
task (Task): Optimization task.
Returns:
Tuple[numpy.ndarray, float, list, dict]:
1. Initial solution.
2. Initial solutions fitness/objective value.
3. Additional arguments.
4. Additional keyword arguments.
"""
x = task.Lower + task.bRange * self.rand(task.D)
curT, xfit = self.T, task.eval(x)
return x, xfit, [], {'curT': curT}
def run_iteration(self, task, x, xfit, xb, fxb, curT, *args, **dparams):
r"""Core function of the algorithm.
Args:
task (Task):
x (numpy.ndarray):
xfit (float):
xb (numpy.ndarray):
fxb (float):
curT (float):
args (list): Additional arguments.
dparams (dict): Additional keyword arguments.
Returns:
Tuple[numpy.ndarray, float, numpy.ndarray, float, list, dict]:
1. New solution.
2. New solutions fitness/objective value.
3. New global best solution.
4. New global best solutions fitness/objective value.
5. Additional arguments.
6. Additional keyword arguments.
"""
c = task.repair(x - self.delta / 2 + self.rand(task.D) * self.delta, rnd=self.Rand)
cfit = task.eval(c)
deltaFit, r = cfit - xfit, self.rand()
if deltaFit < 0 or r < np.exp(deltaFit / curT): x, xfit = c, cfit
curT = self.cool(curT, self.T, deltaT=self.deltaT, nFES=task.nFES)
xb, fxb = self.get_best(x, xfit, xb, fxb)
return x, xfit, xb, fxb, args, {'curT': curT}
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| [
2,
21004,
28,
40477,
23,
198,
198,
37811,
8890,
4817,
281,
710,
4272,
11862,
8265,
526,
15931,
198,
198,
11748,
299,
32152,
355,
45941,
198,
198,
6738,
775,
27871,
20519,
13,
282,
7727,
907,
13,
3849,
32186,
1330,
978,
42289,
198,
198... | 2.584906 | 1,802 |
import sys
sys.path.append("../config_files")
import mysql.connector
from settings_project import CONFIG_MYSQL
import pandas as pd
import numpy as np
import time
#DEFINIMOS FUNCIONES
# CALCULAR MEDIAS MOVILES
# CREAMOS CICLO INFINITO CON PAUSA CADA 10S
while True:
# Cargar datos
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
if ((datos['alpha_bin'].iloc[-1] == 1) & (datos['alpha_bin'].shift(1).iloc[-1] == -1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posición Larga--')
time.sleep(10)
elif ((datos['alpha_bin'].iloc[-1] == -1) & (datos['alpha_bin'].shift(1).iloc[-1] == 1) &
(datos['alpha_bin'].shift(2).iloc[-1] == 1) & (datos['alpha_bin'].shift(3).iloc[-1] ==1)):
print('--Posicion Corta--')
time.sleep(10)
else:
print('Sin Posición')
time.sleep(10)
# Forward Test
datos = load_bitso_data_top500(CONFIG_MYSQL)
# Ordenar datos
datos = datos.reindex(index=datos.index[::-1])
datos.reset_index(inplace=True, drop=True)
# Crear medias moviles
datos['mv20'] = sma(datos.price, 10)
datos['mv160'] = sma(datos.price, 60)
# Restart medias moviles
datos['alpha'] = datos['mv20'] - datos['mv160']
# Obtener datos
datos['alpha_bin'] = datos['alpha'].apply(np.sign)
longitud_arreglo = len(datos['alpha_bin'])
for j in range(len(datos['alpha_bin'])):
i = longitud_arreglo - j
if ((datos['alpha_bin'].iloc[-i] == 1) & (datos['alpha_bin'].shift(1).iloc[-i] == -1) &
(datos['alpha_bin'].shift(2).iloc[-i] == -1) & (datos['alpha_bin'].shift(3).iloc[-i] == -1)):
print('--Posición Larga--', i)
time.sleep(1)
if ((datos['alpha_bin'].iloc[-i] == -1) & (datos['alpha_bin'].shift(1).iloc[-i] == 1) &
(datos['alpha_bin'].shift(2).iloc[-i] == 1) & (datos['alpha_bin'].shift(3).iloc[-i] == 1)):
print('--Posicion Corta--', i)
time.sleep(1)
else:
print('Sin Posición', i)
time.sleep(1) | [
11748,
25064,
198,
17597,
13,
6978,
13,
33295,
7203,
40720,
11250,
62,
16624,
4943,
198,
11748,
48761,
13,
8443,
273,
198,
6738,
6460,
62,
16302,
1330,
25626,
62,
44,
16309,
9711,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
299,
... | 2.062661 | 1,165 |
from django.test import TestCase
# from django.contrib.auth.models import User
from blog.models import Post, Comment
import blog.tests.helper as h
| [
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
2,
422,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
198,
6738,
4130,
13,
27530,
1330,
2947,
11,
18957,
198,
198,
11748,
4130,
13,
41989,
13,
2978,
525,
355... | 3.355556 | 45 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Denys Duchier, IUT d'Orléans
#==============================================================================
import mud.game
from .thing import Thing
from .mixins.containing import Containing
from .location import Location
import queue
from tornado.ioloop import IOLoop
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
15069,
357,
34,
8,
1946,
5601,
893,
360,
794,
959,
11,
314,
3843,
288,
6,
5574,
45031,
504,
198,
2,
23926,
25609,
855,
198,
198,
11748,
17492,
13,
6057,
198,
6738... | 3.247619 | 105 |
#!/usr/bin/env python
'''
Module for pipelining composition.
value >> func1 >> func2 >> func3 ...
Pipelines are expressions whose resulting value can be assigned:
result = value >> func1 >> func2
Rules:
* First value can be any python value
* Functions must be chained with the '>>' operator.
* All functions must be built as attributes of a Pipeline object and must accept
one argument, that will be set using the pipelined value.
Any additional arguments must be specified in the pipeline and the value will be added
as the last argument.
* Generators are allowed. Using a generator will turn the value being pipelined
into a generator object, meaning that subsequent pipeline steps must be able
to consume the values (for example with p.list). Multiple generators will be
automatically chained, and if the last step is a generator, the whole expression
becomes a single generator ready for action!
Examples:
from pipeline import p
# This pipeline has a result of 3
'foo' >> p.len
# This pipeline chains filters and maps objects, and calls list() on them
# at the end to execute them. The result will be [1, 9, 25, 49, 81]
range(10) >> p.filter(lambda i : i%2) >> p.map(lambda i : i*i) >> p.list
# If you already have a function object (or want to define one with lambda),
# pass it as a parameter to p():
'foo' >> p(lambda x: x.upper())
'foo' >> p('The word was {}'.format)
# if imported symbols are used, they must be passed
# to the Pipeline constructor. This example counts
# the links in the python.org page, but since 'findall'
#'is imported, we must build a Pipeline object using
# the globals() array:
from pipeline import Pipeline
from urllib.request import urlopen
from re import findall
p = Pipeline(globals())
url = 'http://python.org'
urlopen(url).read() >> p.findall(b'href="') >> p.len >> p('{} hrefs'.format)
Generator support using the special "p.value" keyword:
range(10) >> p(x*2 for x in p.value if x%2==0) >> p(x*3 for x in p.value)
The result will be a generator, that is, nothing is executed until
the final generator will be asked to produce the values!
'''
import hack
import types
p = Pipeline()
p.value = '' # Dummy iterable
# __oOo__
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
7061,
6,
198,
26796,
329,
7347,
417,
3191,
11742,
13,
628,
220,
220,
220,
1988,
9609,
25439,
16,
9609,
25439,
17,
9609,
25439,
18,
2644,
198,
198,
47,
541,
20655,
389,
14700,
3025,
7... | 3.26087 | 713 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import glob
from os.path import join, dirname
sys.path.insert(0, join(dirname(__file__), '..'))
sys.path.insert(0, join(dirname(__file__), '../..'))
import cv2
import numpy as np
if __name__ == '__main__':
rate = 1.25
rate2 = 1.0
dataset = {}
fps = 30
video_size = (1280, 720)
videoWriter = cv2.VideoWriter("/media/wang/Data/video/first-person/2.mp4", cv2.VideoWriter_fourcc(*'MJPG'), fps, video_size)
for index in [2,4,5]:
img_list, pcd_list, nav_list, cost_list, out_list = read_files(index)
dataset[index] = {'img_list':img_list, 'pcd_list':pcd_list, 'nav_list':nav_list, 'cost_list':cost_list, 'out_list':out_list}
for index in [2]:
choose_dataset = dataset[index]
for ts in choose_dataset['img_list']:
img = cv2.imread('/media/wang/Data/video/data'+str(index)+'/output/'+ts+'.png')
#print(img.shape) #(720, 1280, 3)
if img is None: continue
#img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
nav_ts = find_nn(ts, choose_dataset['nav_list'])
cost_ts = find_nn(ts, choose_dataset['cost_list'])
nav = cv2.imread('/media/wang/Data/video/data'+str(index)+'/nav/'+nav_ts+'.png')
costmap = cv2.imread('/media/wang/Data/video/data'+str(index)+'/cost/'+cost_ts+'.png')
nav = cv2.cvtColor(nav, cv2.COLOR_BGR2RGB) #(160, 200, 3)
#input_img = get_img(img, nav)
nav = cv2.resize(nav, (int(200*rate), int(rate*160)))
img[0:int(rate*160), -int(200*rate):] = nav
img[0:int(rate2*200), 0:int(400*rate2)] = costmap
cv2.imshow('img', img)
videoWriter.write(img)
#cv2.imshow('costmap', costmap)
cv2.waitKey(1)
cv2.destroyAllWindows()
videoWriter.release()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
25064,
198,
11748,
15095,
198,
6738,
28686,
13,
6978,
1330,
4654,
11,
26672,
3672,
198,
17597,
13,
6978,
13,... | 1.967078 | 972 |
from gephistreamer import graph
from gephistreamer import streamer
#https://github.com/totetmatt/GephiStreamer
#DA FARE
stream = streamer.Streamer(streamer.GephiWS(hostname="localhost", port=8080, workspace="workspace0"),)
node_a = graph.Node("A",custom_property=1)
node_b = graph.Node("B",custom_property=2)
node_c = graph.Node("C",custom_property=3)
node_d = graph.Node("D",custom_property=4)
stream.add_node(node_a,node_b)
edge_ab = graph.Edge(node_a,node_b,custom_property="hello")
stream.add_edge(edge_ab)
| [
6738,
308,
538,
10034,
1476,
263,
1330,
4823,
198,
6738,
308,
538,
10034,
1476,
263,
1330,
4269,
263,
198,
2,
5450,
1378,
12567,
13,
785,
14,
83,
313,
316,
76,
1078,
14,
38,
538,
5303,
28696,
198,
2,
5631,
376,
12203,
198,
5532,
7... | 2.65285 | 193 |
#!/usr/bin/env python
# Copyright (c) 2018, Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import time
import json
import rospy
import hashlib
import sqlite3
import time
from optparse import OptionParser
from tts.srv import Synthesizer, SynthesizerResponse
from tts.srv import PollyResponse
from tts.db import DB
class SpeechSynthesizer:
"""This class serves as a ROS service node that should be an entry point of a TTS task.
Although the current implementation uses Amazon Polly as the synthesis engine, it is not hard to let it support
more heterogeneous engines while keeping the API the same.
In order to support a variety of engines, the SynthesizerRequest was designed with flexibility in mind. It
has two fields: text and metadata. Both are strings. In most cases, a user can ignore the metadata and call
the service with some plain text. If the use case needs any control or engine-specific feature, the extra
information can be put into the JSON-form metadata. This class will use the information when calling the engine.
The decoupling of the synthesizer and the actual synthesis engine will benefit the users in many ways.
First, a user will be able to use a unified interface to do the TTS job and have the freedom to use different
engines available with no or very little change from the client side.
Second, by applying some design patterns, the synthesizer can choose an engine dynamically. For example, a user
may prefer to use Amazon Polly but is also OK with an offline solution when network is not reliable.
Third, engines can be complicated, thus difficult to use. As an example, Amazon Polly supports dozens of parameters
and is able to accomplish nontrivial synthesis jobs, but majority of the users never need those features. This
class provides a clean interface with two parameters only, so that it is much easier and pleasant to use. If by
any chance the advanced features are required, the user can always leverage the metadata field or even go to the
backend engine directly.
Also, from an engineering perspective, simple and decoupled modules are easier to maintain.
This class supports two modes of using polly. It can either call a service node or use AmazonPolly as a library.
Start the service node::
$ rosrun tts synthesizer_node.py # use default configuration
$ rosrun tts synthesizer_node.py -e POLLY_LIBRARY # will not call polly service node
Call the service::
$ rosservice call /synthesizer 'hello' ''
$ rosservice call /synthesizer '<speak>hello</speak>' '"{\"text_type\":\"ssml\"}"'
"""
class DummyEngine:
"""A dummy engine which exists to facilitate testing. Can either
be set to act as if it is connected or disconnected. Will create files where
they are expected, but they will not be actual audio files."""
def __call__(self, **kwargs):
"""put a file at the specified location and return resonable dummy
values. If not connected, fills in the Exception fields.
Args:
**kwarks: dictionary with fields: output_format, voice_id, sample_rate,
text_type, text, output_path
Returns: A json version of a string with fields: Audio File, Audio Type,
Exception (if there is an exception), Traceback (if there is an exception),
and if succesful Amazon Polly Response Metadata
"""
if self.connected:
with open(kwargs['output_path'], 'wb') as f:
f.write(os.urandom(self.file_size))
output_format = kwargs['OutputFormat'] if 'OutputFormat' in kwargs else 'ogg_vorbis'
resp = json.dumps({
'Audio File': kwargs['output_path'],
'Audio Type': output_format,
'Amazon Polly Response Metadata': {'some header': 'some data'}
})
return SynthesizerResponse(resp)
else:
current_dir = os.path.dirname(os.path.abspath(__file__))
error_ogg_filename = 'connerror.ogg'
error_details = {
'Audio File': os.path.join(current_dir, '../src/tts/data', error_ogg_filename),
'Audio Type': 'ogg',
'Exception': {
'dummy head': 'dummy val'
# 'Type': str(exc_type),
# 'Module': exc_type.__module__,
# 'Name': exc_type.__name__,
# 'Value': str(e),
},
'Traceback': 'some traceback'
}
return SynthesizerResponse(json.dumps(error_details))
def set_connection(self, connected):
"""set the connection state
Args:
connected: boolean, whether to act connected or not
"""
self.connected = connected
def set_file_sizes(self, size):
"""Set the target file size for future files in bytes
Args:
size: the number of bytes to make the next files
"""
self.file_size = size
ENGINES = {
'POLLY_SERVICE': PollyViaNode,
'POLLY_LIBRARY': PollyDirect,
'DUMMY': DummyEngine,
}
#TODO: expose this max_cache_bytes value to the roslaunch system (why is rosparam not used in this file?)
def _call_engine(self, **kw):
"""Call engine to do the job.
If no output path is found from input, the audio
file will be put into /tmp and the file name will have
a prefix of the md5 hash of the text. If a filename is
not given, the utterance is added to the cache. If a
filename is specified, then we will assume that the
file is being managed by the user and it will not
be added to the cache.
:param kw: what AmazonPolly needs to synthesize
:return: response from AmazonPolly
"""
if 'output_path' not in kw:
tmp_filename = hashlib.md5(
json.dumps(kw, sort_keys=True)).hexdigest()
tmp_filepath = os.path.join(
os.sep, 'tmp', 'voice_{}'.format(tmp_filename))
kw['output_path'] = os.path.abspath(tmp_filepath)
rospy.loginfo('managing file with name: {}'.format(tmp_filename))
# because the hash will include information about any file ending choices, we only
# need to look at the hash itself.
db = DB()
db_search_result = db.ex(
'SELECT file, audio_type FROM cache WHERE hash=?', tmp_filename).fetchone()
current_time = time.time()
file_found = False
if db_search_result: # then there is data
# check if the file exists, if not, remove from db
# TODO: add a test that deletes a file without telling the db and tries to synthesize it
if os.path.exists(db_search_result['file']):
file_found = True
db.ex('update cache set last_accessed=? where hash=?',
current_time, tmp_filename)
synth_result = PollyResponse(json.dumps({
'Audio File': db_search_result['file'],
'Audio Type': db_search_result['audio_type'],
'Amazon Polly Response Metadata': ''
}))
rospy.loginfo('audio file was already cached at: %s',
db_search_result['file'])
else:
rospy.logwarn(
'A file in the database did not exist on the disk, removing from db')
db.remove_file(db_search_result['file'])
if not file_found: # havent cached this yet
rospy.loginfo('Caching file')
synth_result = self.engine(**kw)
res_dict = json.loads(synth_result.result)
if 'Exception' not in res_dict:
file_name = res_dict['Audio File']
if file_name:
file_size = os.path.getsize(file_name)
db.ex('''insert into cache(
hash, file, audio_type, last_accessed,size)
values (?,?,?,?,?)''', tmp_filename, file_name,
res_dict['Audio Type'], current_time, file_size)
rospy.loginfo(
'generated new file, saved to %s and cached', file_name)
# make sure the cache hasn't grown too big
while db.get_size() > self.max_cache_bytes and db.get_num_files() > 1:
remove_res = db.ex(
'select file, min(last_accessed), size from cache'
).fetchone()
db.remove_file(remove_res['file'])
rospy.loginfo('removing %s to maintain cache size, new size: %i',
remove_res['file'], db.get_size())
else:
synth_result = self.engine(**kw)
return synth_result
def _parse_request_or_raise(self, request):
"""It will raise if request is malformed.
:param request: an instance of SynthesizerRequest
:return: a dict
"""
md = json.loads(request.metadata) if request.metadata else {}
md['output_format'] = md.get('output_format', self.default_output_format)
md['voice_id'] = md.get('voice_id', self.default_voice_id)
md['sample_rate'] = md.get('sample_rate', '16000' if md['output_format'].lower() == 'pcm' else '22050')
md['text_type'] = md.get('text_type', self.default_text_type)
md['text'] = request.text
return md
def _node_request_handler(self, request):
"""The callback function for processing service request.
It never raises. If anything unexpected happens, it will return a SynthesizerResponse with the exception.
:param request: an instance of SynthesizerRequest
:return: a SynthesizerResponse
"""
rospy.loginfo(request)
try:
kws = self._parse_request_or_raise(request)
res = self._call_engine(**kws).result
return SynthesizerResponse(res)
except Exception as e:
return SynthesizerResponse('Exception: {}'.format(e))
def start(self, node_name='synthesizer_node', service_name='synthesizer'):
"""The entry point of a ROS service node.
:param node_name: name of ROS node
:param service_name: name of ROS service
:return: it doesn't return
"""
rospy.init_node(node_name)
service = rospy.Service(service_name, Synthesizer, self._node_request_handler)
rospy.loginfo('{} running: {}'.format(node_name, service.uri))
rospy.spin()
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
2,
15069,
357,
66,
8,
2864,
11,
6186,
13,
785,
11,
3457,
13,
393,
663,
29116,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
1... | 2.335256 | 5,029 |
# -*- coding: utf-8 -*-
"""
Writable String Object | Stores data to simplify writing outputs.
Created on Sat Nov 28 19:06:09 2020
Version 1.3.0 (debug support) A-05-2021 @ 14:34 UTC -5
Requires: NONE
@author: Kinetos#6935
"""
import argparse
class wso(object):
"""
A class to to simplify writing outputs when printing.
Attributes
----------
template : str
Template used to format the printing string.
outfile : str
Path to the text file used for writing.
writeable : bool
Whether the write methods should write to a file when called.
string : str
Current formatted string to write or print.
Methods
-------
fm(*args):
Format the template string using *args. Stores result in self.string.
wp():
Write to file and print the currently stored string.
p():
Print the currently stored string.
w():
Write the currently stored string to the outfile.
clean():
Open the outfile with 'w' setting to clear any existing contents.
"""
def set_template(self, template):
"""
Setter for template.
Parameters
----------
template : str
Template used to format the printing string.
Returns
-------
None.
"""
self.template = template
def set_outfile(self, filepath):
"""
Setter for outfile.
Parameters
----------
filepath : str
New path to text file to use for writing.
Returns
-------
None.
"""
self.outfile = filepath
def set_writeable(self, writeable):
"""
Setter for writeable.
Parameters
----------
writeable : bool
Whether the write methods should write to a file when called.
Returns
-------
None.
"""
self.writeable = bool(writeable)
def set_string(self, string):
"""
Setter for string.
Parameters
----------
string : str
New formatted string to write or print.
Returns
-------
None.
"""
self.string = string
def get_template(self):
"""
Getter for template.
Returns
-------
template : str
Template used to format the printing string.
"""
return self.template
def get_outfile(self):
"""
Getter for outfile.
Returns
-------
outfile : str
Path to the text file used for writing.
"""
return self.outfile
def get_string(self):
"""
Getter for string.
Returns
-------
string : str
Current formatted string to write or print.
"""
return self.string
def fm(self, *args):
"""
Format the template string using *args. Stores result in self.string.
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.string = self.template.format(*args)
def wp(self):
"""
Write to file and print the currently stored string.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string + '\n')
print(self.string)
def p(self):
"""
Print the currently stored string.
Returns
-------
None.
"""
print(self.string)
def w(self):
"""
Write the currently stored string to the outfile.
Returns
-------
None.
"""
if self.writeable:
with open(self.outfile, 'a') as f:
f.write(self.string)
def clean(self):
"""
Open the outfile with 'w' setting to clear any existing contents.
Returns
-------
None.
"""
if self.writeable:
open(self.outfile, 'w').close()
def fmwp(self, *args):
"""
Perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
self.fm(*args)
self.wp()
def dbwp(self, *args):
"""
If debug mode is enabled, perform fm() followed by wp().
Parameters
----------
*args : object
Values given as inputs for str.format().
Returns
-------
None.
"""
if self.debug:
self.fm(*args)
self.wp()
def generate_outfile_parser(description):
"""
Reusable -o and -O arguments.
Parameters
----------
description : str
Description for the argument parser, usually __doc__ or some variant.
Returns
-------
p : argparse.ArgumentParser
Created ArgumentParser object with -o, -O, and the given description.
"""
p = argparse.ArgumentParser(description)
p.add_argument("-o", "--outfile", dest="o", action="store_true",
help="output printed results to default file: out.txt")
p.add_argument("-O", dest="oname", metavar="NAME",
help="output printed results to text file w/ custom path")
return p
def implement_outfile_parser(args):
"""
Implement -o and -O arguments added by generate_outfile_parser.
Parameters
----------
args : argparse args object
Results of parse_args() when called on an Argument Parser object.
Returns
-------
outfile : str, None
None if neither args.oname nor args.o was set, do not output to file.
Notes
-----
Use for docstring in methods that accept outfile as an argument:
outfile : str, None
Path to output text file. Disables writing to file if set to None.
"""
outfile = None
if args.oname is not None:
outfile = args.oname.strip()
if not outfile.endswith(".txt"):
outfile += ".txt"
elif args.o:
outfile = "out.txt"
return outfile
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
20257,
540,
10903,
9515,
930,
41835,
1366,
284,
30276,
3597,
23862,
13,
198,
41972,
319,
7031,
5267,
2579,
678,
25,
3312,
25,
2931,
12131,
198,
198,
14815,
3... | 2.26374 | 2,802 |
from app.schema.answer import Answer
from app.schema.exceptions import TypeCheckingException
from app.schema.widgets.month_year_date_widget import MonthYearDateWidget
from app.validation.month_year_date_type_check import MonthYearDateTypeCheck
| [
6738,
598,
13,
15952,
2611,
13,
41484,
1330,
23998,
198,
6738,
598,
13,
15952,
2611,
13,
1069,
11755,
1330,
5994,
9787,
278,
16922,
198,
6738,
598,
13,
15952,
2611,
13,
28029,
11407,
13,
8424,
62,
1941,
62,
4475,
62,
42655,
1330,
1606... | 3.602941 | 68 |
from flask import Flask, render_template, url_for, flash, redirect
app = Flask(__name__)
@app.route('/')
@app.route('/next')
@app.route('/upcoming')
@app.route('/rockets')
@app.route('/rockets/<rocket>')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
| [
6738,
42903,
1330,
46947,
11,
8543,
62,
28243,
11,
19016,
62,
1640,
11,
7644,
11,
18941,
201,
198,
201,
198,
1324,
796,
46947,
7,
834,
3672,
834,
8,
201,
198,
201,
198,
31,
1324,
13,
38629,
10786,
14,
11537,
201,
198,
201,
198,
31... | 2.210526 | 133 |
from testing_collection import visualisation_tests as visu_tests
from Classes.CurrentDocument import CurrentDoc
def run_tests(curr_doc: CurrentDoc):
"""
1.) write test case and add it to file test_cases.py
2.) Call it in this function (run_tests() will be called in 'System_of_Beams\main.py'
3.) make sure, the variable 'run_tests' in the file main.py is set to true
4.) Only the latest run test can be plotted (no opportunities up to now to run one after another)
5.) Results will be visualized at the bokeh server
"""
"""
VISUALISATION TESTS
"""
# print("Single beam lineload test")
# visu_tests.single_beam_lineload_visu(curr_doc)
# print("Final Software lab structure")
# visu_tests.final_structure_software_lab(curr_doc)
print('Test example Quirin') #19.11
visu_tests.example_unterlagen_visu(curr_doc)
# print("Visualise all possible nodedep elements")
# visu_tests.vis_all_possible_nodedep_ele(curr_doc)
"""
CALCULATION TESTS
"""
# print("Single beam lineload test") #24.11
# test_cases.single_beam_lineload_test(curr_doc)
# print('normal line load') #24.11
# test_cases.single_beam_normal_lineload_test(curr_doc)
# print("Single beam clamping test") #24.11
# test_cases.single_clamping_left_side(curr_doc)
# print("Two beam lineload test") #17.11
# test_cases.two_beam_lineload_test(curr_doc)
# print("Two beam lineload overdefined test") #17.11
# test_cases.single_beam_lineload_test_overdefined(curr_doc)
# print("Single beam lineload test underdefined") #24.11
# test_cases.single_beam_lineload_test_underdefined(curr_doc)
# print('Big beam out of free elements') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test(curr_doc)
# print('Big beam out of free elements 2 l') #17.11
# test_cases.two_beam_combined_to_one_complete_lineload_test_2l(curr_doc)
# print('Single load in the middle') #17.11
# test_cases.two_beam_combined_to_one_single_load_middle(curr_doc)
# print('Seperated elements') #17.11
# test_cases.single_beam_lineload_test_seperated_elements(curr_doc)
# print('Joint test) #18.11
# test_cases.two_beam_combined_to_one_single_load_middle_joint(curr_doc)
#
# print('Clamping with single load test') #17.11
# test_cases.single_clamping_left_side_single_load(curr_doc)
# print('TM example') #17.11
# test_cases.example_from_sheet_2_4(curr_doc)
# print('Trapezlast') #17.11
# test_cases.single_beam_trapezload_test(curr_doc)
# print('Temperature test') #17.11
# test_cases.single_beam_temperature_test(curr_doc)
# print('Triangle test') #17.11
# test_cases.two_beam_triangle_load_middle(curr_doc)
# print('Temperature clamping') #18.11
# test_cases.single_clamping_left_side_temperature(curr_doc)
# print('ss13') #17.11
# test_cases.example_ss13(curr_doc)
# print('ss12') #17.11
# test_cases.example_ss12(curr_doc)
#
# print('ss12_vereinfacht') #17.11
# test_cases.example_ss12_vereinfacht(curr_doc)
# print('ss11') #17.11
# test_cases.example_ss11(curr_doc)
# print('ss14') #19.11
# test_cases.example_ss14(curr_doc)
# print('schraeg') #17.11
# test_cases.single_beam_schraeg(curr_doc)
# print('vertical') #17.11
# test_cases.single_beam_lineload_vertical_test(curr_doc)
# print('vertical single load') #17.11
# test_cases.single_beam_single_load_vertical_test(curr_doc)
# print('Test Ecke') #17.11
# test_cases.two_beam_corner_line_load(curr_doc)
# print('triangle_not_symmetric') #17.11
# test_cases.two_beam_triangle_load_middle_not_symmetrical(curr_doc)
# print('Test example Quirin') #19.11
# test_cases.example_unterlagen_test(curr_doc)
# print('Test Quirin vereinfacht') #19.11
# test_cases.example_unterlagen_test_vereinfacht(curr_doc)
# print('test cos') #18.11
# test_cases.single_beam_cos_test(curr_doc)
# print('test multiple elements') #19.11
# test_cases.multiple_elements(curr_doc)
# print('test case spring') #24.11
# test_cases.example_2_3_neu(curr_doc)
# print('Test case ss 15') #24.11
# test_cases.example_ss15(curr_doc)
# print('Test case ss 16') #24.11
# test_cases.example_SS_16(curr_doc)
# test_cases.single_beam_lineload_test_infinity(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
# test_cases.final_structure_software_lab(curr_doc)
| [
6738,
4856,
62,
43681,
1330,
5874,
5612,
62,
41989,
355,
1490,
84,
62,
41989,
198,
6738,
38884,
13,
11297,
24941,
1330,
9236,
23579,
628,
198,
4299,
1057,
62,
41989,
7,
22019,
81,
62,
15390,
25,
9236,
23579,
2599,
198,
220,
220,
220,
... | 1.738555 | 3,626 |
import pygrib
import numpy as np
from collections import defaultdict
from collections import namedtuple
from collections import Iterable
import xarray as xr
def grb_msg_to_xr(message, has_levels=True):
"""
Convert a single grib message to xarray.
:param message:
:type message: pygrib message
:param has_levels: If True, add a level coordinate.
:rtype: xarray.DataArray
"""
lons = np.linspace(-float(message['longitudeOfFirstGridPointInDegrees']),
float(message['longitudeOfLastGridPointInDegrees']),
int(message['Ni']))
lats = np.linspace(float(message['latitudeOfFirstGridPointInDegrees']),
float(message['latitudeOfLastGridPointInDegrees']),
int(message['Nj']))
coords = {
'time': message.analDate,
'lat': lats,
'lon': lons,
}
if has_levels:
coords['level'] = message.level
# set up data variables
values = message.values # values in lat, lon
attrs = dict()
attrs['units'] = message.units
attrs['standard_name'] = message.cfName
attrs['long_name'] = message.name
attrs['parameter_id'] = message.paramId
da = xr.DataArray(data=values,
dims=('lat', 'lon'),
coords=coords,
name=message.name.lower().replace(' ', '_'),
attrs=attrs)
# Expand dimensions
if 'level' in coords:
da = da.expand_dims('level', 2)
da = da.expand_dims('time', len(coords) - 1)
# Attributes
da['lat'].attrs['standard_name'] = 'latitude'
da['lat'].attrs['long_name'] = 'Latitude'
da['lat'].attrs['units'] = 'degrees_north'
da['lat'].attrs['axis'] = 'Y'
da['lon'].attrs['standard_name'] = 'longitude'
da['lon'].attrs['long_name'] = 'Longitude'
da['lon'].attrs['units'] = 'degrees_east'
da['lon'].attrs['axis'] = 'X'
da['time'].attrs['standard_name'] = 'time'
da['time'].attrs['long_name'] = 'Time'
if 'level' in coords:
da['level'].attrs['long_name'] = 'ECMWF model level'
return da
| [
11748,
12972,
70,
822,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
17268,
1330,
4277,
11600,
198,
6738,
17268,
1330,
3706,
83,
29291,
198,
6738,
17268,
1330,
40806,
540,
198,
11748,
2124,
18747,
355,
2124,
81,
628,
198,
4299,
1036,
6... | 2.215098 | 967 |
# -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge.readthedocs.io/
# FLEDGE_END
""" Test add service using poll and async plugins for both python & C version REST API """
import os
import http.client
import json
import time
from uuid import UUID
from collections import Counter
from urllib.parse import quote
import pytest
import plugin_and_service
__author__ = "Ashish Jabble"
__copyright__ = "Copyright (c) 2019 Dianomic Systems"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
SVC_NAME_1 = 'Random Walk #1'
SVC_NAME_2 = 'HTTP-SOUTH'
SVC_NAME_3 = '1 Bench'
SVC_NAME_4 = 'Rand 1 #3'
SVC_NAME_5 = SVC_NAME_C_ASYNC = "Async 1"
SVC_NAME_6 = 'randomwalk'
PLUGIN_FILTER = 'metadata'
FILTER_NAME = 'meta'
@pytest.fixture
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
9977,
1961,
8264,
62,
33,
43312,
198,
2,
4091,
25,
2638,
1378,
69,
2965,
13,
961,
83,
704,
420,
82,
13,
952,
14,
198,
2,
9977,
1961,
8264,
62,
10619,
198,
... | 2.614035 | 285 |
from rest_framework import serializers
from aggregate.stores.models import Store
| [
6738,
1334,
62,
30604,
1330,
11389,
11341,
198,
198,
6738,
19406,
13,
43409,
13,
27530,
1330,
9363,
628
] | 4.611111 | 18 |
import os
import sys
import unittest
from parameterized import parameterized
sys.path.append(os.path.join(os.path.dirname(__file__), '../../tests/system'))
import metricbeat
| [
11748,
28686,
198,
11748,
25064,
198,
11748,
555,
715,
395,
198,
6738,
11507,
1143,
1330,
11507,
1143,
198,
198,
17597,
13,
6978,
13,
33295,
7,
418,
13,
6978,
13,
22179,
7,
418,
13,
6978,
13,
15908,
3672,
7,
834,
7753,
834,
828,
705... | 3.2 | 55 |
import requests
import time
import json
url = 'https://api.opendota.com/api/'
gang = {
'will': {
'name': 'Will',
'pid': '67798385'
},
'pat': {
'name': 'Pat',
'pid': '52147853'
},
'james': {
'name': 'James',
'pid': '84941438'
},
'tibi': {
'name': 'Tibi',
'pid': '72600614'
}
}
'''
# Player matches
for pl in gang:
pm = requests.get(url + 'players/' + gang[pl].get('pid') + '/matches')
with open('data/%s.json' % pl, 'w') as f:
f.write(pm.text)
time.sleep(1)
'''
'''
# Player match info
for pl in gang:
input_file = open('data/%s.json' % pl, 'r')
json_array = json.load(input_file)
match_list = []
for item in json_array:
matchId = {"match_id":None}
matchId['match_id'] = item['match_id']
match_list.append(matchId)
with open('data/%sMatches.json' % pl, 'w') as f:
json.dump(match_list, f)
'''
'''
# Filtered All Matches
for pl in gang:
input_file = open('data/%sMatches.json' % pl, 'r')
json_array = json.load(input_file)
all_matches = []
for match in json_array:
matchId = {"match_id":None}
matchId['match_id'] = match['match_id']
all_matches.append(matchId)
with open('data/allMatches.json', 'w') as f:
json.dump(all_matches, f)
'''
# Match GET
with open('data/allMatches.json', 'r') as f:
json_array = json.load(f)
for match in json_array:
matchId = {"mid":None}
matchId['mid'] = str(match['match_id'])
am = requests.get(url + 'matches/' + matchId['mid'])
with open('data/matches/%s.json' % matchId['mid'], 'w', encoding="utf8") as f:
f.write(am.text)
time.sleep(1) | [
11748,
7007,
201,
198,
11748,
640,
201,
198,
11748,
33918,
201,
198,
201,
198,
6371,
796,
705,
5450,
1378,
15042,
13,
404,
437,
4265,
13,
785,
14,
15042,
14,
6,
201,
198,
201,
198,
28284,
796,
1391,
201,
198,
220,
220,
220,
705,
1... | 1.963597 | 934 |
"""A series of conversion tools for Quaternions and Euler Angles.
These functions are modified versions of the algorithms found here:
https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
Written by: Zahi Kakish (zmk5)
"""
from typing import Tuple
import numpy as np
def quaternion_to_roll(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Roll Euler angle."""
sinr_cosp = 2.0 * (w * x + y * z)
cosr_cosp = 1.0 - 2.0 * (x * x + y * y)
return np.arctan2(sinr_cosp, cosr_cosp)
def quaternion_to_pitch(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Pitch Euler angle."""
sinp = 2 * (w * y - z * x)
if np.abs(sinp) >= 1.0:
return np.copysign(np.pi / 2, sinp)
return np.arcsin(sinp)
def quaternion_to_yaw(x: float, y: float, z: float, w: float) -> float:
"""Convert Quaternion to Yaw Euler angle."""
siny_cosp = 2 * (w * z + x * y)
cosy_cosp = 1 - 2 * (y * y + z * z)
return np.arctan2(siny_cosp, cosy_cosp)
def quaternion_to_euler(
x: float,
y: float,
z: float,
w: float) -> Tuple[float, float, float]:
"""Convert Quaternion to Euler angles."""
return (
quaternion_to_roll(x, y, z, w),
quaternion_to_pitch(x, y, z, w),
quaternion_to_yaw(x, y, z, w)
)
def roll_to_quaternion(roll: float) -> Tuple[float, float, float, float]:
"""Convert only a Roll Euler angle to its respective Quaternion values."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = 1.0
s_p = 0.0
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def pitch_to_quaternion(pitch: float) -> Tuple[float, float, float, float]:
"""Convert only a Pitch Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = 1.0
s_y = 0.0
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def yaw_to_quaternion(yaw: float) -> Tuple[float, float, float, float]:
"""Convert only a Yaw Euler angle to its respective Quaternion values."""
c_r = 1.0
s_r = 0.0
c_p = 1.0
s_p = 0.0
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
def euler_to_quaternion(
roll: float,
pitch: float,
yaw: float) -> Tuple[float, float, float, float]:
"""Convert Euler angles to Quaternion."""
c_r = np.cos(roll * 0.5)
s_r = np.sin(roll * 0.5)
c_p = np.cos(pitch * 0.5)
s_p = np.sin(pitch * 0.5)
c_y = np.cos(yaw * 0.5)
s_y = np.sin(yaw * 0.5)
return (
s_r * c_p * c_y - c_r * s_p * s_y, # x
c_r * s_p * c_y + s_r * c_p * s_y, # y
c_r * c_p * s_y - s_r * s_p * c_y, # z
c_r * c_p * c_y + s_r * s_p * s_y, # w
)
| [
37811,
32,
2168,
286,
11315,
4899,
329,
2264,
9205,
507,
290,
412,
18173,
2895,
829,
13,
198,
198,
4711,
5499,
389,
9518,
6300,
286,
262,
16113,
1043,
994,
25,
198,
5450,
1378,
268,
13,
31266,
13,
2398,
14,
15466,
14,
3103,
9641,
62... | 1.865779 | 1,803 |
#!/usr/bin/python3
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
import sys
for folder in 'py-json', 'py-scripts':
if folder not in sys.path:
sys.path.append(f'../lanforge/lanforge-scripts/{folder}')
sys.path.append(f'../libs/lanforge')
sys.path.append(f'../libs/testrails')
sys.path.append(f'../libs/apnos')
sys.path.append(f'../libs/cloudsdk')
sys.path.append(f'../libs')
sys.path.append(f'../tests/test_utility/')
import base64
import urllib.request
from bs4 import BeautifulSoup
import ssl
import subprocess, os
from artifactory import ArtifactoryPath
import tarfile
import paramiko
from paramiko import SSHClient
from scp import SCPClient
import os
import pexpect
from pexpect import pxssh
import paramiko
from scp import SCPClient
import pprint
from pprint import pprint
from os import listdir
import re
import requests
import json
import logging
import datetime
import time
from datetime import date
from shutil import copyfile
import argparse
from unittest.mock import Mock
from lf_tests import *
from ap_plus_sdk import *
from lab_ap_info import *
from JfrogHelper import *
from reporting import Reporting
# For finding files
# https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
import glob
# external_results_dir=/var/tmp/lanforge
# To run this from your home system to NOLA-01 testbed, use this command. This assumes you have set up an ssh tunnel
# logged to the cicd jumphost that can reach the lab. In separate console to set up the ssh tunnel: ssh -C -L
# 7220:lab-ctlr:22 ubuntu@3.130.51.163 On local machine:
# ./query_ssids.py --testrail-user-id NONE --model ecw5410
# --ap-jumphost-address localhost --ap-jumphost-port 7220 --ap-jumphost-password secret --ap-jumphost-tty /dev/ttyAP1
import testrail_api
from LANforge.LFUtils import *
# if you lack __init__.py in this directory you will not find sta_connect module#
import sta_connect2
from sta_connect2 import StaConnect2
import testrail_api
from testrail_api import TestRail_Client
import eap_connect
from eap_connect import EAPConnect
import cloudsdk
from cloudsdk import CloudSDK
from cloudsdk import CreateAPProfiles
import ap_ssh
from ap_ssh import *
# Import info for lab setup and APs under test
import lab_ap_info
from lab_ap_info import cloud_sdk_models
from lab_ap_info import ap_models
from lab_ap_info import customer_id
from lab_ap_info import cloud_type
from lab_ap_info import test_cases
from lab_ap_info import radius_info
| [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
198,
11748,
25064,
198,
198,
361,
25064,
13,
9641,
62,
10951,
58,
15,
60,
14512,
513,
25,
198,
220,
220,
220,
3601,
7203,
1212,
4226,
4433,
11361,
513,
4943,
198,
220,
220,
220,
8420,
... | 3.031288 | 831 |
#07 - Crie um programa onde o usuário possa digitar sete valores numéricos e cadastre-os em uma lista única que mantenha separados os valores pares e ímpares. No final, mostre os valores pares e ímpares em ordem crescente.
numeros = list() # lista geral contendo todos os números
listaPar = list() # lista contendo apenas números pares
listaImpar = list() # lista contendo apenas números impares
for i in range(0,7): # num intervalo de 0 a 7
num = int(input('Digite um número: ')) # recebe um input formato inteiro
if num % 2 == 0: # valida se a divisão deste número por 2 dá resto zero, se sim, coloca esse número na lista par
listaPar.append(num)
else: # se condição acima não for atendida, significa que o número é impar. Então adiciona na lista impar
listaImpar.append(num)
# ao final do for, coloca a lista par e a lista impar dentro da lista geral de números
numeros.append(listaPar)
numeros.append(listaImpar)
print(f'\nO valores pares são: {sorted(listaPar)}')
print(f'O valores impares são: {sorted(listaImpar)}\n') | [
2,
2998,
532,
327,
5034,
23781,
1430,
64,
319,
2934,
267,
514,
84,
6557,
27250,
1184,
64,
3100,
7940,
900,
68,
1188,
2850,
997,
2634,
1173,
418,
304,
20603,
459,
260,
12,
418,
795,
334,
2611,
1351,
64,
6184,
118,
77,
3970,
8358,
2... | 2.462963 | 432 |
from pure_fb_openmetrics_exporter.flashblade_collector.flashblade_metrics.array_performance_metrics import ArrayPerformanceMetrics
| [
6738,
5899,
62,
21855,
62,
9654,
4164,
10466,
62,
1069,
26634,
13,
34167,
22500,
62,
33327,
273,
13,
34167,
22500,
62,
4164,
10466,
13,
18747,
62,
26585,
62,
4164,
10466,
1330,
15690,
32273,
9171,
10466,
628
] | 3.666667 | 36 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.dynamodb
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``dynamodb`` cache backend
"""
from .base import BaseCache
from .storage.dynamodbdict import DynamoDbDict
class DynamoDbCache(BaseCache):
""" ``dynamodb`` cache backend.
"""
def __init__(self, table_name='requests-cache', **options):
"""
:param namespace: dynamodb table name (default: ``'requests-cache'``)
:param connection: (optional) ``boto3.resource('dynamodb')``
"""
super(DynamoDbCache, self).__init__(**options)
self.responses = DynamoDbDict(table_name, 'responses',
options.get('connection'),
options.get('endpont_url'),
options.get('region_name'),
options.get('read_capacity_units'),
options.get('write_capacity_units'))
self.keys_map = DynamoDbDict(table_name,
'urls',
self.responses.connection)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
220,
220,
220,
7007,
62,
23870,
13,
1891,
2412,
13,
67,
4989,
375,
65,
198,
220,
220,
220,
220,
27156,
... | 1.927393 | 606 |
import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "ReadMe.md").read_text()
setup(
name="dataclasses_ujson",
version="0.0.14",
packages=find_packages(exclude=("tests*","bench_marks.py")),
author="Kislenko Artem ",
author_email="artem@webart-tech.ru",
description="fast converter your json to dataclass",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/kislenko-artem/dataclasses-ujson",
license="Apache",
install_requires=[
"ujson>=1.35"
],
python_requires=">=3.7",
extras_require={
"dev": ["pytest"]
},
include_package_data=True,
py_modules=['dataclasses_ujson'],
setup_requires=["pytest-runner"],
tests_require=["pytest"]
)
| [
11748,
3108,
8019,
198,
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
198,
198,
39,
9338,
796,
3108,
8019,
13,
15235,
7,
834,
7753,
834,
737,
8000,
198,
15675,
11682,
796,
357,
39,
9338,
1220,
366,
5569,
5308,
13,
9132,
... | 2.455621 | 338 |
"""
Backend for test environment.
"""
from django.core import cache
from django.core.mail.backends.base import BaseEmailBackend
from .. import settings as mailviewer_settings
class EmailBackend(BaseEmailBackend):
"""
An email backend to use during testing and local development with Django Mail Viewer.
Uses Django's cache framework to store sent emails in the cache so that they can
be easily retrieved in multi-process environments such as using Django Channels or
when sending an email from a python shell.
"""
def get_message(self, lookup_id):
"""
Look up and return a specific message in the outbox
"""
return self.cache.get(lookup_id)
def get_outbox(self, *args, **kwargs):
"""
Get the outbox used by this backend. This backend returns a copy of mail.outbox.
May add pagination args/kwargs.
"""
# grabs all of the keys in the stored self.cache_keys_key
# and passes those into get_many() to retrieve the keys
message_keys = self.cache.get(self.cache_keys_key)
if message_keys:
messages = list(self.cache.get_many(message_keys).values())
else:
messages = []
return messages
def delete_message(self, message_id: str):
"""
Remove the message with the given id from the mailbox
"""
message_keys = self.cache.get(self.cache_keys_key, [])
message_keys.remove(message_id)
self.cache.set(self.cache_keys_key, message_keys)
self.cache.delete(message_id)
| [
37811,
198,
7282,
437,
329,
1332,
2858,
13,
198,
37811,
198,
6738,
42625,
14208,
13,
7295,
1330,
12940,
198,
6738,
42625,
14208,
13,
7295,
13,
4529,
13,
1891,
2412,
13,
8692,
1330,
7308,
15333,
7282,
437,
198,
198,
6738,
11485,
1330,
... | 2.688663 | 591 |
import sys
from itertools import combinations
data = [int(line.strip()) for line in sys.stdin.readlines()]
preamble = int(sys.argv[1])
idx = preamble
while idx < len(data):
found = False
for (a,b) in combinations(data[idx-preamble:idx], 2):
if a + b == data[idx]:
found = True
break
if not found:
break
idx += 1
print("Part1: {} - {}".format(idx, data[idx]))
subdata = data[:idx]
subidx = 0
while subidx < len(subdata)-1:
endidx = subidx + 1
while endidx < len(subdata):
sublist = subdata[subidx:endidx]
if sum(sublist) == data[idx]:
print("Part2: {} {}".format(sublist, min(sublist)+max(sublist)))
sys.exit(0)
endidx += 1
subidx += 1
| [
11748,
25064,
198,
6738,
340,
861,
10141,
1330,
17790,
198,
198,
7890,
796,
685,
600,
7,
1370,
13,
36311,
28955,
329,
1627,
287,
25064,
13,
19282,
259,
13,
961,
6615,
3419,
60,
198,
198,
79,
1476,
903,
796,
493,
7,
17597,
13,
853,
... | 2.06267 | 367 |
from models import Patient
from .mutation_type import mutation
from authentication.authentication import needsAuthorization
from graphql.type import GraphQLResolveInfo
from datacreators.patient import CreatePatient
from SdTypes import Permissions
@mutation.field("createPatient")
@needsAuthorization([Permissions.PATIENT_CREATE])
| [
6738,
4981,
1330,
35550,
198,
6738,
764,
76,
7094,
62,
4906,
1330,
15148,
198,
6738,
18239,
13,
41299,
3299,
1330,
2476,
13838,
1634,
198,
6738,
4823,
13976,
13,
4906,
1330,
29681,
9711,
4965,
6442,
12360,
198,
6738,
4818,
330,
630,
669... | 3.952381 | 84 |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: cs6_consume_events_using_cloud_events_1.0_schema.py
DESCRIPTION:
These samples demonstrate creating a list of CloudEvents and sending then as a list.
USAGE:
python cs6_consume_events_using_cloud_events_1.0_schema.py
"""
import os
from azure.eventgrid import EventGridConsumer
consumer = EventGridConsumer()
# returns List[DeserializedEvent]
deserialized_events = consumer.decode_eventgrid_event(service_bus_received_message)
# CloudEvent schema
for event in deserialized_events:
# both allow access to raw properties as strings
time_string = event.time
time_string = event["time"]
# model returns CloudEvent object
cloud_event = event.model
# all model properties are strongly typed
datetime_object = event.model.time
storage_blobcreated_object = event.model.data | [
2,
16529,
35937,
198,
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
4091,
13789,
13,
14116,
287,
262,
1628,
6808,
329,
198,
2,
5964,
1321,
13,
198,
2,
16529,
35937,
198,
3... | 3.779264 | 299 |
import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
import random
import cv2
brightness = iaa.Add((-7, 7), per_channel=0.5)
contrast = iaa.LinearContrast((0.8, 1.6), per_channel=0.5)
perspective = iaa.PerspectiveTransform(scale=(0.025, 0.090))
gaussian_noise = iaa.AdditiveGaussianNoise(loc=0, scale=(0.03*255, 0.04*255), per_channel=0.5)
crop = iaa.Crop(px=(0, 25))
if __name__ == "__main__":
image = cv2.imread('cache/image 10.1.jpg')
aug_images = aug_image(image)
aug_images = [aug_images]
## image = cv2.resize(aug_images[0], (300,400))
## cv2.imshow('Before', image)
print(len(aug_images))
image = cv2.resize(image, (600,600))
image_1 = cv2.resize(aug_images[0], (600,600))
cv2.imshow('1', image)
cv2.waitKey(0)
cv2.imshow('2', image_1)
cv2.waitKey(0)
## image2 = cv2.imread('cache/image 13.2.jpg')
## image2 = cv2.resize(image2, (400,400))
## cv2.imshow('After', image2)
## cv2.waitKey(0)
| [
11748,
33705,
7493,
355,
220,
544,
201,
198,
6738,
33705,
7493,
1330,
35016,
364,
355,
220,
544,
64,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
11748,
4738,
201,
198,
11748,
269,
85,
17,
201,
198,
201,
198,
29199,
1108,
796,
... | 1.622276 | 826 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: injective/peggy/v1/msgs.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from cosmos.base.v1beta1 import coin_pb2 as cosmos_dot_base_dot_v1beta1_dot_coin__pb2
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from injective.peggy.v1 import types_pb2 as injective_dot_peggy_dot_v1_dot_types__pb2
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='injective/peggy/v1/msgs.proto',
package='injective.peggy.v1',
syntax='proto3',
serialized_options=b'ZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/types',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1dinjective/peggy/v1/msgs.proto\x12\x12injective.peggy.v1\x1a\x1e\x63osmos/base/v1beta1/coin.proto\x1a\x14gogoproto/gogo.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1einjective/peggy/v1/types.proto\x1a\x19google/protobuf/any.proto\"X\n\x1bMsgSetOrchestratorAddresses\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\"%\n#MsgSetOrchestratorAddressesResponse\"_\n\x10MsgValsetConfirm\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x14\n\x0corchestrator\x18\x02 \x01(\t\x12\x13\n\x0b\x65th_address\x18\x03 \x01(\t\x12\x11\n\tsignature\x18\x04 \x01(\t\"\x1a\n\x18MsgValsetConfirmResponse\"\x96\x01\n\x0cMsgSendToEth\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x10\n\x08\x65th_dest\x18\x02 \x01(\t\x12/\n\x06\x61mount\x18\x03 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\x12\x33\n\nbridge_fee\x18\x04 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\"\x16\n\x14MsgSendToEthResponse\"6\n\x0fMsgRequestBatch\x12\x14\n\x0corchestrator\x18\x01 \x01(\t\x12\r\n\x05\x64\x65nom\x18\x02 \x01(\t\"\x19\n\x17MsgRequestBatchResponse\"u\n\x0fMsgConfirmBatch\x12\r\n\x05nonce\x18\x01 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x02 \x01(\t\x12\x12\n\neth_signer\x18\x03 \x01(\t\x12\x14\n\x0corchestrator\x18\x04 \x01(\t\x12\x11\n\tsignature\x18\x05 \x01(\t\"\x19\n\x17MsgConfirmBatchResponse\"\xdc\x01\n\x0fMsgDepositClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x03 \x01(\t\x12>\n\x06\x61mount\x18\x04 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x17\n\x0f\x65thereum_sender\x18\x05 \x01(\t\x12\x17\n\x0f\x63osmos_receiver\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x19\n\x17MsgDepositClaimResponse\"\x80\x01\n\x10MsgWithdrawClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x13\n\x0b\x62\x61tch_nonce\x18\x03 \x01(\x04\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x14\n\x0corchestrator\x18\x05 \x01(\t\"\x1a\n\x18MsgWithdrawClaimResponse\"\xb6\x01\n\x15MsgERC20DeployedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x04\x12\x14\n\x0c\x63osmos_denom\x18\x03 \x01(\t\x12\x16\n\x0etoken_contract\x18\x04 \x01(\t\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0e\n\x06symbol\x18\x06 \x01(\t\x12\x10\n\x08\x64\x65\x63imals\x18\x07 \x01(\x04\x12\x14\n\x0corchestrator\x18\x08 \x01(\t\"\x1f\n\x1dMsgERC20DeployedClaimResponse\"<\n\x12MsgCancelSendToEth\x12\x16\n\x0etransaction_id\x18\x01 \x01(\x04\x12\x0e\n\x06sender\x18\x02 \x01(\t\"\x1c\n\x1aMsgCancelSendToEthResponse\"i\n\x1dMsgSubmitBadSignatureEvidence\x12%\n\x07subject\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tsignature\x18\x02 \x01(\t\x12\x0e\n\x06sender\x18\x03 \x01(\t\"\'\n%MsgSubmitBadSignatureEvidenceResponse\"\x81\x02\n\x15MsgValsetUpdatedClaim\x12\x13\n\x0b\x65vent_nonce\x18\x01 \x01(\x04\x12\x14\n\x0cvalset_nonce\x18\x02 \x01(\x04\x12\x14\n\x0c\x62lock_height\x18\x03 \x01(\x04\x12\x34\n\x07members\x18\x04 \x03(\x0b\x32#.injective.peggy.v1.BridgeValidator\x12\x45\n\rreward_amount\x18\x05 \x01(\tB.\xda\xde\x1f&github.com/cosmos/cosmos-sdk/types.Int\xc8\xde\x1f\x00\x12\x14\n\x0creward_token\x18\x06 \x01(\t\x12\x14\n\x0corchestrator\x18\x07 \x01(\t\"\x1f\n\x1dMsgValsetUpdatedClaimResponse2\xc4\r\n\x03Msg\x12\x8f\x01\n\rValsetConfirm\x12$.injective.peggy.v1.MsgValsetConfirm\x1a,.injective.peggy.v1.MsgValsetConfirmResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/valset_confirm\x12\x80\x01\n\tSendToEth\x12 .injective.peggy.v1.MsgSendToEth\x1a(.injective.peggy.v1.MsgSendToEthResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1f/injective/peggy/v1/send_to_eth\x12\x8b\x01\n\x0cRequestBatch\x12#.injective.peggy.v1.MsgRequestBatch\x1a+.injective.peggy.v1.MsgRequestBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/request_batch\x12\x8b\x01\n\x0c\x43onfirmBatch\x12#.injective.peggy.v1.MsgConfirmBatch\x1a+.injective.peggy.v1.MsgConfirmBatchResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/confirm_batch\x12\x8b\x01\n\x0c\x44\x65positClaim\x12#.injective.peggy.v1.MsgDepositClaim\x1a+.injective.peggy.v1.MsgDepositClaimResponse\")\x82\xd3\xe4\x93\x02#\"!/injective/peggy/v1/deposit_claim\x12\x8f\x01\n\rWithdrawClaim\x12$.injective.peggy.v1.MsgWithdrawClaim\x1a,.injective.peggy.v1.MsgWithdrawClaimResponse\"*\x82\xd3\xe4\x93\x02$\"\"/injective/peggy/v1/withdraw_claim\x12\xa3\x01\n\x11ValsetUpdateClaim\x12).injective.peggy.v1.MsgValsetUpdatedClaim\x1a\x31.injective.peggy.v1.MsgValsetUpdatedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/valset_updated_claim\x12\xa4\x01\n\x12\x45RC20DeployedClaim\x12).injective.peggy.v1.MsgERC20DeployedClaim\x1a\x31.injective.peggy.v1.MsgERC20DeployedClaimResponse\"0\x82\xd3\xe4\x93\x02*\"(/injective/peggy/v1/erc20_deployed_claim\x12\xba\x01\n\x18SetOrchestratorAddresses\x12/.injective.peggy.v1.MsgSetOrchestratorAddresses\x1a\x37.injective.peggy.v1.MsgSetOrchestratorAddressesResponse\"4\x82\xd3\xe4\x93\x02.\",/injective/peggy/v1/set_orchestrator_address\x12\x99\x01\n\x0f\x43\x61ncelSendToEth\x12&.injective.peggy.v1.MsgCancelSendToEth\x1a..injective.peggy.v1.MsgCancelSendToEthResponse\".\x82\xd3\xe4\x93\x02(\"&/injective/peggy/v1/cancel_send_to_eth\x12\xc5\x01\n\x1aSubmitBadSignatureEvidence\x12\x31.injective.peggy.v1.MsgSubmitBadSignatureEvidence\x1a\x39.injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse\"9\x82\xd3\xe4\x93\x02\x33\"1/injective/peggy/v1/submit_bad_signature_evidenceBMZKgithub.com/InjectiveLabs/injective-core/injective-chain/modules/peggy/typesb\x06proto3'
,
dependencies=[cosmos_dot_base_dot_v1beta1_dot_coin__pb2.DESCRIPTOR,gogoproto_dot_gogo__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,injective_dot_peggy_dot_v1_dot_types__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_MSGSETORCHESTRATORADDRESSES = _descriptor.Descriptor(
name='MsgSetOrchestratorAddresses',
full_name='injective.peggy.v1.MsgSetOrchestratorAddresses',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgSetOrchestratorAddresses.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=196,
serialized_end=284,
)
_MSGSETORCHESTRATORADDRESSESRESPONSE = _descriptor.Descriptor(
name='MsgSetOrchestratorAddressesResponse',
full_name='injective.peggy.v1.MsgSetOrchestratorAddressesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=286,
serialized_end=323,
)
_MSGVALSETCONFIRM = _descriptor.Descriptor(
name='MsgValsetConfirm',
full_name='injective.peggy.v1.MsgValsetConfirm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgValsetConfirm.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetConfirm.orchestrator', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_address', full_name='injective.peggy.v1.MsgValsetConfirm.eth_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgValsetConfirm.signature', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=325,
serialized_end=420,
)
_MSGVALSETCONFIRMRESPONSE = _descriptor.Descriptor(
name='MsgValsetConfirmResponse',
full_name='injective.peggy.v1.MsgValsetConfirmResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=422,
serialized_end=448,
)
_MSGSENDTOETH = _descriptor.Descriptor(
name='MsgSendToEth',
full_name='injective.peggy.v1.MsgSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSendToEth.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_dest', full_name='injective.peggy.v1.MsgSendToEth.eth_dest', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgSendToEth.amount', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bridge_fee', full_name='injective.peggy.v1.MsgSendToEth.bridge_fee', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=451,
serialized_end=601,
)
_MSGSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgSendToEthResponse',
full_name='injective.peggy.v1.MsgSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=603,
serialized_end=625,
)
_MSGREQUESTBATCH = _descriptor.Descriptor(
name='MsgRequestBatch',
full_name='injective.peggy.v1.MsgRequestBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgRequestBatch.orchestrator', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='denom', full_name='injective.peggy.v1.MsgRequestBatch.denom', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=627,
serialized_end=681,
)
_MSGREQUESTBATCHRESPONSE = _descriptor.Descriptor(
name='MsgRequestBatchResponse',
full_name='injective.peggy.v1.MsgRequestBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=683,
serialized_end=708,
)
_MSGCONFIRMBATCH = _descriptor.Descriptor(
name='MsgConfirmBatch',
full_name='injective.peggy.v1.MsgConfirmBatch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nonce', full_name='injective.peggy.v1.MsgConfirmBatch.nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgConfirmBatch.token_contract', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='eth_signer', full_name='injective.peggy.v1.MsgConfirmBatch.eth_signer', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgConfirmBatch.orchestrator', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgConfirmBatch.signature', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=710,
serialized_end=827,
)
_MSGCONFIRMBATCHRESPONSE = _descriptor.Descriptor(
name='MsgConfirmBatchResponse',
full_name='injective.peggy.v1.MsgConfirmBatchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=829,
serialized_end=854,
)
_MSGDEPOSITCLAIM = _descriptor.Descriptor(
name='MsgDepositClaim',
full_name='injective.peggy.v1.MsgDepositClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgDepositClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgDepositClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgDepositClaim.token_contract', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='injective.peggy.v1.MsgDepositClaim.amount', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ethereum_sender', full_name='injective.peggy.v1.MsgDepositClaim.ethereum_sender', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_receiver', full_name='injective.peggy.v1.MsgDepositClaim.cosmos_receiver', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgDepositClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=857,
serialized_end=1077,
)
_MSGDEPOSITCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgDepositClaimResponse',
full_name='injective.peggy.v1.MsgDepositClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1079,
serialized_end=1104,
)
_MSGWITHDRAWCLAIM = _descriptor.Descriptor(
name='MsgWithdrawClaim',
full_name='injective.peggy.v1.MsgWithdrawClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgWithdrawClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='batch_nonce', full_name='injective.peggy.v1.MsgWithdrawClaim.batch_nonce', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgWithdrawClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgWithdrawClaim.orchestrator', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1107,
serialized_end=1235,
)
_MSGWITHDRAWCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgWithdrawClaimResponse',
full_name='injective.peggy.v1.MsgWithdrawClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1237,
serialized_end=1263,
)
_MSGERC20DEPLOYEDCLAIM = _descriptor.Descriptor(
name='MsgERC20DeployedClaim',
full_name='injective.peggy.v1.MsgERC20DeployedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgERC20DeployedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgERC20DeployedClaim.block_height', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cosmos_denom', full_name='injective.peggy.v1.MsgERC20DeployedClaim.cosmos_denom', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token_contract', full_name='injective.peggy.v1.MsgERC20DeployedClaim.token_contract', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='injective.peggy.v1.MsgERC20DeployedClaim.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symbol', full_name='injective.peggy.v1.MsgERC20DeployedClaim.symbol', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decimals', full_name='injective.peggy.v1.MsgERC20DeployedClaim.decimals', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgERC20DeployedClaim.orchestrator', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1266,
serialized_end=1448,
)
_MSGERC20DEPLOYEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgERC20DeployedClaimResponse',
full_name='injective.peggy.v1.MsgERC20DeployedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1450,
serialized_end=1481,
)
_MSGCANCELSENDTOETH = _descriptor.Descriptor(
name='MsgCancelSendToEth',
full_name='injective.peggy.v1.MsgCancelSendToEth',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='injective.peggy.v1.MsgCancelSendToEth.transaction_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgCancelSendToEth.sender', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1483,
serialized_end=1543,
)
_MSGCANCELSENDTOETHRESPONSE = _descriptor.Descriptor(
name='MsgCancelSendToEthResponse',
full_name='injective.peggy.v1.MsgCancelSendToEthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1545,
serialized_end=1573,
)
_MSGSUBMITBADSIGNATUREEVIDENCE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidence',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subject', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.subject', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.signature', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sender', full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidence.sender', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1575,
serialized_end=1680,
)
_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE = _descriptor.Descriptor(
name='MsgSubmitBadSignatureEvidenceResponse',
full_name='injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1682,
serialized_end=1721,
)
_MSGVALSETUPDATEDCLAIM = _descriptor.Descriptor(
name='MsgValsetUpdatedClaim',
full_name='injective.peggy.v1.MsgValsetUpdatedClaim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.event_nonce', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='valset_nonce', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.valset_nonce', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='block_height', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.block_height', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='members', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.members', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_amount', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_amount', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\332\336\037&github.com/cosmos/cosmos-sdk/types.Int\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reward_token', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.reward_token', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='orchestrator', full_name='injective.peggy.v1.MsgValsetUpdatedClaim.orchestrator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1724,
serialized_end=1981,
)
_MSGVALSETUPDATEDCLAIMRESPONSE = _descriptor.Descriptor(
name='MsgValsetUpdatedClaimResponse',
full_name='injective.peggy.v1.MsgValsetUpdatedClaimResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1983,
serialized_end=2014,
)
_MSGSENDTOETH.fields_by_name['amount'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSENDTOETH.fields_by_name['bridge_fee'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_MSGSUBMITBADSIGNATUREEVIDENCE.fields_by_name['subject'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_MSGVALSETUPDATEDCLAIM.fields_by_name['members'].message_type = injective_dot_peggy_dot_v1_dot_types__pb2._BRIDGEVALIDATOR
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddresses'] = _MSGSETORCHESTRATORADDRESSES
DESCRIPTOR.message_types_by_name['MsgSetOrchestratorAddressesResponse'] = _MSGSETORCHESTRATORADDRESSESRESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetConfirm'] = _MSGVALSETCONFIRM
DESCRIPTOR.message_types_by_name['MsgValsetConfirmResponse'] = _MSGVALSETCONFIRMRESPONSE
DESCRIPTOR.message_types_by_name['MsgSendToEth'] = _MSGSENDTOETH
DESCRIPTOR.message_types_by_name['MsgSendToEthResponse'] = _MSGSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgRequestBatch'] = _MSGREQUESTBATCH
DESCRIPTOR.message_types_by_name['MsgRequestBatchResponse'] = _MSGREQUESTBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgConfirmBatch'] = _MSGCONFIRMBATCH
DESCRIPTOR.message_types_by_name['MsgConfirmBatchResponse'] = _MSGCONFIRMBATCHRESPONSE
DESCRIPTOR.message_types_by_name['MsgDepositClaim'] = _MSGDEPOSITCLAIM
DESCRIPTOR.message_types_by_name['MsgDepositClaimResponse'] = _MSGDEPOSITCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgWithdrawClaim'] = _MSGWITHDRAWCLAIM
DESCRIPTOR.message_types_by_name['MsgWithdrawClaimResponse'] = _MSGWITHDRAWCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaim'] = _MSGERC20DEPLOYEDCLAIM
DESCRIPTOR.message_types_by_name['MsgERC20DeployedClaimResponse'] = _MSGERC20DEPLOYEDCLAIMRESPONSE
DESCRIPTOR.message_types_by_name['MsgCancelSendToEth'] = _MSGCANCELSENDTOETH
DESCRIPTOR.message_types_by_name['MsgCancelSendToEthResponse'] = _MSGCANCELSENDTOETHRESPONSE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidence'] = _MSGSUBMITBADSIGNATUREEVIDENCE
DESCRIPTOR.message_types_by_name['MsgSubmitBadSignatureEvidenceResponse'] = _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaim'] = _MSGVALSETUPDATEDCLAIM
DESCRIPTOR.message_types_by_name['MsgValsetUpdatedClaimResponse'] = _MSGVALSETUPDATEDCLAIMRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MsgSetOrchestratorAddresses = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddresses', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSES,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddresses)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddresses)
MsgSetOrchestratorAddressesResponse = _reflection.GeneratedProtocolMessageType('MsgSetOrchestratorAddressesResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSETORCHESTRATORADDRESSESRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSetOrchestratorAddressesResponse)
})
_sym_db.RegisterMessage(MsgSetOrchestratorAddressesResponse)
MsgValsetConfirm = _reflection.GeneratedProtocolMessageType('MsgValsetConfirm', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirm)
})
_sym_db.RegisterMessage(MsgValsetConfirm)
MsgValsetConfirmResponse = _reflection.GeneratedProtocolMessageType('MsgValsetConfirmResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETCONFIRMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetConfirmResponse)
})
_sym_db.RegisterMessage(MsgValsetConfirmResponse)
MsgSendToEth = _reflection.GeneratedProtocolMessageType('MsgSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEth)
})
_sym_db.RegisterMessage(MsgSendToEth)
MsgSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSendToEthResponse)
})
_sym_db.RegisterMessage(MsgSendToEthResponse)
MsgRequestBatch = _reflection.GeneratedProtocolMessageType('MsgRequestBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatch)
})
_sym_db.RegisterMessage(MsgRequestBatch)
MsgRequestBatchResponse = _reflection.GeneratedProtocolMessageType('MsgRequestBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGREQUESTBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgRequestBatchResponse)
})
_sym_db.RegisterMessage(MsgRequestBatchResponse)
MsgConfirmBatch = _reflection.GeneratedProtocolMessageType('MsgConfirmBatch', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatch)
})
_sym_db.RegisterMessage(MsgConfirmBatch)
MsgConfirmBatchResponse = _reflection.GeneratedProtocolMessageType('MsgConfirmBatchResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCONFIRMBATCHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgConfirmBatchResponse)
})
_sym_db.RegisterMessage(MsgConfirmBatchResponse)
MsgDepositClaim = _reflection.GeneratedProtocolMessageType('MsgDepositClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaim)
})
_sym_db.RegisterMessage(MsgDepositClaim)
MsgDepositClaimResponse = _reflection.GeneratedProtocolMessageType('MsgDepositClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGDEPOSITCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgDepositClaimResponse)
})
_sym_db.RegisterMessage(MsgDepositClaimResponse)
MsgWithdrawClaim = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaim)
})
_sym_db.RegisterMessage(MsgWithdrawClaim)
MsgWithdrawClaimResponse = _reflection.GeneratedProtocolMessageType('MsgWithdrawClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGWITHDRAWCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgWithdrawClaimResponse)
})
_sym_db.RegisterMessage(MsgWithdrawClaimResponse)
MsgERC20DeployedClaim = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaim)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaim)
MsgERC20DeployedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgERC20DeployedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGERC20DEPLOYEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgERC20DeployedClaimResponse)
})
_sym_db.RegisterMessage(MsgERC20DeployedClaimResponse)
MsgCancelSendToEth = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEth', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETH,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEth)
})
_sym_db.RegisterMessage(MsgCancelSendToEth)
MsgCancelSendToEthResponse = _reflection.GeneratedProtocolMessageType('MsgCancelSendToEthResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGCANCELSENDTOETHRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgCancelSendToEthResponse)
})
_sym_db.RegisterMessage(MsgCancelSendToEthResponse)
MsgSubmitBadSignatureEvidence = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidence', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidence)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidence)
MsgSubmitBadSignatureEvidenceResponse = _reflection.GeneratedProtocolMessageType('MsgSubmitBadSignatureEvidenceResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgSubmitBadSignatureEvidenceResponse)
})
_sym_db.RegisterMessage(MsgSubmitBadSignatureEvidenceResponse)
MsgValsetUpdatedClaim = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaim', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIM,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaim)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaim)
MsgValsetUpdatedClaimResponse = _reflection.GeneratedProtocolMessageType('MsgValsetUpdatedClaimResponse', (_message.Message,), {
'DESCRIPTOR' : _MSGVALSETUPDATEDCLAIMRESPONSE,
'__module__' : 'injective.peggy.v1.msgs_pb2'
# @@protoc_insertion_point(class_scope:injective.peggy.v1.MsgValsetUpdatedClaimResponse)
})
_sym_db.RegisterMessage(MsgValsetUpdatedClaimResponse)
DESCRIPTOR._options = None
_MSGSENDTOETH.fields_by_name['amount']._options = None
_MSGSENDTOETH.fields_by_name['bridge_fee']._options = None
_MSGDEPOSITCLAIM.fields_by_name['amount']._options = None
_MSGVALSETUPDATEDCLAIM.fields_by_name['reward_amount']._options = None
_MSG = _descriptor.ServiceDescriptor(
name='Msg',
full_name='injective.peggy.v1.Msg',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2017,
serialized_end=3749,
methods=[
_descriptor.MethodDescriptor(
name='ValsetConfirm',
full_name='injective.peggy.v1.Msg.ValsetConfirm',
index=0,
containing_service=None,
input_type=_MSGVALSETCONFIRM,
output_type=_MSGVALSETCONFIRMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/valset_confirm',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SendToEth',
full_name='injective.peggy.v1.Msg.SendToEth',
index=1,
containing_service=None,
input_type=_MSGSENDTOETH,
output_type=_MSGSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002!\"\037/injective/peggy/v1/send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='RequestBatch',
full_name='injective.peggy.v1.Msg.RequestBatch',
index=2,
containing_service=None,
input_type=_MSGREQUESTBATCH,
output_type=_MSGREQUESTBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/request_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ConfirmBatch',
full_name='injective.peggy.v1.Msg.ConfirmBatch',
index=3,
containing_service=None,
input_type=_MSGCONFIRMBATCH,
output_type=_MSGCONFIRMBATCHRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/confirm_batch',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DepositClaim',
full_name='injective.peggy.v1.Msg.DepositClaim',
index=4,
containing_service=None,
input_type=_MSGDEPOSITCLAIM,
output_type=_MSGDEPOSITCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002#\"!/injective/peggy/v1/deposit_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='WithdrawClaim',
full_name='injective.peggy.v1.Msg.WithdrawClaim',
index=5,
containing_service=None,
input_type=_MSGWITHDRAWCLAIM,
output_type=_MSGWITHDRAWCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/injective/peggy/v1/withdraw_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ValsetUpdateClaim',
full_name='injective.peggy.v1.Msg.ValsetUpdateClaim',
index=6,
containing_service=None,
input_type=_MSGVALSETUPDATEDCLAIM,
output_type=_MSGVALSETUPDATEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/valset_updated_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ERC20DeployedClaim',
full_name='injective.peggy.v1.Msg.ERC20DeployedClaim',
index=7,
containing_service=None,
input_type=_MSGERC20DEPLOYEDCLAIM,
output_type=_MSGERC20DEPLOYEDCLAIMRESPONSE,
serialized_options=b'\202\323\344\223\002*\"(/injective/peggy/v1/erc20_deployed_claim',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SetOrchestratorAddresses',
full_name='injective.peggy.v1.Msg.SetOrchestratorAddresses',
index=8,
containing_service=None,
input_type=_MSGSETORCHESTRATORADDRESSES,
output_type=_MSGSETORCHESTRATORADDRESSESRESPONSE,
serialized_options=b'\202\323\344\223\002.\",/injective/peggy/v1/set_orchestrator_address',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelSendToEth',
full_name='injective.peggy.v1.Msg.CancelSendToEth',
index=9,
containing_service=None,
input_type=_MSGCANCELSENDTOETH,
output_type=_MSGCANCELSENDTOETHRESPONSE,
serialized_options=b'\202\323\344\223\002(\"&/injective/peggy/v1/cancel_send_to_eth',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubmitBadSignatureEvidence',
full_name='injective.peggy.v1.Msg.SubmitBadSignatureEvidence',
index=10,
containing_service=None,
input_type=_MSGSUBMITBADSIGNATUREEVIDENCE,
output_type=_MSGSUBMITBADSIGNATUREEVIDENCERESPONSE,
serialized_options=b'\202\323\344\223\0023\"1/injective/peggy/v1/submit_bad_signature_evidence',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MSG)
DESCRIPTOR.services_by_name['Msg'] = _MSG
# @@protoc_insertion_point(module_scope)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
262,
8435,
11876,
17050,
13,
220,
8410,
5626,
48483,
0,
198,
2,
2723,
25,
8677,
425,
14,
431,
19970,
14,
85,
16,
14,
907,
14542,
13,
1676,
1462,
... | 2.384414 | 22,957 |
n = conta = soma = 0
n = int(input('Digite um valor para soma (999 para SAIR): '))
while n != 999:
soma = soma + n
conta = conta + 1
n = int(input('Digite um valor para soma (999 para SAIR): '))
print(f'A SOMA DOS {conta} VALORES É IGUAL Á {soma}.')
| [
77,
796,
542,
64,
796,
3870,
64,
796,
657,
198,
77,
796,
493,
7,
15414,
10786,
19511,
578,
23781,
1188,
273,
31215,
3870,
64,
357,
17032,
31215,
14719,
4663,
2599,
705,
4008,
198,
4514,
299,
14512,
36006,
25,
198,
220,
220,
220,
387... | 2.193548 | 124 |
from keras.datasets import imdb
import keras
from keras.models import Sequential
from keras.layers.embeddings import Embedding
from keras.layers import Flatten, Dense
from keras.preprocessing import sequence
from numpy import array
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=5000)
word_to_id = keras.datasets.imdb.get_word_index()
word_to_id = {k:(v+3) for k,v in word_to_id.items()}
word_to_id["<PAD>"] = 0
word_to_id["<START>"] = 1
word_to_id["<UNK>"] = 2
x_train = sequence.pad_sequences(x_train, maxlen=300)
x_test = sequence.pad_sequences(x_test, maxlen=300)
network = Sequential()
network.add(Embedding(5000, 32, input_length=300))
network.add(Flatten())
network.add(Dense(1, activation='sigmoid'))
network.compile(loss="binary_crossentropy", optimizer='Adam', metrics=['accuracy'])
network.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=3, batch_size=64)
result = network.evaluate(x_test, y_test, verbose=0)
negative = "this movie was bad"
positive = "i had fun"
negative2 = "this movie was terrible"
positive2 = "i really liked the movie"
for review in [positive, positive2, negative, negative2]:
temp = []
for word in review.split(" "):
temp.append(word_to_id[word])
temp_padded = sequence.pad_sequences([temp], maxlen=300)
print(review + " -- Sent -- " + str(network.predict(array([temp_padded][0]))[0][0]))
| [
6738,
41927,
292,
13,
19608,
292,
1039,
1330,
545,
9945,
201,
198,
11748,
41927,
292,
201,
198,
6738,
41927,
292,
13,
27530,
1330,
24604,
1843,
201,
198,
6738,
41927,
292,
13,
75,
6962,
13,
20521,
67,
654,
1330,
13302,
6048,
278,
201,... | 2.507018 | 570 |
"""
Otsu thresholding
==================
This example illustrates automatic Otsu thresholding.
"""
import matplotlib.pyplot as plt
from skimage import data
from skimage import filters
from skimage import exposure
camera = data.camera()
val = filters.threshold_otsu(camera)
hist, bins_center = exposure.histogram(camera)
plt.figure(figsize=(9, 4))
plt.subplot(131)
plt.imshow(camera, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(132)
plt.imshow(camera < val, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.subplot(133)
plt.plot(bins_center, hist, lw=2)
plt.axvline(val, color='k', ls='--')
plt.tight_layout()
plt.show()
| [
37811,
198,
46,
912,
84,
11387,
278,
198,
4770,
855,
198,
198,
1212,
1672,
21290,
11353,
440,
912,
84,
11387,
278,
13,
198,
37811,
198,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
1341,
9060,
1330,
1366,
... | 2.726141 | 241 |
from BS.utils import read_src_bs, save_bs_dicts_to_txt, read_src_socket_bs, save_socket_bs_dicts_to_txt
if __name__ == '__main__':
change_template_bg()
| [
6738,
24218,
13,
26791,
1330,
1100,
62,
10677,
62,
1443,
11,
3613,
62,
1443,
62,
11600,
82,
62,
1462,
62,
14116,
11,
1100,
62,
10677,
62,
44971,
62,
1443,
11,
3613,
62,
44971,
62,
1443,
62,
11600,
82,
62,
1462,
62,
14116,
628,
628... | 2.424242 | 66 |
#!/usr/bin/env python
convert("config/luxisr.ttf", "luxibytes")
convert("config/luximr.ttf", "luximonobytes")
convert("config/DejaVuSans.ttf", "dejabytes")
convert("config/DejaVuSansMono.ttf", "dejamonobytes")
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
1102,
1851,
7203,
11250,
14,
22564,
271,
81,
13,
926,
69,
1600,
366,
22564,
571,
88,
4879,
4943,
198,
1102,
1851,
7203,
11250,
14,
22564,
320,
81,
13,
926,
69,
1600,
366,
22564,... | 2.244681 | 94 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 25 15:32:42 2018
@author: benmo
"""
import pandas as pd, numpy as np
from .functions import *
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
7031,
2447,
1679,
1315,
25,
2624,
25,
3682,
2864,
198,
198,
31,
9800,
25,
1888,
5908,
198... | 2.030928 | 97 |
import poly
| [
11748,
7514,
201
] | 4 | 3 |
from src.dataset import CatDogDataset
from src.utils import *
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
import os
_CUDA_FLAG = torch.cuda.is_available()
| [
6738,
12351,
13,
19608,
292,
316,
1330,
5181,
32942,
27354,
292,
316,
198,
6738,
12351,
13,
26791,
1330,
1635,
198,
6738,
28034,
13,
26791,
13,
7890,
1330,
6060,
17401,
198,
6738,
28034,
13,
26791,
13,
83,
22854,
3526,
1330,
21293,
3437... | 3.258065 | 62 |
from contextvars import ContextVar
current_user_role = ContextVar('role', default=None)
list_of_roles = ContextVar('roles', default=None)
current_user = ContextVar('users_ID', default=None)
headers = ContextVar('headers', default={})
nrpayload = ContextVar('nrpayload', default=None)
tid = ContextVar('tid', default=None)
auth_token = ContextVar('auth', default=None)
current_request = ContextVar('request', default=None)
broker_instance = ContextVar('broker_instance')
background_tasks = ContextVar('background_tasks', default=[])
register_actors = ContextVar("register_actors", default=None)
| [
6738,
4732,
85,
945,
1330,
30532,
19852,
628,
198,
14421,
62,
7220,
62,
18090,
796,
30532,
19852,
10786,
18090,
3256,
4277,
28,
14202,
8,
198,
4868,
62,
1659,
62,
305,
829,
796,
30532,
19852,
10786,
305,
829,
3256,
4277,
28,
14202,
8,... | 3.367232 | 177 |