blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0381b6ca3f91ecc7c5eed7c10575c27b6d90c3c1 | 5ee8694a7c061bb2faf09d61545f8a9d070d7a41 | /main.py | 0607632e12781883c4b97ee56793b32c4bda7c16 | [] | no_license | Daniil-Aleshechkin/primes-seperation | 4d3b8825e2d9e10aca1cf3552a6d5643ef359582 | 510ef141cac3c755201f77224e53cdace259fcac | refs/heads/master | 2020-04-25T17:13:49.850447 | 2019-03-01T05:05:16 | 2019-03-01T05:05:16 | 172,939,906 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,963 | py | #Daniil Alesheckin
#CS 20
#Primes
#11/22/17
import time
import math
primes = [2]
sexyPrimes = []
primeAmount = 0
highestN = 0
pAmount = (int(input("What is the prime range?")))
seperationAmount = int(input("How much do you want the seperation amount?(Note that all primes are seperated by an even amount (appart from 2 and 3))"))
pLength = int(input("How long do you want the minimal lenght to be"))
print("Finding primes...")
print(2)
for n in range(3,pAmount+1):
#Check for divisible in each prime
for p in primes:
if math.sqrt(n) > p:
if n%p==0:
break
else:
primes.append(n)
print(n)
break
primeAmount = len(primes)
print("Finding the primes seperated by {}".format(seperationAmount))
for p in primes:
worthContinue = True
n = 0
print()
sexyPrime = []
print("Checking {}".format(p))
sexyPrime.append(p)
isSexyPrime = False
pPos = primes.index(p)
while worthContinue == True:
n += 1
#print("round {}".format(n))
for pos in range (1,seperationAmount//2+1):
if pPos+pos < primeAmount:
prime = primes[pPos+pos]
#print("are {} and {} {} primes".format(p,prime,seperationAmount))
if p+seperationAmount*n == prime:
sexyPrime.append(prime)
del primes[pPos+pos]
primeAmount -= 1
worthContinue = True
#print("Found one {}".format(prime))
#print(
pPos += pos-1
break
else:
#print("no prime found")
worthContinue = False
else:
#print("out of primes")
worthContinue = False
break
if n>pLength-1:
sexyPrimes.append(sexyPrime)
if n>highestN-1:
highestN = n
highestList = sexyPrime
print("Look at them...")
for prime in sexyPrimes:
time.sleep(0.1)
print(prime)
print("The highest pair was a {} pair".format(highestN))
print("The smallest {} pair was : {}".format(highestN,highestList))
input() | [
"aleshechkin.daniil@gmail.com"
] | aleshechkin.daniil@gmail.com |
2aab26b76616c1c84f70b34d59687e38c4c9f5f4 | cd2d3dfc84bfa2e2bbcf76e634f9c6ac8383193e | /Energy/Ethylene/data_helper.py | 3b970d4cdcf03e32fb60a787d03829cfa22768be | [] | no_license | coderGray1296/deeplearning | c02dbfc79fe996f8013891d15ae15e533fe14417 | d71dd8e24294fcb320931eaa86c1cf2578dd10a5 | refs/heads/master | 2021-07-02T20:27:00.218417 | 2020-11-12T05:25:12 | 2020-11-12T05:25:12 | 154,139,426 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,922 | py | import pandas as pd
import numpy as np
#只运行一次
def save_normalized_data(path):
#load data from path
data = np.loadtxt(path)
data = normalize(data)
np.savetxt('normalized.txt', data)
#只运行一次
def normalize(data):
#生成数据中的最小值和最大值的索引序列
min = np.argmin(data, axis=0)
max = np.argmax(data, axis=0)
min_value = []
max_value = []
for i in range(len(min)):
min_value.append(data[min[i]][i])
max_value.append(data[max[i]][i])
min_value = np.array(min_value)
max_value = np.array(max_value)
for i in range(len(data)):
data[i] = (data[i] - min_value) / (max_value - min_value)
return data
#为train.py所调用
#加载train或者test数据,返回可用的X, y型
def load_data(path):
data = np.loadtxt(path).tolist()
X = []
y = []
for i in range(len(data)):
X.append(data[i][0:10])
y.append(data[i][10:])
return np.array(X), np.array(y)
#读取数据,分离train和test
def split(test_sample_percentage,path):
data = np.loadtxt(path)
shuffled_data = np.random.permutation(data)
test_sample_index = -1 * int(test_sample_percentage * float(len(shuffled_data)))
train, test = shuffled_data[:test_sample_index], shuffled_data[test_sample_index:]
np.savetxt('../data/UCI/train_airoil_new.txt', train)
np.savetxt('../data/UCI/test_airoil_new.txt', test)
#generator batch data with shuffled
def batch_iter(data, batch_size, num_epochs, shuffle=True):
data = np.array(data)
data_size = len(data)
num_batchs_per_epoch = int((data_size - 1) / batch_size) + 1
for epoch in range(num_epochs):
#shuffled data every epoch
if shuffle:
data_shufled = np.random.permutation(data)
else:
data_shufled = data
for num_batch in range(num_batchs_per_epoch):
start_index = num_batch * batch_size
end_index = min((num_batch + 1) * batch_size, data_size)
yield data_shufled[start_index : end_index]
#split(0.2, 'normalized.txt') 34/137
#_, y = load_data('normalized.txt')
# 交叉特征
def save_featurecrosses_data(path):
data = np.loadtxt(path)
data_new = []
#进行两两交叉3
for i in range(len(data)):
temp = []
for j in range(len(data[i])-2):
for k in range(j+1,len(data[i])-1):
temp.append(data[i][j]*data[i][k])
temp.append(data[i][-1])
data_new.append(temp)
data_new = normalize(data_new)
data_new = np.array(data_new)
np.savetxt('../data/UCI/airoil_new.txt', data_new)
#获取输出列的最大值和最小值,求逆归一化
def get_min_max(path):
data = np.loadtxt(path)
y = data[:,5]
min_index = np.argmin(y)
max_index = np.argmax(y)
min_value = y[min_index]
max_value = y[max_index]
return min_value, max_value
| [
"m13070193321@163.com"
] | m13070193321@163.com |
a6c2fe85bcda169df8cbcc1b2d592796879a151e | 54039618089d1ede57672a259fa3d096440684fb | /0002.py | 54cbad5dff25937bf671f0cabd73fa036eb9cffb | [] | no_license | dallas1217/show-me-the-code | 6d7f2d126684ff1922629c5d878c5e576a9879f6 | 4c181b4cf592d79ae80ebd1ec2d4100917181c23 | refs/heads/master | 2020-07-13T05:17:52.814066 | 2017-07-25T13:31:22 | 2017-07-25T13:31:22 | 94,291,281 | 0 | 0 | null | 2017-06-14T05:07:34 | 2017-06-14T05:07:34 | null | UTF-8 | Python | false | false | 890 | py | # -*- coding:utf-8 -*-
import random
import pymysql
def generate(count, length):
coupon_list = []
for i in range(count):
re = ''
for y in range(length):
re += str(random.randint(1,9))
coupon_list.append(re)
return coupon_list
def AddtoDB(coupon_list):
db_ip = 'localhost'
db_port = 3306
db_sock = '/tmp/mysql.sock'
db_user = 'coupon'
db_pass = 'coupon'
db_use = 'coupon'
db = pymysql.connect(db_ip, db_user, db_pass, db_use)
cursor = db.cursor()
for coupon_no in coupon_list:
cursor.execute('''INSERT INTO coupon (ID) VALUES (''' + coupon_no + ''');''')
cursor.execute('''COMMIT;''')
cursor.execute('''SELECT * FROM coupon;''')
for data in cursor.fetchall():
print(data)
db.close()
if __name__ == '__main__':
coupon_list = generate(10,10)
AddtoDB(coupon_list)
| [
"noreply@github.com"
] | dallas1217.noreply@github.com |
ef9c94b0d3d2f573d013e2f75d8d1b0588fd7528 | 92b76f479f3ec01bcf484fd8f7940f440019110e | /docs/conf.py | bb4ba7f79cd115c870739cb3ca8925d3e97a139e | [
"MIT"
] | permissive | tommikaikkonen/peprint | a3d6bb40303ed93ce39e644895a75c21c7e9afe5 | 7248ae6f92f1b05b2c9089ce69280120ad4fcd69 | refs/heads/master | 2021-08-22T13:14:53.414779 | 2017-11-30T08:14:47 | 2017-11-30T08:14:47 | 108,084,816 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,401 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# peprint documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import peprint
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'peprint'
copyright = u"2017, Tommi Kaikkonen"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = peprint.__version__
# The full version, including alpha/beta/rc tags.
release = peprint.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'peprintdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'peprint.tex',
u'peprint Documentation',
u'Tommi Kaikkonen', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'peprint',
u'peprint Documentation',
[u'Tommi Kaikkonen'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'peprint',
u'peprint Documentation',
u'Tommi Kaikkonen',
'peprint',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| [
"tommi.kaikkonen@aalto.fi"
] | tommi.kaikkonen@aalto.fi |
78878fa36c3062d1b5ba250e093a917f3bfea9fd | 526901d470c645a89421164a0cfcef2f256588a8 | /lab_4.py | 83532964551f9f63d8f902b0d13bb247db4da19f | [] | no_license | omer19-meet/yl1201718 | 52bd60d43acf4de8436df7c7a3cce73e2e8cba3e | d3298f8ab40e2ad06a79531833bb663655e056b3 | refs/heads/master | 2021-05-07T17:38:35.859083 | 2018-02-04T17:06:59 | 2018-02-04T17:06:59 | 108,751,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,051 | py | class animal(object):
"""docstring for animal"""
def __init__(self, name,sound, age, favorit_color):
super(animal, self).__init__()
self.sound = sound
self.name = name
self.age = age
self.favorit_color = favorit_color
def eat(self, food):
print("yummyyy !! "+ self.name + " is eating" + food )
def make_s(self,a):
print(self.sound*a)
class person(object):
def __init__(self, name, age, gender, home_add, status, religion):
self.name = name
self.age = age
self.gender = gender
self.home_add = home_add
self.status = status
self.religion = religion
def present(self):
print("Hellow, my name is " , self.name ,"I am" ,self.age , "yrs old,""i'm a" , self.gender , "i live in" , self.home_add ,"i'm " ,self.status ,"and", self.religion)
frog = animal("frog"," Quack!", 6, "green")
#frog.eat(" fly")
#frog.make_s(10)
moshe = person("Moshe Rabeno", 2 , "male", "Egypt land", "slave", "Jowish")
#moshe.present()
shelly = person("shelly :(", 7,"annoing", "Givaat Zeev", "Shtohah", "zona")
shelly.present()
| [
"laith19@meet.mit.edu"
] | laith19@meet.mit.edu |
e16b8c9808ebc38687cf672a338a6f901cd42936 | 9f1039075cc611198a988034429afed6ec6d7408 | /tensorflow-stubs/contrib/framework/python/framework/checkpoint_utils.pyi | e6e501dad7d3fcdf5bcb59bd42acdb9afc2b5d9c | [] | no_license | matangover/tensorflow-stubs | 9422fbb1cb3a3638958d621461291c315f9c6ec2 | 664bd995ef24f05ba2b3867d979d23ee845cb652 | refs/heads/master | 2020-05-23T12:03:40.996675 | 2019-05-15T06:21:43 | 2019-05-15T06:21:43 | 186,748,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | pyi | # Stubs for tensorflow.contrib.framework.python.framework.checkpoint_utils (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from tensorflow.python.ops import io_ops as io_ops, state_ops as state_ops, variables as variables
from tensorflow.python.platform import gfile as gfile
from tensorflow.python.training import checkpoint_management as checkpoint_management
from typing import Any as Any
def load_checkpoint(filepattern: Any): ...
def load_variable(checkpoint_dir: Any, name: Any): ...
def list_variables(checkpoint_dir: Any): ...
def init_from_checkpoint(checkpoint_dir: Any, assignment_map: Any): ...
| [
"matangover@gmail.com"
] | matangover@gmail.com |
6c9d1ddc37cefebad3cdb55e8c70e69c1b18049d | a9c6f27ff809544e0e71a100efa631979acecf0a | /analysis1/scripts/gen_NET_matrix.py | cb1734961b868963e6afc4abfe6573e361caaf02 | [] | no_license | nih-fmrif/HCP_CCA_Analysis-1 | 350eaa6083a70328dfc5b93bd17d3e7cdd10fb2b | 0aa11e4d384c9e077b6a1eb802cc74fb369f3c8b | refs/heads/master | 2021-06-21T12:11:55.898020 | 2021-04-30T18:03:41 | 2021-04-30T18:03:41 | 208,481,320 | 1 | 2 | null | 2019-09-14T17:59:25 | 2019-09-14T17:59:25 | null | UTF-8 | Python | false | false | 1,111 | py | #!/usr/bin/python3
# usage: python3 gen_matrixN1.py <path to .txt files with partial parcellations>
# Note, the output file will go same location as the original .txt files with the specified name
import numpy as np
from numpy import genfromtxt
import os
import sys
file_path = sys.argv[1] #path to the folder containing the .txt files w/ matrices
arr_size = sys.argv[2] #number of ICA components (ex. 200)
out_name = sys.argv[3] #what HCP dataset is being used (output filename, ex. HCP_1200_NET.txt)
myList = []
for filename in os.listdir(file_path):
if filename.endswith(".txt"):
#print(os.path.join(file_path, filename))
arr = genfromtxt(os.path.join(file_path, filename), delimiter=',')
if(arr.shape[0]==int(arr_size) & arr.shape[1]==int(arr_size)):
#print("okay file", filename)
flat_lower_tri = arr[np.tril(arr, -1) !=0]
myList.append(flat_lower_tri)
else:
print("ERROR: Incorrect array dimensions in file ", filename)
continue
else:
continue
matrix = np.array(myList)
print("Resulting matrix shape:" ,matrix.shape)
np.savetxt(fname=out_name, X=matrix, delimiter=',')
| [
"nikhil.r.goyal@gmail.com"
] | nikhil.r.goyal@gmail.com |
7ec5d020d2d00517b90ce2eb19d3102236d8e5e8 | 0648a0db36fa79040f288b49269140971872aa6e | /Python/bcd-segundo/venv/bin/pip3.7 | 435902b7bf2dd52876ff7282bcede419e279db82 | [] | no_license | mftutui/BCD29008 | 1cffb0546d0b9197fcc1760013e1ec3b9eb31c8a | 7bb7222446a2c4cf94291b03647c1ce35bd784ee | refs/heads/master | 2020-04-23T17:37:08.878265 | 2019-10-11T20:24:10 | 2019-10-11T20:24:10 | 171,337,819 | 0 | 2 | null | 2019-10-11T20:24:11 | 2019-02-18T18:49:03 | Python | UTF-8 | Python | false | false | 411 | 7 | #!/Users/tutui/PycharmProjects/bcd-segundo/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"tutui@Marias-MacBook-Air.local"
] | tutui@Marias-MacBook-Air.local |
07f2f3f93e05a13cc54b9a395bdec240790bfdf0 | fe2c714f9fc870bcdfc08f2fe28d3437d38de684 | /travels/migrations/0011_auto__add_field_itinerary_title_sq__add_field_itinerary_description_sq.py | c1af6828d23011a2e93dd191999e7cd9d9d8f2d1 | [] | no_license | UNICEF-Youth-Section/Locast-Web-Rio | 469ff3634688821ebd55da141d631edfa470cfc6 | 49bcad92407f73fbb97fa275d6605da75e3222a6 | refs/heads/master | 2020-12-29T00:55:23.237540 | 2016-08-26T19:04:26 | 2016-08-26T19:04:26 | 3,157,643 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,538 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Itinerary.title_sq'
db.add_column(u'travels_itinerary', 'title_sq', self.gf('django.db.models.fields.CharField')(max_length=160, null=True, blank=True), keep_default=False)
# Adding field 'Itinerary.description_sq'
db.add_column(u'travels_itinerary', 'description_sq', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
# Adding field 'Cast.title_sq'
db.add_column(u'travels_cast', 'title_sq', self.gf('django.db.models.fields.CharField')(max_length=160, null=True, blank=True), keep_default=False)
# Adding field 'Cast.description_sq'
db.add_column(u'travels_cast', 'description_sq', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
# Adding field 'Settings.project_title_sq'
db.add_column(u'travels_settings', 'project_title_sq', self.gf('django.db.models.fields.CharField')(default="Welcome to UNICEF's Youth Led Digital Mapping", max_length=256, null=True, blank=True), keep_default=False)
# Adding field 'Settings.project_description_sq'
db.add_column(u'travels_settings', 'project_description_sq', self.gf('django.db.models.fields.TextField')(default='This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.', null=True, blank=True), keep_default=False)
# Adding field 'Settings.window_title_sq'
db.add_column(u'travels_settings', 'window_title_sq', self.gf('django.db.models.fields.CharField')(default="UNICEF's Youth Led Digital Mapping", max_length=256, null=True, blank=True), keep_default=False)
# Adding field 'Event.title_sq'
db.add_column(u'travels_event', 'title_sq', self.gf('django.db.models.fields.CharField')(max_length=160, null=True, blank=True), keep_default=False)
# Adding field 'Event.description_sq'
db.add_column(u'travels_event', 'description_sq', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Itinerary.title_sq'
db.delete_column(u'travels_itinerary', 'title_sq')
# Deleting field 'Itinerary.description_sq'
db.delete_column(u'travels_itinerary', 'description_sq')
# Deleting field 'Cast.title_sq'
db.delete_column(u'travels_cast', 'title_sq')
# Deleting field 'Cast.description_sq'
db.delete_column(u'travels_cast', 'description_sq')
# Deleting field 'Settings.project_title_sq'
db.delete_column(u'travels_settings', 'project_title_sq')
# Deleting field 'Settings.project_description_sq'
db.delete_column(u'travels_settings', 'project_description_sq')
# Deleting field 'Settings.window_title_sq'
db.delete_column(u'travels_settings', 'window_title_sq')
# Deleting field 'Event.title_sq'
db.delete_column(u'travels_event', 'title_sq')
# Deleting field 'Event.description_sq'
db.delete_column(u'travels_event', 'description_sq')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 20, 16, 3, 57, 258667)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 20, 16, 3, 57, 258292)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'travels.boundry': {
'Meta': {'object_name': 'Boundry'},
'bounds': ('django.contrib.gis.db.models.fields.PolygonField', [], {}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '160'})
},
u'travels.cast': {
'Meta': {'object_name': 'Cast'},
'attempts': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"}),
'cell_image': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'cell_revision': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'cell_timestamp': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_es': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_pt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_sq': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'favorite_cast'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['travels.TravelsUser']"}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'post_to_facebook': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_to_twitter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'privacy': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tag_cast'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['travels.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'title_en': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_es': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_pt': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_sq': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
u'travels.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'travels.event': {
'Meta': {'object_name': 'Event'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_es': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_pt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_sq': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tag_event'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['travels.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'title_en': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_es': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_pt': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_sq': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
u'travels.flag': {
'Meta': {'object_name': 'Flag'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"})
},
u'travels.imagemedia': {
'Meta': {'object_name': 'ImageMedia', '_ormbases': [u'travels.Media']},
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'media_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['travels.Media']", 'unique': 'True', 'primary_key': 'True'})
},
u'travels.itinerary': {
'Meta': {'object_name': 'Itinerary'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_es': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_pt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_sq': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'favorite_itinerary'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['travels.TravelsUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'path': ('django.contrib.gis.db.models.fields.LineStringField', [], {'null': 'True', 'blank': 'True'}),
'preview_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'related_casts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['travels.Cast']", 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tag_itinerary'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['travels.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'title_en': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_es': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_pt': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'title_sq': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
u'travels.linkedmedia': {
'Meta': {'object_name': 'LinkedMedia', '_ormbases': [u'travels.Media']},
'content_provider': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
u'media_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['travels.Media']", 'unique': 'True', 'primary_key': 'True'}),
'screenshot': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'video_id': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'travels.media': {
'Meta': {'object_name': 'Media'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"}),
'cast': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.Cast']", 'null': 'True', 'blank': 'True'}),
'content_state': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'content_type_model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '90'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '90', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '160'})
},
u'travels.settings': {
'Meta': {'object_name': 'Settings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'project_description': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'blank': 'True'}),
'project_description_en': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'null': 'True', 'blank': 'True'}),
'project_description_es': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'null': 'True', 'blank': 'True'}),
'project_description_fr': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'null': 'True', 'blank': 'True'}),
'project_description_pt': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'null': 'True', 'blank': 'True'}),
'project_description_sq': ('django.db.models.fields.TextField', [], {'default': "'This project explores tools to help youth build impactful, communicative digital maps using mobile and web technologies. A phone application allows youth to produce a realtime portrait of their community through geo-located photos and videos, organized in thematic maps.'", 'null': 'True', 'blank': 'True'}),
'project_title': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'blank': 'True'}),
'project_title_en': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'project_title_es': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'project_title_fr': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'project_title_pt': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'project_title_sq': ('django.db.models.fields.CharField', [], {'default': '"Welcome to UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'window_title': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'blank': 'True'}),
'window_title_en': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'window_title_es': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'window_title_fr': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'window_title_pt': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'window_title_sq': ('django.db.models.fields.CharField', [], {'default': '"UNICEF\'s Youth Led Digital Mapping"', 'max_length': '256', 'null': 'True', 'blank': 'True'})
},
u'travels.tag': {
'Meta': {'object_name': 'Tag'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'primary_key': 'True'}),
'system_tag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'urgency_score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'travels.travelsuser': {
'Meta': {'object_name': 'TravelsUser'},
'can_post_to_social_networks': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'hometown': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '90'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'personal_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
u'travels.useractivity': {
'Meta': {'object_name': 'UserActivity'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['travels.TravelsUser']"})
},
u'travels.videomedia': {
'Meta': {'object_name': 'VideoMedia', '_ormbases': [u'travels.Media']},
'animated_preview': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'compressed_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'duration': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'media_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['travels.Media']", 'unique': 'True', 'primary_key': 'True'}),
'screenshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'web_stream_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'})
}
}
complete_apps = ['travels']
| [
"mgarcia@manas.com.ar"
] | mgarcia@manas.com.ar |
0478b50dada5ebde6ba5cc4ed593631ad0a7f72d | 00ee6a3c859362bbc20342c568a27ea2a493e427 | /src/x007007007/djapp/raspberry/net/models/__init__.py | df1ebdbcd68a29e15c124791e2bb900e5e47a5bf | [
"MIT"
] | permissive | x007007007/raspberrypi | 7721b1fde2763fd28db579ca65217b81ee2193ae | 9dfe49666c029b8bb617830a5c5a873a6106d853 | refs/heads/master | 2022-10-04T04:51:29.974216 | 2022-10-03T16:36:00 | 2022-10-03T16:36:00 | 56,951,270 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | from .. import hook | [
"x007007007@hotmail.com"
] | x007007007@hotmail.com |
bfa8ed6d4a65d26df827c9c4ebd4a6ef42c6b687 | 64f4ffb54225d225e693575992fd5336ed54d241 | /entities/form.py | a2975f30033f6e978aba8496e8f8d00e076c5396 | [] | no_license | bitmarc/Recomendautos_Server | 3929d48a796d01f639522aed63da39b97b9fb5e2 | 4db5ca2f07d97d2f025f5879b4a25553c2eb2e72 | refs/heads/main | 2023-03-23T17:17:39.772977 | 2021-03-04T03:49:20 | 2021-03-04T03:49:20 | 309,015,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | '''
Clase que modela la entidad "Formulario"
'''
class Form:
def __init__(self,id,arrQuestions):
self.__id=0
self.__qArr=arrQuestions
def getId(self):
return self.__id
def setId(self,id):
self.__id=id
def addQuestioArray(self,qArr):
self.__qArr=qArr
def getForm(self):
data=[]
for question in self.__qArr:
data.append(question.getQuestion())
data={"id":self.__id, "questions":data}
return data | [
"marcoarojas.95@gmail.com"
] | marcoarojas.95@gmail.com |
ee2780eb61e3a79e4c042efa07cf41ba57306906 | e0a366c51612ac7a11e76e237bd253c78e9865da | /ambari-server/src/main/resources/stacks/ADH/1.4/services/NIFI/package/scripts/master.py | bf082fd22fd9f478b1f890922af6013354180f12 | [
"GPL-2.0-only",
"OFL-1.1",
"Apache-2.0",
"GPL-2.0-or-later",
"BSD-2-Clause",
"GPL-1.0-or-later",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"MIT",
"Python-2.0",
"MS-PL",
"AFL-2.1"
] | permissive | kuhella/ambari | c6aeb4b1129c8ee0d730b3094cdabda8881ab07c | 9396c17b0305665d31d7a4f4525be857958b5d4c | refs/heads/branch-adh-1.5 | 2020-03-16T04:14:10.914286 | 2018-05-05T14:34:31 | 2018-05-05T14:34:31 | 132,506,573 | 0 | 0 | Apache-2.0 | 2018-05-11T08:51:17 | 2018-05-07T19:22:32 | Java | UTF-8 | Python | false | false | 5,178 | py | # encoding=utf8
import sys, os, pwd, grp, signal, time, glob
from resource_management import *
from subprocess import call
from resource_management.core.logger import Logger
reload(sys)
sys.setdefaultencoding('utf8')
class Master(Script):
def install(self, env):
import params
import status_params
self.install_packages(env)
#e.g. /var/lib/ambari-agent/cache/stacks/HDP/2.3/services/NIFI/package
service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
#Execute('find '+service_packagedir+' -iname "*.sh" | xargs chmod +x')
#Create user and group if they don't exist
self.create_linux_user(params.nifi_user, params.nifi_group)
#create the log dir if it not already present - the below won't work on both Ambari 2.4 so re-writing
Directory([status_params.nifi_pid_dir, params.nifi_log_dir],
owner=params.nifi_user,
group=params.nifi_group
)
Execute('chown ' + params.nifi_user + ':' + params.nifi_group + ' -R ' + params.conf_dir)
Directory('/var/lib/nifi',
owner=params.nifi_user,
group=params.nifi_group,
)
Execute('touch ' + params.nifi_log_file, user=params.nifi_user)
#Fetch and unzip snapshot build, if no cached nifi tar package exists on Ambari server node
#Logger.info("Creating " + params.nifi_dir)
#Directory([params.nifi_dir],
# owner=params.nifi_user,
# group=params.nifi_group,
#)
#Execute('unzip '+params.temp_file+' -d ' + params.nifi_install_dir + ' >> ' + params.nifi_log_file, user=params.nifi_user)
#Execute('mv '+params.nifi_dir+'/*/*/* ' + params.nifi_dir, user=params.nifi_user)
self.configure(env, True)
self.configure(env, True)
def create_linux_user(self, user, group):
try: pwd.getpwnam(user)
except KeyError: Execute('adduser ' + user)
try: grp.getgrnam(group)
except KeyError: Execute('groupadd ' + group)
def configure(self, env, isInstall=False):
import params
import status_params
env.set_params(params)
env.set_params(status_params)
#write out nifi.properties
properties_content=InlineTemplate(params.nifi_properties_content)
File(format("{params.conf_dir}/nifi.properties"), content=properties_content, owner=params.nifi_user, group=params.nifi_group) # , mode=0777)
#write out flow.xml.gz only during install
if isInstall:
Execute('echo "First time setup so generating flow.xml.gz" >> ' + params.nifi_log_file)
flow_content=InlineTemplate(params.nifi_flow_content)
File(format("{params.conf_dir}/flow.xml"), content=flow_content, owner=params.nifi_user, group=params.nifi_group)
Execute(format("cd {params.conf_dir}; mv flow.xml.gz flow_$(date +%d-%m-%Y).xml.gz ;"),user=params.nifi_user,ignore_failures=True)
Execute(format("cd {params.conf_dir}; gzip flow.xml;"), user=params.nifi_user)
#write out boostrap.conf
bootstrap_content=InlineTemplate(params.nifi_boostrap_content)
File(format("{params.conf_dir}/bootstrap.conf"), content=bootstrap_content, owner=params.nifi_user, group=params.nifi_group)
#write out logback.xml
#logback_content=InlineTemplate(params.nifi_logback_content)
#File(format("{params.conf_dir}/logback.xml"), content=logback_content, owner=params.nifi_user, group=params.nifi_group)
#write out nifi-env in bin as 0755
env_content=InlineTemplate(params.nifi_env_content)
File(format("{params.bin_dir}/nifi-env.sh"), content=env_content, owner=params.nifi_user, group=params.nifi_group, mode=0755)
def stop(self, env):
import params
import status_params
Execute ('export JAVA_HOME='+params.jdk64_home+';'+params.bin_dir+'/nifi.sh stop >> ' + params.nifi_log_file, user=params.nifi_user)
if os.path.isfile(status_params.nifi_node_pid_file):
os.unlink(status_params.nifi_node_pid_file)
def start(self, env):
import params
import status_params
Directory('/var/lib/nifi',
owner=params.nifi_user,
group=params.nifi_group,
)
self.configure(env)
Execute('echo pid file ' + status_params.nifi_pid_file)
Execute('echo JAVA_HOME=' + params.jdk64_home)
Execute ('export JAVA_HOME='+params.jdk64_home+';'+params.bin_dir+'/nifi.sh start >> ' + params.nifi_log_file, user=params.nifi_user)
#If nifi pid file not created yet, wait a bit
if not os.path.isfile(status_params.nifi_pid_dir+'/nifi.pid'):
Execute ('sleep 5')
def status(self, env):
import status_params
check_process_status(status_params.nifi_node_pid_file)
def install_mvn_repo(self):
#for centos/RHEL 6/7 maven repo needs to be installed
distribution = platform.linux_distribution()[0].lower()
if distribution in ['centos', 'redhat'] and not os.path.exists('/etc/yum.repos.d/epel-apache-maven.repo'):
Execute('curl -o /etc/yum.repos.d/epel-apache-maven.repo https://repos.fedorapeople.org/repos/dchen/apache-maven/epel-apache-maven.repo')
if __name__ == "__main__":
Master().execute()
| [
"pub@mnu.pp.ru"
] | pub@mnu.pp.ru |
0480ffaaef73482c67d9f5025722a4ae52554eb1 | 755b643dd8848c37907180fc78000c3b23fec9aa | /WeatherPy/api_keys.py | 0364342cfffa3edf6efddf7d3fe35911a269f0aa | [] | no_license | bakshishreya/Python-api-challenge | 0e17e3273d7106f07fd9924dfcceb96c79188b05 | 10a443f0cda2a5932ecf65b02c452735cd92c801 | refs/heads/main | 2023-02-17T21:57:21.489613 | 2021-01-20T03:12:58 | 2021-01-20T03:12:58 | 329,303,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | # OpenWeatherMap API Key
weather_api_key = "4f4a0a6f08ac25fb25ebdf08b5babaac"
# Google API Key
g_key = "YOUR KEY HERE!"
| [
"bakshishreya@gmail.com"
] | bakshishreya@gmail.com |
2ae44315fc12ca3c0910b24d73e13bd8c7d81662 | 03ad802be7a8658be0fe301f1bde9675dd7735e0 | /dicttranslate.py | b25eaec0e7234bf0c5ecfb7b1a9890c3d22f4e26 | [] | no_license | Sonnenlicht/BasicPython | 466544d8c4e8bf063ee4ae509c871645288c7e71 | bf36d60b101c1f2c11d09045bf125160f68cb149 | refs/heads/master | 2021-01-16T21:40:03.450477 | 2016-07-14T23:46:30 | 2016-07-14T23:46:30 | 63,377,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 423 | py | #! python3
words = {'esta': 'is', 'la': 'the', 'en': 'in', 'gato': 'cat', 'casa': 'house', 'el': 'the'}
def translate(langkey):
english = ' '
newwords = langkey.split()
for w in range(len(newwords)):
for k, v in words.items():
if(k == newwords[w]):
english += str(v) + ' '
return english
spanish = 'el gato esta en la casa'
english = translate(spanish)
print(english)
| [
"bs.suriyhaprakhas@gmail.com"
] | bs.suriyhaprakhas@gmail.com |
7ca29d11f5fe4b126f86fa6f169394d22744ec3b | 0aa58b87f0e913c8edaf35352c7306d6e47bd158 | /app/blog/migrations/0006_remove_post_published_date.py | aa22564ca35ae456796f6f7429679d06d9d7d6be | [] | no_license | AlexUM97/prototipo_app | ed63ced021b1d8884c58b48edaf4bed21638b05f | 36f49095ee82636555669e178a9b79d0459c075e | refs/heads/master | 2021-07-24T20:50:46.193279 | 2019-07-10T10:05:22 | 2019-07-10T10:05:22 | 196,181,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | # Generated by Django 2.2.2 on 2019-07-10 08:53
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20190604_1939'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='published_date',
),
]
| [
"aumoreno97@hotmail.com"
] | aumoreno97@hotmail.com |
d901f7954c3bc5fa3b0c233f1138529a2c1a5b04 | 2fc0cde37f50917a93d9a3f180e7ec119e470408 | /confess/controllers/login.py | 6279ceb71e6dbb004b5e8dcf2212a37f771834f8 | [
"MIT"
] | permissive | dggsax/hush.mit.edu | dffcc89cfcd3e50c37988b8ac88f3c7ec9ca09da | e47c28c934dcfb94c52f6e12367869389e8ed7a8 | refs/heads/master | 2021-08-20T00:48:16.880620 | 2017-11-27T21:06:43 | 2017-11-27T21:06:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,333 | py | from confess import app
from confess.models import *
import confess.config as config
from confess.constants import *
from confess.models.user import *
from confess.utils import *
from oic import rndstr
from oic.utils.http_util import Redirect
from oic.oic import Client
from oic.utils.authn.client import CLIENT_AUTHN_METHOD
import requests
from requests.auth import HTTPBasicAuth
import json
import datetime
from flask import (
redirect,
render_template,
request,
url_for,
session
)
@app.route('/login')
def login_page():
# Compute redirect url
if 'redirect' in request.args:
redirect_url = config.DOMAIN+'/login?redirect=' + request.args['redirect']
else:
redirect_url = config.DOMAIN+'/login'
# Check if already logged in
if 'jwt' in request.cookies:
try:
id = decode_token(request.cookies['jwt'])
user = User.query.filter_by(id=id).first()
return redirect('/')
except Exception as e:
pass
client = Client(client_authn_method=CLIENT_AUTHN_METHOD)
error = ""
try:
if "code" in request.args and "state" in request.args and request.args["state"] == session["state"]:
r = requests.post('https://oidc.mit.edu/token', auth=HTTPBasicAuth(CLIENT_ID, CLIENT_SECRET),
data={"grant_type": "authorization_code",
"code": request.args["code"],
"redirect_uri": redirect_url})
auth_token = json.loads(r.text)["access_token"]
r = requests.get('https://oidc.mit.edu/userinfo', headers={"Authorization": "Bearer " + auth_token})
user_info = json.loads(r.text)
if "email" in user_info and user_info["email_verified"] == True and user_info["email"].endswith("@mit.edu"):
# Authenticated
email = user_info["email"]
name = user_info["name"]
user = User.query.filter_by(email=email).first()
if user is None:
# Initialize the user with a very old last_post time
user = User(email=email, name=name, last_post=datetime.datetime.min)
db.session.add(user)
db.session.commit()
token = encode_token(user)
response = app.make_response(redirect('/'))
if 'redirect' in request.args:
response = app.make_response(redirect(request.args['redirect']))
response.set_cookie('jwt', token, expires=datetime.datetime.now()+datetime.timedelta(days=90))
return response
else:
if not "email" in user_info:
error = "We need your email to work."
else:
error = "Invalid Login."
session["state"] = rndstr()
session["nonce"] = rndstr()
args = {
"client_id": CLIENT_ID,
"response_type": ["code"],
"scope": ["email", "openid", "profile"],
"state": session["state"],
"nonce": session["nonce"],
"redirect_uri": redirect_url
}
auth_req = client.construct_AuthorizationRequest(request_args=args)
login_url = auth_req.request('https://oidc.mit.edu/authorize')
if error == "":
return redirect(login_url)
else:
return render_template('error.html', login_url=login_url, error=error)
except Exception as e:
session["state"] = rndstr()
session["nonce"] = rndstr()
args = {
"client_id": CLIENT_ID,
"response_type": ["code"],
"scope": ["email", "openid", "profile"],
"state": session["state"],
"nonce": session["nonce"],
"redirect_uri": config.DOMAIN+'/login'
}
auth_req = client.construct_AuthorizationRequest(request_args=args)
login_url = auth_req.request('https://oidc.mit.edu/authorize')
return render_template('error.html', login_url=login_url, error="Stuff didn't go according to plan :(")
@app.route('/logout')
def logout():
response = app.make_response(redirect('/'))
response.set_cookie('jwt', '')
return response
| [
"shreyask@mit.edu"
] | shreyask@mit.edu |
e6ad1ebdb3fbb57d9e739bc8e8340a63d23b51da | 3f7bc9172de48ba9b36d26b7daf788613d2943a4 | /utils.py | c083bb7ca05aedd13c986baf1783604ac1c20fae | [] | no_license | GoosenA/OverdriveGUI | 39b526574c864d3dfb02bc3308b079a5eb62c28c | 0ce1235ddd478aad1aaabaa34992bf8a074fd6cd | refs/heads/master | 2021-05-18T00:57:24.690708 | 2020-04-01T15:53:59 | 2020-04-01T15:53:59 | 251,035,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,762 | py | import pygame
import json
import enum
class SurfaceObject(enum.Enum):
EMPTY = 0
MUD = 1
OIL_SPILL = 2
OIL_POWER = 3
FINISH = 4
BOOST = 5
PLAYER = 6
OPPONENT = 7
def read_config():
with open("config.json") as f:
print("file open")
config = json.load(f)
return config
def get_round_as_string(round):
rnd_str = str(round)
if len(rnd_str) == 3:
return rnd_str
elif len(rnd_str) == 2:
return "0"+rnd_str
else:
return "00"+rnd_str
def read_json(round, config):
rnd_as_str = get_round_as_string(round)
player_file = config["FolderPrepend"]+"/Round "+rnd_as_str+'/'+config["Player"]+'/JsonMap.json'
with open(player_file) as f:
player_data = json.load(f)
opponent_file = config["FolderPrepend"]+"/Round "+rnd_as_str+'/'+config["Opponent"]+'/JsonMap.json'
with open(opponent_file) as f:
opponent_data = json.load(f)
return player_data, opponent_data
def populate_grid(grid, player_world, opponent_world, player_id = 1, opponent_id = 2):
player_offset = player_world[0][0]["position"]["x"]
# offset = 0
for row in player_world:
for cell in row:
# print(cell["position"]["y"]+2, cell["position"]["x"]-player_offset, player_offset)
grid[cell["position"]["y"]+2][cell["position"]["x"]-player_offset] = SurfaceObject(cell["surfaceObject"])
if cell["occupiedByPlayerId"] == player_id:
grid[cell["position"]["y"]+2][cell["position"]["x"]-player_offset] = SurfaceObject.PLAYER
if cell["occupiedByPlayerId"] == opponent_id:
grid[cell["position"]["y"]+2][cell["position"]["x"]-player_offset] = SurfaceObject.OPPONENT
opponent_offset = opponent_world[0][0]["position"]["x"]
for row in opponent_world:
for cell in row:
grid[cell["position"]["y"]+10][cell["position"]["x"]-opponent_offset] = SurfaceObject(cell["surfaceObject"])
if cell["occupiedByPlayerId"] == player_id:
grid[cell["position"]["y"]+10][cell["position"]["x"]-opponent_offset] = SurfaceObject.PLAYER
if cell["occupiedByPlayerId"] == opponent_id:
grid[cell["position"]["y"]+10][cell["position"]["x"]-opponent_offset] = SurfaceObject.OPPONENT
return grid, player_offset, opponent_offset
def display_text(screen, font, config, player, opponent, textcolor):
cellSurf = font.render(f'PLAYER1: {config["Player"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 15)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'PLAYER2: {config["Opponent"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 215)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'PLAYER: {config["Player"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 400)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Speed: {player["player"]["speed"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 420)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'State: {player["player"]["state"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 440)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Powerups: {player["player"]["powerups"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 460)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Boosting: {player["player"]["boosting"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 480)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'PLAYER: {config["Opponent"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 600)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Speed: {opponent["player"]["speed"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 620)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'State: {opponent["player"]["state"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 640)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Powerups: {opponent["player"]["powerups"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = (10, 660)
screen.blit(cellSurf, cellRect)
cellSurf = font.render(f'Boosting: {opponent["player"]["boosting"]}', True, textcolor)
cellRect = cellSurf.get_rect()
cellRect.topleft = ( 10, 680)
screen.blit(cellSurf, cellRect) | [
"goosenae+gitkraken@gmail.com"
] | goosenae+gitkraken@gmail.com |
5e1cc998043e8089b6fb00b3517d860d5dffcd83 | c48bf0acce521d9e7e14a6531432c3b8da12488f | /venv/Scripts/pip-script.py | bb8ccfc2f3a6e9f886759f70dff46d87f4ac9dd9 | [] | no_license | oluyalireuben/python_complete_conte | 94481426d7497f54912cfe32a18ff3f9db782568 | 01a70d64ba33623d7dc628a37794a1d495aa76f1 | refs/heads/master | 2022-05-18T11:03:20.056603 | 2020-04-24T03:18:43 | 2020-04-24T03:18:43 | 258,547,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | #!C:\Users\alex\PycharmProjects\pycharm_one\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
| [
"oluyalireuben1@gmail.com"
] | oluyalireuben1@gmail.com |
1a01e133669315890a63fe326f1b255ce30d4178 | 1ef13c8d1e1bf3d1b7f47011bbb4b80b155cebcb | /NU-CS5001/hw04/passwords.py | 8a52a61b49fc37b8bf53232344e221e020bf3e5b | [
"MIT"
] | permissive | zahraaliaghazadeh/python | e242265ffd0d568dbce10d9264fa0b570a2a2993 | 2f2d0141a916c99e8724f803bd4e5c7246a7a02e | refs/heads/master | 2023-01-05T22:02:00.631281 | 2020-10-13T06:53:25 | 2020-10-13T06:53:25 | 295,260,142 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,183 | py | # Zahra Ali Aghazadeh
# This function takes first name and last name and a favortie
# word from the user and then it creates a username and 3
# diverse suggested passwords.
import math
import random
UP_BOUND_RANDOM = 99
# Actually this is: 3! Ways to sort three things + 1
WAYS_TO_SORT_THREE = 7
# since randint is inclusive we have 6 , and not 7
MIN_LN_LEN = 6
def passwords():
""" function prompts the user for `first_name`, `last_name`
and their `favorite_word`. It will then create a user name
and 3 suggested passwords for them.
:return: returns 4 strings(1 username and 3 passwords).
"""
print("Welcome to the username and password generator!")
# User prompts
first_name = input("Please Enter your first name: ")
last_name = input("Please Enter your last name: ")
favorite_word = input("Please Enter your favorite word: ")
# user_name
number_1_str = str(random.randint(1, UP_BOUND_RANDOM))
while (len(last_name) < MIN_LN_LEN):
last_name = last_name + ("*" * (MIN_LN_LEN-len(last_name)))
user_name = (first_name[0].lower() + last_name[0:MIN_LN_LEN].lower()
+ number_1_str)
print("Thanks {}, your username is ".format(first_name) + user_name)
# password_1
# convert the first and last name to lowercase
password_1_starter = (first_name.lower()
+ str(random.randint(1, UP_BOUND_RANDOM))
+ last_name.lower())
password_1 = ""
# loop through the lowercased string and
# substitute certain alphabets with characters
for c in password_1_starter:
if c == "a":
password_1 += "@"
elif c == "o":
password_1 += "0"
elif c == "l":
password_1 += "1"
elif c == "s":
password_1 += "$"
# 2 extra conditions below
elif c == "q":
password_1 += "9"
elif c == "h":
password_1 += "#"
else:
password_1 += c
print("Password 1: " + password_1)
# password 2
# lowercase first letters and uppercase last letters
# then concat them in order
# 0 slices the first char and -1 slices the last
password_2 = (first_name[0].lower()
+ first_name[-1].upper()
+ last_name[0].lower()
+ last_name[-1].upper()
+ favorite_word[0].lower()
+ favorite_word[-1].upper())
print("Password 2 : " + password_2)
# password 3
random_length_fn = random.randint(1, len(first_name))
random_length_ln = random.randint(1, len(last_name))
random_length_fw = random.randint(1, len(favorite_word))
# There are 6 ways to sort 3 things
# Below are the conditions for each generated random number
random_choice = random.randint(1, WAYS_TO_SORT_THREE)
if (random_choice == 1): # 1 2 3
password_3 = (first_name[:random_length_fn]
+ last_name[:random_length_ln]
+ favorite_word[:random_length_fw])
elif(random_choice == 2): # 2 1 3
password_3 = (last_name[:random_length_ln]
+ first_name[:random_length_fn]
+ favorite_word[:random_length_fw])
elif(random_choice == 3): # 3 1 2
password_3 = (favorite_word[:random_length_fw]
+ first_name[:random_length_fn]
+ last_name[:random_length_ln])
elif(random_choice == 4): # 3 2 1
password_3 = (favorite_word[:random_length_fw]
+ last_name[:random_length_ln]
+ first_name[:random_length_fn])
elif(random_choice == 5): # 2 3 1
password_3 = (last_name[:random_length_ln]
+ favorite_word[:random_length_fw]
+ first_name[:random_length_fn])
elif(random_choice == 6): # 1 3 2
password_3 = (first_name[:random_length_fn]
+ favorite_word[:random_length_fw]
+ last_name[:random_length_ln])
print("Password 3: " + password_3)
def main():
"main is to call our function"
passwords()
main()
| [
"zahraaliaghazadeh@gmail.com"
] | zahraaliaghazadeh@gmail.com |
22a0ca8e1e08f8516eb0a7d34b276c7390c35474 | c15a28ae62eb94dbf3ed13e2065195e572a9988e | /Cook book/src/8/lazily_computed_attributes/example1.py | a872f26e188323bd9e96e4b786016ffff9d9d6d8 | [] | no_license | xuyuchends1/python | 10798c92840a1a59d50f5dc5738b2881e65f7865 | 545d950a3d2fee799902658e8133e3692939496b | refs/heads/master | 2021-01-25T07:07:04.812140 | 2020-02-28T09:25:15 | 2020-02-28T09:25:15 | 93,647,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 704 | py | class lazyproperty:
def __init__(self, func):
self.func = func
def __get__(self, instance, cls):
if instance is None:
return self
else:
value = self.func(instance)
setattr(instance, self.func.__name__, value)
return value
if __name__ == '__main__':
import math
class Circle:
def __init__(self, radius):
self.radius = radius
@lazyproperty
def area(self):
print('Computing area')
return math.pi * self.radius ** 2
@lazyproperty
def perimeter(self):
print('Computing perimeter')
return 2 * math.pi * self.radius
| [
"xuyuchends@163.com"
] | xuyuchends@163.com |
66ba6597d61378a91d5ee82df352bcc2eba93876 | d8d95b609a103454b408634bc3a61e4c1fb72dd6 | /6주차/my_SIFT for student.py | d29df1b53fb430f6b303991b82775340a443ecd1 | [] | no_license | sglee487/ComputerGraphics | b7d8cb26a93c91bcfa8515807dce5b09a5bf4384 | 5468b807d98589fda5c9effc64740f1963d7550b | refs/heads/master | 2020-07-31T10:03:54.900052 | 2020-04-04T08:07:01 | 2020-04-04T08:07:01 | 210,547,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,154 | py | import cv2
import numpy as np
def get_extrema(DoG, ext):
for i in range(1, 4):
for j in range(1, DoG.shape[0]-1):
for k in range(1, DoG.shape[1]-1):
# 최대값 혹은 최소값인 지점을 extrema로 구해주세요.
DoG1localMax = np.max(DoG[j-1:j+2,k-1:k+2,i-1])
DoG1localMin = np.min(DoG[j-1:j+2,k-1:k+2,i-1])
DoG2localMax = np.max(DoG[j-1:j+2,k-1:k+2,i])
DoG2localMin = np.min(DoG[j-1:j+2,k-1:k+2,i])
DoG3localMax = np.max(DoG[j-1:j+2,k-1:k+2,i+1])
DoG3localMin = np.min(DoG[j-1:j+2,k-1:k+2,i+1])
allLocalMax = max(DoG1localMax,DoG2localMax,DoG3localMax)
allLocalMin = min(DoG1localMin,DoG2localMin,DoG3localMin)
if ((allLocalMax == DoG[j][k][i]) or (allLocalMin == DoG[j][k][i])):
# xhat과 D(xhat)을 구하기 위한 미분을 수행해주세요.
dDdx = (DoG[j,k+1,i]-DoG[j,k-1,i])/2
dDdy = (DoG[j+1,k,i]-DoG[j-1,k,i])/2
dDds = (DoG[j,k,i+1]-DoG[j,k,i-1])/2
d2Ddx2 = DoG[j,k+1,i] - DoG[j,k-1,i] + 2 * DoG[j,k,i]
d2Ddy2 = DoG[j+1, k , i] - DoG[j-1, k, i] + 2 * DoG[j, k, i]
d2Dds2 = DoG[j, k , i+1] - DoG[j, k , i-1] + 2 * DoG[j, k, i]
d2Ddxy = (((DoG[j+1,k+1,i]) - DoG[j+1,k-1,i])-((DoG[j-1,k+1,i]-DoG[j-1,k-1,i])))/4
d2Ddxs = (((DoG[j, k + 1, i+1]) - DoG[j, k - 1, i+1]) - (
(DoG[j, k + 1, i-1] - DoG[j, k - 1, i-1]))) / 4
d2Ddys = (((DoG[j + 1, k, i+1]) - DoG[j + 1, k, i-1]) - (
(DoG[j - 1, k, i+1] - DoG[j - 1, k, i-1]))) / 4
H = [[d2Ddx2,d2Ddxy,d2Ddxs],[d2Ddxy,d2Ddy2,d2Ddxy],[d2Ddxs,d2Ddys,d2Dds2]]
dD = np.transpose([dDdx,dDdy,dDds])
xhat = np.linalg.lstsq(np.dot(-1,H), dD, rcond=-1)[0]
target = DoG[j,k,i]
Dxhat = target + 0.5 * np.dot(dD.transpose(), xhat)
# Thresholding을 수행해주세요. ( 적절한 위치만 ext 배열에 저장해주세요, )
if(np.abs(Dxhat) < thresh or np.min(np.abs(xhat)) > 0.5):
continue
Hpart = np.array([[d2Ddx2,d2Ddxy],[d2Ddxy,d2Ddy2]])
traceHpartsquare = np.trace(Hpart) ** 2
detHpart = np.linalg.det(Hpart)
rc = ((r + 1) ** 2)/r
if (detHpart<0 or (traceHpartsquare/detHpart) > rc):
continue
ext[j,k,i-1] = 1
return ext
def SIFT(src, thresh, r):
s = 1.3 #초기 sigma
a = 3. #극점을 찾을 이미지 수
k = 2. ** (1/a) # scale step
lv1sigma = np.array([s , s * k, s * (k**2), s * (k**3), s * (k**4), s * (k**5)]) #double image에 적용될 sigma.
lv2sigma = np.array([s * (k**3) , s * (k**4), s * (k**5), s * (k**6), s * (k**7), s * (k**8) ]) #Original size image #start : 2 * sigma
lv3sigma = np.array([s * (k**6) , s * (k**7), s * (k**8), s * (k**9), s * (k**10), s * (k**11) ]) #half size image #start : 4 * sigma
lv4sigma = np.array([s * (k**9) , s * (k**10), s * (k**11), s * (k**12), s * (k**13), s * (k**14) ]) #quater size image #start : 8 * sigma
#image resize
doubled = cv2.resize(src,None,fx=2,fy=2,interpolation=cv2.INTER_LINEAR) #원본의 2배로 이미지를 resize 해주세요. cv2.INTER_LINEAR, cv2.INTER_NEAREST 자유롭게 사용.
normal = src #원본과 동일
half = cv2.resize(src,None,fx=0.5,fy=0.5,interpolation=cv2.INTER_LINEAR) #가로 세로 각각 1/2
quarter = cv2.resize(src,None,fx=0.25,fy=0.25,interpolation=cv2.INTER_LINEAR) #가로 세로 각각 1/4
# Gaussian 피라미드 저장할 3차원 배열
lv1py = np.zeros((doubled.shape[0], doubled.shape[1], 6))
lv2py = np.zeros((normal.shape[0], normal.shape[1], 6))
lv3py = np.zeros((half.shape[0], half.shape[1], 6))
lv4py = np.zeros((quarter.shape[0], quarter.shape[1], 6))
print('make gaussian pyr')
# Gaussian을 계산
# ksize = 2 * int(4 * sigma + 0.5) + 1
for i in range(6):
#Gaussian Pyramids를 만들어주세요.
#예제에서는 한 Level(Octave)에 6개의 Gaussian Image가 저장됩니다.
ksize = 2 * int(4 * lv1sigma[i] + 0.5) + 1
lv1py[:,:,i] = cv2.GaussianBlur(doubled, (ksize, ksize), lv1sigma[i])
ksize = 2 * int(4 * lv2sigma[i] + 0.5) + 1
lv2py[:,:,i] = cv2.GaussianBlur(normal,(ksize,ksize),lv2sigma[i])
ksize = 2 * int(4 * lv3sigma[i] + 0.5) + 1
lv3py[:,:,i] = cv2.GaussianBlur(half,(ksize,ksize),lv3sigma[i])
ksize = 2 * int(4 * lv4sigma[i] + 0.5) + 1
lv4py[:,:,i] = cv2.GaussianBlur(quarter,(ksize,ksize),lv4sigma[i])
#DoG 피라미드를 저장할 3차원 배열
DoGlv1 = np.zeros((doubled.shape[0], doubled.shape[1], 5))
DoGlv2 = np.zeros((normal.shape[0], normal.shape[1], 5))
DoGlv3 = np.zeros((half.shape[0], half.shape[1], 5))
DoGlv4 = np.zeros((quarter.shape[0], quarter.shape[1], 5))
print('calc DoG')
# DoG를 계산
for i in range(5):
#Difference of Gaussian Image pyramids 를 구해주세요.
DoGlv1[:,:,i] = cv2.subtract(lv1py[:,:,i],lv1py[:,:,i+1])
DoGlv2[:,:,i] = cv2.subtract(lv2py[:,:,i],lv2py[:,:,i+1])
DoGlv3[:,:,i] = cv2.subtract(lv3py[:,:,i],lv3py[:,:,i+1])
DoGlv4[:,:,i] = cv2.subtract(lv4py[:,:,i],lv4py[:,:,i+1])
# 극값의 위치를 표시할 3차원 배열
extPy1 = np.zeros((doubled.shape[0], doubled.shape[1], 3))
extPy2 = np.zeros((normal.shape[0], normal.shape[1], 3))
extPy3 = np.zeros((half.shape[0], half.shape[1], 3))
extPy4 = np.zeros((quarter.shape[0], quarter.shape[1], 3))
# Extrema의 위치 계산
print('find extrema')
extPy1 = get_extrema(DoGlv1, extPy1)
extPy2 = get_extrema(DoGlv2, extPy2)
extPy3 = get_extrema(DoGlv3, extPy3)
extPy4 = get_extrema(DoGlv4, extPy4)
extr_sum = extPy1.sum() + extPy2.sum() + extPy3.sum() + extPy4.sum()
extr_sum = extr_sum.astype(np.int)
keypoints = np.zeros((extr_sum, 3)) # 원래는 3가지의 정보가 들어가나 과제에선 Y좌표, X 좌표, scale 세 가지의 값만 저장한다.
#값 저장
count = 0 #keypoints 수를 Count
for i in range(3):
for j in range(doubled.shape[0]):
for k in range(doubled.shape[1]):
#Lv1
#Keypoints 배열에 Keypoint의 정보를 저장하세요. 함수로 만들어서 수행하셔도 됩니다.
if (extPy1[j,k,i] == 1):
keypoints[count,0] = j * 0.5
keypoints[count,1] = k * 0.5
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(normal.shape[0]):
for k in range(normal.shape[1]):
#Lv2
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy2[j,k,i] == 1):
keypoints[count,0] = j
keypoints[count,1] = k
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(half.shape[0]):
for k in range(half.shape[1]):
#Lv3
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy3[j,k,i] == 1):
keypoints[count,0] = j * 2
keypoints[count,1] = k * 2
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(quarter.shape[0]):
for k in range(quarter.shape[1]):
#Lv4
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy4[j,k,i] == 1):
keypoints[count,0] = j * 4
keypoints[count,1] = k * 4
keypoints[count,2] = i
count += 1
return keypoints
if __name__ == '__main__':
src = cv2.imread('./building.jpg')
gray = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)
gray = gray.astype(np.double)
gray /= 255.
thresh = 0.03
r = 10. #원 논문에서 값을 10으로 사용
keypoints = SIFT(gray, thresh = thresh, r = r)
for i in range(len(keypoints)):
cv2.circle(src, (int(keypoints[i,1]), int(keypoints[i,0])), int(1 * keypoints[i,2]), (0, 0, 255), 1) # 해당 위치에 원을 그려주는 함수
src2 = cv2.imread('./building_temp.jpg')
gray2 = cv2.cvtColor(src2, cv2.COLOR_BGR2GRAY)
gray2 = gray2.astype(np.double) / 255.
keypoints2 = SIFT(gray2, thresh=thresh, r=r)
for i in range(len(keypoints2)):
cv2.circle(src2, (int(keypoints2[i,1]), int(keypoints2[i,0])), int(1 * keypoints2[i,2]), (0, 0, 255), 1) # 해당 위치에 원을 그려주는 함수
cv2.imshow('src', src)
cv2.imshow('src2', src2)
cv2.waitKey()
cv2.destroyAllWindows() | [
"sglee487@gmail.com"
] | sglee487@gmail.com |
31c5fd7ff63795698e8ccb61b3d1f8b33a80e943 | a5e075ab873d4cdef3571aae970c71ee49a9a568 | /ifelse.py | baf335dd4dc77a83caa284896119c81c01d15a1c | [] | no_license | Manjukrishnamk/pythonbasic | d310295f0ea93246b327da0cec87ffa579462729 | 6525df078cd3fefef03f8400045b490e7db5841f | refs/heads/master | 2021-07-18T03:49:38.163694 | 2017-10-24T03:56:26 | 2017-10-24T03:56:26 | 107,953,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | varone = int(input())
vartwo = int(input())
varthree = int(input())
if varone > vartwo and varone >varthree:
print("hey im varone")
elif vartwo >varthree:
print("vartwo")
else:
print("varthree")
| [
"manjukrishnamk@cs.ajce.in"
] | manjukrishnamk@cs.ajce.in |
285e8278d834a96263c89eb1feea01a5ba22b756 | a1dc2f25c17085ea0dcc34081028eaf6bc604219 | /CodingWithMosh/4_Functions/Function_Exercises.py | a7ebe494691e7444c77f3e065ae0ad8307e9f345 | [] | no_license | thedonflo/Flo-Python | a8579fd5ec2b5ddc3be6fd04405dcbc3c0a40b1b | 15ee8b6b1d39ace5aa0ad27db3cd2f03a64bb7b2 | refs/heads/master | 2021-07-13T00:09:24.460635 | 2020-09-23T09:05:45 | 2020-09-23T09:05:45 | 203,876,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,330 | py | # def greet():
# print("Hi there")
# print("Welcome aboard")
#
# greet()
# def greet(first_name, last_name):
# print(f"Hi {first_name} {last_name}")
# print("Welcome aboard")
#
# greet("Funlola", "Ogunleye")
# def get_greeting(name):
# return f"Hi {name}"
#
#
# message = get_greeting("Funlola")
#
# print(message)
#
# def increment(number, by):
# return number + by
#
# print(increment(2, by=1))
# def increment(number, by=1):
# return number + by
#
# print(increment(2))
# def multiply(*numbers): #Creates tuples
# print(numbers)
#
#
# multiply(2, 3, 4, 5)
# def multiply(*numbers): #Creates tuples and iterates through them
# for number in numbers:
# print(number)
#
#
# multiply(2, 3, 4, 5)
# def multiply(*numbers): #Creates tuples and iterates through them
# total = 1
# for number in numbers:
# total *= number
# return total
#
#
# print(multiply(2, 3, 4, 5))
# def save_user(**user): #Creates dictionary
# print(user)
# print(user["id"])
# print(user["name"])
#
# save_user(id=1, name="John", age=22)
def fizz_buzz(input):
if (input % 5 == 0) and (input % 3 == 0):
return "FizzBuzz"
if input % 3 == 0:
return "Fizz"
if input % 5 == 0:
return "Buzz"
return input
print(fizz_buzz(7))
| [
"tuffguy_20708@yahoo.com"
] | tuffguy_20708@yahoo.com |
f1fae0f924c00a9486c0b986eec4af374aa7f501 | ea83e60e2be606813005081a9f1b9516de018c7d | /language/realm/retrieval.py | 7962c9d8ad6fdb8180adf7d556fcee0ec5c34d37 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | optimopium/language | 1562a1f150cf4374cf8d2e6a0b7ab4a44c5b8961 | bcc90d312aa355f507ed128e39b7f6ea4b709537 | refs/heads/master | 2022-04-03T03:51:28.831387 | 2022-03-16T21:41:17 | 2022-03-16T22:50:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,555 | py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utilities for performing retrieval."""
import abc
from concurrent import futures
import time
from absl import logging
from language.realm import featurization
from language.realm import parallel
from language.realm import profile
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
class Retriever(abc.ABC):
"""Retrieves documents for a query."""
@abc.abstractmethod
def retrieve(self, query_batch):
"""Retrieves candidates for a batch of queries.
Args:
query_batch (list[Query]): a list of queries.
Returns:
a batch of lists, where each list is a list of Documents for the
corresponding query.
"""
raise NotImplementedError()
class DummyRetriever(Retriever):
"""Dummy retriever for testing."""
def __init__(self, num_neighbors):
self._num_neighbors = num_neighbors
self.total_candidates = 13353718
self.embed_dim = 128
with tf.device('/CPU:0'):
self._doc_embeds = tf.zeros((self.total_candidates, self.embed_dim))
def retrieve(self, query_batch):
# [batch_size, embed_dim]
query_embeds = tf.zeros((len(query_batch), self.embed_dim))
with tf.device('/CPU:0'):
# [batch_size, total_candidates]
cand_scores = tf.matmul(query_embeds, self._doc_embeds, transpose_b=True)
_, top_ids_batch = tf.math.top_k(cand_scores, k=self._num_neighbors)
title_ids = np.zeros(10, dtype=np.int32)
body_ids = np.zeros(280, dtype=np.int32)
retrievals_batch = []
for top_ids in top_ids_batch:
retrievals = [
featurization.Document(0, title_ids, body_ids) for i in top_ids
]
retrievals_batch.append(retrievals)
return retrievals_batch
class BruteForceRetriever(Retriever):
"""Retrieves documents using brute force matrix multiplication."""
def __init__(self, query_embedder, documents, doc_embeds_or_path,
num_neighbors):
"""Constructs BruteForceRetriever.
Args:
query_embedder: an instance of QueryEmbedder.
documents: a list of Document objects.
doc_embeds_or_path: either a [num_docs, embed_dim] TF Tensor, or a path to
load it.
num_neighbors: number of neighbors to retrieve.
"""
total_candidates = len(documents)
self._query_embedder = query_embedder
self._num_neighbors = num_neighbors
self._documents = documents
# Load embeddings.
if isinstance(doc_embeds_or_path, str):
with tf.device('/CPU:0'):
ckpt_reader = tf.train.load_checkpoint(doc_embeds_or_path)
self._doc_embeds = ckpt_reader.get_tensor('block_emb')
else:
self._doc_embeds = doc_embeds_or_path
logging.info('Loaded document embeddings.')
# Check shapes.
if self._doc_embeds.shape[0] != total_candidates:
raise ValueError('Did not load the right number of embeddings.')
@profile.profiled_function
def retrieve(self, query_batch):
# [batch_size, embed_dim]
query_embeds = self._query_embedder.embed(query_batch)
with tf.device('/CPU:0'):
# [batch_size, total_candidates]
cand_scores = tf.matmul(query_embeds, self._doc_embeds, transpose_b=True)
_, top_ids_batch = tf.math.top_k(cand_scores, k=self._num_neighbors)
retrievals_batch = []
for top_ids in top_ids_batch:
retrievals = [self._documents[i] for i in top_ids]
retrievals_batch.append(retrievals)
return retrievals_batch
def count_tf_records(file_path):
"""Counts the number of records in a GZIP'd TFRecord file."""
gzip_option = tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)
count = 0
for _ in tf.python_io.tf_record_iterator(file_path, gzip_option):
count += 1
return count
def count_tf_records_parallel_helper(args):
"""Just a helper function for count_tf_records_parallel."""
file_idx, file_path = args
return (file_idx, count_tf_records(file_path))
def count_tf_records_parallel(file_paths, num_processes=None):
"""Counts number of records in TFRecord files in parallel.
Args:
file_paths: a list of paths, where each path points to a GZIP-ed TFRecord
file.
num_processes: number of Python processes to use in parallel. If None, will
use all available CPUs.
Returns:
shard_sizes: a list of ints.
"""
num_files = len(file_paths)
with parallel.Executor(
create_worker=lambda: count_tf_records_parallel_helper,
queue_size=num_files,
num_workers=num_processes) as executor:
for file_idx, file_path in enumerate(file_paths):
executor.submit((file_idx, file_path))
counts = [None] * num_files
results = executor.results(max_to_yield=num_files)
for i, (file_idx, count) in enumerate(results):
counts[file_idx] = count
logging.info('Counted %d / %d files.', i + 1, num_files)
return counts
def load_documents(path):
"""Loads Documents from a GZIP-ed TFRecords file into a Python list."""
gzip_option = tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)
def get_bytes_feature(ex, name):
return list(ex.features.feature[name].bytes_list.value)
def get_ints_feature(ex, name):
# 32-bit Numpy arrays are more memory-efficient than Python lists.
return np.array(ex.features.feature[name].int64_list.value, dtype=np.int32)
docs = []
for val in tf.python_io.tf_record_iterator(path, gzip_option):
ex = tf.train.Example.FromString(val)
title = get_bytes_feature(ex, 'title')[0]
body = get_bytes_feature(ex, 'body')[0]
doc_uid = featurization.get_document_uid(title, body)
title_token_ids = get_ints_feature(ex, 'title_token_ids')
body_token_ids = get_ints_feature(ex, 'body_token_ids')
doc = featurization.Document(
uid=doc_uid,
title_token_ids=title_token_ids,
body_token_ids=body_token_ids)
docs.append(doc)
return docs
def load_documents_from_shard(args):
"""A helper function for load_documents_from_shards."""
shard_idx, shard_path = args
docs = load_documents(shard_path)
return (shard_idx, docs)
@profile.profiled_function
def load_documents_from_shards(shard_paths, num_processes=None):
"""Loads Documents from a sharded, GZIP-ed TFRecords file into a Python list.
Uses multiple processes to perform IO in parallel.
Args:
shard_paths: a list of paths, where each path points to a GZIP-ed TFRecords
file. Documents loaded from each shard will be concatenated in the order
of shard_paths.
num_processes: number of Python processes to use in parallel. If None, will
use all available CPUs.
Returns:
a list of Document instances.
"""
num_shards = len(shard_paths)
with parallel.Executor(
create_worker=lambda: load_documents_from_shard,
queue_size=num_shards,
num_workers=num_processes) as executor:
for shard_idx, shard_path in enumerate(shard_paths):
executor.submit((shard_idx, shard_path))
results = []
for shard_idx, docs in executor.results(max_to_yield=num_shards):
results.append((shard_idx, docs))
logging.info('Loaded %d of %d document shards.', len(results), num_shards)
# Sorts results by shard_idx.
results.sort()
logging.info('Combining data from all document shards.')
all_docs = []
for shard_idx, docs in results:
all_docs.extend(docs)
logging.info('Finished loading all shards.')
return all_docs
class QueryEmbedder(object):
"""Embeds queries."""
def __init__(self, embedder_model_or_path, featurizer):
if isinstance(embedder_model_or_path, str):
# Assume it is a path to a SavedModel
self._model = tf.saved_model.load_v2(embedder_model_or_path, tags={})
else:
# Assume it is an already loaded SavedModel
self._model = embedder_model_or_path
logging.info('Loaded query embedder.')
self._featurizer = featurizer
def embed(self, query_batch):
"""Embeds a batch of queries.
Args:
query_batch: a list of Query instances.
Returns:
embeds: a [batch_size, embed_dim] float Tensor.
"""
with profile.Timer('embed_featurize'):
feature_dicts = [self._featurizer.featurize_query(q) for q in query_batch]
# Concatenate features into a single dict with the following structure:
# input_ids: [batch_size, seq_len] <int32>
# input_mask: [batch_size, seq_len] <int32>
# segment_ids: [batch_size, seq_len] <int32>
model_inputs = featurization.batch_feature_dicts(feature_dicts)
with profile.Timer('embed_tf'):
return self._model.signatures['projected'](**model_inputs)['default']
class DocumentEmbedder(object):
"""Embeds documents using TF Estimator.
Note: this only works with the REALM Hub modules. An ICT Hub module won't work
because it has a different set of signatures.
"""
def __init__(self, hub_module_spec, featurizer, use_tpu, run_config=None):
"""Constructs the DocumentEmbedder."""
if run_config is None:
if use_tpu:
raise ValueError('Must supply a RunConfig if use_tpu.')
else:
run_config = tf.estimator.tpu.RunConfig() # Just supply a default.
self._hub_module_spec = hub_module_spec
self._featurizer = featurizer
self._use_tpu = use_tpu
self._run_config = run_config
self._log_interval = 10 # When embedding, log every 10 seconds.
def embed(self, get_documents_dataset, total_docs, batch_size):
"""Embeds a Dataset of documents using Estimator.
Args:
get_documents_dataset: a function that returns a TF Dataset, where each
element is a dict with attributes described below.
total_docs: total number of documents in the Dataset.
batch_size: number of documents to embed in each batch. Each element in
the Dataset returned by get_documents_dataset should be a dict with the
attributes described below.
get_documents_dataset should return a Dataset over dicts, each containing at
least the following attributes:
- title_token_ids: a 1-D int Tensor.
- body_token_ids: a 1-D int Tensor.
Yields:
a [embed_dim] Numpy array, one for each document.
"""
if total_docs < 1:
raise ValueError('Must embed at least 1 document.')
# These hyperparams are passed to Estimator.
params = {
'vocab_path':
self._featurizer.tokenizer.vocab_path,
'do_lower_case':
self._featurizer.tokenizer.do_lower_case,
'query_seq_len':
self._featurizer.query_seq_len,
'candidate_seq_len':
self._featurizer.candidate_seq_len,
'num_candidates':
self._featurizer.num_candidates,
'max_masks':
self._featurizer.max_masks,
'separate_candidate_segments':
self._featurizer.separate_candidate_segments,
}
def input_fn(params):
"""Constructs the dataset fed to Estimator."""
# We cannot access self._featurizer via closure, because this function is
# passed to another device. Hence, we need to reconstruct the featurizer
# from its hyerparameters (passed through `params`).
tokenizer = featurization.Tokenizer(
vocab_path=params['vocab_path'],
do_lower_case=params['do_lower_case'])
featurizer = featurization.Featurizer(
query_seq_len=params['query_seq_len'],
candidate_seq_len=params['candidate_seq_len'],
num_candidates=params['num_candidates'],
max_masks=params['max_masks'],
tokenizer=tokenizer,
separate_candidate_segments=params['separate_candidate_segments'])
dataset = get_documents_dataset()
def featurize(doc_dict):
return featurizer.featurize_document_tf(doc_dict['title_token_ids'],
doc_dict['body_token_ids'])
dataset = dataset.map(
featurize, num_parallel_calls=tf.data.experimental.AUTOTUNE)
# Add a document index variable.
dataset = dataset.enumerate()
def _enumerate_to_dict(result_idx, tensor_dict):
return dict(tensor_dict, result_idx=result_idx)
dataset = dataset.map(
_enumerate_to_dict, num_parallel_calls=tf.data.experimental.AUTOTUNE)
# Pad the end of the dataset with one full extra batch.
# This ensures that we don't drop the remainder.
if total_docs % batch_size != 0:
# Pad using the first value of the dataset, repeated batch_size times.
pad_vals = dataset.take(1).repeat(batch_size)
dataset = dataset.concatenate(pad_vals)
# Batch the dataset.
dataset = dataset.batch(batch_size, drop_remainder=True)
dataset = dataset.prefetch(2) # Prefetch for efficiency.
return dataset
def model_fn(features, labels, mode, params):
"""Constructs the model used by Estimator."""
del labels, params
embedder_module = hub.Module(
spec=self._hub_module_spec, name='embedder', trainable=False)
# Remove the result_idx before feeding features to the module.
result_idx = features.pop('result_idx')
# [batch_size, embed_dim]
embeds = embedder_module(inputs=features, signature='projected')
return tf.estimator.tpu.TPUEstimatorSpec(
mode=mode, predictions={
'embeds': embeds,
'result_idx': result_idx
})
estimator = tf.estimator.tpu.TPUEstimator(
use_tpu=self._use_tpu,
model_fn=model_fn,
model_dir=None, # Don't persist model.
config=self._run_config,
params=params,
train_batch_size=batch_size,
predict_batch_size=batch_size)
logging.info('Embedding %d documents total.', total_docs)
predictions = estimator.predict(
input_fn=input_fn, yield_single_examples=True)
for result in yield_predictions_from_estimator(
predictions, total=total_docs, log_interval=self._log_interval):
yield result['embeds']
def yield_predictions_from_estimator(predictions, total, log_interval=10):
"""Yields predictions from Estimator.predict, with added error correction.
This function handles the case of Estimator.predict occasionally restarting,
causing results to be yielded out of order.
Args:
predictions: the return value of Estimator.predict. An iterable of dicts.
Each dict MUST have a 'result_idx' attribute, used to track result order.
total (int): total expected number of elements to yield from predictions.
log_interval: log every this many seconds.
Yields:
the same dicts yielded from Estimator.predict, but in the right order. The
result_idx element is removed from every dict.
"""
predictions_iter = iter(predictions)
total_yielded = 0
start_time = time.time()
last_log_timestamp = time.time()
while total_yielded < total:
try:
result = next(predictions_iter)
except StopIteration:
raise ValueError(
'Estimator.predict terminated before we got all results.')
result_idx = result.pop('result_idx')
if result_idx == total_yielded:
# If results are always emitted from Estimator.predict in the same
# order that they were fed into the Estimator, then we should always
# expect result_idx to equal total_yielded. However, this does not always
# happen, so we handle that in the `else` case below.
yield result
total_yielded += 1
# Log progress.
current_time = time.time()
if current_time - last_log_timestamp > log_interval:
total_time = current_time - start_time
log_msg = 'Yielded {} results in {:.2f} secs.'.format(
total_yielded, total_time)
logging.info(log_msg)
last_log_timestamp = current_time
else:
# If results start to arrive out of order, something has gone wrong.
if result_idx < total_yielded:
# This can happen if the TPU worker dies, causing Estimator.predict to
# restart from the beginning. In this case, we just don't yield
# anything on this step. Instead, we keep pulling things from the
# iterator until we are back to where we were.
if result_idx == 0:
logging.warning('TPU worker seems to have restarted.')
elif result_idx > total_yielded:
# Something has gone really wrong.
raise ValueError('Estimator.predict has somehow missed a result.')
def embed_documents_using_multiple_tpu_workers(
shard_paths, shard_sizes, hub_module_spec,
featurizer, tpu_workers,
batch_size, num_tpu_cores_per_worker):
"""Embeds documents using multiple TPU workers.
Args:
shard_paths: a list of file paths, each specifying a GZIP'd TFRecord file
containing documents stored as TF Examples. Doc embeddings will be
concatenated in the order of shard_paths.
shard_sizes: a list parallel to shard_paths, specifying the number of
documents in each shard.
hub_module_spec: path to the Hub module that will be used to embed the
documents.
featurizer: a Featurizer used to convert documents into Tensor features.
tpu_workers: list of addresses of available TPU workers.
batch_size: each TPU worker embeds documents in batches of this size.
num_tpu_cores_per_worker: number of cores to use on each TPU worker.
Returns:
a [total_docs, embed_dim] Numpy array.
"""
num_shards = len(shard_paths)
num_tpu_workers = len(tpu_workers)
tpu_config = tf.estimator.tpu.TPUConfig(
iterations_per_loop=1, # This seems to be ignored by predict().
num_shards=num_tpu_cores_per_worker)
# Distribute the data shards as evenly as possible among the workers.
num_shards_per_worker = [num_shards // num_tpu_workers] * num_tpu_workers
for worker_idx in range(num_shards % num_tpu_workers):
num_shards_per_worker[worker_idx] += 1
worker_kwargs = []
shards_assigned = 0
for k, num_shards_k in enumerate(num_shards_per_worker):
worker_kwargs.append({
'tpu_run_config':
tf.estimator.tpu.RunConfig(
master=tpu_workers[k], tpu_config=tpu_config),
'shard_paths':
shard_paths[shards_assigned:shards_assigned + num_shards_k],
'shard_sizes':
shard_sizes[shards_assigned:shards_assigned + num_shards_k],
'hub_module_spec': hub_module_spec,
'featurizer': featurizer,
'batch_size': batch_size,
})
shards_assigned += num_shards_k
# All shards should be assigned.
assert shards_assigned == num_shards
# Run all workers in parallel via separate threads.
with futures.ThreadPoolExecutor(max_workers=num_tpu_workers) as executor:
# A list of [num_docs_per_worker, embed_dim] Numpy arrays.
embeds_list = list(
executor.map(lambda kwargs: embed_documents(**kwargs), worker_kwargs))
# A [total_docs, embed_dim] Numpy array.
embeds = np.concatenate(embeds_list, axis=0)
return embeds
def embed_documents(
shard_paths,
shard_sizes,
hub_module_spec,
featurizer,
batch_size,
tpu_run_config = None):
"""Embeds documents either locally (CPU/GPU) or with a TPU worker.
Note: TPUEstimator.predict currently requires the TPU worker to have a single
"host" (a machine running TensorFlow that is physically connected to the TPU
chips). This is not true for all TPU topologies -- some have multiple hosts.
Args:
shard_paths: a list of file paths, each specifying a GZIP'd TFRecord file
containing documents stored as TF Examples. Doc embeddings will be
concatenated in the order of shard_paths.
shard_sizes: a list parallel to shard_paths, specifying the number of
documents in each shard.
hub_module_spec: path to the Hub module that will be used to embed the
documents.
featurizer: a Featurizer used to convert documents into Tensor features.
batch_size: embed documents in batches of this size.
tpu_run_config: configures the TPU worker. If None, run on CPU/GPU.
Returns:
a [total_docs, embed_dim] Numpy array.
"""
embedder = DocumentEmbedder(
hub_module_spec=hub_module_spec,
featurizer=featurizer,
use_tpu=(tpu_run_config is not None),
run_config=tpu_run_config)
def parse_tf_example(serialized):
# FixedLenSequenceFeature requires allow_missing to be True, even though we
# can't actually handle those cases.
feature_spec = {
'title':
tf.FixedLenSequenceFeature([], tf.string, allow_missing=True),
'text':
tf.FixedLenSequenceFeature([], tf.string, allow_missing=True),
'title_token_ids':
tf.FixedLenSequenceFeature([], tf.int64, allow_missing=True),
'body_token_ids':
tf.FixedLenSequenceFeature([], tf.int64, allow_missing=True),
}
features = tf.parse_single_example(serialized, feature_spec)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(features.keys()):
tensor = features[name]
if tensor.dtype == tf.int64:
tensor = tf.cast(tensor, tf.int32)
features[name] = tensor
return features
def get_documents_dataset():
# Note: num_parallel_reads should be None to guarantee that shard_paths
# are visited sequentially, not in parallel.
dataset = tf.data.TFRecordDataset(
shard_paths,
compression_type='GZIP',
buffer_size=8 * 1024 * 1024,
num_parallel_reads=None)
return dataset.map(
parse_tf_example, num_parallel_calls=tf.data.experimental.AUTOTUNE)
embeds = embedder.embed(
get_documents_dataset=get_documents_dataset,
total_docs=sum(shard_sizes),
batch_size=batch_size)
# A list of [embed_dim] Numpy arrays.
embeds_list = list(embeds)
# A [total_docs, embed_dim] Numpy array.
return np.stack(embeds_list, axis=0)
| [
"kentonl@google.com"
] | kentonl@google.com |
474a0230e8c8f77fb2a72c2eade97920f8c36c7b | 45e917cac806fa6fd0c7fba92007cf7477ff548d | /orig/hash.py | c4317815324c9b60e24a1733b5d71b237da834c4 | [
"ISC"
] | permissive | pombredanne/flylsh | 15e457af1b2ac1cec9c7651048c2a43add5dfbd9 | 7a1210e59a0e16ea7fcf111e94b6dd5e5f51a56f | refs/heads/master | 2022-04-16T16:36:50.531652 | 2018-07-17T12:23:05 | 2018-07-17T12:23:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,814 | py | #!/usr/bin/env python
from __future__ import division
from optparse import OptionParser
import random,time
import numpy as np
import heapq
random.seed(10301949)
np.random.seed(10301949)
"""
"A neural algorithm for a fundamental computing problem" (2017)
Sanjoy Dasgupta, Charles F. Stevens, Saket Navlakha.
"""
# Command line parameters.
NUM_KENYON = -1 # number of Kenyon cells.
PROJECTION = -1 # type of random projection.
HASH_LENGTH = -1 # hash length.
# Dataset-dependent parameters.
NUM_NNS = -1 # number of nearest neighbors to validate over.
DIM = -1 # number of dimensions per example.
N = -1 # number of examples in the dataset.
# Fixed parameters.
SET_MEAN = 100 # averaging firing rate per odor.
DIST_FN = "norm2" # distance function.
#==============================================================================
# READING INPUT DATA
#==============================================================================
def read_generic_data(filename,do_norm=False):
""" Generic reader for: sift, gist, corel, mnist, glove, audio, msong. """
D = np.zeros((N,DIM))
with open(filename) as f:
for line_num,line in enumerate(f):
cols = line.strip().split(",")
assert len(cols) == DIM
D[line_num,:] = map(float,cols)
#D[line_num,:] *= -1 # to invert distribution?
assert line_num+1 == N
return standardize_data(D,do_norm)
def standardize_data(D,do_norm):
""" Performs several standardizations on the data.
1) Makes sure all values are non-negative.
2) Sets the mean of example to SET_MEAN.
3) Applies normalization if desired.
"""
# 1. Add the most negative number per column (ORN) to make all values >= 0.
for col in xrange(DIM):
D[:,col] += abs(min(D[:,col]))
# 2. Set the mean of each row (odor) to be SET_MEAN.
for row in xrange(N):
# Multiply by: SET_MEAN / current mean. Keeps proportions the same.
D[row,:] = D[row,:] * ((SET_MEAN / np.mean(D[row,:])))
D[row,:] = map(int,D[row,:])
assert abs(np.mean(D[row,:]) - SET_MEAN) <= 1
# 3. Applies normalization.
if do_norm: # := v / np.linalg.norm(v)
D = D.astype(np.float64)
D = normalize(D)
# Make sure all values (firing rates) are >= 0.
for row in xrange(N):
for col in xrange(DIM):
assert D[row,col] >= 0
return D
#==============================================================================
# ALGORITHM STUFF
#==============================================================================
def create_rand_proj_matrix():
""" Creates a random projection matrix of size NUM_KENYON by NUM_ORNS. """
# Create a sparse, binary random projection matrix.
if PROJECTION.startswith("SB"):
num_sample = int(PROJECTION[2:]) # "SB6" -> 6
assert num_sample <= DIM
# Each row (KC) samples from the glomeruli: every row has num_sample
# random 1s, and 0s everywhere else.
M = np.zeros((NUM_KENYON,DIM))
for row in xrange(NUM_KENYON):
# Sample NUM_SAMPLE random indices, set these to 1.
for idx in random.sample(xrange(DIM),num_sample):
M[row,idx] = 1
# Make sure I didn't screw anything up!
assert sum(M[row,:]) == num_sample
# Create a dense, Gaussian random projection matrix.
elif PROJECTION == "DG":
M = np.random.randn(NUM_KENYON,DIM)
else: assert False
return M
def dist(X,Y):
""" Computes the distance between two vectors. """
if DIST_FN == "norm1":
return np.linalg.norm((X-Y),ord=1)
elif DIST_FN == "norm2":
return np.linalg.norm((X-Y),ord=2) # same as scipy euclidean but faster!
else:
assert False
#==============================================================================
# EVALUATION FUNCTIONS
#==============================================================================
def tesht_map_dist(D,H):
""" Computes mean average precision (MAP) and distortion between true nearest-neighbors
in input space and approximate nearest-neighbors in hash space.
"""
queries = random.sample(range(N),100)
MAP = [] # [list of MAP values for each query]
for i in queries:
temp_i = [] # list of (dist input space,odor) from i.
temp_h = [] # list of (dist hash space ,odor) from i.
for j in range(N):
if i == j: continue
# Distance between i and j in input space.
dij_orig = dist(D[i,:],D[j,:])
if dij_orig <= 0: continue # i and j are duplicates, e.g. corel: i=1022,j=2435.
temp_i.append( (dij_orig,j) )
# Distance between i and j in hash space.
dij_hash = dist(H[i,:],H[j,:])
temp_h.append( (dij_hash,j) )
assert len(temp_i) == len(temp_h) # == N-1 # not the last part bc of duplicates.
# Create a set of the true NUM_NNS nearest neighbors.
# true_nns = sorted(temp_i)[0:NUM_NNS] # true NUM_NNS tuples.
true_nns = heapq.nsmallest(NUM_NNS,temp_i) # true NUM_NNS tuples. (faster than above)
true_nns = set([vals[1] for vals in true_nns]) # true NUM_NNS examples.
# Go through predicted nearest neighbors and compute the MAP.
# pred_nns = sorted(temp_h)[0:NUM_NNS] # pred NUM_NNS tuples.
pred_nns = heapq.nsmallest(NUM_NNS,temp_h) # pred NUM_NNS tuples. (faster than above)
pred_nns = [vals[1] for vals in pred_nns] # pred NUM_NNS examples.
assert len(true_nns) == len(pred_nns)
# Compute MAP: https://makarandtapaswi.wordpress.com/2012/07/02/intuition-behind-average-precision-and-map/
# E.g. if the top NUM_NNS results are: 1, 0, 0, 1, 1, 1
# then the MAP is: avg(1/1, 0, 0, 2/4, 3/5, 4/6)
num_correct_thus_far = 0
map_temp = []
for idx,nghbr in enumerate(pred_nns):
if nghbr in true_nns:
num_correct_thus_far += 1
map_temp.append((num_correct_thus_far)/(idx+1))
map_temp = np.mean(map_temp) if len(map_temp) > 0 else 0
assert 0.0 <= map_temp <= 1.0
MAP.append(map_temp)
# Store overall performance for these queries.
x_map = np.mean(MAP)
return x_map
#==============================================================================
# MAIN
#==============================================================================
def main():
start = time.time()
global NUM_KENYON,PROJECTION,HASH_LENGTH,N,DIM,NUM_NNS
usage="usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("-p", "--projection", action="store", type="string", dest="projection", default="DG",help="type of random projection: DG (dense Gaussian), SB6 (sparse, binary with sampling=6)")
parser.add_option("-y", "--kenyon", action="store", type="int", dest="num_kenyon", default=1000,help="number of kenyon cells (i.e. expansion size)")
parser.add_option("-w", "--wta", action="store", type="string", dest="wta", default=None,help="type of WTA to perform (top, bottom, rand)")
parser.add_option("-l", "--hash", action="store", type="int", dest="hash_length", default=8,help="length of the hash")
parser.add_option("-d", "--dataset", action="store", type="string", dest="dataset", default="halem",help="name of the dataset")
(options, args) = parser.parse_args()
NUM_REPEATS = 50
NUM_KENYON = options.num_kenyon
PROJECTION = options.projection
HASH_LENGTH = options.hash_length
DATASET = options.dataset
WTA = options.wta
# ===============================================================
# Read Sift data: 10,000 images x 128 sift descriptors/features.
if DATASET == "sift10k":
N = 10000
DIM = 128
D = read_generic_data("../data/sift/sift10k.txt")
# Read Gist data: 10,000 images x 960 gist descriptors/features.
elif DATASET == "gist10k":
N = 10000
DIM = 960
D = read_generic_data("../data/gist/gist10k.txt")
# Read MNIST data: 10,000 images x 784 pixels.
elif DATASET == "mnist10k":
N = 10000
DIM = 784
D = read_generic_data("../data/mnist/mnist10k.txt")
# Read Glove data: 10,000 words x 300 features.
elif DATASET == "glove10k":
N = 10000
DIM = 300
D = read_generic_data("../data/glove/glove10k.txt")
else: assert False
NUM_NNS = max(10,int(0.02*N)) # 10 or the top 2%.
assert NUM_NNS <= N-1
x_map = [None] * NUM_REPEATS
for ii in range(NUM_REPEATS):
# Create random projection matrix.
M = create_rand_proj_matrix()
# Compute KC activity for each example: multiply input vector by random projection matrix.
K = np.dot(D,np.transpose(M)) # N x NUM_KENYON
assert K.shape[0] == N
assert K.shape[1] == NUM_KENYON
# ? why bucket width of 10?
# Perform quantization: add offset, divide by width, take floor.
offset,width = 0,10
K = np.floor((K+offset)/width)
# Apply WTA to KCs: firing rates at indices corresponding to top/bot/rand/all KCs; 0s elsewhere.
if WTA == "random":# fix indices for all odors, otherwise, can't compare.
rand_indices = random.sample(range(NUM_KENYON),HASH_LENGTH)
H = np.zeros((N,NUM_KENYON))
for i in range(N):
# Take all neurons.
if WTA == "all":
assert HASH_LENGTH == NUM_KENYON
indices = range(NUM_KENYON)
# Highest firing neurons.
elif WTA == "top":
indices = np.argpartition(K[i,:],-HASH_LENGTH)[-HASH_LENGTH:]
# Lowest firing neurons.
elif WTA == "bottom":
indices = np.argpartition(K[i,:],HASH_LENGTH)[:HASH_LENGTH]
# Random neurons.
elif WTA == "random":
indices = rand_indices#random.sample(range(NUM_KENYON),HASH_LENGTH)
else: assert False
H[i,:][indices] = K[i,:][indices]
# Evaluate MAP.
x_map[ii] = tesht_map_dist(D,H)
return [DIM,PROJECTION,NUM_KENYON,WTA,HASH_LENGTH,np.mean(x_map),np.std(x_map),DATASET,(time.time()-start) / 60]
if __name__ == "__main__":
main() | [
"andrew.luetgers@gmail.com"
] | andrew.luetgers@gmail.com |
4f67d2f9b2c114e2e8be349cc9e20fc740d77f6f | fd664afbdcf16fbee4fec51716396c46d86bf2c8 | /main.py | 09447b99df1d355f81e5b38380edda5fbe8c142f | [] | no_license | andrewMcGhie/user-signup | 471272f510c4fe3462a26b00112ff18897653e4f | 530e1251730487a00db8255ad437be5d3c40695a | refs/heads/master | 2021-08-20T03:00:51.498557 | 2017-11-28T02:30:32 | 2017-11-28T02:30:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,042 | py | from flask import Flask, request, redirect, render_template
import os
import cgi
app = Flask(__name__)
app.config['DEBUG'] = True
@app.route('/', methods=['POST', 'GET'])
def index():
return render_template('index.html', title = "User Signup")
def validate_userinfo():
username = request.form['username']
password = request.form['password']
verify = request.form['verify']
email = request.form['email']
username_error = ""
password_error = ""
email_error = ""
if username == "":
username_error = "Must enter a user name"
elif " " in username:
username_error = "User name cannot contain a space"
elif len(username) > 20 or len(username) < 3:
username_error = "User name must be betweeen 3 and 20 characters"
if password == "":
password_error = "Must enter a password"
elif " " in password:
password_error = "Password cannot contain a space"
elif len(password) > 20 or len(password) < 3:
password_error = "Password must be betweeen 3 and 20 characters"
elif password != verify:
password_error = "Passwords do not match"
if email == "":
email_error = "Must enter a valid email address"
elif " " in email:
email_error = "Email address cannot contain a space"
elif len(email) > 20 or len(email) < 3:
email_error = "Email address must be betweeen 3 and 20 characters"
elif email.count("@") != 1 or email.count(".") != 1:
email_error= "Invalid email address"
if username_error or password_error or email_error:
return render_template('index.html', title = "User Signup", username=username, password="", verify="", email=email, username_error=username_error, password_error=password_error, email_error=email_error)
else:
return redirect('/welcome?username={0}'.format(username))
@app.route('/welcome')
def welcome():
username = request.form['username']
return render_template('welcome.html', username=username)
app.run()
| [
"mcghiea@gmail.com"
] | mcghiea@gmail.com |
82303825a36ae127081f7c965f2fa948b36e6fcc | d7ae8db44b31de83eabaf0e286b1452d4ada24ff | /IoT_Domain_Analyst_ECE_3502/Lab_3/Linear_Regression.py | 524229a56949837e42e249dd6a58236604882ea0 | [
"CC0-1.0"
] | permissive | eshan5/VIT-Labs | ae4c6719b86fb5e2f30e0f5a023171597cf33d42 | 5a20b9571a10b4550b886d588969592e595dac1d | refs/heads/main | 2023-08-24T06:50:23.888426 | 2021-10-09T10:18:32 | 2021-10-09T10:18:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | import numpy as np
from sklearn.linear_model import LinearRegression
x = np.array([5, 15, 25, 35, 45, 55]).reshape((-1, 1))
y = np.array([5, 20, 14, 32, 22, 38])
print(x)
print(y)
model = LinearRegression().fit(x, y)
r_sq = model.score(x, y)
print('coefficient of determination:', r_sq)
print('intercept:', model.intercept_)
print('slope:', model.coef_)
"""new_model = LinearRegression().fit(x, y.reshape((-1, 1)))
print('intercept:', new_model.intercept_)
print('slope:', new_model.coef_)
y_pred = model.predict(x)
print('predicted response:', y_pred, sep='\n')
y_pred = model.intercept_ + model.coef_ * x
print('predicted response:', y_pred, sep='\n')
x_new = np.arange(5).reshape((-1, 1))
print(" First few points of the line :")
print(x_new)
y_new = model.predict(x_new)
print(y_new)"""
| [
"aadhityas@gmail.com"
] | aadhityas@gmail.com |
1882e6bd42af8f728c9d7796b25c44164b46c8a0 | d2915ef6ee9c1ea01f47d3468bba8e320a8f5914 | /design_patterns/behavioural/template_method.py | b4d81ca7739be92fcbe5d17b1a54a35d7cf159d6 | [] | no_license | asing177/python_basics | a269adbaf166fb760d2692874601528ef230bbbd | 48ce7d5d6356edbd9bc21f8ebb55ec95787d4340 | refs/heads/main | 2023-01-11T12:11:44.155102 | 2020-11-13T07:24:54 | 2020-11-13T07:24:54 | 300,806,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,123 | py | from test_abc import ABC, abstractmethod
class AbstractClass(ABC):
"""
The Abstract Class defines a template method that contains a skeleton of
some algorithm, composed of calls to (usually) abstract primitive
operations.
Concrete subclasses should implement these operations, but leave the
template method itself intact.
"""
def template_method(self) -> None:
"""
The template method defines the skeleton of an algorithm.
"""
self.base_operation1()
self.required_operations1()
self.base_operation2()
self.hook1()
self.required_operations2()
self.base_operation3()
self.hook2()
# These operations already have implementations.
def base_operation1(self) -> None:
print("AbstractClass says: I am doing the bulk of the work")
def base_operation2(self) -> None:
print("AbstractClass says: But I let subclasses override some operations")
def base_operation3(self) -> None:
print("AbstractClass says: But I am doing the bulk of the work anyway")
# These operations have to be implemented in subclasses.
@abstractmethod
def required_operations1(self) -> None:
pass
@abstractmethod
def required_operations2(self) -> None:
pass
# These are "hooks." Subclasses may override them, but it's not mandatory
# since the hooks already have default (but empty) implementation. Hooks
# provide additional extension points in some crucial places of the
# algorithm.
def hook1(self) -> None:
pass
def hook2(self) -> None:
pass
class ConcreteClass1(AbstractClass):
"""
Concrete classes have to implement all abstract operations of the base
class. They can also override some operations with a default implementation.
"""
def required_operations1(self) -> None:
print("ConcreteClass1 says: Implemented Operation1")
def required_operations2(self) -> None:
print("ConcreteClass1 says: Implemented Operation2")
class ConcreteClass2(AbstractClass):
"""
Usually, concrete classes override only a fraction of base class'
operations.
"""
def required_operations1(self) -> None:
print("ConcreteClass2 says: Implemented Operation1")
def required_operations2(self) -> None:
print("ConcreteClass2 says: Implemented Operation2")
def hook1(self) -> None:
print("ConcreteClass2 says: Overridden Hook1")
def client_code(abstract_class: AbstractClass) -> None:
"""
The client code calls the template method to execute the algorithm. Client
code does not have to know the concrete class of an object it works with, as
long as it works with objects through the interface of their base class.
"""
# ...
abstract_class.template_method()
# ...
if __name__ == "__main__":
print("Same client code can work with different subclasses:")
client_code(ConcreteClass1())
print("")
print("Same client code can work with different subclasses:")
client_code(ConcreteClass2()) | [
"adityasingh27@hotmail.com"
] | adityasingh27@hotmail.com |
1d4a7962f047e1507edd5b010afde2fc751120b8 | e400d4a141f35bc4240293253048535f1e737d4e | /src/03_IPhreeqcPy/02_phreeqc_mixing_CSH.py | 0ef9294bb86ad1f2be65ad009b6c572debf6e331 | [] | no_license | annavarzina/carbonation | 94416935f92cdfb1874c61407c8d1909178bd6c9 | 030b222f000d79538e9890fb9047d57ced7bad2d | refs/heads/master | 2021-06-23T07:33:20.147869 | 2021-03-02T13:29:34 | 2021-03-02T13:29:34 | 193,922,887 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,356 | py | import numpy as np
import matplotlib.pylab as plt
from mixing import PhreeqcMixing
from kinetics import PhreeqcKinetics
class PhreeqcMixingCSH(PhreeqcMixing):
def __init__(self, n, fraction, csh, database):
self.phase = csh['name']
self.csh = csh
self.steps = n
self.fraction = fraction
self.database = database
self.phrqc_input = []
self.selected_output = []
self.phrqc_string = ''
self.simulation_time = 0
def generate_phrqc_string(self):
self.phases()
self.solution_1()
self.solution_2()
for i in np.arange(0, self.steps):
self.mix_2()
self.selected_output_1()
self.user_punch()
self.mix_3()
self.phrqc_string = '\n'.join(self.phrqc_input)
def phases(self):
phrqc_input = []
# CSH stochiometry
s = self.csh['stochiometry']
h = s['H+']
h2o = s['H2O'] + s['H+'] - s['Ca']
sign1 = '+'
if h < 0:
sign1 = '-'
h *= -1
sign2 = '+'
if h2o < 0:
sign2 = '-'
h2o *= -1
# input
phrqc_input.append('PHASES')
phrqc_input.append(self.phase)
phrqc_input.append('\t(CaO)' + str(s['Ca']) +'(SiO2)'+ str(s['Si']) + \
'(H2O)' + str(s['H2O']) + ' ' + sign1 + ' ' + str(h) + 'H+ = ' + \
str(s['Ca']) + 'Ca+2 + ' + str(s['Si']) + 'SiO2 ' + sign2 +\
' ' + str(h2o) + ' H2O')
#phrqc_input.append('\t-Vm\t' + str(csh['vm']) )
phrqc_input.append('\t-log_K\t' + str(self.csh['log_k']) + '\n')
self.phrqc_input += phrqc_input
def user_punch(self):
phrqc_input = []
phrqc_input.append('USER_PUNCH')
phrqc_input.append('\t-headings\tCa\t' + self.phase)
phrqc_input.append('\t-start')
phrqc_input.append('\t10\tpunch\ttot("Ca")')
phrqc_input.append('\t15\tpunch\ttot("Si")')
phrqc_input.append('\t20\tpunch\ttot("' + self.phase + '")')
phrqc_input.append('\t30\tpunch')
phrqc_input.append('\t-end')
phrqc_input.append('END')
self.phrqc_input += phrqc_input
#%% PARAMETERS
database = 'C:\Anaconda2\lib\site-packages\databases\cemdata18.dat'
csh = {'name':'CSH', 'stochiometry':{'Ca':1.67, 'Si':1.0, 'H2O':4.34, 'H+':3.34}, 'log_k':29.133,}
n = 400000 # time should be ~10 minutes
krate = 10**(-8.0) #1e-7
s = 800#scale factor
fraction = krate * s
print('Kinetic rate = ' + str(krate))
print('Mixing fraction = ' + str(fraction))
#%% RUN
pm = PhreeqcMixingCSH(n, fraction, csh, database)
pm.run_phreeqc()
print('Mixing fraction simulation time = ' + str(pm.simulation_time))
#%% PLOT
h = 1
t = range(1, n+1)
t = [i/3600. for i in t]
ca_m = []
si_m = []
for i in range(len(pm.selected_output)):
if pm.selected_output[i][0]==3:
ca_m.append(pm.selected_output[i][1])
si_m.append(pm.selected_output[i][2])
plt.figure()
plt.plot(t, ca_m, label = "mix")
plt.xlabel('time (h)')
plt.ylabel('Ca (mol/l)')
plt.legend()
plt.figure()
plt.plot(t, si_m, label = "mix")
plt.xlabel('time (h)')
plt.ylabel('Si (mol/l)')
plt.legend()
plt.show()
| [
"varzinaanna@gmail.com"
] | varzinaanna@gmail.com |
913e406199d7adf3fcacb33850752f52a57881fa | 69e5f24fa12346f892b1c907e802286045b3641f | /train.py | c17b2ccaaaaae82b11f58306d9b719d7f6098609 | [] | no_license | hope-yao/failed_adversarial_training | 0cf9d05333767756134db1eb8ea2424ace8449c9 | be87e05b59aaeecec9001c1d6ae69afcf9382c1d | refs/heads/master | 2020-04-01T19:04:32.433080 | 2018-10-17T22:39:48 | 2018-10-17T22:39:48 | 153,532,414 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,181 | py | """Trains a model, saving checkpoints and tensorboard summaries along
the way."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import json
import os
import shutil
from timeit import default_timer as timer
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
from model import Model
from pgd_attack import LinfPGDAttack
with open('config.json') as config_file:
config = json.load(config_file)
# Setting up training parameters
tf.set_random_seed(config['random_seed'])
max_num_training_steps = config['max_num_training_steps']
num_output_steps = config['num_output_steps']
num_summary_steps = config['num_summary_steps']
num_checkpoint_steps = config['num_checkpoint_steps']
batch_size = config['training_batch_size']
# Setting up the data and the model
mnist = input_data.read_data_sets('MNIST_data', one_hot=False)
global_step = tf.contrib.framework.get_or_create_global_step()
model = Model()
# Setting up the optimizer
train_step = tf.train.AdamOptimizer(1e-4).minimize(model.xent,
global_step=global_step)
# Set up adversary
attack = LinfPGDAttack(model,
config['epsilon'],
config['k'],
config['a'],
config['random_start'],
config['loss_func'])
# Setting up the Tensorboard and checkpoint outputs
model_dir = config['model_dir']
if not os.path.exists(model_dir):
os.makedirs(model_dir)
# We add accuracy and xent twice so we can easily make three types of
# comparisons in Tensorboard:
# - train vs eval (for a single run)
# - train of different runs
# - eval of different runs
saver = tf.train.Saver(max_to_keep=3)
tf.summary.scalar('accuracy adv train', model.accuracy)
tf.summary.scalar('accuracy adv', model.accuracy)
tf.summary.scalar('xent adv train', model.xent / batch_size)
tf.summary.scalar('xent adv', model.xent / batch_size)
tf.summary.image('images adv train', model.x_image)
merged_summaries = tf.summary.merge_all()
shutil.copy('config.json', model_dir)
with tf.Session() as sess:
# Initialize the summary writer, global variables, and our time counter.
summary_writer = tf.summary.FileWriter(model_dir, sess.graph)
sess.run(tf.global_variables_initializer())
# saver.restore(sess,'/home/hope-yao/Documents/madrys_code/mnist_challenge/models/a_very_robust_model_run2/checkpoint-99900')
training_time = 0.0
# Main training loop
for ii in range(max_num_training_steps):
if ii%10000 == 0:
num_adv_batch = 1000
x_pool_nat = np.zeros((num_adv_batch * batch_size, 784))
x_pool_adv = np.zeros((num_adv_batch * batch_size, 784))
y_pool = np.zeros((num_adv_batch * batch_size))
from tqdm import tqdm
for jj in tqdm(range(num_adv_batch)):
x_batch, y_batch = mnist.train.next_batch(batch_size)
x_batch_adv = attack.perturb(x_batch, y_batch, sess)
x_pool_nat[jj * batch_size:(jj + 1) * batch_size] = x_batch
x_pool_adv[jj * batch_size:(jj + 1) * batch_size] = x_batch_adv
y_pool[jj * batch_size:(jj + 1) * batch_size] = y_batch
np.save('x_pool_adv_itr{}'.format(ii), x_pool_adv)
np.save('x_pool_nat_itr{}'.format(ii), x_pool_nat)
np.save('y_pool_itr{}'.format(ii), y_pool)
# x_batch, y_batch = mnist.train.next_batch(batch_size)
# # Compute Adversarial Perturbations
# start = timer()
# x_batch_adv = attack.perturb(x_batch, y_batch, sess)
# end = timer()
# training_time += end - start
x_batch = x_pool_nat[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
x_batch_adv = x_pool_adv[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
y_batch = y_pool[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
nat_dict = {model.x_input: x_batch,
model.y_input: y_batch}
adv_dict = {model.x_input: x_batch_adv,
model.y_input: y_batch}
# Output to stdout
if ii % num_output_steps == 0:
nat_acc = sess.run(model.accuracy, feed_dict=nat_dict)
adv_acc = sess.run(model.accuracy, feed_dict=adv_dict)
print('Step {}: ({})'.format(ii, datetime.now()))
print(' training nat accuracy {:.4}%'.format(nat_acc * 100))
print(' training adv accuracy {:.4}%'.format(adv_acc * 100))
if ii != 0:
print(' {} examples per second'.format(
num_output_steps * batch_size / training_time))
training_time = 0.0
# Tensorboard summaries
if ii % num_summary_steps == 0:
summary = sess.run(merged_summaries, feed_dict=adv_dict)
summary_writer.add_summary(summary, global_step.eval(sess))
# Write a checkpoint
if ii % num_checkpoint_steps == 0:
saver.save(sess,
os.path.join(model_dir, 'checkpoint'),
global_step=global_step)
# Actual training step
start = timer()
for jj in range(5):
sess.run(train_step, feed_dict=adv_dict)
end = timer()
training_time += end - start
| [
"hope-yao@asu.edu"
] | hope-yao@asu.edu |
0b4866faf3a17c9283a152b34f125550fa7e3d21 | 099fba27964270ed2508971e67a4cdcab574d5f0 | /tests/conftest.py | a35e45974809b39476059f4842c1100db10e13ac | [
"MIT"
] | permissive | Melevir/cognitive_complexity | 8dacdb67e5ea0d4c1a7ccdbf89524c556fcdd55e | 1cb05131e1abe20b78abcb5f97996090724defb2 | refs/heads/master | 2022-08-20T19:34:45.006958 | 2022-08-09T07:45:44 | 2022-08-09T07:45:44 | 218,568,372 | 33 | 8 | MIT | 2022-08-09T07:02:52 | 2019-10-30T16:07:54 | Python | UTF-8 | Python | false | false | 215 | py | import ast
from cognitive_complexity.api import get_cognitive_complexity
def get_code_snippet_compexity(src: str) -> int:
funcdef = ast.parse(src.strip()).body[0]
return get_cognitive_complexity(funcdef)
| [
"i.lebedev@bestdoctor.ru"
] | i.lebedev@bestdoctor.ru |
20a9dd95c4d20f1f214b8136ec1f4ad66131fc85 | 628dac7256b6893e0afe611a43ba144b90a6bbe9 | /listings/urls.py | 3dd4d3fc97ad37939aeceec26c6d568385db8fd6 | [] | no_license | abdouakhad/clone-btrealstate | 3c2a5fc883eb5051055ebda655ef7a69d0594086 | f294b497332d5f0829f4208d45708200c5931961 | refs/heads/main | 2023-06-24T14:37:28.006886 | 2021-07-27T22:45:05 | 2021-07-27T22:45:05 | 390,147,910 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='listings'),
path('<int:listing_id>', views.listing, name='listing'),
path('search', views.search, name='search')
]
| [
"akhad0015@gmail.com"
] | akhad0015@gmail.com |
4cc51ea420bf1fe6227d7456d6aed549b87f8788 | cc872c1dd09b8049c10fc4e3a2a147d60724b5e9 | /3_ModifyingPrograms/randline.py | 4d0ad2d794008c539dd3038ffde5e387d5818885 | [] | no_license | aanish94/SoftwareConstructionLab | 71d2d9590614b56a53dad9eb7a0dc636df3a1e45 | 23de6820e61e7cd3dbfab4b1efa04be8d2b70c56 | refs/heads/master | 2021-01-21T06:59:44.356871 | 2014-05-16T02:01:52 | 2014-05-16T02:01:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,212 | py | #!/usr/bin/python
import random, sys
from optparse import OptionParser
class randline:
def __init__(self, lines,numlines,unique,wreplace):
self.lines = lines
self.unique = unique
if self.unique:
self.lines = list(set(self.lines))
self.wreplace = wreplace
self.numlines = numlines
def chooseline(self):
curLine = random.choice(self.lines)
if self.wreplace:
self.lines.remove(curLine)
return curLine
def main():
version_msg = "%prog 2.0"
usage_msg = """%prog [OPTION] [OPTION] [OPTION]... FILE
Output randomly selected lines from FILE. Use -u for unique and -w for no line replacment."""
parser = OptionParser(version=version_msg,
usage=usage_msg)
parser.add_option("-n", "--numlines",
action="store", dest="numlines", default=1,
help="output NUMLINES lines (default 1)")
parser.add_option("-u", "--unique",action="store_true",
dest="unique",default=False,
help="unique lines from file only")
parser.add_option("-w","--without_replacement",action="store_true",
dest="without_replace",
default=False,help="without replacement option")
options, args = parser.parse_args(sys.argv[1:])
try:
numlines = int(options.numlines)
except:
parser.error("invalid NUMLINES: {0}".
format(options.numlines))
unique = options.unique
wreplace = options.without_replace
if numlines < 0:
parser.error("negative count: {0}".
format(numlines))
if len(args) < 1:
parser.error("not enough operands")
alllines = []
for x in range(len(args)):
f=open(args[x])
lines = f.readlines()
for j in range(len(lines)):
alllines.append(lines[j])
try:
generator = randline(alllines,numlines,unique,wreplace)
for index in range(numlines):
sys.stdout.write(generator.chooseline())
except IOError as err:
parser.error("I/O error({0}): {1}".
format(errno, strerror))
if __name__ == "__main__":
main()
| [
"aanish94@live.in"
] | aanish94@live.in |
03fa270be63af49d803b50f06e2f566610bf1159 | 1c962341f3b580f2be0529a2d5804d49804470f6 | /judge_2152.py | a4cdbad8d0c1c0d6e41d3a7a54609469d3035777 | [] | no_license | andersonmarquees/-uri_python | 7bc14b50198bd238f9594b37a86553ecfb277f76 | 379518cd17433725d6a859526de356162b26aa40 | refs/heads/master | 2020-05-05T09:08:51.483638 | 2019-04-14T16:42:24 | 2019-04-14T16:42:24 | 179,892,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,090 | py | n = int(input())
while n > 0:
number = list(map(int, input().split()))
if number[2] == 0 and number[0] >= 10 and number[1] >= 10:
print("{}:{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] >= 10 and number[1] >= 10:
print("{}:{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[0] < 10 and number[1] < 10:
print("0{}:0{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] < 10 and number[1] < 10:
print("0{}:0{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[0] < 10:
print("0{}:{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] < 10:
print("0{}:{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[1] < 10:
print("{}:0{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[1] < 10:
print("{}:0{} - A porta abriu!".format(number[0], number[1]))
n -= 1
| [
"anderson_fisico@yahoo.com.br"
] | anderson_fisico@yahoo.com.br |
199eee9eab0ed5e0816ebcf705d2ad5c2dbaf9ea | 7b9898017f0a6a249f4a9cd92608fc466f4925d1 | /scripts/db_bulk_create.py | ba3b674c1d6600b0d8465c771108402fd2f256de | [] | no_license | kdd0721/korexdata | eb46ad88f602da3bf23af251900008a480979a09 | 0a913bbd0352ffc44c705eec459ee40f66f02af9 | refs/heads/master | 2022-12-17T04:34:23.346973 | 2020-09-22T06:24:17 | 2020-09-22T06:24:17 | 292,180,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,217 | py | from django.core.exceptions import ObjectDoesNotExist
import os
from address.models import AddrRoad, AddrInfo, AddrJibun, AddrAddinfo
def search(dirname):
filenames = os.listdir(dirname)
for filename in filenames:
full_filename = os.path.join(dirname, filename)
read_file(full_filename)
def read_file(filename):
r = open(filename, mode='rt')
bulk_list = []
batch_size = 10000
print(filename)
for i, line in enumerate(r):
row = line.strip().split('|')
bulk_list.append(
AddrInfo(
bdmgtsn=row[0],
rnmgtsn=row[1],
emdno=row[2],
udrtyn=row[3],
buldmnnm=row[4],
buldslno=row[5],
areano=row[6],
chgrsncd=row[7],
ntcdate=row[8],
stnmbfr=row[9],
addrstus=row[10]
)
)
if i % batch_size == 0:
AddrInfo.objects.bulk_create(bulk_list)
print(i)
bulk_list = []
AddrInfo.objects.bulk_create(bulk_list)
r.close()
print("finish")
def run():
# read_file('C:/Users/daeun/Desktop/openAPI/db/addr_info/주소_경상남도.txt')
search('C:/Users/daeun/Desktop/openAPI/db/addr_info')
# search('C:/Users/daeun/Desktop/openAPI/db/addr_jibun')
# search('C:/Users/daeun/Desktop/openAPI/db/addr_addinfo')
"""
AddrRoad(
rnmgtsn=row[0],
rn=row[1],
rneng=row[2],
emdno=row[3],
sinm=row[4],
sinmeng=row[5],
sggnm=row[6],
sggnmeng=row[7],
emdnm=row[8],
emdnmeng=row[9],
emddiv=row[10],
emdcd=row[11],
usage=row[12],
chgrsn=row[13],
chginfo=row[14],
ntcdate=row[15],
expdate=row[16],
)
AddrInfo(
bdmgtsn=row[0],
rnmgtsn=row[1],
emdno=row[2],
udrtyn=row[3],
buldmnnm=row[4],
buldslno=row[5],
areano=row[6],
chgrsncd=row[7],
ntcdate=row[8],
stnmbfr=row[9],
addrstus=row[10]
)
AddrJibun(
bdmgtsn=row[0],
srlno=row[1],
dongcd=row[2],
sinm=row[3],
sggnm=row[4],
emdnm=row[5],
linm=row[6],
mtyn=row[7],
lnbrmnnm=row[8],
lnbrslno=row[9],
repyn=row[10],
chgrsncd=row[11]
)
AddrAddinfo(
bdmgtsn=row[0],
hjdongcd=row[1],
hjdongnm=row[2],
zipno=row[3],
zipsrlno=row[4],
lrgdlvry=row[5],
bdrgstrbdnm=row[6],
sggbdnm=row[7],
bdkdcd=row[8],
chgrsncd=row[9]
)
""" | [
"kdd0721@gmail.com"
] | kdd0721@gmail.com |
53ccb6530c2f2fd58fb3dd5ba06f0a3494db388c | 3f69a4564c4bb2e6f1306be1a15056d07f1b7fe9 | /src/profiles_project/profiles_project/settings.py | 59c16d9e9aaa83076508f410e79463a290baf70c | [] | no_license | Sumel08/AmazonInstance | 01484886d920f334d62e806d288b92af27dd495a | 29d4199f576c8f3ce05cd2e9bdfcdf28e2c259f6 | refs/heads/master | 2021-01-01T17:23:01.835667 | 2017-07-23T00:18:00 | 2017-07-23T00:18:00 | 98,058,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,244 | py | """
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r)*o$=-zh^+-y-n_i0&j067typ4r_*v!llbs#k64ev6*l5^7mi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| [
"oscarl.ocho@gmail.com"
] | oscarl.ocho@gmail.com |
bd74bf57b9a942b8db684370875fded51b336414 | 10b863830cc34e569046fac0eed9113a67813b96 | /AradhyaGallery/migrations/0002_remove_gallery_g_desc.py | 47eed116efee82abb8307c7f3bc5b141722b0b09 | [] | no_license | ajayk2233/Aradhya-Hospital | b16aaa30faa7ca1bb81da6301c217b90600e7b59 | 5dcbb6b1659273dce86bada512b679010ae83238 | refs/heads/master | 2021-01-09T14:18:06.782369 | 2020-03-08T22:42:42 | 2020-03-08T22:42:42 | 242,333,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | # Generated by Django 3.0 on 2020-03-01 05:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('AradhyaGallery', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='gallery',
name='g_desc',
),
]
| [
"ajay.watchout@gmail.com"
] | ajay.watchout@gmail.com |
7129193767994ad736159b273b307510d497f2ca | f91d27c126b5d76bcee30b8c8eb897342cff5855 | /fluent_python/chapter_7/clock_deco.py | 7066fb41d77ec5a46ea4247e6e4bc1a723016f42 | [] | no_license | das-dev/misc | 9afd13480e149b5e5cee5e0d8fc9113eb0170d97 | 6d204a6bf6d0c4addd277914cb98f11d7d7b3c50 | refs/heads/master | 2023-04-11T18:55:07.137684 | 2021-05-14T16:48:55 | 2021-05-14T16:48:55 | 269,633,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 721 | py | import time
from functools import wraps
def clock(func):
@wraps(func)
def clocked(*args, **kwargs):
start = time.perf_counter()
result = func(*args)
elapsed = time.perf_counter() - start
arg_list = []
if args:
arg_list.append(', '.join(repr(arg) for arg in args))
if kwargs:
pairs = [f'{k}={v}' for k, v in sorted(kwargs.items())]
arg_list.append(', '.join(pairs))
arg_str = ', '.join(arg_list)
print(f'[{elapsed:.8f} {func.__name__}({arg_str}) -> {result}')
return result
return clocked
if __name__ == '__main__':
@clock
def snooze(seconds):
time.sleep(seconds)
snooze(.123)
| [
"das.dev@pm.me"
] | das.dev@pm.me |
3008dd278727707f36e5e705364cf2287b98a024 | 0bcca9bd3c82301173d701259fefada0bd575786 | /strategy/util.py | 33f848fcb919f2b8d5bb65763f2554a6d5e5bef4 | [
"MIT"
] | permissive | agi1512/python-sc2 | e3f413e893896a3a774655d8882caa021f1daa1c | 74ab732f38d02dd984e2944d6f7f68be436709eb | refs/heads/master | 2020-05-24T00:00:24.677867 | 2018-09-03T14:06:14 | 2018-09-03T14:06:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,220 | py | from random import uniform, randrange
import logging
logger = logging.getLogger("sc2.performance")
logger.setLevel(logging.INFO)
import functools
import timeit
import time
import os
def create_folder(folder):
"""Creates folder if not exists"""
if not os.path.exists(folder):
os.makedirs(folder)
def get_random_building_location(bot):
"""Generates random placement suggestion for building location"""
random1 = randrange(5, 15, 2)
random2 = randrange(5, 12, 2)
if bot.townhalls.exists:
return bot.townhalls.random.position.towards(bot.game_info.map_center, random1).random_on_distance(random2)
else:
return bot.first_base.position.towards(bot.game_info.map_center, random1).random_on_distance(random2)
def print_log(logger, level, message):
"""Logs or print messages"""
if logger is not None:
if level == logging.DEBUG:
logger.debug(message)
elif level == logging.INFO:
logger.info(message)
elif level == logging.WARNING:
logger.warning(message)
elif level == logging.ERROR:
logger.error(message)
elif level == logging.CRITICAL:
logger.critical(message)
else:
logger.error("UNKNOWN LEVEL: "+ message)
else:
print(message)
# Based on: https://stackoverflow.com/a/20924212
def measure_runtime(func):
"""Measures runtime, logs in case of slow performance"""
@functools.wraps(func)
async def newfunc(*args, **kwargs):
start = time.time()
await func(*args, **kwargs)
elaped_ms = int((time.time() - start) * 1000)
level = None
if elaped_ms <= 50:
return
elif elaped_ms > 1000:
level = logging.ERROR
elif elaped_ms > 500:
level = logging. WARNING
elif elaped_ms > 100:
level = logging.INFO
elif elaped_ms > 50:
level = logging.DEBUG
else:
level = logging.CRITICAL
print_log(logger, level, "Function {} required {} ms".format(func.__name__, elaped_ms))
return newfunc
| [
"spam.42@uni-muenster.de"
] | spam.42@uni-muenster.de |
777088f9d6eadec2ebdc405f2690d293e184d772 | 1a97a97920f115c64741e5c084013983bffa163b | /getBCvalue.py | 76ab0845ee588b7bda9922ecfd408be382a3de2d | [] | no_license | tomato26/gilfoyle_bitcoin_alert | 95abb72527bbc7d24de73c70b36af6792adbb737 | b63a684feb766b3b75d9ea2b592fe60a617d6b82 | refs/heads/master | 2020-05-23T15:46:57.602951 | 2019-05-15T13:50:45 | 2019-05-15T13:50:45 | 186,834,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | import requests
def getBCvalue():
r = requests.get('https://api.bitflyer.jp/v1/ticker?product_code=BTC_JPY')
json = r.json()
return json["ltp"]
| [
"gakkouyou88@gmail.com"
] | gakkouyou88@gmail.com |
b51ac374afe25f663243c13b27f727390672b103 | 0b0217ffcf597800776c0e0c44642703b0c3a970 | /Create_xml.py | fa513435656e36876257c050737e4bccdae9685d | [] | no_license | GergoPeterSomodi/Hexa | 44fbbad39c722b13793f5bbd21eca97978526efd | 0971a6d677a92d4970ed5eed0ef7bdb675c876b0 | refs/heads/master | 2022-09-12T07:31:00.175561 | 2020-05-29T07:29:51 | 2020-05-29T07:29:51 | 255,087,469 | 0 | 2 | null | 2020-04-24T11:47:29 | 2020-04-12T13:23:45 | Python | UTF-8 | Python | false | false | 1,737 | py | from xml.etree import ElementTree, cElementTree
from xml.dom import minidom
from Setting import *
from Game import *
def get_hex_id(images):
temp = create_dict()
layer_map = []
for image in images:
for key, value in temp.items():
if value == image:
layer_id = key
layer_map.append(layer_id)
return layer_map
num_columns = str(int(game_size[0] / game_settings.width))
num_rows = str(int(game_size[1] / game_settings.height / (3 / 4)))
layer_id = 1
def create_xml(num_columns, num_rows, list_of_map):
root = ElementTree.Element('map', width=num_columns, height=num_rows, tilewidth="32", tileheight="32")
child1 = ElementTree.SubElement(root, 'layer', id="1", name="Tile Layer 1", width=num_columns, height=num_rows)
child1_1 = ElementTree.SubElement(child1, 'data')
child2 = ElementTree.SubElement(root, 'layer', id="2", name="Tile Layer 2", width=num_columns, height=num_rows)
child2_1 = ElementTree.SubElement(child2, 'data')
child3 = ElementTree.SubElement(root, 'layer', id="3", name="Tile Layer 3", width=num_columns, height=num_rows)
child3_1 = ElementTree.SubElement(child3, 'data')
child1_1.text = str(list_of_map)
child2_1.text = str(list_of_map)
child3_1.text = str(list_of_map)
print(ElementTree.tostring(root))
tree = cElementTree.ElementTree(root) # wrap it in an ElementTree instance, and save as XML
t = minidom.parseString(ElementTree.tostring(root)).toprettyxml() # Since ElementTree write() has no pretty printing support, used minidom to beautify the xml.
tree1 = ElementTree.ElementTree(ElementTree.fromstring(t))
tree1.write("map3.xml",encoding='utf-8', xml_declaration=True)
| [
"somoman8@gmail.com"
] | somoman8@gmail.com |
b189d011d6657ef5e6f9b4e1061f09ba5eb4c1a7 | 3a891a79be468621aae43defd9a5516f9763f36e | /apps/beeswax/gen-py/TCLIService/TCLIService.py | 3bf8cee42c53891d5ab95aa26926dd4078f58c7f | [
"Apache-2.0"
] | permissive | oyorooms/hue | b53eb87f805063a90f957fd2e1733f21406269aa | 4082346ef8d5e6a8365b05752be41186840dc868 | refs/heads/master | 2020-04-15T20:31:56.931218 | 2019-01-09T19:02:21 | 2019-01-09T19:05:36 | 164,998,117 | 4 | 2 | Apache-2.0 | 2019-01-10T05:47:36 | 2019-01-10T05:47:36 | null | UTF-8 | Python | false | true | 104,941 | py | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
def OpenSession(self, req):
"""
Parameters:
- req
"""
pass
def CloseSession(self, req):
"""
Parameters:
- req
"""
pass
def GetInfo(self, req):
"""
Parameters:
- req
"""
pass
def ExecuteStatement(self, req):
"""
Parameters:
- req
"""
pass
def GetTypeInfo(self, req):
"""
Parameters:
- req
"""
pass
def GetCatalogs(self, req):
"""
Parameters:
- req
"""
pass
def GetSchemas(self, req):
"""
Parameters:
- req
"""
pass
def GetTables(self, req):
"""
Parameters:
- req
"""
pass
def GetTableTypes(self, req):
"""
Parameters:
- req
"""
pass
def GetColumns(self, req):
"""
Parameters:
- req
"""
pass
def GetFunctions(self, req):
"""
Parameters:
- req
"""
pass
def GetOperationStatus(self, req):
"""
Parameters:
- req
"""
pass
def CancelOperation(self, req):
"""
Parameters:
- req
"""
pass
def CloseOperation(self, req):
"""
Parameters:
- req
"""
pass
def GetResultSetMetadata(self, req):
"""
Parameters:
- req
"""
pass
def FetchResults(self, req):
"""
Parameters:
- req
"""
pass
def GetDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def CancelDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def RenewDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def GetLog(self, req):
"""
Parameters:
- req
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def OpenSession(self, req):
"""
Parameters:
- req
"""
self.send_OpenSession(req)
return self.recv_OpenSession()
def send_OpenSession(self, req):
self._oprot.writeMessageBegin('OpenSession', TMessageType.CALL, self._seqid)
args = OpenSession_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_OpenSession(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = OpenSession_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "OpenSession failed: unknown result");
def CloseSession(self, req):
"""
Parameters:
- req
"""
self.send_CloseSession(req)
return self.recv_CloseSession()
def send_CloseSession(self, req):
self._oprot.writeMessageBegin('CloseSession', TMessageType.CALL, self._seqid)
args = CloseSession_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CloseSession(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CloseSession_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseSession failed: unknown result");
def GetInfo(self, req):
"""
Parameters:
- req
"""
self.send_GetInfo(req)
return self.recv_GetInfo()
def send_GetInfo(self, req):
self._oprot.writeMessageBegin('GetInfo', TMessageType.CALL, self._seqid)
args = GetInfo_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetInfo failed: unknown result");
def ExecuteStatement(self, req):
"""
Parameters:
- req
"""
self.send_ExecuteStatement(req)
return self.recv_ExecuteStatement()
def send_ExecuteStatement(self, req):
self._oprot.writeMessageBegin('ExecuteStatement', TMessageType.CALL, self._seqid)
args = ExecuteStatement_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ExecuteStatement(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ExecuteStatement_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ExecuteStatement failed: unknown result");
def GetTypeInfo(self, req):
"""
Parameters:
- req
"""
self.send_GetTypeInfo(req)
return self.recv_GetTypeInfo()
def send_GetTypeInfo(self, req):
self._oprot.writeMessageBegin('GetTypeInfo', TMessageType.CALL, self._seqid)
args = GetTypeInfo_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTypeInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTypeInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTypeInfo failed: unknown result");
def GetCatalogs(self, req):
"""
Parameters:
- req
"""
self.send_GetCatalogs(req)
return self.recv_GetCatalogs()
def send_GetCatalogs(self, req):
self._oprot.writeMessageBegin('GetCatalogs', TMessageType.CALL, self._seqid)
args = GetCatalogs_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetCatalogs(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetCatalogs_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetCatalogs failed: unknown result");
def GetSchemas(self, req):
"""
Parameters:
- req
"""
self.send_GetSchemas(req)
return self.recv_GetSchemas()
def send_GetSchemas(self, req):
self._oprot.writeMessageBegin('GetSchemas', TMessageType.CALL, self._seqid)
args = GetSchemas_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetSchemas(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetSchemas_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetSchemas failed: unknown result");
def GetTables(self, req):
"""
Parameters:
- req
"""
self.send_GetTables(req)
return self.recv_GetTables()
def send_GetTables(self, req):
self._oprot.writeMessageBegin('GetTables', TMessageType.CALL, self._seqid)
args = GetTables_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTables(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTables_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTables failed: unknown result");
def GetTableTypes(self, req):
"""
Parameters:
- req
"""
self.send_GetTableTypes(req)
return self.recv_GetTableTypes()
def send_GetTableTypes(self, req):
self._oprot.writeMessageBegin('GetTableTypes', TMessageType.CALL, self._seqid)
args = GetTableTypes_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTableTypes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTableTypes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTableTypes failed: unknown result");
def GetColumns(self, req):
"""
Parameters:
- req
"""
self.send_GetColumns(req)
return self.recv_GetColumns()
def send_GetColumns(self, req):
self._oprot.writeMessageBegin('GetColumns', TMessageType.CALL, self._seqid)
args = GetColumns_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetColumns(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetColumns_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetColumns failed: unknown result");
def GetFunctions(self, req):
"""
Parameters:
- req
"""
self.send_GetFunctions(req)
return self.recv_GetFunctions()
def send_GetFunctions(self, req):
self._oprot.writeMessageBegin('GetFunctions', TMessageType.CALL, self._seqid)
args = GetFunctions_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetFunctions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetFunctions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetFunctions failed: unknown result");
def GetOperationStatus(self, req):
"""
Parameters:
- req
"""
self.send_GetOperationStatus(req)
return self.recv_GetOperationStatus()
def send_GetOperationStatus(self, req):
self._oprot.writeMessageBegin('GetOperationStatus', TMessageType.CALL, self._seqid)
args = GetOperationStatus_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetOperationStatus(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetOperationStatus_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetOperationStatus failed: unknown result");
def CancelOperation(self, req):
"""
Parameters:
- req
"""
self.send_CancelOperation(req)
return self.recv_CancelOperation()
def send_CancelOperation(self, req):
self._oprot.writeMessageBegin('CancelOperation', TMessageType.CALL, self._seqid)
args = CancelOperation_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CancelOperation(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CancelOperation_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelOperation failed: unknown result");
def CloseOperation(self, req):
"""
Parameters:
- req
"""
self.send_CloseOperation(req)
return self.recv_CloseOperation()
def send_CloseOperation(self, req):
self._oprot.writeMessageBegin('CloseOperation', TMessageType.CALL, self._seqid)
args = CloseOperation_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CloseOperation(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CloseOperation_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseOperation failed: unknown result");
def GetResultSetMetadata(self, req):
"""
Parameters:
- req
"""
self.send_GetResultSetMetadata(req)
return self.recv_GetResultSetMetadata()
def send_GetResultSetMetadata(self, req):
self._oprot.writeMessageBegin('GetResultSetMetadata', TMessageType.CALL, self._seqid)
args = GetResultSetMetadata_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetResultSetMetadata(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetResultSetMetadata_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetResultSetMetadata failed: unknown result");
def FetchResults(self, req):
"""
Parameters:
- req
"""
self.send_FetchResults(req)
return self.recv_FetchResults()
def send_FetchResults(self, req):
self._oprot.writeMessageBegin('FetchResults', TMessageType.CALL, self._seqid)
args = FetchResults_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_FetchResults(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = FetchResults_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "FetchResults failed: unknown result");
def GetDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_GetDelegationToken(req)
return self.recv_GetDelegationToken()
def send_GetDelegationToken(self, req):
self._oprot.writeMessageBegin('GetDelegationToken', TMessageType.CALL, self._seqid)
args = GetDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetDelegationToken failed: unknown result");
def CancelDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_CancelDelegationToken(req)
return self.recv_CancelDelegationToken()
def send_CancelDelegationToken(self, req):
self._oprot.writeMessageBegin('CancelDelegationToken', TMessageType.CALL, self._seqid)
args = CancelDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CancelDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CancelDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelDelegationToken failed: unknown result");
def RenewDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_RenewDelegationToken(req)
return self.recv_RenewDelegationToken()
def send_RenewDelegationToken(self, req):
self._oprot.writeMessageBegin('RenewDelegationToken', TMessageType.CALL, self._seqid)
args = RenewDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_RenewDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = RenewDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "RenewDelegationToken failed: unknown result");
def GetLog(self, req):
"""
Parameters:
- req
"""
self.send_GetLog(req)
return self.recv_GetLog()
def send_GetLog(self, req):
self._oprot.writeMessageBegin('GetLog', TMessageType.CALL, self._seqid)
args = GetLog_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetLog(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetLog_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetLog failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["OpenSession"] = Processor.process_OpenSession
self._processMap["CloseSession"] = Processor.process_CloseSession
self._processMap["GetInfo"] = Processor.process_GetInfo
self._processMap["ExecuteStatement"] = Processor.process_ExecuteStatement
self._processMap["GetTypeInfo"] = Processor.process_GetTypeInfo
self._processMap["GetCatalogs"] = Processor.process_GetCatalogs
self._processMap["GetSchemas"] = Processor.process_GetSchemas
self._processMap["GetTables"] = Processor.process_GetTables
self._processMap["GetTableTypes"] = Processor.process_GetTableTypes
self._processMap["GetColumns"] = Processor.process_GetColumns
self._processMap["GetFunctions"] = Processor.process_GetFunctions
self._processMap["GetOperationStatus"] = Processor.process_GetOperationStatus
self._processMap["CancelOperation"] = Processor.process_CancelOperation
self._processMap["CloseOperation"] = Processor.process_CloseOperation
self._processMap["GetResultSetMetadata"] = Processor.process_GetResultSetMetadata
self._processMap["FetchResults"] = Processor.process_FetchResults
self._processMap["GetDelegationToken"] = Processor.process_GetDelegationToken
self._processMap["CancelDelegationToken"] = Processor.process_CancelDelegationToken
self._processMap["RenewDelegationToken"] = Processor.process_RenewDelegationToken
self._processMap["GetLog"] = Processor.process_GetLog
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_OpenSession(self, seqid, iprot, oprot):
args = OpenSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = OpenSession_result()
result.success = self._handler.OpenSession(args.req)
oprot.writeMessageBegin("OpenSession", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CloseSession(self, seqid, iprot, oprot):
args = CloseSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = CloseSession_result()
result.success = self._handler.CloseSession(args.req)
oprot.writeMessageBegin("CloseSession", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetInfo(self, seqid, iprot, oprot):
args = GetInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetInfo_result()
result.success = self._handler.GetInfo(args.req)
oprot.writeMessageBegin("GetInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ExecuteStatement(self, seqid, iprot, oprot):
args = ExecuteStatement_args()
args.read(iprot)
iprot.readMessageEnd()
result = ExecuteStatement_result()
result.success = self._handler.ExecuteStatement(args.req)
oprot.writeMessageBegin("ExecuteStatement", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTypeInfo(self, seqid, iprot, oprot):
args = GetTypeInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTypeInfo_result()
result.success = self._handler.GetTypeInfo(args.req)
oprot.writeMessageBegin("GetTypeInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetCatalogs(self, seqid, iprot, oprot):
args = GetCatalogs_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetCatalogs_result()
result.success = self._handler.GetCatalogs(args.req)
oprot.writeMessageBegin("GetCatalogs", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetSchemas(self, seqid, iprot, oprot):
args = GetSchemas_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetSchemas_result()
result.success = self._handler.GetSchemas(args.req)
oprot.writeMessageBegin("GetSchemas", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTables(self, seqid, iprot, oprot):
args = GetTables_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTables_result()
result.success = self._handler.GetTables(args.req)
oprot.writeMessageBegin("GetTables", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTableTypes(self, seqid, iprot, oprot):
args = GetTableTypes_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTableTypes_result()
result.success = self._handler.GetTableTypes(args.req)
oprot.writeMessageBegin("GetTableTypes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetColumns(self, seqid, iprot, oprot):
args = GetColumns_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetColumns_result()
result.success = self._handler.GetColumns(args.req)
oprot.writeMessageBegin("GetColumns", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetFunctions(self, seqid, iprot, oprot):
args = GetFunctions_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetFunctions_result()
result.success = self._handler.GetFunctions(args.req)
oprot.writeMessageBegin("GetFunctions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetOperationStatus(self, seqid, iprot, oprot):
args = GetOperationStatus_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetOperationStatus_result()
result.success = self._handler.GetOperationStatus(args.req)
oprot.writeMessageBegin("GetOperationStatus", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CancelOperation(self, seqid, iprot, oprot):
args = CancelOperation_args()
args.read(iprot)
iprot.readMessageEnd()
result = CancelOperation_result()
result.success = self._handler.CancelOperation(args.req)
oprot.writeMessageBegin("CancelOperation", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CloseOperation(self, seqid, iprot, oprot):
args = CloseOperation_args()
args.read(iprot)
iprot.readMessageEnd()
result = CloseOperation_result()
result.success = self._handler.CloseOperation(args.req)
oprot.writeMessageBegin("CloseOperation", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetResultSetMetadata(self, seqid, iprot, oprot):
args = GetResultSetMetadata_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetResultSetMetadata_result()
result.success = self._handler.GetResultSetMetadata(args.req)
oprot.writeMessageBegin("GetResultSetMetadata", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_FetchResults(self, seqid, iprot, oprot):
args = FetchResults_args()
args.read(iprot)
iprot.readMessageEnd()
result = FetchResults_result()
result.success = self._handler.FetchResults(args.req)
oprot.writeMessageBegin("FetchResults", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetDelegationToken(self, seqid, iprot, oprot):
args = GetDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetDelegationToken_result()
result.success = self._handler.GetDelegationToken(args.req)
oprot.writeMessageBegin("GetDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CancelDelegationToken(self, seqid, iprot, oprot):
args = CancelDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = CancelDelegationToken_result()
result.success = self._handler.CancelDelegationToken(args.req)
oprot.writeMessageBegin("CancelDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_RenewDelegationToken(self, seqid, iprot, oprot):
args = RenewDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = RenewDelegationToken_result()
result.success = self._handler.RenewDelegationToken(args.req)
oprot.writeMessageBegin("RenewDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetLog(self, seqid, iprot, oprot):
args = GetLog_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetLog_result()
result.success = self._handler.GetLog(args.req)
oprot.writeMessageBegin("GetLog", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class OpenSession_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TOpenSessionReq, TOpenSessionReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TOpenSessionReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('OpenSession_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OpenSession_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TOpenSessionResp, TOpenSessionResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TOpenSessionResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('OpenSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseSession_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCloseSessionReq, TCloseSessionReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCloseSessionReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseSession_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseSession_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCloseSessionResp, TCloseSessionResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCloseSessionResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetInfo_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetInfoReq, TGetInfoReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetInfoReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetInfo_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetInfo_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetInfoResp, TGetInfoResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetInfoResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteStatement_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TExecuteStatementReq, TExecuteStatementReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TExecuteStatementReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecuteStatement_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteStatement_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TExecuteStatementResp, TExecuteStatementResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TExecuteStatementResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecuteStatement_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTypeInfo_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTypeInfoReq, TGetTypeInfoReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTypeInfoReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTypeInfo_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTypeInfo_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTypeInfoResp, TGetTypeInfoResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTypeInfoResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTypeInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetCatalogs_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetCatalogsReq, TGetCatalogsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetCatalogsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetCatalogs_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetCatalogs_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetCatalogsResp, TGetCatalogsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetCatalogsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetCatalogs_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSchemas_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetSchemasReq, TGetSchemasReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetSchemasReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetSchemas_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSchemas_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetSchemasResp, TGetSchemasResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetSchemasResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetSchemas_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTables_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTablesReq, TGetTablesReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTablesReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTables_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTables_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTablesResp, TGetTablesResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTablesResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTables_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTableTypes_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTableTypesReq, TGetTableTypesReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTableTypesReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTableTypes_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTableTypes_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTableTypesResp, TGetTableTypesResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTableTypesResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTableTypes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetColumns_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetColumnsReq, TGetColumnsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetColumnsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetColumns_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetColumns_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetColumnsResp, TGetColumnsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetColumnsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetColumns_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetFunctions_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetFunctionsReq, TGetFunctionsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetFunctionsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetFunctions_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetFunctions_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetFunctionsResp, TGetFunctionsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetFunctionsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetFunctions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetOperationStatus_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetOperationStatusReq, TGetOperationStatusReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetOperationStatusReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetOperationStatus_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetOperationStatus_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetOperationStatusResp, TGetOperationStatusResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetOperationStatusResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetOperationStatus_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelOperation_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCancelOperationReq, TCancelOperationReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCancelOperationReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelOperation_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelOperation_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCancelOperationResp, TCancelOperationResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCancelOperationResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelOperation_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseOperation_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCloseOperationReq, TCloseOperationReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCloseOperationReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseOperation_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseOperation_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCloseOperationResp, TCloseOperationResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCloseOperationResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseOperation_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetResultSetMetadata_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetResultSetMetadataReq, TGetResultSetMetadataReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetResultSetMetadataReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetResultSetMetadata_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetResultSetMetadata_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetResultSetMetadataResp, TGetResultSetMetadataResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetResultSetMetadataResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetResultSetMetadata_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FetchResults_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TFetchResultsReq, TFetchResultsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TFetchResultsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FetchResults_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FetchResults_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TFetchResultsResp, TFetchResultsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TFetchResultsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FetchResults_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetDelegationTokenReq, TGetDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetDelegationTokenResp, TGetDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCancelDelegationTokenReq, TCancelDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCancelDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCancelDelegationTokenResp, TCancelDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCancelDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RenewDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TRenewDelegationTokenReq, TRenewDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TRenewDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RenewDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RenewDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TRenewDelegationTokenResp, TRenewDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TRenewDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RenewDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetLog_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetLogReq, TGetLogReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetLogReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetLog_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetLog_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetLogResp, TGetLogResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetLogResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetLog_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| [
"romain@cloudera.com"
] | romain@cloudera.com |
165dbfac08fe2737b02cb52e18b95f4b2f9ff9c7 | 1dc4561e17b3dcc4c3f9a30a185096b0f7e47407 | /sudo/settings.py | 68f59fd30485b024bef0aefb09ea3b87370efc48 | [] | no_license | sahitilucky/SuDo | c14b99a68d7c3da77832f67cc44f1b1daf28d416 | ec27cbea43165ef48fbc48e12dc4155c777d88b4 | refs/heads/master | 2021-01-22T13:04:11.702668 | 2014-07-01T17:57:41 | 2014-07-01T17:57:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,457 | py | import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
import local_settings
from local_settings import *
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = SITE_URL + 'media/'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = SITE_URL + 'static/'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(BASE_DIR,'static')
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR,'templates'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3u-avpdyav8*3$85e#)9j&5w5s&miraqkt-fk6ka_)_(=rogpr'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
'sudo.context_processors.site_url',
'sudo.context_processors.static_url',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sudo.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sudo.wsgi.application'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'pytils',
'registration',
'tuts',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
ACCOUNT_ACTIVATION_DAYS = 7
REGISTRATION_OPEN = True
LOGIN_REDIRECT_URL = SITE_URL | [
"srmanikandasriram@gmail.com"
] | srmanikandasriram@gmail.com |
1b91f277473e457bf7a13588026930f68d01270b | 97a3ed78b2a73124d91f72c5d765e6dd24ea55dd | /pyrogram/Raw Functions/message/GetDialogFilters.py | b48821fc14542f16a4101c1ddc886f75b37b190c | [] | no_license | LidmiPython/Lidmi | 5d370beb002970638ddb077ad4fa79a33c415691 | 1c5c833cd6538e27fb1f948f5de21c14c4bf1b9a | refs/heads/main | 2023-04-21T10:27:10.786995 | 2023-04-03T15:58:24 | 2023-04-03T15:58:24 | 308,862,526 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from pyrogram.raw.functions.messages import GetDialogFilters
#Получить папки и список чатов в папке
@app.on_message()
async def messages_GetDialogFilters(client, message):
msg = await app.send(GetDialogFilters())
print(msg)
| [
"noreply@github.com"
] | LidmiPython.noreply@github.com |
012ec55775cf53199855f1f4a6732e29639740e1 | 30dee3a1031c0520b1ba33aa08cb8524f0f8ef29 | /delete_a_module_and_its_items.py | ec1b02b9ce90f23d74020e8a215735dc1d5060b1 | [
"MIT"
] | permissive | gqmaguirejr/Canvas-tools | 561848c7cf9cfc905db2d5ee37ac5815ed3911eb | 8a6fc3af1ebeeffc6578d6ed470329f6f796aa4a | refs/heads/master | 2023-04-13T03:30:34.312603 | 2023-04-07T12:10:01 | 2023-04-07T12:10:01 | 164,110,439 | 33 | 10 | MIT | 2022-04-13T14:22:25 | 2019-01-04T13:25:02 | Python | UTF-8 | Python | false | false | 13,732 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# ./delete_a_module_and_its_items.py course_id 'module_name'
#
# Output: To go throught a specific module and delete the items in the module. If they are pages, then delete the page - unless it is used by anothr module.
#
#
# with the option "-v" or "--verbose" you get lots of output - showing in detail the operations of the program
#
# With the option "-t" or "--testing" it does not actually do the deletes
#
# Can also be called with an alternative configuration file:
# ./list_your_courses.py --config config-test.json
#
# Example:
# ./delete_a_module_and_its_items.py --config config-test.json 11 'Test module for deletion'
#
# ./delete_a_module_and_its_items.py --testing --config config-test.json 11 'Test module for deletion'
#
# ./delete_a_module_and_its_items.py --config config-test.json 11 'Test module for deletion'
#
# G. Q. Maguire Jr.
#
# based on earlier edit_modules_items_in_a_module_in_a_course.py and cdel.py
#
# Note that when an existing page is used in a module, it gets a new module item instance in the module ; however, the url points to the original wikipage. For this reason, one can consider deleting the page only if it is not used in another module and in any case you can delete the module item.
#
# 2021-09-30
#
import requests, time
import pprint
import optparse
import sys
import json
# Use Python Pandas to create XLSX files
import pandas as pd
#############################
###### EDIT THIS STUFF ######
#############################
global baseUrl # the base URL used for access to Canvas
global header # the header for all HTML requests
global payload # place to store additionally payload when needed for options to HTML requests
# Based upon the options to the program, initialize the variables used to access Canvas gia HTML requests
def initialize(options):
global baseUrl, header, payload
# styled based upon https://martin-thoma.com/configuration-files-in-python/
if options.config_filename:
config_file=options.config_filename
else:
config_file='config.json'
try:
with open(config_file) as json_data_file:
configuration = json.load(json_data_file)
access_token=configuration["canvas"]["access_token"]
baseUrl="https://"+configuration["canvas"]["host"]+"/api/v1"
header = {'Authorization' : 'Bearer ' + access_token}
payload = {}
except:
print("Unable to open configuration file named {}".format(config_file))
print("Please create a suitable configuration file, the default name is config.json")
sys.exit()
def list_modules(course_id):
modules_found_thus_far=[]
# Use the Canvas API to get the list of modules for the course
#GET /api/v1/courses/:course_id/modules
url = "{0}/courses/{1}/modules".format(baseUrl, course_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting modules: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
modules_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
if Verbose_Flag:
print("result of getting modules for a paginated response: {}".format(r.text))
page_response = r.json()
for p_response in page_response:
modules_found_thus_far.append(p_response)
return modules_found_thus_far
def list_module_items(course_id, module_id):
module_items_found_thus_far=[]
# Use the Canvas API to get the list of modules for the course
# GET /api/v1/courses/:course_id/modules/:module_id/items
url = "{0}/courses/{1}/modules/{2}/items".format(baseUrl, course_id, module_id)
if Verbose_Flag:
print("url: {}".format(url))
r = requests.get(url, headers = header)
if Verbose_Flag:
print("result of getting module items: {}".format(r.text))
if r.status_code == requests.codes.ok:
page_response=r.json()
for p_response in page_response:
module_items_found_thus_far.append(p_response)
# the following is needed when the reponse has been paginated
# i.e., when the response is split into pieces - each returning only some of the list of modules
# see "Handling Pagination" - Discussion created by tyler.clair@usu.edu on Apr 27, 2015, https://community.canvaslms.com/thread/1500
while r.links.get('next', False):
r = requests.get(r.links['next']['url'], headers=header)
if Verbose_Flag:
print("result of getting modules for a paginated response: {}".format(r.text))
page_response = r.json()
for p_response in page_response:
module_items_found_thus_far.append(p_response)
return module_items_found_thus_far
# canvas_course_page_url will be of the form: https://kth.instructure.com/courses/11/pages/notes-20160716
def del_course_page(canvas_course_page_url):
# Use the Canvas API to get the list of pages for this course
# DELETE /api/v1/courses/:course_id/pages/:url
#extract course_id from URL
course_id=canvas_course_page_url[canvas_course_page_url.find("courses/")+8:canvas_course_page_url.find("pages/")-1]
if Verbose_Flag:
print("course_id: {}".format(course_id))
#extract the file name portion of the URL
page_url=canvas_course_page_url[canvas_course_page_url.rfind("/")+1:]
if Verbose_Flag:
print("page_url: {}".format(page_url))
new_file_name=canvas_course_page_url[canvas_course_page_url.rfind("/")+1:]+'.html'
if Verbose_Flag:
print("new_file_name: {}".format(new_file_name))
url = "{0}/courses/{1}/pages/{2}".format(baseUrl,course_id, page_url)
if Verbose_Flag:
print(url)
payload={}
r = requests.delete(url, headers = header, data=payload)
if Verbose_Flag:
print("r.status_code: {}".format(r.status_code))
if r.status_code == requests.codes.ok:
page_response = r.json()
print("{} deleted".format(canvas_course_page_url))
return True
else:
print("error when deleteing page: {}".format(canvas_course_page_url))
return False
return False
def del_course_pages(course_id, urls):
# Use the Canvas API to delete pages for this course
# DELETE /api/v1/courses/:course_id/pages/:url
if Verbose_Flag:
print("course_id: {}".format(course_id))
for page_url in urls:
if Verbose_Flag:
print("page_url: {}".format(page_url))
url = "{0}/courses/{1}/pages/{2}".format(baseUrl,course_id, page_url)
payload={}
r = requests.delete(url, headers = header, data=payload)
if Verbose_Flag:
print("r.status_code: {}".format(r.status_code))
if r.status_code == requests.codes.ok:
page_response = r.json()
print("{} deleted".format(canvas_course_page_url))
else:
print("error when deleteing page: {}".format(page_url))
def delete_module(course_id, module_id):
# Use the Canvas API to delete this module in this course
# DELETE /api/v1/courses/:course_id/modules/:id
if Verbose_Flag:
print("course_id: {}".format(course_id))
url = "{0}/courses/{1}/modules/{2}".format(baseUrl,course_id, module_id)
payload={}
r = requests.delete(url, headers = header, data=payload)
if Verbose_Flag:
print("r.status_code: {}".format(r.status_code))
if r.status_code == requests.codes.ok:
page_response = r.json()
if Verbose_Flag:
print("module {} deleted".format(module_id))
else:
print("error when deleteing module: {}".format(module_id))
def delete_module_item(course_id, module_id, item_id):
# Use the Canvas API to delete this module item in this module in this course
# DELETE /api/v1/courses/:course_id/modules/:module_id/items/:di
if Verbose_Flag:
print("course_id: {}".format(course_id))
url = "{0}/courses/{1}/modules/{2}/items/{3}".format(baseUrl, course_id, module_id, item_id)
payload={}
r = requests.delete(url, headers = header, data=payload)
if Verbose_Flag:
print("r.status_code: {}".format(r.status_code))
if r.status_code == requests.codes.ok:
page_response = r.json()
if Verbose_Flag:
print("module item {} deleted".format(item_id))
else:
print("error when deleteing module item: {}".format(item_id))
def look_for_use_elsewhere(course_id, url, module_id, modules_info):
global Testing_Flag
if Testing_Flag:
print("looking for url={}".format(url))
for m in modules_info:
if m == module_id: # skip the module you are currently looking at
continue
for item in modules_info[m]:
item_url=item.get('url', None)
if not item_url: # item has no URL
continue
if item_url == url: # it the URL is used, then return True
if Testing_Flag:
print("found url in module_id={}".format(m))
return True
# if the url is not in use in another of the modules return False
return False
def process_module(course_id, module_id, modules, modules_info):
global Verbose_Flag
global Testing_Flag
module_items=list_module_items(course_id, module_id)
if Verbose_Flag:
print("module_items={}".format(module_items))
number_of_items=len(module_items)
if Verbose_Flag:
print("number_of_items={}".format(number_of_items))
if number_of_items < 1:
return
for i in range(1, number_of_items+1):
process_item(course_id, i, module_items, module_id, modules_info)
# now that the items have been taken care of, delte the module
if not Testing_Flag:
delete_module(course_id, module_id)
else:
print("If not testing, module_id={} would be deleted".format(module_id))
def process_item(course_id, position, module_items, module_id, modules_info):
print("process_item {}".format(position))
item_to_process=None
for item in module_items:
if item['position'] == position:
item_to_process=item
if not item_to_process:
return
print("processing item: {}".format(item_to_process['title']))
# the types of th module items are: 'File', 'Page', 'Discussion', 'Assignment', 'Quiz',
# 'SubHeader', 'ExternalUrl', 'ExternalTool'
# delete the module item from this module
if item_to_process['type'] == 'Page':
url=item_to_process['url']
# skip deletion of pages that are in use in another module
if not look_for_use_elsewhere(course_id, url, module_id, modules_info):
if not Testing_Flag:
#print("deleting course page {}".format(url))
del_course_pages(course_id, [url])
else:
print("If not testing, url={} would be deleted".format(url))
if not Testing_Flag:
delete_module_item(course_id, module_id, item_to_process['id'])
else:
print("If not testing, item_id={} would be deleted".format(item_to_process['id']))
return True
def main():
global Verbose_Flag
global Testing_Flag
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose',
dest="verbose",
default=False,
action="store_true",
help="Print lots of output to stdout"
)
parser.add_option("--config", dest="config_filename",
help="read configuration from FILE", metavar="FILE")
parser.add_option('-t', '--testing',
dest="testing",
default=False,
action="store_true",
help="Enable testing mode"
)
options, remainder = parser.parse_args()
Verbose_Flag=options.verbose
if Verbose_Flag:
print("ARGV : {}".format(sys.argv[1:]))
print("VERBOSE : {}".format(options.verbose))
print("REMAINING : {}".format(remainder))
print("Configuration file : {}".format(options.config_filename))
Testing_Flag=options.testing
if Testing_Flag:
print("In testing mode")
initialize(options)
if (len(remainder) < 1):
print("Insuffient arguments - must provide course_id")
return
course_id=remainder[0]
modules=list_modules(course_id)
if not modules:
print("No modules in the course!")
module_id=None
if (len(remainder) == 2):
module_name=remainder[1]
for m in modules:
if m['name'] == module_name:
module_id=m['id']
modules_info=dict()
for m in modules:
modules_info[m['id']]=list_module_items(course_id, m['id'])
if Testing_Flag:
print("modules_info={}".format(modules_info))
process_module(course_id, module_id, modules, modules_info)
if __name__ == "__main__": main()
| [
"maguire@kth.se"
] | maguire@kth.se |
091dd22d440f86d5dd6c055c1e239998cf322eb4 | dd4bc9782dd41f08b7c8db4dad29f9702fce9743 | /C@SKR/skrvs/skr.py | 2f9d289348bcc2bae0519f5ee032cc9429db69d8 | [] | no_license | shubham3rajput/Practice | fee34db6ccb12305cf514195d0f6a1e3ada64c07 | 6785d36e4baa7c27f23c38adc4e581d281b5a04b | refs/heads/main | 2022-12-25T20:14:09.018113 | 2020-10-09T13:33:55 | 2020-10-09T13:33:55 | 302,647,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22 | py | print("Hello Shubham") | [
"38416939+shubham3rajput@users.noreply.github.com"
] | 38416939+shubham3rajput@users.noreply.github.com |
e03518fe5a868c8690c8dd7672785198665cacfe | a5b1481c9c232e8deb6dc79be3ca57e9f6d0990a | /core/config.py | 66a021793d0ee3fe62a87030e1087f54379d15f4 | [
"MIT"
] | permissive | wangxieric/Meta-Aug | 6009092cda28bb1aeaf491c83ecab4cf96ef9b17 | 2d0a3c0535be8a6c6d9bf7fd45188f7f97b86ec6 | refs/heads/main | 2023-07-01T05:47:37.071456 | 2021-08-11T08:36:50 | 2021-08-11T08:36:50 | 389,677,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | import os
import sys
import json
def prep_config(config_file):
with open(config_file) as config_params:
print(f"loading config file {config_file}")
config = json.load(config_params)
return config | [
"wangxieric@gmail.com"
] | wangxieric@gmail.com |
419a34672ae78d9fb5a0562be44038daf736fd30 | 5b16e979dc7dc2471786113e49f05fa2fc2e563b | /tests/processor/test_test_command_callback.py | 815a52d94e76e08e1dd17a0de132a0b8870a0869 | [
"MIT"
] | permissive | Ye0nny/defects4cpp | 3ebbeee80a960154888ab90ddaf8d42bc800c915 | c2a6406faa907d91c471c6b2f95000e1a20c9ab1 | refs/heads/main | 2023-09-02T04:11:15.601235 | 2021-11-04T06:43:02 | 2021-11-04T06:43:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,110 | py | import json
from dataclasses import dataclass
from pathlib import Path
from typing import Callable, Generator, List, Optional
import processor
import pytest
import taxonomy
from processor.core.command import DockerCommand, DockerCommandScript, DockerCommandScriptGenerator
from processor.core.docker import Worktree
_DUMMY_DOCKERFILE = """
FROM ubuntu:20.04
RUN useradd --create-home --home-dir /home/workspace --shell /bin/bash defects4cpp
USER defects4cpp
ENV USER defects4cpp
WORKDIR /home/workspace
"""
_TEST_PROJECT_NAME = "yara"
class DummyDockerCommand(DockerCommand):
_ignore_registry = True
def __init__(
self,
callback,
command_type: taxonomy.CommandType,
commands: List[str],
tmp: Path,
):
super().__init__()
self.callback = callback
self.command_type = command_type
self.commands = commands
self.metadata = taxonomy.MetaData(_TEST_PROJECT_NAME, str(tmp))
self.worktree = Worktree(_TEST_PROJECT_NAME, 1, False, str(tmp))
with open(f"{self.metadata.dockerfile}", "w+") as fp:
fp.write(_DUMMY_DOCKERFILE)
def create_script_generator(self, argv: List[str]) -> DockerCommandScriptGenerator:
return DummyDockerCommandScriptGenerator(
self.callback,
self.command_type,
self.commands,
self.metadata,
self.worktree,
)
def setup(self, generator: DockerCommandScriptGenerator):
pass
def teardown(self, generator: DockerCommandScriptGenerator):
pass
@property
def help(self) -> str:
return "help"
class DummyDockerCommandScriptGenerator(DockerCommandScriptGenerator):
def __init__(
self,
callback: Callable,
command_type: taxonomy.CommandType,
commands: List[str],
metadata: taxonomy.MetaData,
worktree: Worktree,
):
super().__init__(metadata, worktree, False)
self.callback = callback
self.command_type = command_type
self.commands = commands
def create(self) -> Generator[DockerCommandScript, None, None]:
yield DummyDockerCommandScript(self.callback, self.command_type, self.commands)
class DummyDockerCommandScript(DockerCommandScript):
def __init__(
self, callback: Callable, command_type: taxonomy.CommandType, command: List[str]
):
super().__init__(command_type, command)
self.callback = callback
def before(self):
pass
def output(self, linenr: int, exit_code: Optional[int], output: str):
self.callback(linenr, exit_code, output)
def after(self):
pass
@dataclass
class TestConfig:
tmp: Path
src_dir: Path
output_dir: Path
dest: List[Path]
__test__ = False
@pytest.fixture
def setup(tmp_path: Path, request) -> Callable[[List[int]], TestConfig]:
def create(case: List[int]) -> TestConfig:
tmp = tmp_path / request.node.name
src_dir = tmp / _TEST_PROJECT_NAME / "fixed#1"
src_dir.mkdir(parents=True, exist_ok=True)
output_dir = tmp / "output"
output_dir.mkdir(parents=True, exist_ok=True)
dpp_config = src_dir / ".defects4cpp.json"
with open(dpp_config, "w+") as fp:
obj = {
"project_name": _TEST_PROJECT_NAME,
"index": 1,
"buggy": False,
"workspace": str(src_dir.parents[1]),
}
json.dump(obj, fp)
return TestConfig(
tmp,
src_dir,
output_dir,
[output_dir / f"{_TEST_PROJECT_NAME}-fixed#1-{i}" for i in case],
)
return create
def iterate_once(script_it: Generator[DockerCommandScript, None, None]):
next(script_it)
return next(script_it)
def iterate_coverage_once(script_it: Generator[DockerCommandScript, None, None]):
next(script_it)
next(script_it)
obj = next(script_it)
return obj
def test_check_result(setup):
test = processor.TestCommand()
config = setup([1, 2])
cmd = f"{str(config.src_dir)} --output-dir={str(config.output_dir)} --case 1,2".split()
script_generator = test.create_script_generator(cmd)
script_it = script_generator.create()
# Command with zero exit code.
a = iterate_once(script_it)
a.lines = [""]
a.output(1, 0, "hello world!")
d1 = config.dest[0]
with open(f"{d1}/1.output", "r") as output:
assert output.readline() == "hello world!"
with open(f"{d1}/1.test", "r") as result:
assert result.readline() == "passed"
# Command with non-zero exit code.
b = iterate_once(script_it)
b.lines = [""]
b.output(1, 1, "Bye world!")
d2 = config.dest[1]
with open(f"{d2}/2.output", "r") as output:
assert output.readline() == "Bye world!"
with open(f"{d2}/2.test", "r") as result:
assert result.readline() == "failed"
def test_check_coverage(setup):
test = processor.TestCommand()
config = setup([1])
cmd = f"{str(config.src_dir)} --coverage --output-dir={str(config.output_dir)} --case 1".split()
script_generator = test.create_script_generator(cmd)
script_it = script_generator.create()
# Create a dummy gcov directory.
gcov = config.src_dir / "gcov"
gcov.mkdir(parents=True, exist_ok=True)
with open(f"{gcov}/foo.gcov", "w+") as fp:
fp.write("Hello, world!")
a = iterate_coverage_once(script_it)
a.lines = [""]
a.output(1, 0, "hello world!")
# gcov directory should be removed.
assert not gcov.exists()
with open(config.dest[0] / "foo.gcov", "r") as fp:
assert fp.readline() == "Hello, world!"
assert len(test.failed_coverage_files) == 0
# Run again to see if it fails (there is no gcov directory).
script_it = script_generator.create()
a = iterate_coverage_once(script_it)
a.lines = [""]
a.output(1, 0, "hello world!")
assert len(test.failed_coverage_files) > 0
def test_run_command(setup):
def docker_command_type_should_pass(_: Optional[int], exit_code: int, output: str):
assert exit_code == 0
assert output.strip() == "Hello, world!"
def docker_command_type_should_fail_to_keep_context(
linenr: Optional[int], exit_code: int, output: str
):
if linenr == 1:
# export TEST_VAR=1 won't work
assert exit_code != 0
elif linenr == 2:
# TEST_VAR is not set
assert exit_code == 0
assert output.strip() == "$TEST_VAR"
else:
assert False, "unexpected line, check test input again"
config = setup([1])
test = DummyDockerCommand(
callback=docker_command_type_should_pass,
command_type=taxonomy.CommandType.Docker,
commands=["echo 'Hello, world!'"],
tmp=config.tmp,
)
test([])
test = DummyDockerCommand(
callback=docker_command_type_should_fail_to_keep_context,
command_type=taxonomy.CommandType.Docker,
commands=["export TEST_VAR=1", "echo $TEST_VAR"],
tmp=config.tmp,
)
test([])
def test_run_command_as_script(setup):
def script_command_type_should_pass(_: Optional[int], exit_code: int, output: str):
assert exit_code == 0
assert output.strip() == "Hello, world!"
def script_command_type_should_keep_context(
linenr: Optional[int], exit_code: int, output: str
):
assert linenr is None
assert exit_code == 0
assert output.strip() == "1"
config = setup([1])
test = DummyDockerCommand(
callback=script_command_type_should_pass,
command_type=taxonomy.CommandType.Script,
commands=["#!/usr/bin/env bash", "echo 'Hello, world!'"],
tmp=config.tmp,
)
test([])
test = DummyDockerCommand(
callback=script_command_type_should_keep_context,
command_type=taxonomy.CommandType.Script,
commands=["#!/usr/bin/env bash", "export TEST_VAR=1", "echo $TEST_VAR"],
tmp=config.tmp,
)
test([])
| [
"gentlebuuny@gmail.com"
] | gentlebuuny@gmail.com |
8b2c6e9ce2bc943ab4214d1bf2ca2a7ae6d5b347 | 592e7e8f1224190c5a33ec7ca8ba97f1ae1919bd | /_815/Sol_3_43_45.py | 18850cbfe349daa6ed0bc140a14f9a39520ece8f | [] | no_license | Nicolas-Li/LeetCode | b4e5eaeb156aab8fb9ab531eb8425617e32d3914 | 7c60414d5dcdf4a3d74c11390ed50ec81def06cf | refs/heads/main | 2023-03-07T14:20:46.470271 | 2021-02-18T03:43:58 | 2021-02-18T03:43:58 | 331,369,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | class Solution:
def numBusesToDestination(self, routes: List[List[int]], S: int, T: int) -> int:
import collections
metric = collections.defaultdict(set)
max_num = 0
for route in routes:
for i in route:
if i > max_num:
max_num = i
for j in route:
metric[i].add(j)
import sys
stop_num = max_num + 1
dest = []
rel = [0] * stop_num
for l in range(len(routes) + 1):
if l == 0:
dest.append([S])
rel[S] = 1
else:
lev = []
for i in dest[l-1]:
for j in metric.get(i, []):
if rel[j] == 0:
rel[j] = 1
lev.append(j)
dest.append(lev)
if T in dest[l]:
return l
return -1 | [
"lijunjie_thu@qq.com"
] | lijunjie_thu@qq.com |
0975efa145a38f80e3ae613b3f3420b11d1e7b2a | ac6896a6dfc76b04d49c6cf2d9e89134f6db58fc | /Implementation/EHR/ehr/predict_risk/models.py | 3568a1d44ea8db891e50040002d9e60d3934fcf6 | [] | no_license | abdullah12388/EHR-project | 0f39c4f2f05b2da6cb2013fc34c8d6f7991d1393 | e1ef5cd70db5b008801f37711d0e1ef7e7141f38 | refs/heads/master | 2020-04-01T23:48:39.953780 | 2019-07-01T16:30:00 | 2019-07-01T16:30:00 | 153,774,032 | 1 | 2 | null | 2019-02-23T16:51:38 | 2018-10-19T11:40:58 | Tcl | UTF-8 | Python | false | false | 1,971 | py | from django.db import models
# from accounts.models import UserProfileInfo
from patient.models import patient
from django.utils import timezone
from django.urls import reverse
# Create your models here.
sex_choices=((0, 'Female'),(1, 'Male'))
cp_choice=((0,'None'),(1, 'Typical Angina'),(2, 'Atypical Angina'),(3, 'Non-Angina'),(4, 'Asymptomatic'))
fasting_blood_sugar_choices=((1,'> 120 mg/dl'),((0,'< 120 mg/dl')))
resting_ecg_choices=((0, 'Normal'),(1, 'Having ST-T wave abnormality'),(2, 'hypertrophy'))
exercise_induced_angina_choices=((0, 'No'),(1, 'Yes'))
st_slope_choices=((1, 'Upsloping'),(2, 'Flat'),(3, 'Down Sloping'))
number_of_vessels_choices=((0, 'None'),(1, 'One'),(2, 'Two'),(3, 'Three'))
thallium_scan_results_choices=((3, 'Normal'),(6, 'Fixed Defect'),(7, 'Reversible Defect'))
class Predictions(models.Model):
profile = models.ForeignKey(patient, on_delete=models.CASCADE, related_name='predict')
age = models.IntegerField()
sex = models.IntegerField(choices=sex_choices, default=0)
cp = models.IntegerField(choices=cp_choice,default=0)
resting_bp = models.IntegerField()
serum_cholesterol = models.IntegerField()
fasting_blood_sugar = models.IntegerField(choices=fasting_blood_sugar_choices,default=0)
resting_ecg = models.IntegerField(choices=resting_ecg_choices,default=0)
max_heart_rate = models.IntegerField()
exercise_induced_angina = models.IntegerField(choices=exercise_induced_angina_choices,default=0)
st_depression = models.DecimalField(max_digits=4, decimal_places=2)
st_slope = models.IntegerField(choices=st_slope_choices)
number_of_vessels = models.IntegerField(choices=number_of_vessels_choices)
thallium_scan_results = models.IntegerField(choices=thallium_scan_results_choices)
predicted_on = models.DateTimeField(default=timezone.now)
num=models.IntegerField()
def get_absolute_url(self):
return reverse('predict:predict', kwargs={'pk': self.profile.pk})
| [
"abdullah.mk96@yahoo.com"
] | abdullah.mk96@yahoo.com |
2e0d9b100a84da1410af72aac37b86c550b878dd | 18e89ca8c0c19d1eec4583fd1d9eb81afd3fbf3a | /Playground/snakesandladders.py | 8613e090ad748d6a7612f2061975c49f6bdf6d0f | [] | no_license | kevinlu1248/ccc | 11d6c7326e2977a4cf0cdeeb5d866cda2328b94e | 187a519842eb9bdc11cedb25031c1eaef3033e74 | refs/heads/master | 2020-08-03T00:59:06.935626 | 2020-02-23T21:23:54 | 2020-02-23T21:23:54 | 211,573,846 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | # ccc03s1
'''
Sample Input
Copy
9
11
12
7
3
5
10
9
Sample Output
Copy
You are now on square 10
You are now on square 21
You are now on square 33
You are now on square 64
You are now on square 86
You are now on square 91
You are now on square 91
You are now on square 100
You Win!
'''
map = {54: 19, 90: 48, 99: 77, 9: 34, 40: 64, 67: 86}
i = 1
while True:
try:
add = int(input())
print(i)
if i == 0:
print("You Quit!")
break
i += add
if i in map:
i = map[i]
if i >= 100:
print("You are on square 100!")
print("You Win!")
break
else:
print("You are now on square {}".format(i))
except:
break | [
"kevinlu1248@gmail.com"
] | kevinlu1248@gmail.com |
e2566a0ad1266ed0c2da552c06edc69f6708ea28 | 301b5511b2498ffdb87735b7c6149884062b928c | /offlinesys/data_batch_service/task.py | f15bf016b8d99c835006aac26270ee90ec474898 | [] | no_license | LoveEatChicken/CrystalScrapy2 | 2adc29977e761e8369b48fb0c77199d4a390caef | dbdc04fa45865c07de7ac1c61895c7809f6cb310 | refs/heads/master | 2021-06-07T02:24:05.804049 | 2016-11-01T08:25:38 | 2016-11-01T08:25:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,220 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from utils.collections import Dict
from rules import rules
from utils import db
import traceback
from utils import log_util
from processor.processor import ProcessorFactory
from utils.log_util import log
class TaskHelper(list):
'''
任务帮助类
'''
@staticmethod
def init_tasks_from_db(ids = []):
'''
从iwant DB 的des_task表初始化tasks
:return: Dict list
'''
if not ids :
return None
sql = 'select * from des_task where (status = 0) and (id in ('
index = 0
for id in ids:
if index == 0:
sql = sql+id
else:
sql = sql +','+id
sql = sql+'))'
task_infos = db.select(sql)
if not task_infos:
return None
tasks = []
for task_info in task_infos:
task = BaseTask.get_instance(task_info)
tasks.append(task)
return tasks
class BaseTask(object):
'''
任务基类
'''
def __init__(self,task_info):
self.task_info=task_info
@property
def task_info(self):
return self.__task_info
@task_info.setter
def task_info(self,value):
if value is None:
raise ValueError('value is None!')
if not isinstance(value,Dict):
raise ValueError('value must be a Dict!')
self.__task_info = value
def start(self):
'''
启动任务
:return:
'''
try:
db.update('update des_task set status=? where id=?',1,self.task_info.id)
result = self.process()
if result:
db.update('update des_task set status=? where id=?', 2, self.task_info.id)
else:
db.update('update des_task set status=? where id=?',3,self.task_info.id)
except:
traceback.print_exc()
log_util.error(traceback.format_exc())
db.update('update des_task set status=? where id=?', 3, self.task_info.id)
def stop(self):
'''
停止任务
'''
pass
def process(self):
pass
@staticmethod
def get_instance(task_info):
if task_info.cmd != 0:
return SingleTask(task_info)
return CompleteFlowTask(task_info)
class SingleTask(BaseTask):
'''
单任务
'''
def process(self):
processor = ProcessorFactory.create_instance(self.task_info)
if processor is not None :
return processor.do_process()
else:
return False
class CompleteFlowTask(BaseTask):
'''
全流程 任务
'''
def process(self):
task_info = self.task_info
task_info.cmd = 1
task = SingleTask(task_info)
result = task.process()
if not result:
return False
task_info.cmd = 2
task = SingleTask(task_info)
result = task.process()
if not result:
return False
task_info.cmd = 3
task = SingleTask(task_info)
result = task.process()
if not result:
return False
return True
| [
"crystal@joymason.cn"
] | crystal@joymason.cn |
04e8f703e5edf6aa0391a8d9e6cdcd52f3bdce64 | c9558409023652b6825431f57b034e0c3c5fb990 | /chapter2/palingram_1 | 66819c5de87a1e6754b97806374ea7033162754c | [] | no_license | TreyShenk/impracticalpython | 68291128887bf4bc98c43b34a2af2d2c275cfe91 | f9a0a9d4bec5a1adda4da16129a0c103371c1c6c | refs/heads/main | 2023-02-10T17:24:26.993162 | 2021-01-03T16:17:05 | 2021-01-03T16:17:05 | 326,283,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 27 11:36:45 2020
@author: trey
"""
def main():
import load_dictionary as ld
word_list = ld.load('words_alpha.txt')
if __name__ == "__main__":
main() | [
"trey.shenk@gmail.com"
] | trey.shenk@gmail.com | |
9514cb7b846ac550f90c6f83a5946dbad2da5087 | 08e98bff0129152decb28b7674c9f7bc88e01bb2 | /run.py | 20d081a2a80dc0607e3b21f549041611efb09360 | [] | no_license | cpvlordelo/instrument-prediction | fad751aaf809404b1ebd9754e6961c76932f7596 | e013d1c204f359106ecf3f9776edf7e88ee2edc3 | refs/heads/master | 2020-04-05T16:15:56.934441 | 2018-11-02T09:45:01 | 2018-11-02T09:45:01 | 157,004,542 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,540 | py | import torch.optim as optim
import datetime
date = datetime.datetime.now()
import sys
sys.path.append('./function')
from lib import *
from fit import *
from model import *
from audioset import *
from config import *
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '2' # change
def get_weight(Ytr):
mp = Ytr[:].sum(0).sum(0)
mmp = mp.astype(np.float32) / mp.sum()
cc=((mmp.mean() / mmp) * ((1-mmp)/(1 - mmp.mean())))**0.3
inverse_feq = torch.from_numpy(cc)
return inverse_feq
out_model_fn = './data/model/%s/'%(saveName)
if not os.path.exists(out_model_fn):
os.makedirs(out_model_fn)
# load data
Xtr,Ytr,Xte,Yte,avg,std = load()
print 'finishing data loading...'
# Build Dataloader
t_kwargs = {'batch_size': batch_size, 'num_workers': 2, 'pin_memory': True,'drop_last': True}
v_kwargs = {'batch_size': batch_size, 'num_workers': 10, 'pin_memory': True}
tr_loader = torch.utils.data.DataLoader(Data2Torch([Xtr[:], Ytr[:]]), shuffle=True, **t_kwargs)
va_loader = torch.utils.data.DataLoader(Data2Torch([Xte, Yte]), **v_kwargs)
print 'finishing data building...'
#Construct Model
model = Net().cuda()
model.apply(model_init)
print model
num_params(model)
print 'batch_size:%d num_labels:%d'%(batch_size, num_labels)
print 'Dataset:' + data_name
print 'Xtr:' + str(Xtr.shape)
print 'Xte:' + str(Xte.shape)
print 'Ytr:' + str(Ytr.shape)
print 'Yte:' + str(Yte.shape)
inverse_feq = get_weight(Ytr.transpose(0,2,1))
#Start training
Trer = Trainer(model, 0.01, 100, out_model_fn, avg,std)
Trer.fit(tr_loader, va_loader,inverse_feq)
| [
"biboamybibo@gmail.com"
] | biboamybibo@gmail.com |
284deb502b460d18389044ea5103890c7f6686d0 | 01a8c5ecea9cb4d40d3e26a1ca08cb1ccc17e98a | /common/prep_terrain_data.py | a35188d77e0eab7d0ba710f5dbfa6d1addca21c6 | [] | no_license | pelinbalci/intro_to_ml | fe570cfe5a556cdd55fccabd1f7096b42124a7a7 | 450ba3cff7d3f2009d94a526527ed76fee6e1fdf | refs/heads/master | 2022-11-15T04:22:29.372686 | 2020-07-12T10:13:05 | 2020-07-12T10:13:05 | 277,359,558 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | #!/usr/bin/python
import random
def makeTerrainData(n_points=1000):
"""make the toy dataset """
random.seed(42)
grade = [random.random() for i in range(0, n_points)] #[0.63, 0.025, 0.275, 0.223, 0.736, 0.676, 0.89, 0.085, 0.42, 0.029]
bumpy = [random.random() for i in range(0, n_points)] #[0.218, 0.50, 0.026, 0.19, 0.649, 0.54, 0.22, 0.58, 0.809, 0.006]
error = [random.random() for i in range(0, n_points)]
y = [round(grade[i]*bumpy[i]+0.3+0.1*error[i]) for i in range(0, n_points)] #[1, 0, 0, 0, 1, 1, 1, 0, 1, 0]
for i in range(0, len(y)):
if grade[i] > 0.8 or bumpy[i] > 0.8:
y[i] = 1.0 # <class 'list'>: [1, 0, 0, 0, 1, 1, 1.0, 0, 1.0, 0]
# split into train/test sets
X = [[gg, ss] for gg, ss in zip(grade, bumpy)]
split = int(0.75 * n_points)
X_train = X[0:split] # [[0.63, 0.218], [0.025, 0.50] ... ]
X_test = X[split:]
y_train = y[0:split] # [1, 0, 0, 0, 1, 1, 1.0]
y_test = y[split:]
grade_sig = [X_train[i][0] for i in range(0, len(X_train)) if y_train[i] == 0]
bumpy_sig = [X_train[i][1] for i in range(0, len(X_train)) if y_train[i] == 0]
grade_bkg = [X_train[i][0] for i in range(0, len(X_train)) if y_train[i] == 1]
bumpy_bkg = [X_train[i][1] for i in range(0, len(X_train)) if y_train[i] == 1]
grade_sig = [X_test[i][0] for i in range(0, len(X_test)) if y_test[i] == 0]
bumpy_sig = [X_test[i][1] for i in range(0, len(X_test)) if y_test[i] == 0]
grade_bkg = [X_test[i][0] for i in range(0, len(X_test)) if y_test[i] == 1]
bumpy_bkg = [X_test[i][1] for i in range(0, len(X_test)) if y_test[i] == 1]
test_data = {"fast": {"grade": grade_sig, "bumpiness": bumpy_sig},
"slow": {"grade": grade_bkg, "bumpiness": bumpy_bkg}}
return X, y, X_train, y_train, X_test, y_test | [
"balci.pelin@gmail.com"
] | balci.pelin@gmail.com |
2ae3fc6cbcdffaccd6da52934e2ac6d3e9d1fc08 | efe85925878eeb1d903202535481da207008aca1 | /nanoZ_stabilityFolder.py | 55508d6cff48f7f2c4d9947c2450f1395b434482 | [] | no_license | danieljdenman/nanoZ | 4c6d6b8fed9e94629620e95978d2b859c2a02556 | 12d937ed860cd79a40ea2d713dbedccd6e7f6802 | refs/heads/master | 2021-01-25T08:37:56.788683 | 2014-02-17T19:03:09 | 2014-02-17T19:03:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,706 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 27 16:16:39 2014
@author: danieljdenman
"""
#****************************************************************************
import numpy as np # NumPy (multidimensional arrays, linear algebra, ...)
import scipy as sp # SciPy (signal and image processing library)
import os
import matplotlib as mpl # Matplotlib (2D/3D plotting library)
import matplotlib.pyplot as plt # Matplotlib's pyplot: MATLAB-like syntax
from pylab import * # Matplotlib's pylab interface
ion() # Turned on Matplotlib's interactive mode
#****************************************************************************
def nanoZ_plotLongitudinal(dirname):
[times,names,means,stds,sems,medians] = nanoZ_stabilityFolder(dirname)
plt.clf()
plt.figure(figsize = (7,2))
def nanoZ_stabilityFolder(dirname):
#load all of the .txts from the nanoZ that are in a folder
[names,allData] = load_nanoZInFolder(dirname)
#try to parse the filenames such that the times can be extracted
times = np.zeros(len(names),dtype=float);
for i in range(0,len(names)):
nm = names[i].rstrip('.txt').split('_')
parsedTime = -1
for subNM in nm:
if subNM.strip('hr').isdigit():
parsedTime = float(subNM.strip('hr'))
else:
if subNM.strip('min').isdigit():
parsedTime = float(subNM.strip('min'))/60
times[i] = parsedTime
#show the user the parsing. if it sucks, let the user fix it.<-- not implemented. TODO
#ask the user about the probe, to know which channels to ignore
#maskDictionary contains properly masked channels for certain configureation of
#the nanoZ. ask Dan for more details.
maskDictionary = {'imec_right':[0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1],
'imec_left':[0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0]}
#maskDictionary.setdefault([0]*len(allData[0][:,0]))#np.zeros(len(allData[0][:,0])))
probe=raw_input('tell me about the probe. your options are: imec_left, imec_right: ')
mask = maskDictionary[probe]
#proceed only if the data and mask are alignabled
if len(mask)==len(allData[0][:,0]):
#go through and remove the channels known to be open
for i in range(0,len(mask)-1):
if mask[31-i]==1:
for j in range(0,len(times)):
allData[j]=np.delete(allData[j],31-i,0); #this is where open channels are deleted
else:
print 'mask:'+str(len(mask))+' and data:'+str(len(allData[0][:,0]))+' do not match\rcheck the loader to see how many lines it skipped.'
#make oneD arrays that match time
means = np.zeros(len(times))
stds = np.zeros(len(times))
sems = np.zeros(len(times))
medians = np.zeros(len(times))
for i in range(0,len(times)):
means[i] = np.mean(allData[i][:,0])
stds[i] = np.std(allData[i][:,0])
sems[i] = stds[i]/np.sqrt(len(allData[i][:,0]))
medians = np.median(allData[i][:,0])
#return the results
return [times,names,means,stds,sems,medians]
#make distribution of Z @ 1K
def nanoZ1KHist(nm, tm=1):
lft = '/Users/danieljdenman/Academics/allen/BlancheLab/electrodeMeasurements/data/impTesting_23012014/'+nm+'_L_prepost1uADC_500kOhm_target_'+str(1)+'min.txt'
rgt = '/Users/danieljdenman/Academics/allen/BlancheLab/electrodeMeasurements/data/impTesting_23012014/'+nm+'_R_prepost1uADC_500kOhm_target_'+str(1)+'min.txt'
#load data
rL = np.genfromtxt(lft,skip_header=3,skip_footer=1,filling_values = '-1')
rL = rL[0:rL.shape[0]]
rR = np.genfromtxt(rgt,skip_header=3,skip_footer=1,filling_values = '-1')
rR = rR[0:rR.shape[0]]
numChans = (rL.shape[0])+(rR.shape[0])
np.ar
#make a new figure for this electrode
plt.clf()
plt.figure(figsize = (7,2))
#analyze phases
Lphases = rL[:,2];Lphases = Lphases.astype('float')
Rphases = rR[:,2];Rphases = Rphases.astype('float')
Lph_hist = np.histogram(Lphases,60,(-100,-40))
Rph_hist = np.histogram(Rphases,60,(-100,-40))
allphases = np.concatenate((Lphases,Rphases), axis=0)
plt.subplot(1,2,1)
plt.hist(allphases,bins=60,range=(-150,-10),histtype='bar',label=nm)
plt.axis([-150,-10,0,30])
#analyze Zs
LZ = rL[:,1];LZ = LZ.astype('float')
RZ = rR[:,1];RZ = RZ.astype('float')
LZ_hist = np.histogram(LZ,30,(0,15))
RZ_hist = np.histogram(RZ,30,(0,15))
allZs = np.concatenate((LZ,RZ), axis=0)
plt.subplot(1,2,2)
plt.hist(allZs,bins=60,range=(0,12),histtype='bar',label=nm)
plt.axis([0,12,0,30])
plt.show()
lessthan=0
lessthan4=0
avgList=[];
avgList4=[];
for i in range(0,32):
if float(rL[i,1]) <= 1:
lessthan+=1
avgList.append(rL[i,1])
if float(rR[i,1]) <= 1:
lessthan+=1
avgList.append(rR[i,1])
if float(rL[i,1]) <= 4:
lessthan4+=1
avgList4.append(rL[i,1])
if float(rR[i,1]) <= 4:
lessthan4+=1
avgList4.append(rR[i,1])
ls = np.asarray(avgList)
print "z<1 : "+str(lessthan)+' '+str(np.average(ls))+' +/- '+str(np.std(ls))
ls = np.asarray(avgList4)
print "z<4 : "+str(lessthan4)+' '+str(np.average(ls))+' +/- '+str(np.std(ls))
#______________________________________________________________________________
#--------------------------tools used in above scripts--------------------------
#______________________________________________________________________________
#******************************************************************************
def load_nanoZInFolder(dirname):
#function that loads all the nanoZ measurements in a folder.
#returns a list of the names of the txt files.
#make sure there was an input directory, otherwise use a default one
if not dirname:
dirname = '/Users/danieljdenman/Academics/allen/BlancheLab/electrodeMeasurements/imec/data/impTesting_r37_stability/'
#go through each file in the folder, open it, determine if it is nanoZ data
#allow the user to skip, force load, or abort loading altogether
nms=[]; rL=[]
for file_name in os.listdir(dirname):
fullPath = dirname+file_name;
tmp = np.genfromtxt(fullPath,skip_header=3,skip_footer=1,filling_values = '-1',invalid_raise=False,usecols=(1,2))
#if no exception (or user forces), put this file in the tuple of open files
#and the name of this file in the list of filenames that corresponds to the tuple.
if not tmp.any():
print "------------------------------------------------"
print "found a bad one!: "+fullPath
print "------------------------------------------------"
else:
rL.append(np.genfromtxt(fullPath,skip_header=3,skip_footer=1,filling_values = '-1',invalid_raise=False, usecols=(1,2)))
nms.append(file_name)
return [nms,rL]
#******************************************************************************
#******************************************************************************
def dSimpleStats(inpt):
#function that returns the mean, st. dev., and s.e.m., median of an array of numbers
#returns: [mean,s.d.,s.e.m.,median]
mn = np.mean(inpt)
sd = np.std(inpt)
sem = sd/np.sqrt(len(inpt))
md = np.median(inpt)
return [mn,sd,sem,md]
#****************************************************************************** | [
"danieljdenman@gmail.com"
] | danieljdenman@gmail.com |
f44a41bb62029f3722f2147f77a2e042f14fd276 | effa59ca937dad64c80853c236d29b7b033ef918 | /hole/urls.py | 5ad17d15c3b59da33c93257ac1886f41160c0527 | [] | no_license | greyjr1/o2 | b9f74782ea862f56a73749b913f7655d92a2c41d | cd145c3e8927755c60aed347ed455a7f4260c3d3 | refs/heads/master | 2020-03-18T23:30:22.836574 | 2018-05-30T10:14:26 | 2018-05-30T10:14:26 | 135,406,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name="index"),
url(r'^tax/', views.tax, name="tax"),
url(r'^one_more/', views.one_more, name="one_more"),
]
| [
"greyjr@i.ua"
] | greyjr@i.ua |
8eb2178a96421f89cd778cea940b0230d6d69a47 | 73a0421c35ac9a9ae1e7c6cd6f8bd952df76f841 | /dtf_bot/wsgi.py | 317ee2f1705df8e9bd5abeea279c55ec13485a47 | [
"MIT"
] | permissive | sudoguy/dtf_bot | ee9c2f04a05d5a2c9a1b57f44146c43a1ead87d1 | 424172c527d27f8ccee412d497e5e82ca97e84a0 | refs/heads/master | 2022-12-09T11:06:14.112540 | 2020-04-20T09:44:18 | 2020-04-20T09:44:18 | 185,618,936 | 2 | 0 | MIT | 2022-12-08T03:12:43 | 2019-05-08T14:09:23 | Python | UTF-8 | Python | false | false | 548 | py | """
WSGI config for dtf_bot project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dotenv import find_dotenv, load_dotenv
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
load_dotenv(find_dotenv())
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dtf_bot.settings")
application = SentryWsgiMiddleware(get_wsgi_application())
| [
"eskemerov@gmail.com"
] | eskemerov@gmail.com |
459ff2610da06eead3033f367e5de1946d4b27dd | fa6d3278e43978365e7c96a1b14486e91d761866 | /bgtimer | 82b899a53b5b34d5f38e5be45f85fb2d11ee433b | [] | no_license | alfem/bgtimer | da9166c1f09e380a2320bb325501a35a51f09442 | 5c2959ca6557e958595c2aa3b7805839cfb66931 | refs/heads/master | 2020-05-16T21:51:17.783293 | 2015-04-20T10:09:00 | 2015-04-20T10:09:00 | 24,093,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,327 | #!/usr/bin/python
# -*- coding: utf8 -*-
# bgtimer
# Board Game Timer
# stopwatch for family games
# Author: Alfonso E.M. <alfonso@el-magnifico.org>
# License: Free (GPL3)
# Version: 1.0 - 13/Dec/2010
import pygame
from pygame.locals import *
import os, sys
import random
import sys; sys.path.insert(0, "pgu")
from pgu import gui
WIDTH=1024
HEIGHT=768
# Some handy functions
def load_sound(name):
fullname = os.path.join('sounds', name)
try:
sound = pygame.mixer.Sound(fullname)
except pygame.error, message:
print 'Cannot load sound:', name
raise SystemExit, message
return sound
def isnumeric(x):
return x.replace(" ","").replace("-", "").isdigit()
def suma():
for c in range(0,4):
t=0
for l in range(0,10):
if isnumeric(points[l][c].value):
t=t+int(points[l][c].value)
total[c].value=str(t)
app.repaintall()
# End of functions
# MAIN
alarm_minutes=1
alarm_seconds=5
# Pygame initialization
pygame.init()
screen=pygame.display.set_mode((WIDTH,HEIGHT),pygame.FULLSCREEN)
#screen=pygame.display.set_mode((WIDTH,HEIGHT))
# Loading sounds
snd_warning=load_sound("alert.wav")
snd_next=load_sound("newmessage.wav")
snd_alarm=load_sound("ring.wav")
# Loading fonts
fnt_timer = pygame.font.Font("Inconsolata.otf", 350)
fnt_info = pygame.font.Font("Inconsolata.otf", 30)
# Scoreboard dialog
app = gui.Desktop()
app.connect(gui.QUIT,app.quit,None)
c = gui.Table()
c.tr()
c.td(gui.Label("Score"),colspan=5)
c.tr()
c.td(gui.Label(""))
c.td(gui.Input(value='A',size=8))
c.td(gui.Input(value='B',size=8))
c.td(gui.Input(value='C',size=8))
c.td(gui.Input(value='D',size=8))
points=[]
for l in range(0,10):
points.append([])
c.tr()
c.td(gui.Label("%2i" % (l+1)))
for n in range(0,4):
points[l].append(gui.Input(value=" ",size=5))
c.td(points[l][n])
c.tr()
total=[]
c.td(gui.Label("total:"))
for t in range(0,4):
total.append(gui.Label(" 0"))
c.td(total[t])
c.tr()
bt_sum=gui.Button("Suma")
bt_ok=gui.Button("OK")
c.td(gui.Label(""),colspan=2)
c.td(bt_sum)
c.td(bt_ok)
bt_sum.connect(gui.CLICK,suma)
bt_ok.connect(gui.CLICK,app.quit,None)
###
while True:
# Stopwatch screen
pygame.mouse.set_visible(0)
screen.fill((0,0,0))
text = fnt_info.render("Esc=Quit | SPACE=Restart | Tab=Menu", 1, (100,255,100))
width,height=text.get_size()
x=WIDTH/2-width/2
screen.blit(text, (x,10))
pygame.display.flip()
###
oldseconds=0
startms=pygame.time.get_ticks()
while True:
ms=pygame.time.get_ticks()-startms
seconds=int(ms/1000)
if seconds != oldseconds:
screen.fill((0,0,0),(0,50,WIDTH,HEIGHT-50))
minutes,seconds=divmod(seconds,60)
text = fnt_timer.render("%02i:%02i" % (minutes,seconds), 1, (255,255,255))
width,height=text.get_size()
x=WIDTH/2-width/2
y=HEIGHT/2-height/2
screen.blit(text, (x,y))
pygame.display.flip()
if minutes == alarm_minutes and seconds == alarm_seconds:
snd_alarm.play()
pygame.time.delay(1000)
oldseconds = seconds
event=pygame.event.pump()
keys_state=pygame.key.get_pressed()
if keys_state[K_ESCAPE]:
sys.exit(0)
if keys_state[K_SPACE]:
snd_next.play()
startms=pygame.time.get_ticks()
if keys_state[K_TAB]:
pygame.mouse.set_visible(1)
app.run(c)
break
pygame.time.delay(100)
| [
"alfem@users.noreply.github.com"
] | alfem@users.noreply.github.com | |
987786e2857ee39acc25898f8923fcdfd90bdda5 | 49619818476c43b7b4b45aadd1e1fb80f82f2d43 | /locustfile.py | 8e513b99a93536861a2eb6a8109f2a76db3b9b0b | [] | no_license | Robinbux/rust-server | b70869e7ab38ed482f2cc50f0836797ac9543428 | 763fae60b11592a737bcb8eec282f4f03b402faf | refs/heads/master | 2023-02-25T17:31:22.436391 | 2021-02-01T10:21:38 | 2021-02-01T10:21:38 | 295,986,069 | 1 | 0 | null | 2021-02-01T10:21:46 | 2020-09-16T09:28:48 | Rust | UTF-8 | Python | false | false | 401 | py | import time
from locust import HttpUser, task
class QuickstartUser(HttpUser):
@task
def get_assets(self):
self.client.get("/assets/pika")
self.client.get("/assets/vid")
self.client.get("/admin/console")
'''@task(3)
def view_item(self):
for item_id in range(10):
self.client.get("/admin/console", name="/admin")
time.sleep(1)''' | [
"katja.roth@code.berlin"
] | katja.roth@code.berlin |
a307d190864c688240df95388d0101710746d094 | 16eff60c29062849d7d2fc035a9fbb4a3f93e206 | /crnn_model/cnn_basenet.py | 4bc2c25734ae536b5105871a303e2fb722471c70 | [] | no_license | marjeylee/text_recognization | 3144d3f3903918d1c9a9e75b14597288b92af8cd | efc7982198cbdea8f330de2f758583be6ba3c23f | refs/heads/master | 2020-03-23T18:09:57.215751 | 2018-07-22T13:07:57 | 2018-07-22T13:07:57 | 141,893,083 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,465 | py | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: cnn_basenet
Description :
Author : 'li'
date: 2018/7/22
-------------------------------------------------
Change Activity:
2018/7/22:
-------------------------------------------------
"""
__author__ = 'li'
import tensorflow as tf
import numpy as np
from abc import ABCMeta
"""
cnn基本方法库
"""
class CNNBaseModel(metaclass=ABCMeta):
"""
基本方法库模型
"""
def __init__(self):
pass
@staticmethod
def conv2d(inputdata, out_channel, kernel_size, padding='SAME', stride=1, w_init=None, b_init=None,
nl=tf.identity, split=1, use_bias=True, data_format='NHWC', name=None):
"""
卷积操作
:param name: op name
:param inputdata: A 4D tensorflow tensor which ust have known number of channels, but can have other
unknown dimensions.
:param out_channel: number of output channel.
:param kernel_size: int so only support square kernel convolution
:param padding: 'VALID' or 'SAME'
:param stride: int so only support square stride
:param w_init: initializer for convolution weights
:param b_init: initializer for bias
:param nl: a tensorflow identify function
:param split: split channels as used in Alexnet mainly group for GPU memory save.
:param use_bias: whether to use bias.
:param data_format: default set to NHWC according tensorflow
:return: tf.Tensor named ``output``
"""
with tf.variable_scope(name):
in_shape = inputdata.get_shape().as_list()
channel_axis = 3 if data_format == 'NHWC' else 1
in_channel = in_shape[channel_axis]
assert in_channel is not None, "[Conv2D] Input cannot have unknown channel!"
assert in_channel % split == 0
assert out_channel % split == 0
padding = padding.upper()
if isinstance(kernel_size, list):
filter_shape = [kernel_size[0], kernel_size[1]] + [in_channel / split, out_channel]
else:
filter_shape = [kernel_size, kernel_size] + [in_channel / split, out_channel]
if isinstance(stride, list):
strides = [1, stride[0], stride[1], 1] if data_format == 'NHWC' else [1, 1, stride[0], stride[1]]
else:
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
if w_init is None:
w_init = tf.contrib.layers.variance_scaling_initializer()
if b_init is None:
b_init = tf.constant_initializer()
w = tf.get_variable('W', filter_shape, initializer=w_init)
b = None
if use_bias:
b = tf.get_variable('b', [out_channel], initializer=b_init)
if split == 1:
conv = tf.nn.conv2d(inputdata, w, strides, padding, data_format=data_format)
else:
inputs = tf.split(inputdata, split, channel_axis)
kernels = tf.split(w, split, 3)
outputs = [tf.nn.conv2d(i, k, strides, padding, data_format=data_format)
for i, k in zip(inputs, kernels)]
conv = tf.concat(outputs, channel_axis)
ret = nl(tf.nn.bias_add(conv, b, data_format=data_format) if use_bias else conv, name=name)
return ret
@staticmethod
def relu(input_data, name=None):
"""
relu方法
:param name:
:param input_data:
:return:
"""
return tf.nn.relu(features=input_data, name=name)
@staticmethod
def sigmoid(inputdata, name=None):
"""
sigmoid方法
:param name:
:param inputdata:
:return:
"""
return tf.nn.sigmoid(x=inputdata, name=name)
@staticmethod
def maxpooling(inputdata, kernel_size, stride=None, padding='VALID', data_format='NHWC', name=None):
"""
池化操作
:param name:
:param inputdata:
:param kernel_size:
:param stride:
:param padding:
:param data_format:
:return:
"""
padding = padding.upper()
if stride is None:
stride = kernel_size
if isinstance(kernel_size, list):
kernel = [1, kernel_size[0], kernel_size[1], 1] if data_format == 'NHWC' else \
[1, 1, kernel_size[0], kernel_size[1]]
else:
kernel = [1, kernel_size, kernel_size, 1] if data_format == 'NHWC' else [1, 1, kernel_size, kernel_size]
if isinstance(stride, list):
strides = [1, stride[0], stride[1], 1] if data_format == 'NHWC' else [1, 1, stride[0], stride[1]]
else:
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
return tf.nn.max_pool(value=inputdata, ksize=kernel, strides=strides, padding=padding,
data_format=data_format, name=name)
@staticmethod
def avgpooling(inputdata, kernel_size, stride=None, padding='VALID', data_format='NHWC', name=None):
"""
平均值池话
:param name:
:param inputdata:
:param kernel_size:
:param stride:
:param padding:
:param data_format:
:return:
"""
if stride is None:
stride = kernel_size
kernel = [1, kernel_size, kernel_size, 1] if data_format == 'NHWC' else [1, 1, kernel_size, kernel_size]
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
return tf.nn.avg_pool(value=inputdata, ksize=kernel, strides=strides, padding=padding,
data_format=data_format, name=name)
@staticmethod
def globalavgpooling(inputdata, data_format='NHWC', name=None):
"""
全局平均值池话
:param name:
:param inputdata:
:param data_format:
:return:
"""
assert inputdata.shape.ndims == 4
assert data_format in ['NHWC', 'NCHW']
axis = [1, 2] if data_format == 'NHWC' else [2, 3]
return tf.reduce_mean(input_tensor=inputdata, axis=axis, name=name)
@staticmethod
def layernorm(inputdata, epsilon=1e-5, use_bias=True, use_scale=True, data_format='NHWC', name=None):
"""
batch normalize操作
:param name:
:param inputdata:
:param epsilon: epsilon to avoid divide-by-zero.
:param use_bias: whether to use the extra affine transformation or not.
:param use_scale: whether to use the extra affine transformation or not.
:param data_format:
:return:
"""
shape = inputdata.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
mean, var = tf.nn.moments(inputdata, list(range(1, len(shape))), keep_dims=True)
if data_format == 'NCHW':
channnel = shape[1]
new_shape = [1, channnel, 1, 1]
else:
channnel = shape[-1]
new_shape = [1, 1, 1, channnel]
if ndims == 2:
new_shape = [1, channnel]
if use_bias:
beta = tf.get_variable('beta', [channnel], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
else:
beta = tf.zeros([1] * ndims, name='beta')
if use_scale:
gamma = tf.get_variable('gamma', [channnel], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
else:
gamma = tf.ones([1] * ndims, name='gamma')
return tf.nn.batch_normalization(inputdata, mean, var, beta, gamma, epsilon, name=name)
@staticmethod
def instancenorm(inputdata, epsilon=1e-5, data_format='NHWC', use_affine=True, name=None):
"""
:param name:
:param inputdata:
:param epsilon:
:param data_format:
:param use_affine:
:return:
"""
shape = inputdata.get_shape().as_list()
if len(shape) != 4:
raise ValueError("Input data of instancebn layer has to be 4D tensor")
if data_format == 'NHWC':
axis = [1, 2]
ch = shape[3]
new_shape = [1, 1, 1, ch]
else:
axis = [2, 3]
ch = shape[1]
new_shape = [1, ch, 1, 1]
if ch is None:
raise ValueError("Input of instancebn require known channel!")
mean, var = tf.nn.moments(inputdata, axis, keep_dims=True)
if not use_affine:
return tf.divide(inputdata - mean, tf.sqrt(var + epsilon), name='output')
beta = tf.get_variable('beta', [ch], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
gamma = tf.get_variable('gamma', [ch], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
return tf.nn.batch_normalization(inputdata, mean, var, beta, gamma, epsilon, name=name)
@staticmethod
def dropout(inputdata, keep_prob, noise_shape=None, name=None):
"""
dropout
:param name:
:param inputdata:
:param keep_prob:
:param noise_shape:
:return:
"""
return tf.nn.dropout(inputdata, keep_prob=keep_prob, noise_shape=noise_shape, name=name)
@staticmethod
def fullyconnect(inputdata, out_dim, w_init=None, b_init=None, nl=tf.identity, use_bias=True, name=None):
"""
全连接层
:param inputdata: a tensor to be flattened except for the first dimension.
:param out_dim: output dimension
:param w_init: initializer for w. Defaults to `variance_scaling_initializer`.
:param b_init: initializer for b. Defaults to zero
:param nl: a nonlinearity function
:param use_bias: whether to use bias.
:param name:
:return: tf.Tensor: a NC tensor named ``output`` with attribute `variables`.
"""
shape = inputdata.get_shape().as_list()[1:]
if None not in shape:
inputdata = tf.reshape(inputdata, [-1, int(np.prod(shape))])
else:
inputdata = tf.reshape(inputdata, tf.stack([tf.shape(inputdata)[0], -1]))
if w_init is None:
w_init = tf.contrib.layers.variance_scaling_initializer()
if b_init is None:
b_init = tf.constant_initializer()
ret = tf.layers.dense(inputs=inputdata, activation=lambda x: nl(x, name='output'), use_bias=use_bias, name=name,
kernel_initializer=w_init, bias_initializer=b_init, trainable=True, units=out_dim)
return ret
@staticmethod
def layerbn(inputdata, is_training):
"""
batch norm
:param inputdata:
:param is_training:
:return:
"""
output = tf.contrib.layers.batch_norm(inputdata, scale=True, is_training=is_training, updates_collections=None)
return output
@staticmethod
def squeeze(inputdata, axis=None, name=None):
"""
从tensor中删除所有大小是1的维度
:param inputdata:
:param axis:
:param name:
:return:
"""
return tf.squeeze(input=inputdata, axis=axis, name=name)
| [
"marjey_lee@163.com"
] | marjey_lee@163.com |
bb437491562d5944a6b65bc0683d768d8945ec23 | 7ef8168d5ac11f91effc626cab9aa696830afbca | /dashboard.py | f3f731775618f8baed812621883e28ff0fb4a7b3 | [] | no_license | rosh0450/Student-Result-Management-System | 5d5cd37e50562bd3d769e67ac53afa3bac10d365 | 14e3928ebba576e9ea5bd5ac521f9496b5ecb075 | refs/heads/main | 2023-04-15T04:10:00.986860 | 2021-04-29T07:38:11 | 2021-04-29T07:38:11 | 362,716,884 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,603 | py | from tkinter import *
from PIL import Image, ImageTk
from course import CourseClass
from student import StudentClass
from result import resultClass
from report import reportClass
from tkinter import messagebox
from datetime import *
import time
from math import *
import sqlite3
from tkinter import messagebox
import os
#import login
class RMS:
def __init__(self, root):
self.root=root
self.root.title("Student Result Management System")
self.root.geometry("1350x700+0+0")
self.root.config(bg="white")
############# icons
self.logo_dash=ImageTk.PhotoImage(file="Image/student_icon.png")
#################### title ########################
title=Label(self.root, text="Student Result Management System", padx=10 ,compound=LEFT, image=self.logo_dash, font=("goudy old style", 20, "bold"), bg="#033054", fg="white").place(x=0, y=0, relwidth=1, height=50)
############### Menus
M_Frame=LabelFrame(self.root, text="Menus", font=("times new roman", 15), bg="white")
M_Frame.place(x=10, y=70, width=1340, height=80)
btn_course=Button(M_Frame, text="Course", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.add_course).place(x=20, y=5, width=200, height=40)
btn_student=Button(M_Frame, text="Student", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.add_student).place(x=240, y=5, width=200, height=40)
btn_result=Button(M_Frame, text="Result", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.add_result).place(x=460, y=5, width=200, height=40)
btn_view=Button(M_Frame, text="View Student Results", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.add_report).place(x=680, y=5, width=200, height=40)
btn_logout=Button(M_Frame, text="Logout", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.logout).place(x=900, y=5, width=200, height=40)
btn_exit=Button(M_Frame, text="Exit", font=("goudy old style", 15, "bold"), bg="#0b5377", fg="white", cursor="hand2", command=self.exit_).place(x=1120, y=5, width=200, height=40)
#################### Content ###################
self.bg_img=Image.open("Image/big.jpg")
self.bg_img=self.bg_img.resize((920, 350), Image.ANTIALIAS)
self.bg_img=ImageTk.PhotoImage(self.bg_img)
self.lbl_bg=Label(self.root, image=self.bg_img).place(x=400, y=180, width=920, height=350)
################## update_details ##############
self.lbl_course=Label(self.root, text="Total Courses\n[ 0 ]", font=("goudy old style", 20), bd=10, relief=RIDGE, bg="#e43b06", fg="white")
self.lbl_course.place(x=413, y=530, width=300, height=100)
self.lbl_student=Label(self.root, text="Total Students\n[ 0 ]", font=("goudy old style", 20), bd=10, relief=RIDGE, bg="#0676ad", fg="white")
self.lbl_student.place(x=723, y=530, width=300, height=100)
self.lbl_result=Label(self.root, text="Total Results\n[ 0 ]", font=("goudy old style", 20), bd=10, relief=RIDGE, bg="#038074", fg="white")
self.lbl_result.place(x=1033, y=530, width=300, height=100)
############### clock
self.lbl=Label(self.root,text="\nAnalog Clock", font=("Book Antiqua", 25, "bold"),fg="white", compound=BOTTOM, bg="#081923", bd=0)
self.lbl.place(x=10, y=180, height=450, width=350)
#self.clock_image()
#################### footer ########################
footer=Label(self.root, text="SRM Student Result Management System\nContact us for any Technical Issue: 8939xxxx04", font=("goudy old style", 12), bg="#262626", fg="white").pack(side=BOTTOM, fill=X)
self.update_details()
####################################################################
def update_details(self):
con=sqlite3.connect(database="rms.db")
cur=con.cursor()
try:
cur.execute("SELECT * FROM course ")
cr=cur.fetchall()
self.lbl_course.config(text=f"Total Course\n[{str(len(cr))}]")
cur.execute("SELECT * FROM student ")
cr=cur.fetchall()
self.lbl_student.config(text=f"Total Students\n[{str(len(cr))}]")
self.lbl_course.after(200, self.update_details)
except Exception as ex:
messagebox.showerror("Error", f"Error due to {str(ex)}")
def clock_image(self, hr, min_, sec_):
clock=Image.new("RGB", (400, 400), (8,25, 35))
draw=ImageDraw.Draw(clock)
################ for clock img
bg=Image.open("image/clock_img.jfif")
bg=bg.resize((300, 300), Image.ANTIALIAS)
clock.paste(bg, (50, 50))
'''
origin=200, 200
################### For hour line img
draw.line((origin, 200+50*sin(radians(hr)), 200-50*cos(radians(hr))), fill="black", width=4)
################### For minute line img
draw.line((origin, 200+80*sin(radians(min_)), 200-80*cos(radians(min_))), fill="blue", width=3)
################### For second line img
draw.line((origin, 200+100*sin(radians(sec_)), 200-100*cos(radians(sec_))), fill="green", width=4)
draw.ellipse((195, 195, 210, 210),fill="black")
clock.save("clock_new.png")
'''
origin=200, 200
################### For hour line img
draw.line((origin, 200+40*sin(radians(hr)), 200-40*cos(radians(hr))), fill="#29A19C", width=4)
################### For minute line img
draw.line((origin, 200+60*sin(radians(min_)), 200-60*cos(radians(min_))), fill="#E94B3CFF", width=3)
################### For second line img
draw.line((origin, 200+80*sin(radians(sec_)), 200-80*cos(radians(sec_))), fill="green", width=2)
draw.ellipse((195, 195, 210, 210),fill="#F9D342")
clock.save("clock_new.png")
def working(self):
h=datetime.now().time().hour
m=datetime.now().time().minute
s=datetime.now().time().second
hr=(h/12)*360
min_=(m/60)*360
sec_=(s/60)*360
#print(h, m, s)
#print(hr, min_, sec_)
self.clock_image(hr, min_, sec_)
self.img=ImageTk.PhotoImage(file="clock_new.png")
self.lbl.config(image=self.img)
self.lbl.after(200, self.working)
def add_course(self):
self.new_win=Toplevel(self.root)
self.new_obj=CourseClass(self.new_win)
def add_student(self):
self.new_win=Toplevel(self.root)
self.new_obj=StudentClass(self.new_win)
def add_result(self):
self.new_win=Toplevel(self.root)
self.new_obj=resultClass(self.new_win)
def logout(self):
op=messagebox.askyesno("Confirm", "Do you really want to logout?", parent=self.root)
if op==True:
self.root.destroy()
os.system("python login.py")
def exit_(self):
op=messagebox.askyesno("Confirm", "Do you really want to exit?", parent=self.root)
if op==True:
self.root.destroy()
| [
"noreply@github.com"
] | rosh0450.noreply@github.com |
8dfb11fecccfa42e13a7d4a5af2a08433173e7c8 | f654c129a223c3be7b630bdd6cb884bcc4d79fe2 | /Assignment02/ex14.py | 9e35cc9a2ac84d7fa55c6f87aca797ac1dd433b7 | [] | no_license | Poissonfish/hort503 | d1629054235f9320808587fbfc68290154cafa27 | 4d884db8e194ab8cc52041583ca4146d4f6295c0 | refs/heads/master | 2021-05-13T11:54:42.050872 | 2018-05-04T21:02:38 | 2018-05-04T21:02:38 | 117,144,995 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | from sys import argv
script, user_name, arg1, arg2 = argv
prompt = '** '
print(f"Hi {user_name}, I'm the {script} script.")
print("I'd like to ask you a few questions.")
print(f"Do you like me {user_name}?")
likes = input(prompt)
print(f"Where do you live {user_name}?")
lives = input(prompt)
print("What kind of computer do you have")
computer = input(prompt)
print(f"Do you like {arg1}?")
likearg1 = input(prompt)
print(f"Do you like {arg2}?")
likearg2 = input(prompt)
print(f"""
Alright, so you said {likes} about like me.
You live in {lives}. Not sure where that is.
And you have a {computer} computer. Nice.
""")
| [
"b99612021@ntu.edu.tw"
] | b99612021@ntu.edu.tw |
c8bbb73692d7230f2ee50e9ea03c0edad60d1341 | 89542641d98225862187b023ca09a8708dc43e6d | /examples/volcano/config_analytical.py | 119e867d29f37e80f501309a628567b6be6b6781 | [
"BSD-3-Clause"
] | permissive | lanl/spotlight | 46656688f0988274a198cefd9cc2a057605c79af | 1601f697e813238486af02965ea66cd997dc6b1a | refs/heads/master | 2023-06-25T01:36:12.177626 | 2023-06-15T23:51:20 | 2023-06-15T23:51:20 | 219,827,760 | 8 | 4 | NOASSERTION | 2023-06-15T23:51:21 | 2019-11-05T18:54:27 | Python | UTF-8 | Python | false | false | 1,928 | py | """ A refinement plan for an analytical response function.
"""
import numpy
from scipy import stats
from spotlight import plan
class Plan(plan.BasePlan):
# required to have solution_file, state_file, and num_solvers
configuration = {
"solution_file" : "solution.db",
"state_file" : "state.db",
"checkpoint_stride" : 1,
}
# required to have local solver and sampling method
# all other special options get added to a Solver instance
# any non-special options are passed to the Solver.solve function
solver = {
"local_solver" : "powell",
"stop_change" : 0.1,
"stop_generations" : 5,
"sampling_method" : "uniform",
}
# parameters names and bounds
# in compute function use self.get("x") to use optimizer's value for "x"
parameters = {
"x" : [-9.5, 9.5],
"y" : [-9.5, 9.5],
}
def initialize(self):
""" Executed once at the beginning to set up the problem.
"""
pass
def compute(self):
""" Executed for each set of drawn parameters in the optimization search.
"""
# get the x and y values from Mystic
x, y = self.get("x"), self.get("y")
# get value at Gaussian function x and y
var = stats.multivariate_normal(mean=[0, 0], cov=[[0.5, 0],[0, 0.5]])
gauss = -50.0 * var.pdf([x, y])
# get value at volcano function x and y
r = numpy.sqrt(x**2 + y**2)
mu, sigma = 5.0, 1.0
stat = 25.0 * (numpy.exp(-r / 35.0) + 1.0 /
(sigma * numpy.sqrt(2.0 * numpy.pi)) *
numpy.exp(-0.5 * ((r - mu) / sigma) ** 2)) + gauss
# whether to flip sign of function
# a positive lets you search for minimum
# a negative lets you search for maximum
stat *= self.surface.sign if hasattr(self, "surface") else 1.0
return stat
| [
"noreply@github.com"
] | lanl.noreply@github.com |
e0efee112cf4d79cf7a172627680636fa7fded04 | 59c61f91f3fc813ce7985bef94b131880217efd5 | /app.py | 5609bcf2c514519d7bfdb9492efc8d3b28e84baf | [
"MIT"
] | permissive | strivedi01/sqlalchemy-challenge | 48a16d7804aa25b4c00c08602e5907c3b9c74adf | ba04ae8e5064d58d83dda6472fc3d01d920042d4 | refs/heads/main | 2023-01-06T21:59:27.042166 | 2020-10-29T03:28:10 | 2020-10-29T03:28:10 | 301,580,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,011 | py | import numpy as np
import datetime as dt
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
#################################################
# Database Setup
#################################################
engine = create_engine("sqlite:///Resources//hawaii.sqlite")
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)
# Save reference to the table
Measurement = Base.classes.measurement
Station = Base.classes.station
### Routes
#################################################
# Flask Setup
#################################################
app = Flask(__name__)
#################################################
# Flask Routes
#################################################
# Home page.
# @app.route("/")
# def home():
# print("Server received request for 'Home' page...")
# return "Welcome to my 'Home' page!"
# List all routes that are available.
@app.route("/")
def welcome():
return (
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/station<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/temp/start --> Input date as: YYYY-MM-DD<br/>"
f"/api/v1.0/temp/start/end --> Input date as: YYYY-MM-DD<br/>"
)
# Convert the query results to a dictionary using `date` as the key and `prcp` as the value.
# Return the JSON representation of your dictionary.
@app.route("/api/v1.0/precipitation")
def precip():
"""Return the date_orcp data as json"""
session = Session(engine)
last_year=dt.date(2017,8,23)-dt.timedelta(days=365)
result = session.query(Measurement.date, Measurement.prcp).filter(Measurement.date>=last_year).all()
session.close()
return jsonify(result)
# Return a JSON list of stations from the dataset.
# Query the dates and temperature observations of the most active station for the last year of data.
@app.route("/api/v1.0/station")
def station():
session = Session(engine)
"""Return the date_orcp data as json"""
station_result = session.query(Measurement.station,func.count(Measurement.station)).\
group_by(Measurement.station).\
order_by(func.count(Measurement.station).desc()).all()
session.close()
return jsonify(station_result)
# Return a JSON list of temperature observations (TOBS) for the previous year.
@app.route("/api/v1.0/tobs")
def tobs():
session = Session(engine)
last_year=dt.date(2017,8,23)-dt.timedelta(days=365)
temp_result=session.query(Measurement.tobs).\
filter(Measurement.station == 'USC00519281').\
filter(Measurement.date>=last_year).all()
temps=list(np.ravel(temp_result))
session.close()
return jsonify(temps)
#'/api/v1.0/<start>' and '/api/v1.0/<start>/<end>'
@app.route('/api/v1.0/temp/<start>')
@app.route('/api/v1.0/temp/<start>/<end>')
def temp(start=None, end=None):
session = Session(engine)
sel=[func.min(Measurement.tobs),func.avg(Measurement.tobs),func.max(Measurement.tobs)]
if not end:
result=session.query(*sel).filter(Measurement.date>=start).all()
temperature=list(np.ravel(result))
session.close()
return jsonify(temperature)
result=session.query(*sel).filter(Measurement.date>=start).filter(Measurement.date<=end).all()
temperature=list(np.ravel(result))
session.close()
return jsonify(temperature)
# Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.
# When given the start only, calculate `TMIN`, `TAVG`, and `TMAX` for all dates greater than and equal to the start date.
# When given the start and the end date, calculate the `TMIN`, `TAVG`, and `TMAX` for dates between the start and end date inclusive.
if __name__ == "__main__":
app.run(debug=True) | [
"strivedi_nj@yahoo.com"
] | strivedi_nj@yahoo.com |
396bb74a6b9f43641495e4fe46e2d43a022c1ffd | 3a67902e80e72259472d4b37d5339d3c88e4eab3 | /seLogerBot.py | 2c3343c9937ca2f4e7f6aa39d3931b147c49c1b3 | [] | no_license | slimanitz/real-Estate-Project-webScrap | d768daf5fe836904838a794ecd81df19ae8e5616 | 803934f5e043ba2458b875b0f9fc0a924b22a9b9 | refs/heads/master | 2023-06-09T20:04:16.652283 | 2021-06-27T09:39:55 | 2021-06-27T09:39:55 | 368,842,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,327 | py | import requests
from bs4 import BeautifulSoup
import pprint
import urllib.parse as urlparse
from urllib.parse import parse_qs
import datetime
import urllib.request
import json
import random
import time
from urllib.parse import urlencode
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"Accept-Encoding": "gzip, deflate",
'Accept-Language': 'fr-fr',
"Dnt": "1",
"Host": "www.seLoger.com",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36",
}
class seLogerBot:
url = ""
peopertyCount = 0
j = 0
session = requests.Session()
type= ""
def __init__(self,url,type):
self.url = url
self.type = type
def getOwner(self,soup):
try:
return soup.find('div', {'class': 'Contact__ContentContainer-sc-3d01ca-2 cKwmCO'}).getText().strip()
except:
return "None"
def getPrice(self,soup):
try:
return int(soup.find('div',{'data-test':'sl.price-container '}).getText().strip().replace(' ','').replace('€',''))
except:
try:
return int(
soup.find('div', {'data-test': 'sl.price-label'}).getText().strip().replace(' ', '').replace(
'€', ''))
except:
return 0
def getSize(self,soup):
try:
return int(soup.find('ul',{'data-test':'sl.tags'}).getText().strip().split()[0])
except:
return 0
def getDate(self):
return str(datetime.date.today())
def getCity(self,soup):
try:
block = soup.find('div', {'class': 'ContentZone__Address-wghbmy-1 dlWlag'})
spans = block.findAll('span')
return spans[1].getText().strip()
except:
return None
def getRef(self,soup):
urlsoup = soup.find('a', {'name': 'classified-link'})
url = urlsoup['href']
parsed = urlparse.urlparse(url)
return parse_qs(parsed.query)['Classified-ViewId'][0]
def getDepartementName(self,soup):
try:
block = soup.find('div',{'class':'ContentZone__Address-wghbmy-1 dlWlag'})
spans = block.findAll('span')
departmentName = spans[0].getText().strip().split(" ")
if len(departmentName) == 3:
return departmentName[0]+" "+departmentName[1]
return departmentName[0]
except:
return None
def getPostalCode(self,soup):
try:
block = soup.find('div',{'class':'ContentZone__Address-wghbmy-1 dlWlag'})
spans = block.findAll('span')
postalCode = spans[0].getText().strip().split()
if len(postalCode) == 3:
return postalCode[2].replace('(','').replace(')','')
return postalCode[1].replace('(','').replace(')','')
except:
return None
def getUrl(self,soup):
try:
urlsoup = soup.find('a',{'name':'classified-link'})
url = urlsoup['href']
return url.split('?')[0]
except:
return None
def getPropertyData(self,cardSoup):
owner = self.getOwner(cardSoup)
price = self.getPrice(cardSoup)
city = self.getCity(cardSoup)
departement = self.getDepartementName(cardSoup)
size = self.getSize(cardSoup)
url = self.getUrl(cardSoup)
postalCode = self.getPostalCode(cardSoup)
ref = self.getRef(cardSoup)
publishDate = self.getDate()
if(city ==None):
return {
'owner': owner,
'price': price,
'city': "Quartier "+departement.split(" ")[0],
'departement': departement,
'size': size,
'url': url,
'postalCode': postalCode,
'ref':ref,
'publishDate':publishDate
}
return{
'owner':owner,
'price':price,
'city':city,
'departement':departement,
'size':size,
'url':url,
'postalCode':postalCode,
'ref':ref,
'publishDate': publishDate
}
def getPropertiesData(self,url):
cardsSoup = self.getAllCards(url)
for card in cardsSoup:
pprint.pprint(self.getPropertyData(card))
pprint.pprint("============================")
self.sendToDB(self.getPropertyData(card))
def getAllCards(self,url):
soup = self.getSoup(url)
cards = soup.findAll('div', {'data-test': 'sl.card-container'})
return cards
def getSoup(self,url):
response = self.session.get(url , headers=headers)
print(response.status_code)
if response.status_code == 403:
time.sleep(7200)
return self.getSoup()
self.j += 1
if self.j % 5 == 0:
time.sleep(600)
if response.status_code == 200:
soup = BeautifulSoup(response.text,'html.parser')
return soup
def sendToDB(self,propertyData):
myurl = "http://localhost:3000/boxs/"+self.type
req = urllib.request.Request(myurl)
req.add_header('Content-Type', 'application/json; charset=utf-8')
jsondata = json.dumps(propertyData)
print("////////////////////////////\n")
print(jsondata)
jsondataasbytes = jsondata.encode('utf-8') # needs to be bytes
req.add_header('Content-Length', len(jsondataasbytes))
urllib.request.urlopen(req, jsondataasbytes)
def getAllPagesPropertiesData(self):
soup = self.getSoup(self.url)
pagesNumber = self.getPagesNumber(soup)
for i in range(pagesNumber):
newUrl = self.url+'&LISTING-LISTpg='+str(i+1)
print(i)
self.getPropertiesData(newUrl)
def getPagesNumber(self,soup):
pagesList = soup.find('div',{'data-test':'sl.status-container'})
pagesNumber = pagesList.getText().strip().split(" ")[5]
return int(int(pagesNumber)/25)
| [
"58049014+slimanitz@users.noreply.github.com"
] | 58049014+slimanitz@users.noreply.github.com |
c39cc5dda80f909656f9411ff1e0ff395f66ea2f | 9da0798b6f309d2274c65077efa81c3766b78051 | /SearchQuery.py | 398bb743fac51257f35f8c955e13f286be2efd41 | [] | no_license | theriley106/RandomSearchQuery | 09b37c23c3798b873c45db529158b326410d759e | e084a1a63279994fe06ef8dd594d2bc8e1d7b445 | refs/heads/master | 2021-01-13T04:57:56.583001 | 2017-02-07T05:04:07 | 2017-02-07T05:04:07 | 81,155,360 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | import random
import csv
QueryList = open('QueryList.csv', 'r')
QueryList = csv.reader(QueryList)
QueryList = [row for row in QueryList]
QueryList = [l[0] for l in QueryList]
def Random():
return random.choice(QueryList)
def Multi():
return QueryList | [
"christopherlambert106@gmail.com"
] | christopherlambert106@gmail.com |
498a5884593f09cdddac8a145328c861960f6dea | e9ee3166ef8cfa80fbade99e4c90f9e6c7e0affc | /App_Store/AppManagement/AppAlreadyUp/Test_App_Already_Up_Right.py | e415ec58b5428c465b79a7dd05147335b9c54b26 | [] | no_license | Lyxiou/git_demo | 24aef1c65eb2573a7465e98e55e7951ae9491fb5 | fabf0b169e3b56dbd62952021598fcc0d893fa0d | refs/heads/master | 2021-01-11T20:54:24.971072 | 2017-01-20T01:14:19 | 2017-01-20T01:14:19 | 79,209,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,644 | py | # coding=utf-8
from selenium import webdriver
import unittest
from bsddb.test.test_all import suite
from lib2to3.tests.support import driver
from time import sleep
from selenium.webdriver.common.keys import Keys
from string import rstrip
class TestAppManagement(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.base_url = "http://10.110.1.55:8082/admin/home.html"
#http://10.110.1.55:8082/login.html
self.driver.get(self.base_url)
self.driver.find_element_by_id("username").send_keys("admin")
self.driver.find_element_by_id("password").send_keys("123456")
self.driver.find_element_by_xpath("//input[@value='登录']").click()
sleep(1)
sreach_window = self.driver.current_window_handle
self.driver.find_element_by_link_text("应用管理").click()
sleep(1)
sreach_window = self.driver.current_window_handle
self.driver.find_element_by_link_text("已上架").click()
sleep(1)
def check_drop_down_list_num_10(self):
sreach_window = self.driver.current_window_handle
driver = self.driver
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys("j")
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys(Keys.ENTER)
#Refresh
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/button').click()
sleep(1)
#drop down list num items
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]').click()
sleep(1)
'''
items = driver.find_elements_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/ul/li')
for i in items:
i.click()
sleep(1)
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]').click()
sleep(1)
'''
sleep(1)
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/ul/li[1]').click()
numitemstext = driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/button/span[1]').text
if (numitemstext == "10"):
rst = True
else:
rst = False
return rst
def test_drop_down_list_num_10(self):
result = self.check_drop_down_list_num_10()
self.assertTrue(result)
def check_drop_down_list_num_25(self):
sreach_window = self.driver.current_window_handle
driver = self.driver
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys("j")
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys(Keys.ENTER)
#Refresh
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/button').click()
sleep(1)
#drop down list num items
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]').click()
sleep(1)
sleep(1)
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/ul/li[2]').click()
numitemstext = driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/button/span[1]').text
if (numitemstext == "25"):
rst = True
else:
rst = False
return rst
def test_drop_down_list_num_25(self):
result = self.check_drop_down_list_num_25()
self.assertTrue(result)
def check_drop_down_list_num_50(self):
sreach_window = self.driver.current_window_handle
driver = self.driver
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys("j")
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys(Keys.ENTER)
#Refresh
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/button').click()
sleep(1)
#drop down list num items
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]').click()
sleep(1)
sleep(1)
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/ul/li[3]').click()
numitemstext = driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/button/span[1]').text
if (numitemstext == "50"):
rst = True
else:
rst = False
return rst
def test_drop_down_list_num_50(self):
result = self.check_drop_down_list_num_50()
self.assertTrue(result)
def check_drop_down_list_num_all(self):
sreach_window = self.driver.current_window_handle
driver = self.driver
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys("j")
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[1]/div/input').send_keys(Keys.ENTER)
#Refresh
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/button').click()
sleep(1)
#drop down list num items
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]').click()
sleep(1)
sleep(1)
driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/ul/li[4]').click()
numitemstext = driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[1]/button/span[1]').text
if (numitemstext == "All"):
rst = True
else:
rst = False
return rst
def test_drop_down_list_num_all(self):
result = self.check_drop_down_list_num_all()
self.assertTrue(result)
def check_down_app_page(self):
#list down list descr item
self.driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]').click()
sleep(1)
list_dsc_items = self.driver.find_elements_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]/ul/li')
for j in list_dsc_items:
sleep(1)
j.click()
# driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]').click()
sleep(1)
j.click()
# 如果知道CheckBox的状态就知道是几个描述
#lisID/AppName...
text_id = self.driver.find_element_by_xpath('//*[@id="grid-data"]/thead/tr/th[1]/a/span[1]').text
print(text_id)
text_appname = self.driver.find_element_by_xpath('//*[@id="grid-data"]/thead/tr/th[2]/a/span[1]').text
print(text_appname)
#The List num
list_nums = self.driver.find_elements_by_xpath('//*[@id="grid-data"]/tbody/tr')
i=0
for list_num in list_nums:
i= i+1
print(i)
sreach_window = self.driver.current_window_handle
self.driver.find_element_by_xpath('//*[@id="grid-data"]/tbody/tr/td[4]/button').click()
sleep(1)
sreach_window = self.driver.current_window_handle
print("Current title is %s"%self.driver.title)
print("Current url is %s"%self.driver.current_url)
#addpageurl = 'http://10.110.1.55:8082/admin/apkinfo.html?id=0000-a7b5575a-ba57-4019-a2ef-27d961c52ddd&status=3'
if (self.driver.current_url.endswith('status=3')):
rst = True
else:
rst = False
return rst
def test_check_down_app_page(self):
result = self.check_down_app_page()
self.assertTrue(result)
def check_down_app(self):
#list down list descr item
self.driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]').click()
sleep(1)
list_dsc_items = self.driver.find_elements_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]/ul/li')
for j in list_dsc_items:
sleep(1)
j.click()
# driver.find_element_by_xpath('//*[@id="grid-data-header"]/div/div/div[2]/div[2]').click()
sleep(1)
j.click()
# 如果知道CheckBox的状态就知道是几个描述
#lisID/AppName...
text_id = self.driver.find_element_by_xpath('//*[@id="grid-data"]/thead/tr/th[1]/a/span[1]').text
print(text_id)
text_appname = self.driver.find_element_by_xpath('//*[@id="grid-data"]/thead/tr/th[2]/a/span[1]').text
print(text_appname)
#The List num
list_nums = self.driver.find_elements_by_xpath('//*[@id="grid-data"]/tbody/tr')
i=0
for list_num in list_nums:
i= i+1
print(i)
sreach_window = self.driver.current_window_handle
self.driver.find_element_by_xpath('//*[@id="grid-data"]/tbody/tr/td[4]/button').click()
sleep(1)
sreach_window= self.driver.current_window_handle
self.driver.find_element_by_link_text('下架').click()
sleep(1)
sreach_window= self.driver.current_window_handle
self.driver.find_element_by_id('offreason').send_keys("down")
sleep(1)
#driver.find_element_by_css_selector('button.btn.btn-default').click()
sleep(1)
self.driver.find_element_by_id('offBtn').click()
sreach_window= self.driver.current_window_handle
alert_text = self.driver.switch_to_alert().text
print(alert_text)
sleep(1)
self.driver.switch_to_alert().accept()
sleep(1)
sreach_window= self.driver.current_window_handle
after_down_url = 'http://10.110.1.55:8082/admin/apkshelvedlist.html'
#if (alert_text == '保存成功'):
# rst = True
#else:
# rst = False
#print("alert_text %r"%rst)
if(self.driver.current_url == after_down_url):
rst = True
else:
rst = False
return rst
def test_down_app(self):
result = self.check_down_app()
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
| [
"13478618652@163.com"
] | 13478618652@163.com |
debb19032931c21dc710d3ed597f7cf09837c618 | 4a351381368e7b401077107a2ce9db4431bc8488 | /by_day.sage.py | 1d91a500c4d054a15ad7bdcc153d6ccc888b58ce | [
"MIT"
] | permissive | michaelmusty/SolvableDessins | 77ea28fd697e2a7a9578591d26adf708d963136a | 09898539d57a494525c0734123efce3e8173f354 | refs/heads/master | 2021-01-17T14:08:42.726665 | 2019-10-30T20:23:27 | 2019-10-30T20:23:27 | 84,077,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py |
# This file was *autogenerated* from the file by_day.sage
from sage.all_cmdline import * # import sage library
_sage_const_2 = Integer(2); _sage_const_20 = Integer(20); _sage_const_4 = Integer(4)
from sage.schemes.riemann_surfaces.riemann_surface import RiemannSurface
A = AffineSpace(QQ, _sage_const_2 , names=('x', 'y',)); (x, y,) = A._first_ngens(2)
C = Curve([x**_sage_const_2 - x*y**_sage_const_4 - x - y**_sage_const_4 ], A)
S = RiemannSurface(C.defining_polynomial(), prec = _sage_const_20 )
| [
"michaelmusty@gmail.com"
] | michaelmusty@gmail.com |
63fc5e681047adf8ce0bb84d975c092aac0fc275 | 2ef467c1edbe8966072795274e22ad7c9147b2af | /chainerpruner/masks/__init__.py | 4b06d0909dbe05e164fd173312ca210d8193d63d | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | tkat0/ChainerPruner | e033cc964d6f4e8a91b5be92dd94b11b127aa191 | bd9d5752554e06030eac217d64e28c4cfcdddeac | refs/heads/master | 2023-04-09T13:01:17.195459 | 2019-07-09T07:40:37 | 2019-07-09T07:40:37 | 191,313,406 | 0 | 0 | MIT | 2023-04-04T00:55:53 | 2019-06-11T07:03:39 | Python | UTF-8 | Python | false | false | 49 | py | from chainerpruner.masks.normmask import NormMask | [
"tomohiro.kato@dena.com"
] | tomohiro.kato@dena.com |
52b74a5d0060bf0f5b827c533434a5ee272f3b97 | 14477d789144393acb93ce1093a6b7f73b54d960 | /DataSet and Data_prepariton/__copy_of_mreza__/Tensorfolow_Classification_Duzina_predvidja_ Otkaz_DNN.py | 3e72f9c449e4acbd05fdab8fd29117edc5166601 | [] | no_license | aleksabisercic/M21_paper_optimal_maintenance_strategy | a614944fe74ec6204cdb258e8fd4f0e2244da502 | 69006c7103798111da57f809c3a829974f37d71d | refs/heads/main | 2023-04-18T01:34:17.911147 | 2021-05-01T13:04:40 | 2021-05-01T13:04:40 | 319,946,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,593 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Nov 5 01:22:56 2020
@author: Freedom
"""
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 4 16:20:14 2020
@author: Freedom
"""
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import xlwt as xl
import pickle
from sklearn.metrics import accuracy_score
df = pd.read_excel("Zastoji.xlsx", index_col = 0)
df = df[df["Sistem"] == "BTD SchRs-800"]
df = df.sort_values(by = ['Početak zastoja'])
df = df[['Vreme_zastoja', 'Vrsta_zastoja' ]]
df.reset_index(inplace = True, drop = True)
df = df[df.Vreme_zastoja < 2000]
lista = []
lista1 = []
for i in range (0,len(df.index)): #df['Vreme_zastoja']:
lista.append(df["Vreme_zastoja"].iloc[i])
lista1.append(df["Vrsta_zastoja"].iloc[i])
data_X = np.array(lista).reshape(-1,1)
labels_raw = np.array(lista1)
data_Y = []
for label in labels_raw:
if label == 'Masinski':
data_Y.append(0)
elif label == 'Elektro':
data_Y.append(1)
elif label == 'Ostalo':
data_Y.append(2)
series_zastoj = np.array(lista).reshape(-1)
series = np.array(data_Y).reshape(-1,1)
def plot_series(time, series, format="-", start=0, end=None):
plt.plot(time[start:end], series[start:end], format)
plt.xlabel("Time")
plt.ylabel("Value")
plt.grid(True)
def windowed_dataset(series, labels, window_size, batch_size, shuffle_buffer):
dataset = tf.data.Dataset.from_tensor_slices(series)
dataset = dataset.window(window_size + 1, shift=1, drop_remainder=True)
dataset = dataset.flat_map(lambda window: window.batch(window_size + 1))
dataset = dataset.shuffle(shuffle_buffer).map(lambda window: (window[:-1], labels))
dataset = dataset.batch(batch_size).prefetch(1)
return dataset
split_time = 2000
time = np.arange(len(series))
time_train = time[:split_time]
x_train = series[:split_time]
time_valid = time[split_time:]
x_valid = series[split_time:]
window_size = 50
batch_size = 32
shuffle_buffer_size = 150
dataset = windowed_dataset(x_train, window_size, batch_size, shuffle_buffer_size)
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(10, input_shape=[window_size], activation="relu"),
tf.keras.layers.Dense(10, activation="relu"),
tf.keras.layers.Dense(3, activation='softmax')
])
# lr_schedule = tf.keras.callbacks.LearningRateScheduler(
# lambda epoch: 1e-8 * 10**(epoch / 20))
# optimizer = tf.keras.optimizers.SGD(lr=1e-8, momentum=0.9)
model.compile(loss='sparse_categorical_crossentropy',
optimizer=tf.keras.optimizers.Adam(), metrics=['accuracy'])
history = model.fit(dataset, epochs=500)
loss = history.history['loss']
epochs = range(len(loss))
plt.plot(epochs, loss, 'b', label='Training Loss')
plt.show()
forecast = []
for time in range(len(series) - window_size):
forecast.append(model.predict(series[time:time + window_size][np.newaxis]))
results = []
forecast = forecast[split_time-window_size:]
forecast = np.array(forecast)
forecast = forecast.reshape(forecast.shape[0],forecast.shape[2])
for probabilities in forecast:
if probabilities[0] > probabilities[1] and probabilities[0] > probabilities[2]:
results.append(0)
elif probabilities[1] > probabilities[0] and probabilities[1] > probabilities[2]:
results.append(1)
else:
results.append(2)
results = np.array(results)
plt.figure(figsize=(10, 6))
plot_series(time_valid[100:150], x_valid[100:150])
plot_series(time_valid[100:150], results[100:150])
print(accuracy_score(x_valid, results)) | [
"64646644+aleksabisercic@users.noreply.github.com"
] | 64646644+aleksabisercic@users.noreply.github.com |
d6ee256c37fc5713b5cedef6e2f80f965ca68c85 | a2a3930f44c6fa126d297026325dceb4d4646962 | /flask_app/app.py | ebf1877c1d5b46747250b72f62a729017df222c9 | [] | no_license | KeerthiGowda18/cloud-docker | fb3958a798097c763602847d00b821e9b23711fc | 7e94aa0b1e3c391a8d3bc756f228fd828e5eeb8e | refs/heads/master | 2021-03-22T16:50:19.663452 | 2020-03-23T00:07:05 | 2020-03-23T00:07:05 | 247,384,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,189 | py | from flask import Flask
import pymongo
from pymongo import MongoClient
import json
from flask import flash, render_template, request, redirect
import time
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["cloud_assignment"]
mycol = mydb["books"]
app = Flask(__name__,template_folder="template")
req =" "
multikeys = []
catalogue = []
log=[]
frequency=[]
@app.route("/", methods=['POST','GET'])
def index():
return render_template('search_page.html')
@app.route("/search",methods=['POST','GET'])
def search():
start=time.time()
global req
req= request.form.get('search')
inc =0
frequency.append(req)
data = mycol.find({'author':req})
newdata = mycol.find({'author':req})
count= mycol.find({'author':req}).count()
for word in frequency:
if req in word:
inc = inc+1
else:
inc =1
print(frequency)
print(count)
end= time.time()- start
logentry={'Keyword': req, "time taken" : end, "frequency": inc }
log.append(logentry)
with open('Logs.json', 'w', encoding='utf-8') as f:
json.dump(log, f, ensure_ascii=False, indent=4)
if(count>0):
for x in newdata:
entry={'author': req, 'title':(x["title"])}
catalogue.append(entry)
with open('catalogue.json', 'w', encoding='utf-8') as f:
json.dump(catalogue, f, ensure_ascii=False, indent=4)
return render_template('search_page.html',data=data)
else:
return "Unsuccessful search, Not found in Database"
@app.route("/note",methods=['POST','GET'])
def note():
#if mydb.mycol.count_documents({'author':req})!=0:
count= mycol.find({'author':req}).count()
#print(req)
#print(count)
if(count>0):
req_note= request.form.get('note')
entry = {'author': req , 'Note': req_note}
multikeys.append(entry)
with open('Note.json', 'w', encoding='utf-8') as f:
json.dump(multikeys, f, ensure_ascii=False, indent=4)
f.close()
return "Note Saved Successfully for the author: " + req
else:
return "Author not found"
@app.route("/retrieval", methods=['POST','GET'])
def retrieval():
with open('Note.json') as infile:
newdata = json.load(infile)
return render_template('search_page.html',newdata=newdata)
if __name__ == '__main__':
app.run(debug = True) | [
"kr583413@dal.ca"
] | kr583413@dal.ca |
95d3abe7b498d426617981d594df08f4b56e7b9c | 22efc461d2b3851492fee36b0e9d0623aa4ebcc1 | /manage.py | 93bf03ad4b447fcdc0a87f887a62c79183e779c4 | [] | no_license | narsiram/potfolioproject | d39ba7a4cf92bc31c6620885b7c4f21c5950c66e | d49b4dfa222885b4940178c39ca499bad304fc9b | refs/heads/master | 2020-03-28T01:18:38.031227 | 2018-09-06T07:12:40 | 2018-09-06T07:12:40 | 147,495,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PortfolioProject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"jindalnarsi20@gmail.com"
] | jindalnarsi20@gmail.com |
5336785d7fb438552c119ebfdf3711de7bbd9352 | f5391f55431493f7b3d9cb0aab113b56bd5019c1 | /item_explorer.py | c5fedd7500cfa5346c9d913a460ccedc8d11fb05 | [] | no_license | MyYo/instapy | aad4d49079b2d25a5b45b41039ab92f061b1198a | 4fa4e2c3f69ac380102befb112d9c1f86bc875c2 | refs/heads/master | 2022-12-21T13:29:24.439325 | 2019-01-02T06:05:23 | 2019-01-02T06:05:23 | 163,771,176 | 0 | 0 | null | 2022-12-08T01:30:37 | 2019-01-01T22:32:03 | Python | UTF-8 | Python | false | false | 4,090 | py | import requests
import sys
URL_TEMPLATE = 'https://www.instacart.com/v3/containers/items/item_{item}'
HEADERS = {
# 'authority': 'www.instacart.com',
# 'method': 'GET',
# 'path': '/v3/containers/items/item_',
# 'scheme': 'https',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
# 'accept-encoding': 'gzip, deflate, br',
# 'accept-language': 'en-US,en;q=0.9,he;q=0.8',
# 'cache-control': 'no-cache',
'cookie': "_ga=GA1.2.1182894543.1546381925; _gid=GA1.2.458901356.1546381925; _parsely_visitor={%22id%22:%22pid=4375f6f70a4e16f304274f82666b3f07%22%2C%22session_count%22:1%2C%22last_session_ts%22:1546381925133}; build_sha=5b6265ba96f23c9fa3ce2ab22124ee0a3077b565; amplitude_idundefinedinstacart.com=eyJvcHRPdXQiOmZhbHNlLCJzZXNzaW9uSWQiOm51bGwsImxhc3RFdmVudFRpbWUiOm51bGwsImV2ZW50SWQiOjAsImlkZW50aWZ5SWQiOjAsInNlcXVlbmNlTnVtYmVyIjowfQ==; _gcl_au=1.1.94154404.1546382065; ab.storage.userId.6f8d91cb-99e4-4ad7-ae83-652c2a2c845d=%7B%22g%22%3A%2245073255%22%2C%22c%22%3A1546384295233%2C%22l%22%3A1546384295233%7D; ab.storage.deviceId.6f8d91cb-99e4-4ad7-ae83-652c2a2c845d=%7B%22g%22%3A%2248a35bdb-9814-4633-2ed6-5ffcf643a481%22%2C%22c%22%3A1546384295243%2C%22l%22%3A1546384295243%7D; __ssid=a0ad6de098b1f57281fb9581ddbf38d; ajs_anonymous_id=%2287674ef2-4917-4237-b1e4-833cfc915f24%22; ahoy_visitor=ccafdf41-7eda-4db4-b0cf-252a952c7d94; ahoy_visit=3bc7608b-40a9-4b12-90a1-9b94c94dcb6b; ajs_group_id=null; remember_user_token=W1s0NTA3MzI1NV0sIiQyYSQxMCRoRFJ3TncxWElETGVqemdnVVFxOHBPIiwiMTU0NjM4NDQxMS44ODE3NTQiXQ%3D%3D--39ec879046cb2eb24e391856371d2879a78ed8f9; ajs_user_id=45073255; _derived_epik=v%3D1%26u%3D5NmVSp77nhTOdGy12mbRyv1bB6bNhGoh%26n%3Dr6XiDlW5Px5FetEBXnYwmg%3D%3D%26m%3D7; ab.storage.sessionId.6f8d91cb-99e4-4ad7-ae83-652c2a2c845d=%7B%22g%22%3A%220929b83b-ad63-6401-02ed-5c381166e899%22%2C%22e%22%3A1546384514260%2C%22c%22%3A1546384483524%2C%22l%22%3A1546384484260%7D; amplitude_id_b87e0e586f364c2c189272540d489b01instacart.com=eyJkZXZpY2VJZCI6ImM0MzY3YTc1LWNmMDktNDc1NC1hZmVhLWM5NDI2OGU3NTI1MFIiLCJ1c2VySWQiOiI0NTA3MzI1NSIsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTU0NjM4NDE3NjU2NywibGFzdEV2ZW50VGltZSI6MTU0NjM4NDQ4NDI2NCwiZXZlbnRJZCI6NzQsImlkZW50aWZ5SWQiOjE4LCJzZXF1ZW5jZU51bWJlciI6OTJ9; _instacart_session=NnVQRTd6bVVENWJjMzJqR1hDZ1NrSCtjaGg1OUtvM21MZWszNTV3Rm1YR2o5ZlNHWUJrWjcxV09aZW9FUE5oRS9DaTJZYnE5ZFdpTmIyZEJnbnJ2UXhOcTJLUjNFZWl6UVdneUEwY04wT2laVE1MUTEwVEh3bFFiblppY2k2eExDNzIvWlpIKzBTQVprSjZPYXZIdVpjSHBiaXJ3S1E1TlFRa0lUUGVpZHh0UGFFM3l3QTBkK2Z3ZUNkR0kvZ0l3L2R2MngwWlZnaWNVYWRZR080UEU5VWZRR2V1WWs2NFdlT3QrNmRKcnBzQ29BWGhHL3dPSndTb2JsZk5EY3RzS1pKdXIyM0lsQmp0bnp5Q0JOeVdpZGVQSHhmZmRkNEcyWFVRVkVjU0Frajg9LS1aa3NiQjBzemQwZ2lTRHJ1WFRGcit3PT0%3D--25b33bd374a833bf9f15048e7141f7f2ca1a3bb8",
# 'pragma': 'no-cache',
# 'upgrade-insecure-requests': '1',
'user-agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'",
}
def get_item_price(item_id):
url = URL_TEMPLATE.format(item=item_id)
response = requests.get(url, headers=HEADERS)
if not response.ok:
raise Exception(str(response.status_code) + str(response.reason))
data = response.json()
name = data['container']['title']
price = data['container']['modules'][0]['data']['item']['pricing']['price']
size = None
store = None
try:
size = data['container']['modules'][0]['data']['item']['size']
store = data['container']['modules'][0]['data']['breadcrumbs'][0]['path'].split('/')[0]
except:
None
# price = data['container']['modules'][0]['data']['item']['variable_estimate']['price_per_unit']
return name, price, store, size
if __name__ == '__main__':
# item = 173628440
# item = 192139112
if len(sys.argv) != 2:
print('Error. usage: {} <item_id>'.format(sys.argv[0]))
exit(1)
name, price, store, size = get_item_price(sys.argv[1])
print('[{store}] {name}: {price} ({size})'.format(store=store, name=name,
price=price, size=size))
| [
"gilsho@cs.stanford.edu"
] | gilsho@cs.stanford.edu |
0a85f7a6a915b8b2c9a9a194d8e7009744ca7c19 | 130329bb6f845849bc6bdc2cf4aa3e64577d1376 | /urls.py | d9c996153df4171f140968dec414b184f5b8e74b | [] | no_license | voramir/prometheus-collector | 480c5a3a4f7a497b607ccb1c3d3d080f95034767 | da076846ad16b38baa84c551b7f049aed0f6fb54 | refs/heads/master | 2021-04-03T09:23:29.683032 | 2018-03-08T19:43:35 | 2018-03-08T19:43:35 | 124,439,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,409 | py | from metrics_classes import SingleValueGauge, BiDirGauge, BucketsGauge
import sys
#from json-prometheus import host
host = sys.argv[1]
collected1 = 'https://{}:215/api/analytics/v1/datasets/cpu.utilization/data?seconds=1'.format(host)
collected2 = 'https://{}:215/api/analytics/v1/datasets/io.ops[op]/data?seconds=1'.format(host)
collected3 = 'https://{}:215/api/analytics/v1/datasets/nic.kilobytes[direction]/data?seconds=1'.format(host)
collected4 = 'https://{}:215/api/analytics/v1/datasets/ip.bytes[hostname]/data?seconds=1'.format(host)
collected5 = 'https://{}:215/api/analytics/v1/datasets/arc.size[component]/data?seconds=1'.format(host)
collected6 = 'https://{}:215/api/analytics/v1/datasets/io.bytes[op]/data?seconds=1'.format(host)
collected7 = 'https://{}:215/api/analytics/v1/datasets/arc.accesses[hit/miss]/data?seconds=1'.format(host)
collected8 = 'https://{}:215/api/analytics/v1/datasets/arc.hitratio/data?seconds=1'.format(host)
collected9 = 'https://{}:215/api/analytics/v1/datasets/arc.l2_accesses[hit/miss]/data?seconds=1'.format(host)
collected10 = 'https://{}:215/api/analytics/v1/datasets/arc.l2_size/data?seconds=1'.format(host)
collected11 = 'https://{}:215/api/analytics/v1/datasets/dnlc.accesses[hit/miss]/data?seconds=1'.format(host)
collected12 = 'https://{}:215/api/analytics/v1/datasets/arc.l2_accesses[share]/data?seconds=1'.format(host)
collected13 = 'https://{}:215/api/analytics/v1/datasets/arc.accesses[share]/data?seconds=1'.format(host)
collected14 = 'https://{}:215/api/analytics/v1/datasets/nfs3.bytes[share]/data?seconds=1'.format(host)
collected15 = 'https://{}:215/api/analytics/v1/datasets/nfs3.bytes[client]/data?seconds=1'.format(host)
collected16 = 'https://{}:215/api/analytics/v1/datasets/mem.heap[application]/data?seconds=1'.format(host)
collected17 = 'https://{}:215/api/analytics/v1/datasets/nfs3.ops[share]/data?seconds=1'.format(host)
collected18 = 'https://{}:215/api/analytics/v1/datasets/nfs3.ops[client]/data?seconds=1'.format(host)
bundles = []
bundles.append((SingleValueGauge, (collected1, 'cpu_utilization', host)))
bundles.append((BiDirGauge, (collected2, 'io_ops', host, 'write', 'read')))
bundles.append((BiDirGauge, (collected3, 'nic_direction', host, 'out', 'in')))
bundles.append((BucketsGauge, (collected4, 'ip_bytes_by_hostname', host)))
bundles.append((BucketsGauge, (collected5, 'arc_size_by_component', host)))
bundles.append((BiDirGauge, (collected6, 'hdd_direction', host, 'write', 'read')))
bundles.append((BiDirGauge, (collected7, 'ARC_accesses', host, 'metadata_hits', 'metadata_misses')))
bundles.append((BucketsGauge, (collected8, 'arc_hit_ratio', host)))
bundles.append((BiDirGauge, (collected9, 'L2ARC_accesses', host, 'hits', 'misses')))
bundles.append((BucketsGauge, (collected10, 'L2ARC_size', host)))
bundles.append((BiDirGauge, (collected11, 'DNLC_access', host, 'hits', 'misses')))
bundles.append((BucketsGauge, (collected12, 'L2ARC_accesses_by_share', host)))
bundles.append((BucketsGauge, (collected13, 'ARC_accesses_by_share', host)))
bundles.append((BucketsGauge, (collected14, 'nfs3_bytes_by_share', host)))
bundles.append((BucketsGauge, (collected15, 'nfs3_bytes_by_client', host)))
bundles.append((BucketsGauge, (collected16, 'mem_heap_by_application', host)))
bundles.append((BucketsGauge, (collected17, 'nfs3_ops_by_share', host)))
bundles.append((BucketsGauge, (collected18, 'nfs3_ops_by_client', host)))
| [
"aerborne@deprivedsoftware.com"
] | aerborne@deprivedsoftware.com |
c0581b0edf96aaccec47155c953f53614a4418d9 | a1588f525c22830c3accc651fd810bb43e064f26 | /vco-server.py | 1915f3c411c57988d1d4ed4d571dba6c82ea8be4 | [] | no_license | sigma/vco-gae | 9a405b5b006a0306db634d427e600c61598676e3 | 0c4f4d780b3011ac54611513a647103a9ee1cdde | refs/heads/master | 2023-06-25T11:19:40.616016 | 2010-10-10T16:04:14 | 2010-10-10T16:04:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,546 | py | import sys
sys.path.insert(0, 'zope.egg')
sys.path.insert(0, 'ZSI.egg')
import logging
from datetime import datetime
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
import vco.generated.VSOWebControlService_server
import vco.data4 as data4
from ZSI.schema import GED
from ZSI.twisted.wsgi import SOAPApplication, soapmethod, SOAPHandlerChainFactory, WSGIApplication
def _soapmethod(op):
op_request = GED("http://webservice.vso.dunes.ch", op).pyclass
op_response = GED("http://webservice.vso.dunes.ch", op + "Response").pyclass
return soapmethod(op_request.typecode, op_response.typecode,
operation=op, soapaction=op)
class VcoService(SOAPApplication):
factory = SOAPHandlerChainFactory
# wsdl_content = dict(name='Vco', targetNamespace='urn:echo',
# imports=(), portType='')
@_soapmethod('echo')
def soap_echo(self, request, response, **kw):
msg = request._message
logging.debug("[/] echo: %s" % (msg))
response._echoReturn = msg
return request,response
@_soapmethod('echoWorkflow')
def soap_echoWorkflow(self, request, response, **kw):
msg = request._workflowMessage
logging.debug("[/] echo: %s" % (msg))
response._echoWorkflowReturn = msg
return request,response
@_soapmethod('getWorkflowForId')
def soap_getWorkflowForId(self, request, response, **kw):
wf_id = request._workflowId
user = request._username
pwd = request._password
logging.debug("[%s/%s] getWorkflowForId: %s" % (user, pwd, wf_id))
wf = data4.Workflow.findById(wf_id)
response._getWorkflowForIdReturn = wf
return request, response
@_soapmethod('executeWorkflow')
def soap_executeWorkflow(self, request, response, **kw):
wf_id = request._workflowId
user = request._username
pwd = request._password
inputs = {}
logging.debug("[%s/%s] executeWorkflow: %s (%s)" % (user, pwd, wf_id, inputs))
for i in request._workflowInputs:
inputs[i._name] = (i._type, i._value)
wf = data4.Workflow.findById(wf_id)
response._executeWorkflowReturn = wf.run(inputs)
return request, response
@_soapmethod('simpleExecuteWorkflow')
def soap_simpleExecuteWorkflow(self, request, response, **kw):
wf_id = request._in0
user = request._in1
pwd = request._in2
inputs = {}
logging.debug("[%s/%s] simpleExecuteWorkflow: %s (%s)" % (user, pwd, wf_id, inputs))
# unserializing of inputs. Probably this is very fragile
input = request._in3.split(',')
for (name, type, value) in zip(input[::3], input[1::3], input[2::3]):
inputs[name] = (type, value)
wf = data4.Workflow.findById(wf_id)
response._simpleExecuteWorkflowReturn = wf.run(inputs)
return request, response
@_soapmethod('cancelWorkflow')
def soap_cancelWorkflow(self, request, response, **kw):
tk_id = request._workflowTokenId
user = request._username
pwd = request._password
logging.debug("[%s/%s] cancelWorkflow: %s" % (user, pwd, tk_id))
data4.WorkflowToken.findById(tk_id).cancel()
return request, response
@_soapmethod('answerWorkflowInput')
def soap_answerWorkflowInput(self, request, response, **kw):
tk_id = request._workflowTokenId
user = request._username
pwd = request._password
inputs = {}
for i in request._answerInputs:
inputs[i._name] = (i._type, i._value)
data4.WorkflowToken.findById(tk_id).answer(inputs)
return request, response
@_soapmethod('getWorkflowTokenStatus')
def soap_getWorkflowTokenStatus(self, request, response, **kw):
tk_ids = request._workflowTokenIds
user = request._username
pwd = request._password
tks = [data4.WorkflowToken.findById(tk_id) for tk_id in tk_ids]
response._getWorkflowTokenStatusReturn = [tk._globalState for tk in tks]
return request, response
@_soapmethod('getWorkflowTokenResult')
def soap_getWorkflowTokenResult(self, request, response, **kw):
tk_id = request._workflowTokenId
user = request._username
pwd = request._password
token = data4.WorkflowToken.findById(tk_id)
response._getWorkflowTokenResultReturn = token.result()
return request, response
@_soapmethod('getWorkflowTokenForId')
def soap_getWorkflowTokenForId(self, request, response, **kw):
tk_id = request._workflowTokenId
user = request._username
pwd = request._password
response._getWorkflowTokenForIdReturn = data4.WorkflowToken.findById(tk_id)
return request, response
@_soapmethod('getAllPlugins')
def soap_getAllPlugins(self, request, response, **kw):
user = request._username
pwd = request._password
response._getAllPluginsReturn = data4.Plugin.findAll()
return request, response
@_soapmethod('getAllWorkflows')
def soap_getAllWorkflows(self, request, response, **kw):
user = request._username
pwd = request._password
wfs = data4.Workflow.findAll()
response._getAllWorkflowsReturn = wfs
return request, response
@_soapmethod('getWorkflowsWithName')
def soap_getWorkflowsWithName(self, request, response, **kw):
user = request._username
pwd = request._password
workflowName = request._workflowName
logging.debug("[%s/%s] getWorkflowsWithName: %s" % (user, pwd, workflowName))
wfs = data4.Workflow.findByName(workflowName)
response._getWorkflowsWithNameReturn = wfs
return request, response
# TODO: complete implem
@_soapmethod('hasRights')
def soap_hasRights(self, request, response, **kw):
response.hasRightsReturn = False
return request, response
# TODO: complete implem
@_soapmethod('sendCustomEvent')
def soap_sendCustomEvent(self, request, response, **kw):
return request, response
@_soapmethod('findForId')
def soap_findForId(self, request, response, **kw):
type = request._type
id = request._id
user = request._username
pwd = request._password
objs = data4.FinderResult.find(id=id, type=type)
if len(objs) == 0:
obj = None
else:
obj = objs[0]
response._findForIdReturn = obj
return request, response
# TODO: complete implem
@_soapmethod('findRelation')
def soap_findRelation(self, request, response, **kw):
type = request._parentType
id = request._parentId
relation = request._relationName
user = request._username
pwd = request._password
response._findRelationReturn = []
return response
# TODO: complete implem
@_soapmethod('hasChildrenInRelation')
def soap_hasChildrenInRelation(self, request, response, **kw):
type = request._parentType
id = request._parentId
relation = request._relationName
user = request._username
pwd = request._password
response._hasChildrenRelationReturn = False
return request, response
@_soapmethod('find')
def soap_find(self, request, response, **kw):
type = request._type
query = request._query
user = request._username
pwd = request._password
objs = data4.FinderResult.find(type=type, query=query, _query_result=True)
response._findReturn = objs
return request, response
application = WSGIApplication()
application['webservice'] = VcoService()
def real_main():
logging.getLogger().setLevel(logging.DEBUG)
run_wsgi_app(application)
def profile_main():
# This is the main function for profiling
# We've renamed our original main() above to real_main()
import cProfile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("real_main()", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
stats.sort_stats("time") # Or cumulative
stats.print_stats(80) # 80 = how many to print
# The rest is optional.
stats.print_callees()
stats.print_callers()
# output to logs
logging.info("Profile data:\n%s", stream.getvalue())
main = real_main
if __name__ == "__main__":
main()
| [
"yann.hodique@gmail.com"
] | yann.hodique@gmail.com |
e1af21122ea893a3e542363a7147e6c62d4c30b2 | e6b4f5709906a50206bd652a147146ea829b65cb | /API/create_mappings/location.py | 949a9cf77290cfad261dafc4a01bcc7a4f13d772 | [] | no_license | sajalk95/skill-exchange-elastic-search | d3d63abb7c27fa2d8c7a77cfdc25e19518cbef57 | 2cc8c642442bff6e9f1f866b5116604dd72c11fb | refs/heads/master | 2022-08-01T21:49:54.931979 | 2020-05-26T07:22:23 | 2020-05-26T07:22:23 | 266,677,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | import sys
setattr(sys.modules[__name__], '__path__', '__path__')
import bcrypt
from elasticsearch import Elasticsearch
from Mappings.location import MAPPINGS
from settings import USERNAME, PASSWORD, PORT, pwhash
from constants.locations.create_mapping import ELASTIC_SEARCH_END_POINT, LOCATION_MAPPING
client = Elasticsearch(
ELASTIC_SEARCH_END_POINT,
http_auth=(USERNAME, PASSWORD),
port=PORT,
)
INDEX = LOCATION_MAPPING
OPERATION_TYPE = 'index'
def create_index():
password = input("Enter the password: ")
password = password.encode("utf-8")
if bcrypt.checkpw(password, pwhash):
client.indices.create(
index=INDEX,
body=MAPPINGS
)
else:
print("Password didn't match")
if __name__ == "__main__":
create_index() | [
"khandelwal95.sajal@outlook.com"
] | khandelwal95.sajal@outlook.com |
911180e7ee64a0100d3c6313c45f05e653ec0dd9 | daad73d3396ed112b41ecd10ba7663e248d25397 | /auth.py | 524b08c4a25d1224f6f0d28358b54a8c00f16c52 | [] | no_license | mehmetcanbudak/SpotifySentiment | d9dbd528e60e1ec2a8d525b653f3dcf9d0994e79 | 438e74272416e5633e9d36d4f39eacc239c3b0f4 | refs/heads/master | 2023-02-11T20:21:36.122829 | 2020-05-20T18:57:38 | 2020-05-20T18:57:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | def get_spotify_credentials():
return dict(
cid=open('auth/spotify_cid', 'r').read(),
secret=open('auth/spotify_secret', 'r').read()
)
def get_mapbox_token():
return open('auth/mapbox_token', 'r').read()
| [
"35010178+Ollie-Hooper@users.noreply.github.com"
] | 35010178+Ollie-Hooper@users.noreply.github.com |
158abd118a83ee6b026833fba3b0222be1727100 | cfe1ef581a8fa1e9c445c59d282a8d603a034342 | /ws9/ws9.py | 2f4b68050e33710e66a97d77ddc7ef8f153bbbdd | [] | no_license | sbarenfe/AY190 | f3800708f0123884f868c04b9ac451f20b11bd63 | c8f51560c1d36472e6e2a2af03d644ebbec0e2d0 | refs/heads/master | 2020-05-18T07:57:19.302857 | 2014-03-22T18:12:38 | 2014-03-22T18:12:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,655 | py | import numpy as np
import matplotlib.pyplot as plt
def two():
for i in range(1,6):
A,b=read(i)
for j in range(len(b)):
if A[j][j]==0:
print "Problem with:"
print A[j]
x=GENP(A,b)
print "x equals:"
print x
def three():
for i in range(1,6):
A,b=read(i)
for j in range(len(b)):
if A[j][j]==0:
print "Problem with:"
print A[j]
x=np.linalg.solve(A,b)
print "x equals:"
print x
def read(n):
Afile='LSE%i_m.dat' %n
bfile='LSE%i_bvec.dat' %n
A=np.loadtxt(Afile)
b=np.loadtxt(bfile)
print np.shape(A)
print len(b)
print np.linalg.slogdet(A)
return A,b
def GENP(A, b):
'''
Gaussian elimination with no pivoting.
% input: A is an n x n nonsingular matrix
% b is an n x 1 vector
% output: x is the solution of Ax=b.
% post-condition: A and b have been modified.
'''
A=np.float64(A)
b=np.float64(b)
n = len(A)
if b.size != n:
raise ValueError("Invalid argument: incompatible sizes between A & b.", b.size, n)
for pivot_row in xrange(n-1):
for row in xrange(pivot_row+1, n):
multiplier = A[row][pivot_row]/A[pivot_row][pivot_row]
#the only one in this column since the rest are zero
A[row][pivot_row] = 0 #my change to 0
for col in xrange(pivot_row + 1, n):
A[row][col] = A[row][col] - multiplier*A[pivot_row][col]
#Equation solution column
b[row] = b[row] - multiplier*b[pivot_row]
x = np.zeros(n)
k = n-1
x[k] = b[k]/A[k,k]
k=k-1
while k >= 0:
x[k] = (b[k] - np.dot(A[k,k+1:],x[k+1:]))/A[k,k]
k = k-1
return x
| [
"scott.barenfeld@gmail.com"
] | scott.barenfeld@gmail.com |
962da81bad12df62af900b21c8b8e6774bbf0daf | f740903eb0f191e217621889d990fdeb46895c01 | /Codes 5x5/Arena_Gen.py | 2f64079a94c9296cd63106a065a1cb947bec914b | [
"MIT"
] | permissive | vamsikrishnabodaballa/Path-Finding-Robot | 56223950fa1a5e48bc76d7111df4ca1d570125e4 | 7e94dce95d58085b7f87fef8ce5de63c31fef75a | refs/heads/main | 2023-02-08T06:24:10.914696 | 2021-01-05T08:42:29 | 2021-01-05T08:42:29 | 326,934,860 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,356 | py | import numpy as np
import cv2
def bhagwan():
im = cv2.imread("arena_Kmeans.jpg") # Image for thresholding
showCrosshair = False
fromCenter = False
arena = np.zeros([5, 5], dtype=int)
arena_mom_x = np.zeros([5, 5], dtype=float)
arena_mom_y = np.zeros([5, 5], dtype=float)
shape = im.shape
for i in range(2): # 0 for red and 1 for yellow
r = cv2.selectROI("Image", im, fromCenter, showCrosshair) # first time color selection
imcrop = im[int(r[1]):int(r[1] + r[3]), int(r[0]):int(r[0] + r[2])]
r1 = cv2.selectROI("Image", im, fromCenter, showCrosshair) # second time color selection
imcrop1 = im[int(r1[1]):int(r1[1] + r1[3]), int(r1[0]):int(r1[0] + r1[2])]
imcropmin = [imcrop[:, :, 0].min(), imcrop[:, :, 1].min(), imcrop[:, :, 2].min()]
imcropmax = [imcrop[:, :, 0].max(), imcrop[:, :, 1].max(), imcrop[:, :, 2].max()]
imcrop1min = [imcrop1[:, :, 0].min(), imcrop1[:, :, 1].min(), imcrop1[:, :, 2].min()]
imcrop1max = [imcrop1[:, :, 0].max(), imcrop1[:, :, 1].max(), imcrop1[:, :, 2].max()]
thresh = 25 # for having corr0ect range of colors
minBGR = np.array([min(imcropmin[0], imcrop1min[0]) - thresh, min(imcropmin[1], imcrop1min[1]) - thresh,
min(imcropmin[2], imcrop1min[2]) - thresh])
maxBGR = np.array([max(imcropmax[0], imcrop1max[0]) + thresh, max(imcropmax[1], imcrop1max[1]) + thresh,
max(imcropmax[2], imcrop1max[2]) + thresh])
if i == 0:
np.save("Red_Range", [minBGR, maxBGR])
elif i == 1:
np.save("Yellow_Range", [minBGR, maxBGR])
maskBGR = cv2.inRange(im, minBGR, maxBGR)
kernel = np.ones((5, 5), np.uint8)
maskBGR = cv2.erode(maskBGR, kernel, iterations=1)
contours, hierarchy = cv2.findContours(maskBGR, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.imshow('erode_mask', maskBGR)
for cnt in contours:
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if area > 100:
cv2.drawContours(im, [cnt], 0, 0, 3)
cv2.imshow("image", im)
x, y, w, h = cv2.boundingRect(cnt)
rect_area = w * h
extent = float(area) / rect_area
cx = int((int(M['m10'] / M['m00']) / shape[0]) * 5)
cy = int((int(M['m01'] / M['m00']) / shape[1]) * 5)
# red circle is 1 red square is 2 yellow circle is 3 and yellow square is 4
if extent < 0.8: # circle
if i == 0:
j = 1
else:
j = 3
elif extent >= 0.8: # square
if i == 0:
j = 2
else:
j = 4
arena[cy][cx] = j
arena_mom_x[cy][cx] = M['m10'] / M['m00']
arena_mom_y[cy][cx] = M['m01'] / M['m00']
cv2.destroyAllWindows()
cv2.waitKey(0)
cv2.destroyAllWindows()
print(arena)
np.save("arena", arena)
np.save("arena_mom_x", arena_mom_x)
np.save("arena_mom_y", arena_mom_y)
return arena, arena_mom_x, arena_mom_y, shape
bhagwan() | [
"noreply@github.com"
] | vamsikrishnabodaballa.noreply@github.com |
52059af5c5ae581dccfdd80c19ec5702975af559 | 99fe2cb2939690eafcbc92940728e18f852cbb7d | /1week/11656.py | fa9b8aad5990c1ee0dd4a91b3c5035e12f818918 | [] | no_license | gwcat0506/coding_test_2021 | 68f7be41eb8d0d3d24c863af93b4406a3a8d1788 | ba67cc41337ea0cbde4bf71732dc2e29d7234eec | refs/heads/main | 2023-07-02T06:01:26.816483 | 2021-07-06T04:38:54 | 2021-07-06T04:38:54 | 383,339,427 | 0 | 0 | null | 2021-07-15T10:20:03 | 2021-07-06T04:31:16 | Python | UTF-8 | Python | false | false | 284 | py |
# 정렬만 하면됨 문제 덜 풀었음!
import sys
data = sys.stdin.readline()
string_list = []
for i in range(len(data)):
string_list.append(data[i:len(data)-1])
print(sorted(string_list))
for i in range(len(string_list)):
print(string_list[i]) | [
"noreply@github.com"
] | gwcat0506.noreply@github.com |
0ba336c0fec4acf7a2d9ddb7e398bd40c56b514a | 454f8e9a0dcb2ba7ab1d399e2a4268ae0c715041 | /SW/BruteForce/5189_전자카트.py | 691564b8657ef6b41611bf7587a038895b1968ba | [] | no_license | YiSoJeong/Algorithm_Python | bcd441cbc2a4675770486d7528331403bfddaca4 | 131f5a32b4b87d86ea1d5e24f52bfbdafa7a6f88 | refs/heads/master | 2020-08-06T11:15:52.382191 | 2020-05-05T04:12:45 | 2020-05-05T04:12:45 | 212,956,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,879 | py | import sys
sys.stdin = open('sample_input.txt', 'r')
# sol 1
# 분류 : Brute Force
# 발상 : 이동할 수 있는 경우의 수 수열로 나열한 후 계산
# 변형 : 각 구역을 한 번씩만 방문하고 사무실로 돌아올 때의 최소 배터리 사용량
# 조합 : 순열
# import itertools
#
# T = int(input())
# for t in range(1, T+1):
# N = int(input())
# battery = [list(map(int, input().split())) for _ in range(N)]
# section = [i for i in range(1, N)]
# ans = float('inf')
# for path in list(itertools.permutations(section)):
# total = 0
# for i in range(N):
# if i == 0:
# total += battery[0][path[i]]
# elif i == N-1:
# total += battery[path[i-1]][0]
# else:
# total += battery[path[i-1]][path[i]]
# if total < ans:
# ans = total
#
# print('#{} {}'.format(t, ans))
# sol 2
# 분류 : Back Tracking
# 발상 : 각 구역을 모두 돌아야 함
# 변형 : 각 구역을 한 번씩만 방문하고 사무실로 돌아올 때의 최소 배터리 사용량
# 조합 : dfs
def dfs(start):
global total, ans
if len(path) == N-1:
for i, j in path:
total += battery[i][j]
total += battery[start][0] # 마지막으로 방문한 구역
if total < ans:
ans = total
total = 0
return
for next in range(1, N):
if not visit[next]:
path.append([start, next])
visit[next] = True
dfs(next)
path.remove([start, next])
visit[next] = False
T = int(input())
for t in range(1, T+1):
N = int(input())
battery = [list(map(int, input().split())) for _ in range(N)]
visit = [False]*N
path, total, ans = [], 0, float('inf')
dfs(0)
print('#{} {}'.format(t, ans))
| [
"soj980312@gmail.com"
] | soj980312@gmail.com |
da875a6f7b687b754838f009050d6de8baa5d73d | 8f26dd89cff78d03220d107526b84a5309951e79 | /test/test_util.py | 5b5f0a37beeb5225363c88a005cd6080da0eb26f | [] | no_license | altaurog/fake-tsapi | 4b0c291ca219fb1efb0664814e7df80e672d59f6 | e629f53d55f841959b31c122b3f81b4fcf5de845 | refs/heads/master | 2020-09-14T03:07:18.698842 | 2019-11-20T17:57:55 | 2019-11-20T17:57:55 | 222,997,162 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | import pytest
from api import cities, util
@pytest.mark.parametrize("pattern, expected", [
('SA', [
"busan",
"kinshasa",
"osaka",
"san-francisco",
"san-jose",
"santiago",
"sao-paulo",
]),
('d?l', [
"dallas",
"delhi",
"guadalajara",
"philadelphia",
]),
('n*ch', [
"munich",
"nanchang",
]),
])
def test_search(pattern, expected):
assert list(util.search(cities.cities, pattern)) == expected
| [
"git@aryehleib.com"
] | git@aryehleib.com |
4a13e69ae72231f2bbbeccfef203a95165134ed0 | 98fd3275aa34c90c26d1f43d70983ae762c69064 | /floor_division.py | 1f092bde43f669fac26bc9d825a9243c8393537d | [] | no_license | hasnatosman/problem_solving | 62b5eaf6a418ae7f75d187b2c8e1e4b0ab4750fd | 1f33acc6289d322a9e950b6e39185a505159b7e2 | refs/heads/main | 2023-06-17T05:34:33.908078 | 2021-07-15T06:36:45 | 2021-07-15T06:36:45 | 383,810,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | """
PROBLEM 4:
Find the floor division of two numbers.
HINTS:
Just use two // instead of one.
"""
num1 = int(input('Enter the first number: '))
num2 = int(input('Enter the second number: '))
result = num1 // num2
print("Result is: ", result)
"""
Explanation:
When you divide one number by another you get two things. One is called the integer part of the division.
Another is the remainder.
To get the quotient (result without the remainder), you can use two-division symbols.
"""
"""
import math
result = math.floor(3.4)
print(result)
""" | [
"noreply@github.com"
] | hasnatosman.noreply@github.com |
6f522dcdc162e405f38c3532dbf80e916902ef54 | 47b0ea5c878cf0cc563814466a20aeb6607798df | /build/lib.linux-x86_64-2.7/poll/models.py | 7780e6c7c848ee3c60c4be1cb5a1765aa0363029 | [] | no_license | techoutlooks/rapidsms-polls | 2d87cb9a7cdcd6b0fbbe41a326c06ecbad2b8cd1 | 18c32d41212ae24f4f9dfab4cbb7987854eb28fd | refs/heads/master | 2020-07-14T02:32:14.451105 | 2015-07-11T22:38:42 | 2015-07-11T22:38:42 | 36,446,803 | 0 | 0 | null | 2015-05-28T15:10:25 | 2015-05-28T15:10:25 | Python | UTF-8 | Python | false | false | 38,751 | py | import datetime
import difflib
from celery.task import task
import django
from django.db import models, transaction
from django.db.models import Sum, Avg, Count, Max, Min, StdDev
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django import forms
from django.utils.translation import ugettext as _
from mptt.forms import TreeNodeChoiceField
from rapidsms.models import Contact, Connection
from eav import register
from eav.models import Value, Attribute
from generic.sorters import SimpleSorter
from rapidsms.contrib.locations.models import Location
from rapidsms.contrib.locations.nested import models as nested_models
from rapidsms_httprouter.models import Message, MessageBatch
from django.conf import settings
import re
from django.utils.translation import (ugettext, activate, deactivate)
from dateutil.relativedelta import relativedelta
import logging
log = logging.getLogger(__name__)
poll_started = django.dispatch.Signal(providing_args=[])
# The standard template allows for any amount of whitespace at the beginning,
# followed by the alias(es) for a particular category, followed by any non-
# alphabetical character, or the end of the message
STARTSWITH_PATTERN_TEMPLATE = '^\s*(%s)(\s|[^a-zA-Z]|$)'
CONTAINS_PATTERN_TEMPLATE = '^.*\s*(%s)(\s|[^a-zA-Z]|$)'
# This can be configurable from settings, but here's a default list of
# accepted yes keywords
YES_WORDS = [_('yes'), _('yeah'), _('yep'), _('yay'), 'y']
# This can be configurable from settings, but here's a default list of
# accepted no keywords
NO_WORDS = [_('no'), _('nope'), _('nah'), _('nay'), 'n']
class ResponseForm(forms.Form):
def __init__(self, data=None, **kwargs):
response = kwargs.pop('response')
if data:
forms.Form.__init__(self, data, **kwargs)
else:
forms.Form.__init__(self, **kwargs)
self.fields['categories'] = forms.ModelMultipleChoiceField(required=False,
queryset=response.poll.categories.all(),
initial=Category.objects.filter(
pk__in=response.categories.values_list(
'category', flat=True)))
class NumericResponseForm(ResponseForm):
value = forms.FloatField()
class LocationResponseForm(ResponseForm):
value = TreeNodeChoiceField(queryset=Location.objects.all(),
level_indicator=u'.', required=True)
class NameResponseForm(ResponseForm):
value = forms.CharField()
class ResponseCategory(models.Model):
category = models.ForeignKey('Category')
response = models.ForeignKey('Response', related_name='categories')
is_override = models.BooleanField(default=False)
user = models.ForeignKey(User, null=True)
class Poll(models.Model):
"""
Polls represent a simple-question, simple-response communication modality
via SMS. They can be thought of as a similar to a single datum in an XForm,
although for now the only data types available are yes/no, free-form text, and
numeric response. Fairly simple idea, a poll is created, containing a question
(the outgoing messages), a list of contacts (those to poll) and an expected
*type* of response. The poll can be edited, contact lists modified, etc. via
the web (the "user"), until it is eventually *started.* When a poll is started,
the outgoing question will be sent to all contacts, and any subsequent messages
coming in from the contacts associated with this poll (until they are polled again)
will be parsed (or attempted to be parsed) by this poll, and bucketed into a
particular category.
FIXME: contact groups, if implemented in core or contrib, should be used here,
instead of a many-to-many field
"""
TYPE_TEXT = 't'
TYPE_NUMERIC = 'n'
TYPE_LOCATION = 'l'
TYPE_REGISTRATION = 'r'
RESPONSE_TYPE_ALL = 'a'# all all responses
RESPONSE_TYPE_ONE = 'o' # allow only one
RESPONSE_TYPE_CHOICES = (
(RESPONSE_TYPE_ALL, _('Allow all')),
(RESPONSE_TYPE_ONE, _('Allow one')),
)
TYPE_CHOICES = {
TYPE_NUMERIC: dict(
label=_('Numeric Response'),
type=TYPE_NUMERIC,
db_type=Attribute.TYPE_FLOAT,
parser=None,
view_template='polls/response_numeric_view.html',
edit_template='polls/response_numeric_edit.html',
report_columns=((('Text', 'text', True, 'message__text', SimpleSorter()),
('Value', 'value', True, 'eav_values__value_float', SimpleSorter()),
('Categories', 'categories', True, 'categories__category__name', SimpleSorter()))),
edit_form=NumericResponseForm),
TYPE_TEXT: dict(
label=_('Free-form'),
type=TYPE_TEXT,
db_type=Attribute.TYPE_TEXT,
parser=None,
view_template='polls/response_text_view.html',
edit_template='polls/response_text_edit.html',
report_columns=(('Text', 'text', True, 'message__text', SimpleSorter()),
('Categories', 'categories', True, 'categories__category__name', SimpleSorter())),
edit_form=ResponseForm),
TYPE_REGISTRATION: dict(
label=_('Name/registration-based'),
type=TYPE_REGISTRATION,
db_type=Attribute.TYPE_TEXT,
parser=None,
view_template='polls/response_registration_view.html',
edit_template='polls/response_registration_edit.html',
report_columns=(('Text', 'text', True, 'message__text', SimpleSorter()),
('Categories', 'categories', True, 'categories__category__name', SimpleSorter())),
edit_form=NameResponseForm),
}
name = models.CharField(max_length=32,
help_text="Human readable name.")
question = models.CharField(_("question"), max_length=160)
messages = models.ManyToManyField(Message)
contacts = models.ManyToManyField(Contact, related_name='polls')
user = models.ForeignKey(User)
start_date = models.DateTimeField(null=True)
end_date = models.DateTimeField(null=True)
type = models.SlugField(max_length=8, null=True, blank=True)
default_response = models.CharField(_("default_response"), max_length=160, null=True, blank=True)
sites = models.ManyToManyField(Site)
objects = models.Manager()
on_site = CurrentSiteManager('sites')
response_type = models.CharField(max_length=1, choices=RESPONSE_TYPE_CHOICES, default=RESPONSE_TYPE_ALL, null=True,
blank=True)
class Meta:
permissions = (
("can_poll", "Can send polls"),
("can_edit_poll", "Can edit poll rules, categories, and responses"),
)
ordering = ["-end_date"]
@classmethod
def register_poll_type(cls, field_type, label, parserFunc, \
db_type=TYPE_TEXT, \
view_template=None, \
edit_template=None, \
report_columns=None, \
edit_form=None):
"""
Used to register a new question type for Polls. You can use this method to build new question types that are
available when building Polls. These types may just do custom parsing of the SMS text sent in, then stuff
those results in a normal core datatype, or they may lookup and reference completely custom attributes.
Arguments are:
label: The label used for this field type in the user interface
field_type: A slug to identify this field type, must be unique across all field types
parser: The function called to turn the raw string into the appropriate type, should take one argument:
'value' the string value submitted.
db_type: How the value will be stored in the database, can be one of: TYPE_FLOAT, TYPE_TEXT or TYPE_OBJECT
(defaults to TYPE_TEXT)
[view_template]: A template that renders an individual row in a table displaying responses
[edit_template]: A template that renders an individual row for editing a response
[report_columns]: the column labels for a table of responses for a poll of a particular type
[edit_form]: A custom edit form for editing responses
"""
# set the defaults
if view_template is None:
view_template = 'polls/response_custom_view.html'
if edit_template is None:
edit_template = 'polls/response_custom_edit.html'
if report_columns is None:
report_columns = (('Original Text', 'text'), ('Value', 'custom'))
Poll.TYPE_CHOICES[field_type] = dict(
type=field_type, label=label,
db_type=db_type, parser=parserFunc,
view_template=view_template,
edit_template=edit_template,
report_columns=report_columns,
edit_form=edit_form)
@classmethod
@transaction.atomic
def create_with_bulk(cls, name, type, question, default_response, contacts, user, is_urgent=False):
log.info("[Poll.create_with_bulk] TRANSACTION START")
log.info("[Poll.create_with_bulk] Creating a poll with bulk contacts...")
log.info("[Poll.create_with_bulk] ignoring blacklisted contacts...")
if getattr(settings, "BLACKLIST_MODEL", None):
app_label, model_name = settings.BLACKLIST_MODEL.rsplit(".")
try:
blacklists = models.get_model(app_label, model_name)._default_manager.values_list('connection')
contactsBefore = contacts.count()
contacts = contacts.exclude(connection__pk__in=blacklists)
contactsAfter = contacts.count()
log.info(
"[Poll.create_with_bulk] excluded [%d] blacklisted contacts. This poll will have [%d] active contacts." % (
(contactsBefore - contactsAfter), contactsAfter))
except:
raise Exception("Your Blacklist Model is Improperly configured")
log.info("[Poll.create_with_bulk] ignored blacklist ok.")
poll = Poll.objects.create(name=name, type=type, question=question, default_response=default_response,
user=user)
#batch for responses
log.info("[Poll.create_with_bulk] Adding contacts...")
poll.contacts.add(*contacts.values_list('pk', flat=True))
log.info("[Poll.create_with_bulk] Create message batch...")
batch = MessageBatch.objects.get_or_create(name=str(poll.pk))[0]
batch.priority = 0 if is_urgent else 1
batch.save()
log.info("[Poll.create_with_bulk] Adding the site...")
if 'django.contrib.sites' in settings.INSTALLED_APPS:
poll.sites.add(Site.objects.get_current())
log.info("[Poll.create_with_bulk] created ok.")
log.info("[Poll.create_with_bulk] TRANSACTION COMMIT")
return poll
def add_yesno_categories(self):
"""
This creates a generic yes/no poll categories for a particular poll
"""
#langs = self.contacts.values_list('language',flat=True).distinct()
langs = dict(settings.LANGUAGES).keys()
self.categories.get_or_create(name=_('yes'))
self.categories.get_or_create(name=_('no'))
self.categories.get_or_create(name=_('unknown'), default=True, error_category=True)
# add one rule to yes category per language
for l in langs:
try:
no_words = settings.NO_WORDS.get(l, NO_WORDS)
yes_words = settings.YES_WORDS.get(l, YES_WORDS)
except AttributeError:
no_words = NO_WORDS
yes_words = YES_WORDS
no_rule_string = '|'.join(no_words)
yes_rule_string = '|'.join(yes_words)
self.categories.get(name=_('yes')).rules.create(
regex=(STARTSWITH_PATTERN_TEMPLATE % yes_rule_string),
rule_type=Rule.TYPE_REGEX,
rule_string=(STARTSWITH_PATTERN_TEMPLATE % yes_rule_string))
self.categories.get(name=_('no')).rules.create(
regex=(STARTSWITH_PATTERN_TEMPLATE % no_rule_string),
rule_type=Rule.TYPE_REGEX,
rule_string=(STARTSWITH_PATTERN_TEMPLATE % no_rule_string))
self.log_poll_message_info(
" Poll creation categories - [{}]".format([str(category) for category in self.categories.all()]))
def is_yesno_poll(self):
return self.categories.count() == 3 and \
self.categories.filter(name=_('yes')).count() and \
self.categories.filter(name=_('no')).count() and \
self.categories.filter(name=_('unknown')).count()
def log_poll_message_warn(self, message):
log.warn("[poll-" + str(self.pk) + "] " + message)
def log_poll_message_info(self, message):
log.info("[poll-" + str(self.pk) + "] " + message)
def log_poll_message_debug(self, message):
log.debug("[poll-" + str(self.pk) + "] " + message)
def is_ready_to_send(self):
batches = MessageBatch.objects.filter(name=self.get_outgoing_message_batch_name()).all()
ready = True;
for batch in batches:
if batch.status != "P":
ready = False
break
return ready
def queue_message_batches_to_send(self):
batches = MessageBatch.objects.filter(name=self.get_outgoing_message_batch_name()).all()
self.log_poll_message_info("Queueing [%d] MessageBatches for sending." % len(batches))
for batch in batches:
batch.status = "Q"
batch.save()
@transaction.atomic
def start(self):
"""
This starts the poll: outgoing messages are sent to all the contacts
registered with this poll, and the start date is updated accordingly.
All incoming messages from these users will be considered as
potentially a response to this poll.
"""
self.log_poll_message_info(" TRANSACTION START")
if self.start_date:
self.log_poll_message_warn(" poll has a start date, not starting poll!")
return
self.log_poll_message_info(" Saving start date...")
self.start_date = datetime.datetime.now()
self.save()
self.log_poll_message_info(" Start date saved ok.")
self.log_poll_message_info(" start - startDate=" + str(self.start_date))
contacts = self.contacts
localized_messages = {}
self.log_poll_message_info(" checking languages... " + str(dict(settings.LANGUAGES).keys()))
for language in dict(settings.LANGUAGES).keys():
if language == "en":
"""default to English for contacts with no language preference"""
localized_contacts = contacts.filter(language__in=["en", ''])
else:
localized_contacts = contacts.filter(language=language)
if localized_contacts.exists():
self.log_poll_message_info(" creating messages using Message.mass_text for [%d] contacts in [%s]..." % (
len(localized_contacts), language))
messages = Message.mass_text(gettext_db(field=self.question, language=language),
Connection.objects.filter(contact__in=localized_contacts).distinct(),
status='Q', batch_status=self.get_start_poll_batch_status(),
batch_name=self.get_outgoing_message_batch_name())
#localized_messages[language] = [messages, localized_contacts]
self.log_poll_message_info(" messages created ok. Adding messages to self...")
self.messages.add(*messages.values_list('pk', flat=True))
self.log_poll_message_info(" messages added ok.")
self.log_poll_message_info(" sending poll_started signal...")
poll_started.send(sender=self)
self.log_poll_message_info(" poll_started signal sent ok.")
self.log_poll_message_info(" TRANSACTION COMMIT")
def end(self):
self.end_date = datetime.datetime.now()
self.save()
def reprocess_responses(self):
for rc in ResponseCategory.objects.filter(category__poll=self, is_override=False):
rc.delete()
for resp in self.responses.all():
resp.has_errors = False
for category in self.categories.all():
for rule in category.rules.all():
regex = re.compile(rule.regex, re.IGNORECASE | re.UNICODE)
if resp.eav.poll_text_value:
if regex.search(resp.eav.poll_text_value.lower()) and not resp.categories.filter(
category=category).count():
if category.error_category:
resp.has_errors = True
rc = ResponseCategory.objects.create(response=resp, category=category)
break
if not resp.categories.all().count() and self.categories.filter(default=True).count():
if self.categories.get(default=True).error_category:
resp.has_errors = True
resp.categories.add(
ResponseCategory.objects.create(response=resp, category=self.categories.get(default=True)))
resp.save()
def process_response(self, message):
self.log_poll_message_debug("processing response...")
if hasattr(message, 'db_message'):
db_message = message.db_message
else:
db_message = message
resp = Response.objects.create(poll=self, message=db_message, contact=db_message.connection.contact,
date=db_message.date)
self.log_poll_message_debug("Response PK ={}".format(str(resp.pk)))
outgoing_message = self.default_response
if self.type == Poll.TYPE_LOCATION:
typedef = Poll.TYPE_CHOICES[self.type]
try:
cleaned_value = typedef['parser'](message.text)
resp.eav.poll_location_value = cleaned_value
resp.save()
except ValidationError as e:
resp.has_errors = True
elif self.type == Poll.TYPE_NUMERIC:
try:
regex = re.compile(r"(-?\d+(\.\d+)?)")
#split the text on number regex. if the msg is of form
#'19'or '19 years' or '19years' or 'age19'or 'ugx34.56shs' it returns a list of length 4
msg_parts = regex.split(message.text)
if len(msg_parts) == 4:
resp.eav.poll_number_value = float(msg_parts[1])
else:
resp.has_errors = True
except IndexError:
resp.has_errors = True
elif (self.type == Poll.TYPE_TEXT) or (self.type == Poll.TYPE_REGISTRATION):
resp.eav.poll_text_value = message.text
if self.categories:
for category in self.categories.all():
for rule in category.rules.all():
regex = re.compile(rule.regex, re.IGNORECASE | re.UNICODE)
if regex.search(message.text.lower()):
rc = ResponseCategory.objects.create(response=resp, category=category)
resp.categories.add(rc)
if category.error_category:
resp.has_errors = True
if category.response:
outgoing_message = category.response
elif self.type in Poll.TYPE_CHOICES:
typedef = Poll.TYPE_CHOICES[self.type]
try:
cleaned_value = typedef['parser'](message.text)
if typedef['db_type'] == Attribute.TYPE_TEXT:
resp.eav.poll_text_value = cleaned_value
elif typedef['db_type'] == Attribute.TYPE_FLOAT or \
typedef['db_type'] == Attribute.TYPE_INT:
resp.eav.poll_number_value = cleaned_value
elif typedef['db_type'] == Attribute.TYPE_OBJECT:
resp.eav.poll_location_value = cleaned_value
except ValidationError as e:
resp.has_errors = True
if getattr(e, 'messages', None):
try:
outgoing_message = str(e.messages[0])
except(UnicodeEncodeError):
outgoing_message = e.messages[0]
else:
outgoing_message = None
self.log_poll_message_debug("checking for categorisation...")
if not resp.categories.exists() and self.categories.filter(default=True).exists():
resp.categories.add(
ResponseCategory.objects.create(response=resp, category=self.categories.get(default=True)))
if self.categories.get(default=True).error_category:
resp.has_errors = True
outgoing_message = self.categories.get(default=True).response
if not resp.has_errors or not outgoing_message:
for respcategory in resp.categories.order_by('category__priority'):
if respcategory.category.response:
outgoing_message = respcategory.category.response
break
self.log_poll_message_debug("Added categories [{}]".format([r.category for r in resp.categories.all()]))
resp.save()
if not outgoing_message:
return resp, None,
else:
if db_message.connection.contact and db_message.connection.contact.language:
outgoing_message = gettext_db(language=db_message.connection.contact.language, field=outgoing_message)
return resp, outgoing_message,
def get_start_poll_batch_status(self):
if getattr(settings, "FEATURE_PREPARE_SEND_POLL", False):
return "P"
else:
return "Q"
def get_outgoing_message_batch_name(self):
return "P%d-O" % self.pk
def get_numeric_detailed_data(self):
return Value.objects.filter(attribute__slug='poll_number_value',
entity_ct=ContentType.objects.get_for_model(Response),
entity_id__in=self.responses.all()).values_list('value_float').annotate(
Count('value_float')).order_by('-value_float')
def get_numeric_report_data(self, location=None, for_map=None):
if location:
q = Value.objects.filter(attribute__slug='poll_number_value',
entity_ct=ContentType.objects.get_for_model(Response),
entity_id__in=self.responses.all())
q = q.extra(tables=['poll_response', 'rapidsms_contact', 'locations_location', 'locations_location'],
where=['poll_response.id = eav_value.entity_id',
'rapidsms_contact.id = poll_response.contact_id',
'locations_location.id = rapidsms_contact.reporting_location_id',
'T7.id in %s' % (str(tuple(location.get_children().values_list('pk', flat=True)))),
'T7.lft <= locations_location.lft', \
'T7.rght >= locations_location.rght', \
],
select={
'location_name': 'T7.name',
'location_id': 'T7.id',
'lft': 'T7.lft',
'rght': 'T7.rght',
}).values('location_name', 'location_id')
else:
q = Value.objects.filter(attribute__slug='poll_number_value',
entity_ct=ContentType.objects.get_for_model(Response),
entity_id__in=self.responses.all()).values('entity_ct')
q = q.annotate(Sum('value_float'), Count('value_float'), Avg('value_float'), StdDev('value_float'),
Max('value_float'), Min('value_float'))
return q
def responses_by_category(self, location=None, for_map=True):
categorized = ResponseCategory.objects.filter(response__poll=self)
uncategorized = self.responses.exclude(
pk__in=ResponseCategory.objects.filter(response__poll=self).values_list('response', flat=True))
uvalues = ['poll__pk']
if location:
if location.get_children().count() == 1:
location_where = 'T9.id = %d' % location.get_children()[0].pk
ulocation_where = 'T7.id = %d' % location.get_children()[0].pk
elif location.get_children().count() == 0:
location_where = 'T9.id = %d' % location.pk
ulocation_where = 'T7.id = %d' % location.pk
else:
location_where = 'T9.id in %s' % (str(tuple(location.get_children().values_list('pk', flat=True))))
ulocation_where = 'T7.id in %s' % (str(tuple(location.get_children().values_list('pk', flat=True))))
where_list = [
'T9.lft <= locations_location.lft',
'T9.rght >= locations_location.rght',
location_where,
'T9.point_id = locations_point.id', ]
select = {
'location_name': 'T9.name',
'location_id': 'T9.id',
'lat': 'locations_point.latitude',
'lon': 'locations_point.longitude',
'rght': 'T9.rght',
'lft': 'T9.lft',
}
tables = ['locations_location', 'locations_point']
if not for_map:
where_list = where_list[:3]
select.pop('lat')
select.pop('lon')
tables = tables[:1]
categorized = categorized \
.values('response__message__connection__contact__reporting_location__name') \
.extra(tables=tables,
where=where_list) \
.extra(select=select)
uwhere_list = [
'T7.lft <= locations_location.lft',
'T7.rght >= locations_location.rght',
ulocation_where,
'T7.point_id = locations_point.id', ]
uselect = {
'location_name': 'T7.name',
'location_id': 'T7.id',
'lat': 'locations_point.latitude',
'lon': 'locations_point.longitude',
'rght': 'T7.rght',
'lft': 'T7.lft',
}
uvalues = ['location_name', 'location_id', 'lat', 'lon']
utables = ['locations_location', 'locations_point']
if not for_map:
uwhere_list = uwhere_list[:3]
uselect.pop('lat')
uselect.pop('lon')
uvalues = uvalues[:2]
utables = utables[:1]
uncategorized = uncategorized \
.values('message__connection__contact__reporting_location__name') \
.extra(tables=utables,
where=uwhere_list) \
.extra(select=uselect)
values_list = ['location_name', 'location_id', 'category__name', 'category__color', 'lat', 'lon', ]
if not for_map:
values_list = values_list[:4]
else:
values_list = ['category__name', 'category__color']
categorized = categorized.values(*values_list) \
.annotate(value=Count('pk')) \
.order_by('category__name')
uncategorized = uncategorized.values(*uvalues).annotate(value=Count('pk'))
if location:
categorized = categorized.extra(order_by=['location_name'])
uncategorized = uncategorized.extra(order_by=['location_name'])
if for_map:
for d in uncategorized:
d['lat'] = '%.5f' % float(d['lat'])
d['lon'] = '%.5f' % float(d['lon'])
for d in categorized:
d['lat'] = '%.5f' % float(d['lat'])
d['lon'] = '%.5f' % float(d['lon'])
if len(uncategorized):
uncategorized = list(uncategorized)
for d in uncategorized:
d.update({'category__name': 'uncategorized', 'category__color': ''})
categorized = list(categorized) + uncategorized
return categorized
def process_uncategorized(self):
responses = self.responses.filter(categories__category=None)
for resp in responses:
resp.has_errors = False
for category in self.categories.all():
for rule in category.rules.all():
regex = re.compile(rule.regex, re.IGNORECASE | re.UNICODE)
if resp.eav.poll_text_value:
if regex.search(resp.eav.poll_text_value.lower()) and not resp.categories.filter(
category=category).count():
if category.error_category:
resp.has_errors = True
rc = ResponseCategory.objects.create(response=resp, category=category)
break
if not resp.categories.all().count() and self.categories.filter(default=True).count():
if self.categories.get(default=True).error_category:
resp.has_errors = True
resp.categories.add(
ResponseCategory.objects.create(response=resp, category=self.categories.get(default=True)))
resp.save()
def responses_by_age(self, lower_bound_in_years, upper_bound_in_years):
lower_bound_date = datetime.datetime.now() - relativedelta(years=lower_bound_in_years)
upper_bound_date = datetime.datetime.now() - relativedelta(years=upper_bound_in_years)
category_dicts = ResponseCategory.objects.filter(response__poll=self,
response__contact__birthdate__gte=upper_bound_date,
response__contact__birthdate__lte=lower_bound_date).values(
'category__name').annotate(
value=Count('pk'))
return [self._get_formatted_values_for_bar_chart(category_dict) for category_dict in category_dicts]
def responses_by_gender(self, gender):
assert self.is_yesno_poll()
values_list = ['category__name']
category_dicts = ResponseCategory.objects.filter(response__poll=self,
response__contact__gender__iexact=gender).values(
*values_list).annotate(value=Count('pk'))
return [self._get_formatted_values_for_bar_chart(category_dict) for category_dict in category_dicts]
def __unicode__(self):
if self.start_date:
sd = self.start_date.date()
else:
sd = "Not Started"
return "%s %s ...(%s)" % (self.name, self.question[0:18], sd)
def _get_formatted_values_for_bar_chart(self, category_dict):
return [category_dict['value'], category_dict['category__name']]
class Category(models.Model):
"""
A category is a 'bucket' that an incoming poll response is placed into.
Categories have rules, which are regular expressions that a message must
satisfy to belong to a particular category (otherwise a response will have
None for its category). FIXME does this make sense, or should all polls
have a default 'unknown' category?
"""
name = models.CharField(max_length=50)
poll = models.ForeignKey(Poll, related_name='categories')
priority = models.PositiveSmallIntegerField(null=True)
color = models.CharField(max_length=6)
default = models.BooleanField(default=False)
response = models.CharField(max_length=160, null=True)
error_category = models.BooleanField(default=False)
class Meta:
ordering = ['name']
@classmethod
def clear_defaults(cls, poll):
for c in Category.objects.filter(poll=poll, default=True):
c.default = False
c.save()
def __unicode__(self):
return u'%s' % self.name
def save(self, force_insert=False, force_update=False, using=None):
if self.default and self.poll.categories.exclude(pk=self.pk).filter(default=True).exists():
self.poll.categories.exclude(pk=self.pk).filter(default=True).update(default=False)
super(Category, self).save()
class Response(models.Model):
"""
Responses tie incoming messages from poll participants to a particular
bucket that their response is associated with. Web users may also be
able to override a particular response as belonging to a particular
category, which shouldn't be overridden by new rules.
"""
message = models.ForeignKey(Message, null=True, related_name='poll_responses')
poll = models.ForeignKey(Poll, related_name='responses')
contact = models.ForeignKey(Contact, null=True, blank=True, related_name='responses')
date = models.DateTimeField(auto_now_add=True)
has_errors = models.BooleanField(default=False)
def update_categories(self, categories, user):
for c in categories:
if not self.categories.filter(category=c).count():
ResponseCategory.objects.create(response=self, category=c, is_override=True, user=user)
for rc in self.categories.all():
if not rc.category in categories:
rc.delete()
register(Response)
class Rule(models.Model):
"""
A rule is a regular expression that an incoming message text might
satisfy to belong in a particular category. A message must satisfy
one or more rules to belong to a category.
"""
contains_all_of = 1
contains_one_of = 2
TYPE_STARTSWITH = 'sw'
TYPE_CONTAINS = 'c'
TYPE_REGEX = 'r'
RULE_CHOICES = (
(TYPE_STARTSWITH, _('Starts With')),
(TYPE_CONTAINS, _('Contains')),
(TYPE_REGEX, _('Regex (advanced)')))
RULE_DICTIONARY = {
TYPE_STARTSWITH: _('Starts With'),
TYPE_CONTAINS: _('Contains'),
TYPE_REGEX: _('Regex (advanced)'),
}
regex = models.CharField(max_length=256)
category = models.ForeignKey(Category, related_name='rules')
rule_type = models.CharField(max_length=2, choices=RULE_CHOICES)
rule_string = models.CharField(max_length=256, null=True)
rule = models.IntegerField(choices=((contains_all_of, _("contains_all_of")), (contains_one_of, _("contains_one_of")),),
null=True)
def get_regex(self):
"""
create a regular expression from the input
"""
words = self.rule_string.split(',')
if self.rule == 1:
all_template = r"(?=.*\b%s\b)"
w_regex = r""
for word in words:
if len(word):
w_regex = w_regex + all_template % re.escape(word.strip())
return w_regex
elif self.rule == 2:
one_template = r"(\b%s\b)"
w_regex = r""
for word in words:
if len(w_regex):
if len(word):
w_regex = w_regex + r"|" + one_template % re.escape(word.strip())
else:
if len(word):
w_regex += one_template % re.escape(word.strip())
return w_regex
def save(self, *args, **kwargs):
if self.rule:
self.regex = self.get_regex()
super(Rule, self).save()
@property
def rule_type_friendly(self):
return Rule.RULE_DICTIONARY[self.rule_type]
def update_regex(self):
if self.rule_type == Rule.TYPE_STARTSWITH:
self.regex = STARTSWITH_PATTERN_TEMPLATE % self.rule_string
elif self.rule_type == Rule.TYPE_CONTAINS:
self.regex = CONTAINS_PATTERN_TEMPLATE % self.rule_string
elif self.rule_type == Rule.TYPE_REGEX:
self.regex = self.rule_string
class Translation(models.Model):
field = models.TextField(db_index=True)
language = models.CharField(max_length=5, db_index=True,
choices=settings.LANGUAGES)
value = models.TextField(blank=True)
def __unicode__(self):
return u'%s: %s' % (self.language, self.value)
class Meta:
unique_together = ('field', 'language')
def gettext_db(field, language):
#if name exists in po file get it else look
if Translation.objects.filter(field=field, language=language).exists():
return Translation.objects.filter(field=field, language=language)[0].value
else:
activate(language)
lang_str = ugettext(field)
deactivate()
return lang_str
@task
def send_messages_to_contacts(poll):
contacts = poll.contacts
localized_messages = {}
for language in dict(settings.LANGUAGES).keys():
if language == "en":
"""default to English for contacts with no language preference"""
localized_contacts = contacts.filter(language__in=["en", ''])
else:
localized_contacts = contacts.filter(language=language)
if localized_contacts.exists():
messages = Message.mass_text(gettext_db(field=poll.question, language=language),
Connection.objects.filter(contact__in=localized_contacts).distinct(),
status='Q', batch_status='Q')
#localized_messages[language] = [messages, localized_contacts]
poll.messages.add(*messages.values_list('pk', flat=True))
| [
"ceduth@techoutlooks.com"
] | ceduth@techoutlooks.com |
ea4e54457be0f72de8668ce11a8e70019dea5dd3 | fb0304f76ab12de9b42e9c09f500b2d5074168b4 | /0217-ContainsDuplicate/solution.py | 50f22c79a0686c2afc6de8c6a7bf208f63ea9704 | [] | no_license | abhijeetseal/leetcode-blind-curated-75 | 8d0eb5ecad1abf9fa214b52041de856a4bd55712 | f9a46114cda6702d630cddf22622aae2f94b98d2 | refs/heads/main | 2023-08-07T02:41:01.461121 | 2021-03-02T14:00:27 | 2021-03-02T16:46:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | class Solution:
def contains_duplicate(self, nums: list[int]) -> bool:
s = set()
for n in nums:
if n in s:
return True
else:
s.add(n)
return False
if __name__ == "__main__":
s = Solution()
| [
"david215@berkeley.edu"
] | david215@berkeley.edu |
37dd1d148c53e4c31530bab413588754beaf49da | c367549d3d38758df81da42ce09e16ecdf44bab6 | /MWA/code/dipole_checker.py | d0b6be51f70e9cd002f6192ad1ee9a55dfa47217 | [] | no_license | bhazelton/random_stuff | 31d8dbcbd31f65229a433c379ecf2a30681c7879 | 5b1371fea6e342526e0c536779925fa761855bd1 | refs/heads/master | 2023-08-16T19:22:44.312996 | 2023-08-14T22:03:48 | 2023-08-14T22:03:48 | 7,611,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,464 | py | import struct
import math
import os
import sys
import getopt
# define global variables
global CH_MIN
global CH_MAX
global LOUD
def help_message():
stars = '****************************************************************'
print '\n%s' % (stars)
print ' Usage: dipole_checker.py evaluates the data stored in *_avg files to determine which tiles and polarizations have bad dipoles. It both outputs results to screen and stores the results in a file in the data directory.\n\n Mandatory argument:\n -t YYYYMMDD.(decimal_day): The starting time for the scan. The first file from each dipole and receiver directly after this time will be evaluated. (Specify receivers and dipoles by the -r and -d commands; see below)\n\n Optional options:\n -h: prints this help message\n -f file1 file2, etc: analyze the data in the filenames listed after this flag. Only include the file names here: paths may be specified with the -p flag, if necessary (see below).\n Special options:\n -f 0: prompt the user for file names\n -f 1: use all files in the specified data directory (This is the default) (see -p below for info on setting the data directory)\n Caveat: If either the \'-r\' or \'-d\' flag is used,!
then potentially only a subset of the files listed will be used.\n -r recv: only analyze data taken by receiver number recv (default: use all receivers)\n -T Tile: only output data for the tile Tile\n -d dip: only analyze data taken with dipole dip (default: use all dipoles)\n -y delay: only analyze data taken with this delay. If y==-1, then use all delays (default: use only delay 0)\n -p pathname: analyze data files located in directory pathname (default: current working directory)\n -o outpathname: final results are written to a file located in outpathname (default: current working directory)\n -i boolean: If boolean equals 1, then only consider the first file for each receiver/dipole pair after the start time (set by -t). If boolean equals 0, then consider all times. (Default: 1).\n -X : Sets the expedition number to xnum. (This is necessary for certain naming conventions). (Default: 12; ie, X12, the Feb 2010 site trip).\n -l LOUD: LOUD==0!
means don\'t print status messages to screen. LOUD==1 means !
print st
atus messages to screen\n -n name: name of the ascii file containing the saved results. (Default: bad_dipoles_dipX_recY.txt, where\'X\' and \'Y\' are the names of the dipoles and receivers to be considered (see the -d and -r flags), or the word \'All\' if none are specified. Only include the file name here: use the -o flag to set the output directory).\n -N: Don\'t save the results (good for debugging, results will still be printed to screen)\n\nError codes:\nL#: Gain for dipole is lower than typical by # decibals.\nH#: Gain for dipole is higher than typical by # decibals.\nD: Dipole appears to be dead (ie, no signal).\nF#: Data for dipole is garbage; data appears to be a flat noise floor at # decibals.\nR #1 #2: Data has bad RFI spikes. Worst spike is at frequency #1 and is a #2 decibal spike above typical.\nE#: Data is fit too well at the highest frequencies by a parabola and/or \'effective potential\'. # is related to the rms of the fit. (This is sometimes caus!
ed because the spectrum is too flat).\nU: Error is detected, but it doesn\'t fit into any of the above categories.\n'
print '%s\n' % (stars)
sys.exit(0)
def fit_line( d, nmpts, get_xs ): # Performs a chi-squared fit of the data in "d" (of length "nmpts") to a line, y = mx + b. The function get_xs() inputs an element number ( 0 to nmpts-1 ) and outputs the value of x of that element. Returns the tuple (m,b).
# Calculate the necessary sums.
dx_sum = d_sum = x2_sum = x_sum = 0.0;
one_sum = float(nmpts);
for i in range(0,nmpts):
x_temp = get_xs(i)
#print 'i = ', i, 'x = ', x_temp, 'data = ', d[i]
dx_sum += d[i]*x_temp;
d_sum += d[i];
x2_sum += (x_temp*x_temp)
x_sum += x_temp
# Calculate the slope and intercept using the analytic solutions to the chi-sq fit
denom = (one_sum*x2_sum-x_sum*x_sum)
m = (one_sum*dx_sum - x_sum*d_sum)/denom
b = (x2_sum*d_sum - x_sum*dx_sum)/denom
#print 'm = ', m, 'b = ', b
return ( m, b )
def fit_line_with_gaps( d, nmpts, indices2use, get_xs ): # Same as above, but now d is a vector of data with length greater than nmpts. Only nmpts of the elements of d are used in the fit, those indices corresponding to the values held in indices2use (a vector of ints of length nmpts).
# Calculate the necessary sums.
dx_sum = d_sum = x2_sum = x_sum = 0.0;
one_sum = float(nmpts);
for i in range(0,nmpts):
index = indices2use[i]
x_temp = get_xs(index)
#print 'index = ', index, 'x = ', x_temp, 'data = ', d[index]
dx_sum += d[index]*x_temp;
d_sum += d[index];
x2_sum += (x_temp*x_temp)
x_sum += x_temp
# Calculate the slope and intercept using the analytic solutions to the chi-sq fit
denom = (one_sum*x2_sum-x_sum*x_sum)
m = (one_sum*dx_sum - x_sum*d_sum)/denom
b = (x2_sum*d_sum - x_sum*dx_sum)/denom
#print 'm = ', m, 'b = ', b
return ( m, b )
def fit_effpot_with_gaps( d, nmpts, indices2use, get_xs, low_cut ): # Fit the coefficients of an 'effective potential' curve (a/f-b/f^2+c). The input d is a vector of data with length greater than nmpts. Only nmpts of the elements of d are used in the fit, those indices corresponding to the values held in indices2use (a vector of ints of length nmpts).
return fit_effpot_or_parab( d, nmpts, indices2use, get_xs, low_cut, eff_pot_func )
def fit_parab_with_gaps( d, nmpts, indices2use, get_xs, low_cut ): # Fit for a parabola (a*f-b*f^2+c). The input d is a vector of data with length greater than nmpts. Only nmpts of the elements of d are used in the fit, those indices corresponding to the values held in indices2use (a vector of ints of length nmpts).
return fit_effpot_or_parab( d, nmpts, indices2use, get_xs, low_cut, parab_func )
def fit_effpot_or_parab( d, nmpts, indices2use, get_xs, low_cut, func ): #option 0: fit effective potential, option 1: fit parabola
freq0 = chan2freq( low_cut-1 ) # We need to avoid 1/0 in the below...
#print freq0, low_cut
# Calculate the necessary sums.
F1 = F2 = F3 = F4 = d0 = d1 = d2 = 0.0
F0 = float(nmpts);
for i in range(0,nmpts):
index = indices2use[i]
f_temp = get_xs(index) # get the frequency
x_temp = func(f_temp,freq0)
data = d[index]
x2_temp = x_temp*x_temp
#print 'index = ', index, 'x = ', x_temp, 'x2 = ', x2_temp, 'data = ', d[index], 'freq0 = ', freq0, 'f = ', get_xs(index)
F1 += x_temp
F2 += x2_temp
F3 += x_temp*x2_temp
F4 += x2_temp*x2_temp
d0 += data
d1 += data*x_temp
d2 += data*x2_temp
# Calculate the fit parameters using the analytic solutions to the chi-sq fit
if( low_cut==99 ): print d0, d1, d2, d[0:3]
G1 = (F3*F3-F4*F2)
G2 = (F1*F3-F2*F2)
G3 = (d2*F2-d1*F3)
G4 = (F1*F3-F2*F2)
G5 = (F1*F1-F0*F2)
G6 = (d0*F2-d1*F1)
H1 = (F2*G1)
H2 = (F1*G1-G2*F3)
H3 = (-G3*F3-d1*G1)
H4 = (G5*G1-G2*G4)
H5 = (G6*G1-G3*G4)
a = (H5*H2-H3*H4)/(H1*H4)
b = (H5*G2-G3*H4)/(G1*H4)
c = -1.0*(H5)/(H4)
# Calculate the rms error
rms = 0.0
for i in range(0,nmpts):
index = indices2use[i]
f_temp = get_xs(index) # get the frequency
x_temp = func(f_temp,freq0)
x2_temp = x_temp*x_temp
data = d[index]
err = data - (a*x_temp + b*x2_temp+c)
rms += err*err
rms = math.sqrt( rms/float(nmpts) )
return [ a, b, c, rms ]
def eff_pot_func( freq, freq0 ):
return 1.0/(freq-freq0)
def parab_func( freq, freq0 ):
return freq
def sub_line( d, nmpts, m, b, get_xs ): # Subtract the line defined by y = mx + b from the data d, a vector of nmpts data points. get_xs() is a function which inputs an element number (0 to nmpts-1) and outputs the x-value of the data for that indice. Returns the subtracted data d
for i in range(0,nmpts):
x_temp = get_xs(i)
y_temp = m*x_temp + b
d[i] -= y_temp
return d
def chan2freq( chn ): # Inputs a channel number ( 0 to 255 ) and outputs the (minimum) frequency of that channel in MHz.
return 1.28*float(chn)
def avg_data( d, nmpts, power ): # Inputs a data vector d of nmpts elements and outputs the average value of ( the data raised to the 'power' power ) Ex, power=2 outputs <x^2>
avg = 0.0
for i in range(0,nmpts):
avg += math.pow(d[i],power)
#print 'data = ', math.pow(d[i],power)
return avg/float(nmpts)
def chan2freq_shift( chn ): # Same as above, but the channel number is shifted by CH_MIN from the input chn
return 1.28*float(chn + CH_MIN)
def freqrange2chns( freq_min, freq_max ): # Inputs a minimum and maximum frequency in MHz and outputs a tuple (ch_min1, ch_max1) containing the minimum and maximum channel numbers within that range.
ch_min1 = int(math.ceil(freq_min/1.28))
ch_max1 = int(math.floor(freq_max/1.28))
return (ch_min1, ch_max1)
def pol2XY( pol ): # Inputs a polarization (0 or 1) and outputs 'X' for 0 and 'Y' for 1
if( pol == 0 ):
return 'X'
else:
return 'Y'
def closestchan2freq( freq ): # returns the channel number with frequency closest to the input frequency of freq
ch1 = int(math.ceil(freq/1.28))
ch2 = int(math.floor(freq/1.28))
diff1 = math.fabs(chan2freq(ch1)-freq)
diff2 = math.fabs(chan2freq(ch2)-freq)
if( diff1 > diff2 ):
return int(ch2)
else:
return int(ch1)
def slot2tile( slot_or_tile, recv, Xnum, mode ): # mode=0: inputs a slot number, and returns the tile number (which depends upon the expedition's setup, thus the 'Xnum'). mode=1: inputs a tile number, and returns a slot number and receiver number
if( mode==0 ): # slot input with slot_or_tile, return the tile number
return int((slot_or_tile-1)*4+recv)
if( mode==1 ): # tile input with slot_or_tile, return the slot number and receiver number
rx_num = int(slot_or_tile) % 4
if( rx_num==0 ):
rx_num=4
slot = (int(slot_or_tile) - int(rx_num))/4+1
return [slot, rx_num]
def cutfiles( start_time, end_time, recv2use, dip2use, delays2use, times, dips, recvs, delays, filenames ): # Inputs a list of filenames and eliminates all files with time stamps before start_time.
num_files = len(filenames)
new_filenames = []
new_dips = []
new_recvs = []
new_delays = []
new_times = []
# Cycle through and eliminate files that were created before start_time
new_num_files = 0
for i in range(num_files):
time = times[i]
receiver = recvs[i]
dipole = dips[i]
delay = delays[i]
if( time_good( start_time, end_time, time ) and recv_good( recv2use, receiver ) and dip_good( dips2use, dipole ) and delay_good( delays2use, delay ) ):
new_num_files = new_num_files + 1
new_filenames.append(filenames[i])
new_dips.append(dips[i])
new_recvs.append(recvs[i])
new_delays.append(delays[i])
new_times.append(times[i])
return [ new_times, new_dips, new_recvs, new_delays, new_filenames ]
def time_good( start_time, end_time, time ): # Returns 0 if the time is before the starting time, 1 otherwise
if( start_time <= time and end_time >= time ):
return 1
else:
return 0
def recv_good( recv2use, receiver ): # Returns 0 if this receiver is not to be used, 1 otherwise
if( recv2use==-1 or recv2use==receiver ):
return 1
else:
return 0
def dip_good( dip2use, dipole ): # Returns 0 if this dipole is not to be used, 1 otherwise
if( dip2use==-1 or dip2use==dipole ):
return 1
else:
return 0
def delay_good( delays2use, delay ): # Returns 0 if this delay is not to be used, 1 otherwise
if( delays2use==-1 or delays2use==delay ):
return 1
else:
return 0
def isolate_one_set( num_dips, num_recvs, num_delays, times, dips, recvs, delays, filenames ): # Inputs (sorted) lists, and returns only the earliest created file for each combination of dipole, receiver, and delay.
num_files = len(filenames)
new_filenames = []
new_dips = []
new_recvs = []
new_delays = []
new_times = []
already_used = [ [ [ 0 for h in range(num_delays) ] for i in range(num_dips) ] for j in range(num_recvs) ]
for i in range(0,num_files):
receiver = recvs[i]
dipole = dips[i]
delay = delays[i]
if( already_used[receiver-1][dipole-1][delay-1] == 0 ):
new_filenames.append(filenames[i])
new_dips.append(dips[i])
new_recvs.append(recvs[i])
new_delays.append(delays[i])
new_times.append(times[i])
already_used[receiver-1][dipole-1][delay-1] = 1
return [ new_times, new_dips, new_recvs, new_delays, new_filenames ]
def slow_sort( times, dips, recvs, delays, filenames ): # Sorting algorithm: not the most graceful bit of code (hence, the 'slow'). No files with time stamps earlier than start_time are considered; these are all left out of the final solution.
global LOUD
num_files = len(filenames)
new_filenames = []
new_dips = []
new_recvs = []
new_delays = []
new_times = []
already_used = [ 0 for i in range(num_files) ]
too_early = [ 0 for i in range(num_files) ]
universal_min_time = 9.99e99
universal_max_time = -1.0
for i in range(num_files):
time = times[i]
if( universal_min_time > time ): universal_min_time = time
if( universal_max_time < time ): universal_max_time = time
new_min_time = 9.99e99
new_min_dip = 9999
new_min_recv = 9999
new_min_delay = 9999
min_spot = -1
min_spot_old = -1
for j in range(num_files):
if( min_spot_old != min_spot ): # new spot was found last cycle, update the minimum values.
new_min_time = times[min_spot]
new_min_dip = dips[min_spot]
new_min_recv = recvs[min_spot]
new_min_delay = delays[min_spot]
min_spot_old = min_spot
if( already_used[j]==0 ):
time = times[j]
dip = int(dips[j])
recv = recvs[j]
delay = delays[j]
if( recv < new_min_recv ):
min_spot = j
elif( recv == new_min_recv ):
if( dip < new_min_dip ):
min_spot = j
elif( dip == new_min_dip ):
if( delay < new_min_delay ):
min_spot = j
elif( delay == new_min_delay ):
if( time < new_min_time ):
min_spot = j
already_used[min_spot] = 1
new_filenames.append(filenames[min_spot])
new_dips.append(dips[min_spot])
new_recvs.append(recvs[min_spot])
new_delays.append(delays[min_spot])
new_times.append(times[min_spot])
if LOUD: print 'slow_sort: min time = %14.5f, max time = %14.5f' % (universal_min_time, universal_max_time)
return [ new_times, new_dips, new_recvs, new_delays, new_filenames ]
def valid_file( filename ): # Outputs 1 is the input filename is valid, 0 otherwise
match_str1 = 'Dipole'
match_str2 = '_Rx'
match_str3 = '_avg'
if( (match_str1 in filename) and (match_str2 in filename) and (match_str3 in filename) ):
return 1
else:
return 0
def calc_avg_std( num_ref_freqs, num_pols, num_z, fixed_quant, option, data, bad_plots, use_mods ): # Calc the avg and std dev of the data located in data
# option==0 --> file number is fixed, average over slots
# option==1 --> slot number is fixed, average over files
avg_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
std_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
#print 'calc_avg num_pols = %d' % (num_pols)
# Cycle through reference frequencies
for i in range(0,num_ref_freqs):
for pol in range(0,num_pols):
# Calculate the avg values for each polarizations
tot = tot2 = 0.0 # tot = sum total, tot2 = sum of square values
nmpts = 0
for z in range(0,num_z): # cycle through zs
if( option==0 ):
bad_val = bad_plots[z][pol][fixed_quant]
else:
bad_val = bad_plots[fixed_quant][pol][z]
#if (pol==1): print 'pol = 1, fixed = %d, z = %d, bad_val = %d' % (fixed_quant, z, bad_val)
if( bad_val==0 ):
temp = data[z][pol][i]
tot += temp
tot2 += temp*temp
nmpts = nmpts+1
#print 'not quite there i = %d, pol = %d, nmpts = %d' % (i, pol, nmpts)
if( nmpts!=0 ):
#print 'here! i = %d, pol = %d, nmpts = %d' % (i, pol, nmpts)
N = float(nmpts)
avg = tot/N
std = math.sqrt( (tot2/N - avg*avg) )
if( use_mods and nmpts>2 ):
# Calculate the 'modified' averages and standard deviations. These are the average and standard deviation of the set that includes all the data points, except for two data points. The two data points excluded are those that lead to the lowest standard deviation when left out.
min_std = 999e99
for j in range(0,num_z):
if( option==0 ):
bad_val = bad_plots[j][pol][fixed_quant]
else:
bad_val = bad_plots[fixed_quant][pol][j]
if( bad_val==0 ):
for k in range(j+1,num_z):
if( option==0 ):
bad_val = bad_plots[k][pol][fixed_quant]
else:
bad_val = bad_plots[fixed_quant][pol][k]
if( bad_val==0 ):
temp1 = data[j][pol][i]
temp2 = data[k][pol][i]
temp_val1 = tot - temp1 - temp2
temp_val2 = tot2 - temp1*temp1 - temp2*temp2
temp_val1 /= float(N-2)
#print temp_val2, temp_val1, N
#print temp_val2/float(N-2), temp_val1*temp_val1
temp_val2 = math.sqrt( math.fabs( (temp_val2/float(N-2) - temp_val1*temp_val1) ) )
if( temp_val2 < min_std ):
min_std = temp_val2
mod_avg = temp_val1
mod_std = temp_val2
left_out1 = j+1
left_out2 = k+1
#if LOUD: print 'looking at (%d,%d) for pol %s, avg = %2.3f, std = %2.3f' % (j+1,k+1,pol2XY(pol),temp_val1, temp_val2)
avg_val[pol][i] = mod_avg
std_val[pol][i] = mod_std
if LOUD: print '%s: Modified vals calc\'d: freq = %d, pol = %s, left out zs (%d,%d), avg = %2.3f, std = %2.3f, mod avg = %2.3f, mod std = %2.3f' % (prog_name, i, pol2XY(pol), left_out1, left_out2, avg, std, avg_val[pol][i], std_val[pol][i])
else:
avg_val[pol][i] = avg
std_val[pol][i] = math.sqrt( (tot2/N - avg*avg) ) # this is a stddev of the data set, not an unbiased estimate of a sqrt variance (ie, divided by sqrt(N), not sqrt(N-1) )
if LOUD: print '%s: Non-modified vals calc\'d: freq = %d, pol = %s, nmpts = %d, avg = %2.3f, std = %2.3f' % (prog_name, i, pol2XY(pol), nmpts, avg_val[pol][i], std_val[pol][i])
return [ avg_val, std_val ]
def calc_avg_std2( slots, pols, freqs, files, allpows, bad_plots, use_mods ): # Calc the avg and std dev of the data located in data
num_slots = len(slots)
num_pols = len(pols)
num_ref_freqs = len(freqs)
num_files = len(files)
avg_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
std_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
if( num_files==1 ):
option = 0
num_zs = num_slots
elif( num_slots==1 ):
option = 1
num_zs = num_files
else:
print 'ERROR: Input to calc_std_avg() must have either len(slots)==1 or len(files)==1. Aborting...'
for i, fitem in enumerate(freqs): # Cycle through reference frequencies
for pol, pitem in enumerate(pols): # Cycle through polarizations
# First, determine how many dipoles are still potentially good. Calculate the average value and value squared of these good dipoles.
nmpts = 0
tot = tot2 = 0.0
for z in range(0,num_zs): # cycle through slots or files
[slot,filenum] = find_slot_filenum( option, z, slots, files )
if( bad_plots[slot][pol][filenum]==0 ):
temp = allpows[slot][pol][i][filenum]
tot += temp
tot2 += temp*temp
nmpts = nmpts+1
if( nmpts!=0 ):
N = float(nmpts)
avg = tot/N
std = math.sqrt( math.fabs( (tot2/N - avg*avg) ) )
if( use_mods and nmpts>2 ):
# Calculate the 'modified' averages and standard deviations. These are the average and standard deviation of the set that includes all the data points, except for two data points. The two data points excluded are those that lead to the lowest standard deviation when left out.
min_std = 999e99
for j in range(0,num_zs): # cycle through slots or files
[slot1,filenum1] = find_slot_filenum( option, j, slots, files )
#print 'j: slot, file, num_zs', slot1, filenum1, bad_plots[slot1][pol][filenum1], num_zs
if( bad_plots[slot1][pol][filenum1]==0 ):
for k in range(j+1,num_zs):
[slot2,filenum2] = find_slot_filenum( option, k, slots, files )
#print 'k: slot, file, bad', slot2, filenum2, bad_plots[slot2][pol][filenum2]
if( bad_plots[slot2][pol][filenum2]==0 ):
temp1 = allpows[slot1][pol][i][filenum1]
temp2 = allpows[slot2][pol][i][filenum2]
temp_val1 = tot - temp1 - temp2
temp_val2 = tot2 - temp1*temp1 - temp2*temp2
temp_val1 /= float(N-2)
#print temp_val2, temp_val1, N
#print temp_val2/float(N-2), temp_val1*temp_val1
temp_val2 = math.sqrt( math.fabs( (temp_val2/float(N-2) - temp_val1*temp_val1) ) )
if( temp_val2 < min_std ):
min_std = temp_val2
mod_avg = temp_val1
mod_std = temp_val2
if( option==0 ):
left_out1 = slot1+1
left_out2 = slot2+1
else:
left_out1 = filenum1+1
left_out2 = filenum2+1
#if LOUD: print 'looking at (%d,%d) for pol %s, avg = %2.3f, std = %2.3f' % (j+1,k+1,pol2XY(pol),temp_val1, temp_val2)
avg_val[pol][i] = mod_avg
std_val[pol][i] = mod_std
if LOUD: print '%s: Modified vals calc\'d: freq = %d, pol = %s, left out zs (%d,%d), avg = %2.3f, std = %2.3f, mod avg = %2.3f, mod std = %2.3f' % (prog_name, i, pol2XY(pol), left_out1, left_out2, avg, std, avg_val[pol][i], std_val[pol][i])
else:
avg_val[pol][i] = avg
std_val[pol][i] = std # this is a stddev of the data set, not an unbiased estimate of a sqrt variance (ie, divided by sqrt(N), not sqrt(N-1) )
if LOUD: print '%s: Non-modified vals calc\'d: freq = %d, pol = %s, nmpts = %d, avg = %2.3f, std = %2.3f' % (prog_name, i, pol2XY(pol), nmpts, avg_val[pol][i], std_val[pol][i])
return [ avg_val, std_val ]
def find_slot_filenum( option, z, slots, files ):
if( option == 0 ):
filenum = files[0]
slot = slots[z]
elif( option == 1 ):
filenum = files[z]
slot = slots[0]
else:
print 'ERROR: invalid input to find_slot_filenum. option must be 0 or 1. Aborting...'
sys.exit(1)
return [ slot, filenum ]
if __name__ == '__main__':
global CH_MIN
global CH_MAX
global LOUD
# Set important variables
prog_name = "dipole_checker.py"
num_chs = 256 # total number of coarse channels
spr = 8 # slots per receiver
num_pols = 2 # number of polarizations, X and Y
num_dips = 16 # number of dipoles per tile
num_rxs = 4 # number of receivers
num_delays = 32 # max possible number of delay lines
num_tiles = num_rxs*spr
compare_option = 1
print '\n%s: Commencing program\n' % (prog_name)
# Set the defaults
LOUD = 0
dipole = -1
receiver = -1
get_files_from_dir = 1
prompt_user = 0
one_set = 1
Xnum = 12
use_r = use_d = use_p = use_f = use_o = use_n = use_t = use_y = nowrite = use_T = 0
# Read in the command line arguments, if any
try:
options, extra_args = getopt.getopt(sys.argv[1:], 'hf:r:d:l:p:o:n:t:i:y:X:NT:')
except getopt.error:
if LOUD: print '%s: ERROR: Unknown commandline argument entered. Printing function usage and then exiting...\n' % (prog_name)
help_message()
sys.exit(0)
for input_opt, arg in options[:]:
if input_opt == '-h':
help_message()
elif input_opt == '-t':
use_t = 1
start_time = float(arg)
elif input_opt == '-f':
use_f = 1
if( arg == '0' ):
if LOUD: print '%s: As per specified on the command line via -f 0, the user will be prompted for files.' % (prog_name)
prompt_user = 1
elif( arg=='1' ):
if LOUD: print '%s: As per specified on the command line via -f 1, all files in the data directory will be considered.' % (prog_name)
get_files_from_dir = 1
prompt_user = 0
else:
if LOUD: print '%s: File names entered via the command line' % (prog_name)
filenames = []
filenames.append(arg)
for item in extra_args:
filenames.append(item)
prompt_user = 0
elif input_opt == '-r':
use_r = 1
receiver = int(arg)
if LOUD: print '%s: Only files involving receiver %d will be examined.' % (prog_name, receiver)
elif input_opt == '-d':
use_d = 1
dipole = int(arg)
if LOUD: print '%s: Only files involving dipole %d will be examined.' % (prog_name, dipole)
elif input_opt == '-y':
use_y = 1
delay = int(arg)
if LOUD: print '%s: Only files involving delay %d will be examined.' % (prog_name, delay)
elif input_opt == '-l':
loudt = int(arg)
if( loudt==0 ): print '%s: LOUD entered via commandline as 0. Suppressing output...' % (prog_name)
LOUD = loudt
elif input_opt == '-p':
use_p = 1
data_dir = arg
elif input_opt == '-o':
use_o = 1
out_dir = arg
elif input_opt == '-n':
use_n = 1
outfilename = arg
elif input_opt == '-i':
one_set = int(arg)
elif input_opt == '-T':
use_T = 1
use_tile_only = int(arg)
elif input_opt == '-X':
Xnum = int(arg)
elif input_opt == '-N':
nowrite = 1
else: # Actually, this line shouldn't be reached because of the 'except getopt.error' line above
print '%s: ERROR: Unknown input option %s entered on the command line. Printing function usage and then exiting...' % (prog_name, input_opt)
help_message()
# Exit if no start time is entered
if( use_t == 0 ):
print '%s: ERROR: beginning time must be entered on the command line. Printing function usage and then exiting...' % (prog_name)
help_message()
# Determine the directories to use
# (i) the data file directory...
if( use_p == 0 ):
data_dir = os.getcwd()
if LOUD: print '%s: Data files will be pulled from the current working directory, %s (use -p dirname on the command line to change this)' % (prog_name, data_dir)
else:
if LOUD: print '%s: As specified via command line argument -p, files will be pulled from the directory, %s' % (prog_name, data_dir)
# (ii) the output file directory...
if( use_o == 0 ):
out_dir = os.getcwd()
if LOUD: print '%s: Final results will be saved to the current working directory, %s (use -o dirname on the command line to change this)' % (prog_name, out_dir)
else:
if LOUD: print '%s: As specified via command line argument -o, files will be saved to the directory, %s' % (prog_name, out_dir)
# Determine the name of the output file.
if( use_n == 0 ):
if( use_d ):
dipole_str = '%02d' % (dipole)
else:
dipole_str = 'All'
if( use_r ):
receiver_str = '%1d' % (receiver)
else:
receiver_str = 'All'
if( use_y ):
if( delay == -1 ):
delay_str = 'All'
else:
delay_str = '%1d' % (delay)
else:
delay_str = '0'
if( use_T ):
outfilename = 'tile_%d_dip%s_rec%s_delay%s_t%014.5f.txt' % (use_tile_only, dipole_str, receiver_str, delay_str, start_time)
else:
outfilename = 'dip%s_rec%s_delay%s_t%014.5f.txt' % (dipole_str, receiver_str, delay_str, start_time)
g_name = 'vals_%s' % (outfilename)
b_name = 'errs_%s' % (outfilename)
if LOUD: print '%s: Output file name will be %s and %s (use -n outfilename on the command line to change the back end here)' % (prog_name, g_name, b_name)
else:
g_name = 'vals_%s' % (outfilename)
b_name = 'errs_%s' % (outfilename)
if LOUD: print '%s: As specified via command line argument -n, the output file will be named %s and %s' % (prog_name, g_name, b_name)
# If a user prompt was not requested, get all legitimate file names from the data directory, data_dir
if( get_files_from_dir ):
filenames = os.listdir(data_dir)
filenames2 = []
# Make sure that the file name is valid
for item in filenames:
if( valid_file( item ) ):
filenames2.append(item)
filenames = filenames2
# If file names not entered via command line, then either prompt the user (if prompt_user==1) or use the default (if prompt_user==-1; this option is good for debugging)
if( prompt_user == 1):
filenames = []
stay_in_loop = 1
while(stay_in_loop==1):
input_str = '%s: Please input a filename (Use the -f flag to avoid this prompt. Input \'END\' if no more file names are to be entered):\n' % (prog_name)
filename = raw_input(input_str)
if( filename != 'END' ): # If a name is entered...
# ... first check to make sure that the name is valid
if( valid_file( filename )==0 ):
print '%s: ERROR: Input file name ( = %s ) is not valid. Ignoring this input...' % (prog_name, filename)
# ... and if it is, then add it to the list of files
else:
filenames.append(filename)
else: # Otherwise, break the loop
stay_in_loop = 0
elif( prompt_user == -1): # else, just use the file listed below (good for debugging only)
filenames.append('Dipole10_Rx1_20091123.46998_avg')
# Determine the dipole number, receiver number, and time of files in the list
num_files = len(filenames)
times = []
dips = []
recvs = []
delays = []
if( Xnum==9 ):
shift = 0
else:
shift = 1
for item in filenames:
parts = item.split('_')
times.append(float(parts[2+shift]))
temp_str = parts[0].split('e')
dips.append(int(temp_str[1]))
temp_str = parts[1+shift].split('x')
recvs.append(int(temp_str[1]))
if( Xnum != 9 ):
delays.append(int(parts[1]))
else:
delays.append(-1)
# Cut all files that are created before the determined starting time (-t), are not of the correct receiver number (-r), or are not of the correct dipole number (-d)
if( use_r==1 ):
recvs2use = receiver
else:
recvs2use = -1
if( use_d==1 ):
dips2use = dipole
else:
dips2use = -1
if( use_y==1 ):
delays2use = delay
else:
delays2use = 0
time_length = 5.0 # only look for files created within time_length minutes of the start time
end_time = start_time + time_length/(60.0*24.0)
[ times, dips, recvs, delays, filenames ] = cutfiles( start_time, end_time, recvs2use, dips2use, delays2use, times, dips, recvs, delays, filenames )
# Sort the files.
[ times, dips, recvs, delays, filenames ] = slow_sort( times, dips, recvs, delays, filenames )
# If desired, isolate one set of 16 dipoles for all possible receivers
if( one_set!=0 ):
[ times, dips, recvs, delays, filenames ] = isolate_one_set( num_dips, num_rxs, num_delays, times, dips, recvs, delays, filenames )
num_files = len(filenames)
# Make sure that at least one file is chosen. Abort if not.
if(num_files==0):
if LOUD: print '%s: No valid names entered. Aborting...' % (prog_name)
sys.exit(1)
if LOUD:
print '%s: Files to be examined:' % (prog_name)
print filenames
# Define the frequencies to consider
num_ref_freqs = 3 # number of reference frequencies. Here, =3 (80, 120, and 160 MHz)
reffreq = [ 80.0, 120.0, 160.0 ] # reference frequencies in MHz
# Create the array that holds all power readings
allpows = [ [ [ [ 0 for h in range(len(filenames)) ] for i in range(num_ref_freqs) ] for j in range(num_pols) ] for k in range(spr) ]
bad2 = [ [ [ 0 for h in range(len(filenames)) ] for j in range(num_pols) ] for k in range(spr) ]
bad_props2 = [ [ [ 0 for h in range(len(filenames)) ] for j in range(num_pols) ] for k in range(spr) ]
bad_props3 = [ [ [ 0 for h in range(len(filenames)) ] for j in range(num_pols) ] for k in range(spr) ]
# The extra dimension 'i' in allms and allbs below is so that calc_avg_std() may be used on allms and allbs (this function was tailored for allpows above.
allms = [ [ [ [ 0 for h in range(len(filenames)) ] for i in range(0,1) ] for j in range(num_pols) ] for k in range(spr) ]
allbs = [ [ [ [ 0 for h in range(len(filenames)) ] for i in range(0,1) ] for j in range(num_pols) ] for k in range(spr) ]
# Cycle through files
for index, item in enumerate(filenames):
# SECTION I: Read in the data
filename = item
filenumber = int(index)
if LOUD: print '\n%s: Reading in the data from file %s' % (prog_name, filename)
read_in_data = [ [ [ 0 for i in range(num_chs) ] for j in range(num_pols) ] for k in range(spr) ]
read_from_file = '%s/%s' % (data_dir, filename)
try:
data_file = open(read_from_file, "rb") # open the file for reading
except IOError:
print '%s: Can\'t open file %s for reading. Aborting...' % (prog_name, read_from_file)
sys.exit(2)
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
for ch in range(0,num_chs): # cycle through coarse channels
try:
s = data_file.read(4) # read in a float
except IOError:
print '%s: Can\'t read in float from file %s. Aborting...' % (prog_name, filename)
sys.exit(0)
value = struct.unpack("f", s)
value = value[0]
if( value > 0.0 ):
read_in_data[slot][pol][ch] = 10.0*math.log10(value) # 1.0/(chan2freq(ch)-chan2freq(38)) - 2.0/(chan2freq(ch)-chan2freq(38))/(chan2freq(ch)-chan2freq(38)) + 5.0 #effpot
#if( slot==0 and pol==0 ): print 'ch, data = ', ch, read_in_data[slot][pol][ch]
else:
read_in_data[slot][pol][ch] = -1.0
bad = [ [ 0 for j in range(num_pols) ] for k in range(spr) ] # Boolean array for bad slots/pols: 0 = good, 1 = bad
bad_props = [ [ 0.0 for j in range(num_pols) ] for k in range(spr) ] # float array containing information about bad slots/pols
# SECTION II: Fit and subtract a line from the data, using only freq<50MHz and freq>300MHz (ie, fit and subtract the noise floor)
sub_noise_floor = 1
if( sub_noise_floor==1 ):
temp_str = 'and subtracting '
else:
temp_str = ''
if LOUD: print '\n%s: Fitting %sthe out-of-freq-range noise floor' % (prog_name, temp_str)
# The extra unnecessary dimension is so that I can use the function calc_avg_std(), created for another part of the code
# Determine the frequency bins to use in the calculation
low_only = 0 # if low_only==1, then only fit a line to freqs less than 50MHz. if low_only==0, then include freqs greater than 300MHz as well
ch50 = closestchan2freq( 50.0 ) # channel number closest to 50MHz
if( low_only==1 ):
nchans2fit = ch50
else:
ch300 = closestchan2freq( 300.0 ) # channel number closest to 300MHz
nchans2fit = ch50 + (num_chs - ch300) # Notice: we're excluding the DC component here (bin # 0)
# Create indices2use[], a list of all the elements of the data to be used in the fit (needed for fit_line_with_gaps)
indices2use = [ 0 for i in range(0,nchans2fit) ]
for i in range(1,ch50+1):
indices2use[i-1] = i
if( low_only==0 ):
for i in range(0,num_chs-ch300):
indices2use[i+ch50] = ch300+i
# Cycle through slots and polarizations, fitting lines to the noise floor, and then subsequently subtracting that line
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
# Fit a line to the extremes of this data
(m,b) = fit_line_with_gaps( read_in_data[slot][pol][0:num_chs], nchans2fit, indices2use, chan2freq )
allms[slot][pol][0][index] = m
allbs[slot][pol][0][index] = b
if LOUD: print '%s: slot = %d%s, fit noise line: slope=%3.3g, intercept=%3.3g' % (prog_name,slot+1,pol2XY(pol),m,b)
if( sub_noise_floor ):
# Subtract the fit line from the data
read_in_data[slot][pol][0:num_chs] = sub_line( read_in_data[slot][pol][0:num_chs], num_chs, m, b, chan2freq )
# Search for bad files based upon these values for the slope and y intercepts
use_mods = 1
avg_slope = [ [ 0 for j in range(1) ] for k in range(num_pols) ]
std_slope = [ [ 0 for j in range(1) ] for k in range(num_pols) ]
avg_intcp = [ [ 0 for j in range(1) ] for k in range(num_pols) ]
std_intcp = [ [ 0 for j in range(1) ] for k in range(num_pols) ]
[ avg_intcp, std_intcp ] = calc_avg_std2( range(0,spr), range(0,num_pols), range(0,1), range(index,index+1), allbs, bad2, use_mods )
crit1 = 0 # if 1, then compare the fit intercept to the other fit values.
crit2 = 1 # if 1, then look to see if the noise floor is greater than 40dB
for pol in range(0,num_pols):
# Cycle through all slots, looking for problems
for slot in range(0,spr): # cycle through slots
temp = allbs[slot][pol][0][index]
if( crit1==1 ):
avg = avg_intcp[pol][0]
std = std_intcp[pol][0]
diff = math.fabs( temp - avg )
if( (diff>std) and (diff>5.0) ):
if LOUD: print '%s: slot = %d%s has funny noise floor. fit intcp = %2.3f, avg = %2.3f, diff = %2.3f, std = %2.3f' % (prog_name,slot+1,pol2XY(pol),temp,avg,diff,std)
bad2[slot][pol][index] = 5
bad_props2[slot][pol][index] = temp - avg
if( crit2==1 ):
if( temp >= 40.0 ):
if LOUD: print '%s: slot = %d%s has funny noise floor. fit intcp = %2.3f: seems too high' % (prog_name,slot+1,pol2XY(pol),temp)
bad2[slot][pol][index] = 5
bad_props2[slot][pol][index] = temp
# SECTION IIb: Search for bad RFI
# Determine the frequency bins to use in the calculation
sat_min = closestchan2freq( 240.0 ) # min channel number for satellite band
sat_max = closestchan2freq( 275.0 ) # max channel number for satellite band
num_sats = sat_max - sat_min + 1
orb_min = closestchan2freq( 137.0 ) # min channel number for orbcom band
orb_max = closestchan2freq( 139.0 ) # max channel number for orbcom band
num_orbs = orb_max - orb_min + 1
nchans2use = num_chs - (num_sats + num_orbs) - 2 # The final -1 is to exclude the DC component and final freq
#print num_chs, nchans2use
#print sat_min, sat_max, orb_min, orb_max
# Create indices2use[], a list of all the elements of the data to be used when searching for RFI
indices2use = [ 0 for i in range(0,nchans2use) ]
i = 0
for j in range(0,num_chs):
if( (j!=0) and (j<orb_min or j>orb_max) and (j<sat_min or j>sat_max) and (j!=num_chs-1) ):
indices2use[i] = j
i = i+1
bad_spike = 5.0 # decimal jump of what is considered a bad RFI spike
real_bad_spike = 20.0
toss_threshold = 50.0
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
if(bad2[slot][pol][index]==0):
max_jump = -999999.9
num_spikes = 0
toss = 0.0
for i in range(0,nchans2use):
spot = indices2use[i]
jump = read_in_data[slot][pol][spot] - (read_in_data[slot][pol][spot-1] + read_in_data[slot][pol][spot+1])/2.0
if( jump > max_jump ):
max_jump = jump
bad_chan = spot
if( jump > bad_spike ):
num_spikes = num_spikes+1
toss = toss + jump
if( (max_jump>real_bad_spike) or (toss>toss_threshold) ):
if LOUD: print '%s: Bad spike(s) found in file %s! pol = %s, slot = %d, freq = %3.2f, jump = %2.2f, num_spikes = %d, toss = %2.2f' % (prog_name, filename, pol2XY(pol), slot+1, chan2freq(bad_chan), max_jump, num_spikes, toss)
bad2[slot][pol][index] = 6
bad_props2[slot][pol][index] = chan2freq(bad_chan)
bad_props3[slot][pol][index] = max_jump
fit_effpot = 0
if( fit_effpot==1 ):
effpot_str = 'an \'effective potential-esque\' function'
allrms_eff = [ [ 99999.99e99 for j in range(spr) ] for k in range(num_pols) ]
allrms_chs_eff = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_a1s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_b1s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_c1s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
else:
effpot_str = ''
fit_parab = 1
if( fit_parab==1 ):
parab_str = 'a parabolic function'
allrms_par = [ [ 99999.99e99 for j in range(spr) ] for k in range(num_pols) ]
allrms_chs_par = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_a2s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_b2s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
all_c2s = [ [ 0.0 for j in range(spr) ] for k in range(num_pols) ]
else:
parab_str = ''
if( fit_effpot and fit_parab ):
and_str = ' and '
else:
and_str = ''
if LOUD: print '\n%s: Fitting for %s%s%s' % (prog_name,effpot_str,and_str,fit_parab)
if( fit_effpot==1 and fit_parab==1 ):
rms_cut = 5.0 # acceptable rms lower cut. If rms < rms_cut, then we've found the bad shape: mark this file as bad
kmin = 70
kmax = 80
elif( fit_effpot==1 ):
rms_cut = 5.0
kmin = 70
kmax = 80
elif( fit_parab==1 ):
rms_cut = 1.0
kmin = 80
kmax = 80
for k in range(kmin,kmax+1):
low_cut = k #closestchan2freq( low_freq ) # lowest frequency channel to use
low_only = 1
if( low_only==1 ):
nchans2fit = num_chs - low_cut
indices2use = [ i for i in range(low_cut,num_chs) ]
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
if(bad2[slot][pol][index]==0):
# Fit for an effective potential
if( fit_effpot ):
[ a1, b1, c1, rms1 ] = fit_effpot_with_gaps( read_in_data[slot][pol][0:num_chs], nchans2fit, indices2use, chan2freq, low_cut )
if( rms1 < allrms_eff[pol][slot] ):
allrms_eff[pol][slot] = rms1
allrms_chs_eff[pol][slot] = k
all_a1s[pol][slot] = a1
all_b1s[pol][slot] = b1
all_c1s[pol][slot] = c1
# Fit for a parabola
if( fit_parab ):
[ a2, b2, c2, rms2 ] = fit_parab_with_gaps( read_in_data[slot][pol][0:num_chs], nchans2fit, indices2use, chan2freq, low_cut )
if( rms2 < allrms_par[pol][slot] ):
allrms_par[pol][slot] = rms2
allrms_chs_par[pol][slot] = k
all_a2s[pol][slot] = a2
all_b2s[pol][slot] = b2
all_c2s[pol][slot] = c2
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
if(bad2[slot][pol][index]==0):
if( fit_effpot==1 and fit_parab==1 ):
value = (allrms_eff[pol][slot]*allrms_par[pol][slot])
elif( fit_effpot==1 ):
value = (allrms_eff[pol][slot])
elif( fit_parab==1 ):
value = (allrms_par[pol][slot])
if( value<rms_cut ):
if LOUD:
print '%s: Bad form at slot %d%s for file %s! ' % (prog_name,slot+1,pol2XY(pol),filename)
if( fit_effpot ):
chan1 = allrms_chs_eff[pol][slot]
print ' Eff pot: rms = %1.2f, vals=(%3.4f, %3.4f, %3.4f), freq=ch %d, %03.2fMHz ' % (allrms_eff[pol][slot],all_a1s[pol][slot],all_b1s[pol][slot],all_c1s[pol][slot],chan1,chan2freq(chan1) )
if( fit_parab ):
chan2 = allrms_chs_par[pol][slot]
print ' Parabola: rms = %1.2f, vals=(%3.4f, %3.4f, %3.4f), freq=ch %d, %03.2fMHz' % (allrms_par[pol][slot],all_a2s[pol][slot],all_b2s[pol][slot],all_c2s[pol][slot],chan2,chan2freq(chan2) )
print ' Overall: prod = %1.2f, cut = %1.2f' % ( value,rms_cut )
bad2[slot][pol][index] = 7
bad_props2[slot][pol][index] = value
# SECTION III: Calculate the power at various frequencies
if LOUD: print '\n%s: Calculating the power at various frequencies' % (prog_name)
#Determine the frequency bins associated with these three frequencies
channels = [ closestchan2freq( reffreq[i] ) for i in range(num_ref_freqs) ]
#Set the number of bins around this bin to average over
bins2avg = 2 # number of bins of each side, total bins in average is 2*bins2avg+1
totbins = 2*bins2avg+1
# Create the array to hold these power levels
refpow = [ [ [ 0 for i in range(num_ref_freqs) ] for j in range(num_pols) ] for k in range(spr) ]
# Cycle through slots and polarizations, and estimate the relevent quantities
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
for i in range(0,num_ref_freqs):
chan_temp = channels[i]
refpow[slot][pol][i] = avg_data( read_in_data[slot][pol][chan_temp-bins2avg:chan_temp+bins2avg+1], totbins, 1 )
# Add these power readings to the full data array
allpows[slot][pol][i][index] = refpow[slot][pol][i]
# Print the calculated values
for slot in range(0,spr): # cycle through slots
for pol in range (0,num_pols): # cycle through polarizations
if LOUD: print '%s: slot = %d%s, power levels = (%2.3f,%2.3f,%2.3f) at MHz = (80,120,160), respectively' % (prog_name,slot+1,pol2XY(pol),refpow[slot][pol][0],refpow[slot][pol][1],refpow[slot][pol][2])
if LOUD: print '\n%s: Looking for dead dipoles across single receivers' % (prog_name)
# Find dead dipoles
use_mod = 1
avg_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
std_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
[ avg_val, std_val ] = calc_avg_std2( range(0,spr), range(0,num_pols), reffreq, range(index,index+1), allpows, bad2, use_mods )
dead_cut = 3.0 # plots of dead_cut db or less in all three frequency channels are mostly likely dead
for pol in range(0,num_pols):
# Cycle through all slots, looking for outliers
for slot in range(0,spr): # cycle through slots
if( bad2[slot][pol][index]==0 ):
dead = 0
for i in range(0,num_ref_freqs):
temp = refpow[slot][pol][i]
diff = math.fabs( temp - avg_val[pol][i] )
if( (temp<0.2*avg_val[pol][i]) or temp<dead_cut ):
dead = dead + 1
if LOUD: print '%s: slot = %d%s may be dead. At freq = %3.3f MHz., power = %2.3f avg = %2.3f' % (prog_name,slot+1,pol2XY(pol),reffreq[i],temp,avg_val[pol][i])
if( dead == num_ref_freqs ):
bad2[slot][pol][index] = 2
if( compare_option==0 ): # In this case, we're comparing different slots in the same receiver
# SECTION IV: Make a quick recommendation based upon these values of the power
# Look for other peculiarities
[ avg_val, std_val ] = calc_avg_std2( range(0,spr), range(0,num_pols), reffreq, range(index,index+1), allpows, bad2, use_mods )
for pol in range(0,num_pols):
# Cycle through all slots, looking for outliers
for slot in range(0,spr): # cycle through slots
if( bad2[slot][pol][index] == 0 ):
low = high = some_bad = 0
low_avg = high_avg = 0.0
for i in range(0,num_ref_freqs):
temp = refpow[slot][pol][i]
avg = avg_val[pol][i]
diff = math.fabs( temp - avg )
if( (diff>std_val[pol][i]) and (diff>3.0) ):
some_bad = 1
if( temp < avg ):
low = low + 1
low_avg = low_avg + diff
elif( temp > avg ):
high = high + 1
high_avg = high_avg + diff
if LOUD: print '%s: slot = %d%s may be bad. At freq = %3.3f MHz., power = %2.3f avg = %2.3f, diff = %2.3f, std dev = %2.3f' % (prog_name,slot+1,pol2XY(pol),reffreq[i],refpow[slot][pol][i],avg_val[pol][i],diff,std_val[pol][i])
if( some_bad==1 ):
if( low==num_ref_freqs ):
bad2[slot][pol][index] = 3
bad_props2[slot][pol][index] = low_avg/float(num_ref_freqs)
elif( high==num_ref_freqs ):
bad2[slot][pol][index] = 4
bad_props2[slot][pol][index] = high_avg/float(num_ref_freqs)
else:
bad2[slot][pol][index] = 1
# SECTION V: Print results to screen and save the results.
# Determine the dipole number, receiver number, delay and time
dip_num = dips[index]
recv_num = recvs[index]
time = times[index]
delay_num = delays[index]
if LOUD: print '%s: File %s is for dipole %d, receiver %d, delay %d, time %8.5f' % (prog_name, filename, dip_num, recv_num, delay_num, time)
# Print out the values
stars = '************************************************************************'
out_txt = ''
write_to_file = '%s/%s' % (out_dir, g_name)
if LOUD: print'\n%s' % (stars)
if LOUD: print '\n%s: Output added to %s from file %s\n' % (prog_name, g_name, filename)
out_txt1 = 'time %9.5f Rx %d Dip %d' % (time, recv_num, dip_num)
for pol in range(0,num_pols):
out_txt2 = '%s%s Delay %d slot ' % (out_txt1, pol2XY(pol), delay_num )
for slot in range(0,spr):
out_txt = '%s%s%d tile %d ' % (out_txt, out_txt2, slot+1, slot2tile( slot+1, recv_num, Xnum, 0 ) )
for i in range(0,num_ref_freqs):
out_txt = '%s%2.2f ' % (out_txt, refpow[slot][pol][i] )
out_txt = '%s\n' % (out_txt)
if LOUD: print '%s' % (out_txt)
if LOUD: print'%s' % (stars)
# Save the results
if( nowrite == 0 ):
#Open the file
if(index == 0): # If this is the first file, then the outfile needs to be created
try:
outfile = open(write_to_file, 'w')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
else: # Otherwise, the outfile needs to be opened, and the new results appended to the end
try:
outfile = open(write_to_file, 'a')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
# Write the data
outfile.write(out_txt)
# Close the file
outfile.close
out_txt = ''
write_to_file = '%s/%s' % (out_dir, b_name)
if LOUD: print'\n%s' % (stars)
if LOUD: print '\n%s: Output added to %s from file %s\n' % (prog_name, b_name, filename)
out_txt1 = 'time %9.5f Rx %d Dip %d' % (time, recv_num, dip_num)
for slot in range(0,spr):
for pol in range(0,num_pols):
out_txt2 = '%s%s Delay %d slot ' % (out_txt1, pol2XY(pol), delay_num )
value = bad2[slot][pol][index]
if( value!=0 ):
out_txt = '%s%s%d tile %d ' % (out_txt, out_txt2, slot+1, slot2tile( slot+1, recv_num, Xnum, 0 ) )
if( value==1 ): # 'U' for Unknown (or, Unusual)
out_txt = '%sU' % (out_txt)
elif( value==2 ): # 'D' for Dead dipole
out_txt = '%sD' % (out_txt)
elif( value==3 ): # 'L' for Low signal
out_txt = '%sL%2.1f' % (out_txt, bad_props2[slot][pol][index])
elif( value==4 ): # 'H' for High signal
out_txt = '%sH%2.1f' % (out_txt, bad_props2[slot][pol][index])
elif( value==5 ): # 'F' for unusual noise Floor
out_txt = '%sF%2.1f' % (out_txt, bad_props2[slot][pol][index])
elif( value==6 ): # 'R' for Rfi spikes
out_txt = '%sR %3.2f %3.2f' % (out_txt, bad_props2[slot][pol][index], bad_props3[slot][pol][index])
out_txt = '%s\n' % (out_txt)
if LOUD: print '%s' % (out_txt)
if LOUD: print'%s' % (stars)
# Save the results
if( nowrite == 0 ):
#Open the file
if(index == 0): # If this is the first file, then the outfile needs to be created
try:
outfile = open(write_to_file, 'w')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
else: # Otherwise, the outfile needs to be opened, and the new results appended to the end
try:
outfile = open(write_to_file, 'a')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
# Write the data
outfile.write(out_txt)
# Close the file
outfile.close
# End, if( compare_option==0 )
# End, cycle through files
if( compare_option==1):
first_time_through = 1
if( use_T ):
start_tile = use_tile_only
stop_tile = use_tile_only+1
else:
start_tile = 1
stop_tile = num_tiles
for tilenum in range(start_tile,stop_tile):
# Determine the slot number and receiver of this tile
[ slot, recv ] = slot2tile( tilenum, -1, Xnum, 1 )
# As a check, make sure that slot2tile outputs the tile number for this slot number and receiver
tile = slot2tile( slot, recv, Xnum, 0 )
if( tile != tilenum ):
if LOUD: print '%s: ERROR: function slot2tile() does not give consistent results. tilenum=%d. output (mode 1): slot = %d, recv = %d. output (mode 0) using these values: tile = %d. Aborting...' % (prog_name, tilenum, slot, recv, tile)
sys.exit(1)
else:
if LOUD: print '%s: Working on tile number %d, slot number %d, recv number %d...' % (prog_name, tilenum, slot, recv)
rel_files = [] # Determine the relevent files for this recv
for i in range(0,num_files):
if( recvs[i]== recv ):
rel_files.append(i)
num_rel_files = len(rel_files)
if( num_rel_files != 0 ): # If at least one file contains this receiver (and thus tile), then proceed...
for delay in range(0,num_delays):
if LOUD: print '\n%s: Working on delay %d...' % (prog_name, delay)
rel_files2 = [] # Determine the relevent files for this recv
for i in range(0,num_files):
if( recvs[i]== recv and delays[i]==delay ):
rel_files2.append(i)
num_rel_files2 = len(rel_files2)
if( num_rel_files2 != 0 ): # If at least one file contains this delay, then proceed...
# SECTION IV: Make a quick recommendation based upon these values of the power
if LOUD: print '\n%s: Making quick recommendations on this tile using %d files' % (prog_name, num_rel_files2)
use_mod = 1
avg_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
std_val = [ [ 0 for j in range(num_ref_freqs) ] for k in range(num_pols) ]
# Find dead dipoles (Search was also done across slots in the same receiver above: this helps in the case when the entire tile is dead.)
[ avg_val, std_val ] = calc_avg_std2( range(slot-1,slot), range(0,num_pols), reffreq, rel_files2, allpows, bad2, use_mods )
dead_cut = 3.0 # plots of dead_cut db or less in all three frequency channels are mostly likely dead
for pol in range(0,num_pols):
for files in range(0,num_rel_files2): # cycle through relevent files (which should be 16 files, one for each dipole)
filenum = rel_files2[files]
if( bad2[slot-1][pol][filenum]==0 ):
dead = 0
for i in range(0,num_ref_freqs):
temp = allpows[slot-1][pol][i][filenum]
avg = avg_val[pol][i]
diff = math.fabs( temp - avg )
if( (temp < 0.2*avg) or temp<dead_cut ):
dead = dead + 1
if LOUD: print '%s: dipole = %d%s for tile %d may be dead. At freq = %3.3f MHz, power = %2.3f avg = %2.3f' % (prog_name,dips[filenum],pol2XY(pol),tilenum,reffreq[i],temp,avg)
if( dead == num_ref_freqs ):
bad2[slot-1][pol][filenum] = 2
# Look for other peculiarities
[ avg_val, std_val ] = calc_avg_std2( range(slot-1,slot), range(0,num_pols), reffreq, rel_files2, allpows, bad2, use_mods )
for pol in range(0,num_pols):
for files in range(0,num_rel_files2): # cycle through relevent files (which should be 16 files, one for each dipole)
filenum = rel_files2[files]
if( bad2[slot-1][pol][filenum] == 0 ):
low = high = some_bad = 0
low_avg = high_avg = 0.0
for i in range(0,num_ref_freqs):
temp = allpows[slot-1][pol][i][filenum]
avg = avg_val[pol][i]
#print 'avg = %2.2f, avg_val = %2.2f, pol = %d, files = %d, i = %d' % ( avg, avg_val[pol][i], pol, files, i )
std = std_val[pol][i]
diff = math.fabs( temp - avg )
if( (diff>std) and (diff>3.0) ):
some_bad = 1
if( temp < avg ):
low = low + 1
low_avg = low_avg + diff
elif( temp > avg ):
high = high + 1
high_avg = high_avg + diff
if LOUD: print '%s: dipole = %d%s for tile %d may be bad. At freq = %3.3f MHz, power = %2.3f avg = %2.3f, diff = %2.3f, std dev = %2.3f' % (prog_name,dips[filenum], pol2XY(pol), tilenum, reffreq[i],temp,avg,diff,std)
if( some_bad==1 ):
if( low==num_ref_freqs ):
bad2[slot-1][pol][filenum] = 3
bad_props2[slot-1][pol][filenum] = low_avg/float(num_ref_freqs)
elif( high==num_ref_freqs ):
bad2[slot-1][pol][filenum] = 4
bad_props2[slot-1][pol][filenum] = high_avg/float(num_ref_freqs)
else:
bad2[slot-1][pol][filenum] = 1
# SECTION V: Print results to screen and save the results.
# Print out the values
stars = '************************************************************************'
write_to_file = '%s/%s' % (out_dir, g_name)
if LOUD: print'\n%s' % (stars)
if LOUD: print '\n%s: Output added to %s for tile %d\n' % (prog_name, g_name, tilenum)
out_txt = 'TILE %03d (RX %02d, SLOT %d) for DELAY %02d at TIME ~%9.3f\n' % (tilenum, recv, slot, delay, times[rel_files2[num_rel_files2/2]])
for files in range(0,num_rel_files2): # cycle through relevent files (which should be 16 files, one for each dipole)
filenum = rel_files2[files]
dip_num = dips[filenum]
#print filenames[filenum], dip_num
for pol in range(0,num_pols):
out_txt = '%sdipole %02d%s ' % (out_txt, dip_num, pol2XY(pol) )
for i in range(0,num_ref_freqs):
out_txt = '%s%2.2f ' % (out_txt, allpows[slot-1][pol][i][filenum] )
out_txt = '%s\n' % (out_txt)
out_txt = '%s\n' % (out_txt)
if LOUD: print '%s' % (out_txt)
if LOUD: print'%s' % (stars)
# Save the results
if( nowrite == 0 ):
#Open the file
if(first_time_through==1): # If this is the first file, then the outfile needs to be created
try:
outfile = open(write_to_file, 'w')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
else: # Otherwise, the outfile needs to be opened, and the new results appended to the end
try:
outfile = open(write_to_file, 'a')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
# Write the data
outfile.write(out_txt)
# Close the file
outfile.close
out_txt = ''
write_to_file = '%s/%s' % (out_dir, b_name)
if LOUD: print'\n%s' % (stars)
if LOUD: print '\n%s: Output added to %s for tile %d\n' % (prog_name, b_name, tilenum)
out_txt = 'TILE %03d (RX %02d, SLOT %d) for DELAY %02d at TIME ~%9.3f\n' % (tilenum, recv, slot, delay, times[rel_files2[num_rel_files2/2]])
for files in range(0,num_rel_files2): # cycle through relevent files (which should be 16 files, one for each dipole)
filenum = rel_files2[files]
dip_num = dips[filenum]
#print filenames[filenum], dip_num
for pol in range(0,num_pols):
value = bad2[slot-1][pol][filenum]
if( value!=0 ):
out_txt = '%sdipole %02d%s ' % (out_txt, dip_num, pol2XY(pol) )
if( value==1 ): # 'U' for Unknown (or, Unusual)
out_txt = '%sU' % (out_txt)
elif( value==2 ): # 'D' for Dead dipole
out_txt = '%sD' % (out_txt)
elif( value==3 ): # 'L' for Low signal
out_txt = '%sL%2.1f' % (out_txt, bad_props2[slot-1][pol][filenum])
elif( value==4 ): # 'H' for High signal
out_txt = '%sH%2.1f' % (out_txt, bad_props2[slot-1][pol][filenum])
elif( value==5 ): # 'F' for unusual noise Floor
out_txt = '%sF%2.1f' % (out_txt, bad_props2[slot-1][pol][filenum])
elif( value==6 ): # 'R' for Rfi spikes
out_txt = '%sR %3.2f %3.2f' % (out_txt, bad_props2[slot-1][pol][filenum], bad_props3[slot-1][pol][filenum])
elif( value==7 ): # 'E' for Effective potential
out_txt = '%sE %3.2f' % (out_txt, bad_props2[slot-1][pol][filenum])
#print 'HERE! file = %s' % (filenames[filenum])
out_txt = '%s\n' % (out_txt)
out_txt = '%s\n' % (out_txt)
if LOUD: print '%s' % (out_txt)
if LOUD: print'%s' % (stars)
# Save the results
if( nowrite == 0 ):
#Open the file
if(first_time_through==1): # If this is the first file, then the outfile needs to be created
try:
outfile = open(write_to_file, 'w')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
first_time_through=0
else: # Otherwise, the outfile needs to be opened, and the new results appended to the end
try:
outfile = open(write_to_file, 'a')
except IOError:
print '%s: Can\'t open file %s for writing. Aborting...' % (prog_name, write_to_file)
sys.exit(0)
# Write the data
outfile.write(out_txt)
# Close the file
outfile.close
else: # no files contain this tile number and delay
if LOUD: print '%s: No input files contain this tile number and delay.' % (prog_name)
# End, cycle through delays
else: # no files contain this tile number
if LOUD: print '%s: No files contain tile number %d' % (prog_name, tilenum)
# End, cycle through tiles
# End, if( compare_option==1 )
print '\n%s: Program complete.\n' % (prog_name)
| [
"brynah@phys.washington.edu"
] | brynah@phys.washington.edu |
6a9ecd992fe159bad837e802c20b0d18e311357b | a25a6b73afb2c424083012a8437a2a9c1c76e660 | /ex090_DicionarioEmPython.py | 8996480e24f27d72eb822dd65360086b58908081 | [] | no_license | vanessa-santana/Exercicios-Estudo_Python | 8a7a207ac790d1d48b5dbf5524d62b64ad8b95c3 | 773c4da0d826ef673d9d6029ee4d61f0c50fb4a7 | refs/heads/main | 2023-03-04T21:18:25.014308 | 2021-02-16T17:28:12 | 2021-02-16T17:28:12 | 339,400,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | #Faça um programa que leia nome e média de um aluno, guardando também a situação em um dicionário.
# No final, mostre o conteúdo da estrutura na tela.
# dicionarios são com {} ou dicts()
aluno = dict()
aluno['Nome'] = str(input('Nome: ')).title()
aluno['Média'] = float(input(f'Média de {aluno["Nome"]}: '))
if aluno['Média']>=7:
aluno['Situação'] = 'Aprovado'
elif 5 <= aluno['Média'] < 7:
aluno['Situação'] = 'Recuperação'
else:
aluno['Situação'] = 'Reprovado'
for chave, valor in aluno.items():
print(f'- {chave} é igual a {valor}') | [
"noreply@github.com"
] | vanessa-santana.noreply@github.com |
5b781b3d46bfa83acf014df60443ec9f46f72653 | 8fc3123a5329c332d873cb58f9d18681abdbd88d | /Tesy/Tesy/bin/prml/linear/ridge_regression.py | 00ac7a32c4f39d68774873137cd5e1bec54d7631 | [] | no_license | chloele33/Poly-X | 19ee5ce8eaddb7b1dd263b80e02556f1deec5895 | 3c967bf544718aa4a626bb26e73ad1d6e64f3215 | refs/heads/master | 2020-04-29T13:52:20.061387 | 2019-05-09T22:50:30 | 2019-05-09T22:50:30 | 176,180,547 | 0 | 0 | null | 2019-03-18T01:03:41 | 2019-03-18T01:03:41 | null | UTF-8 | Python | false | false | 1,093 | py | import numpy as np
from prml.linear.regression import Regression
class RidgeRegression(Regression):
"""
Ridge regression model
w* = argmin |t - X @ w| + alpha * |w|_2^2
"""
def __init__(self, alpha=1.):
float(alpha)
self.alpha = alpha
def fit(self, X, t):
np.ndarray(X)
np.ndarray(t)
"""
maximum a posteriori estimation of parameter
Parameters
----------
X : (N, D) np.ndarray
training data independent variable
t : (N,) np.ndarray
training data dependent variable
"""
eye = np.eye(np.size(X, 1))
self.w = np.linalg.solve(self.alpha * eye + np.matmul(X.T , X), np.matmul(X.T , t))
def predict(self, X):
np.ndarray(X)
"""
make prediction given input
Parameters
----------
X : (N, D) np.ndarray
samples to predict their output
Returns
-------
(N,) np.ndarray
prediction of each input
"""
return np.matmul(X , self.w)
| [
"chloele@seas.upenn.edu"
] | chloele@seas.upenn.edu |
c52b322c1c1fb0464674ec1211c34b90dcd6b4b1 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqptdiag/entity.py | 86ae5c47a213aa5ec2540a9fb905cafd8a5403b0 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 6,254 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Entity(Mo):
"""
Diag-related entity information
"""
meta = ClassMeta("cobra.model.eqptdiag.Entity")
meta.moClassName = "eqptdiagEntity"
meta.rnFormat = "diag"
meta.category = MoCategory.REGULAR
meta.label = "Equipment Diagnostics Entity"
meta.writeAccessMask = 0x880080000000001
meta.readAccessMask = 0x880080000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.eqptdiagp.GrpTests")
meta.childClasses.add("cobra.model.eqptdiag.Rule")
meta.childNamesAndRnPrefix.append(("cobra.model.eqptdiagp.GrpTests", "grptests-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqptdiag.Rule", "rule-"))
meta.parentClasses.add("cobra.model.top.System")
meta.superClasses.add("cobra.model.nw.Conn")
meta.superClasses.add("cobra.model.nw.CpEntity")
meta.superClasses.add("cobra.model.nw.Item")
meta.superClasses.add("cobra.model.nw.GEp")
meta.rnPrefixes = [
('diag', False),
]
prop = PropMeta("str", "adminSt", "adminSt", 3670, PropCategory.REGULAR)
prop.label = "Admin State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "enabled"
prop._addConstant("disabled", "disabled", 2)
prop._addConstant("enabled", "enabled", 1)
meta.props.add("adminSt", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14498, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "name", "name", 3669, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.range = [(1, 128)]
meta.props.add("name", prop)
prop = PropMeta("str", "operErr", "operErr", 3672, PropCategory.REGULAR)
prop.label = "Operational Errors Qualifier"
prop.isOper = True
prop._addConstant("feature-unsupported", "feature-unsupported", 64)
prop._addConstant("init-err", "initialization-error", 1)
prop._addConstant("int-err", "internal-error", 8)
prop._addConstant("ipc-err", "ipc-error", 4)
prop._addConstant("mem-err", "memory-error", 2)
prop._addConstant("proto-err", "protocol-error", 32)
prop._addConstant("sock-err", "socket-error", 16)
meta.props.add("operErr", prop)
prop = PropMeta("str", "operSt", "operSt", 3671, PropCategory.REGULAR)
prop.label = "Operational State"
prop.isOper = True
prop.defaultValue = 1
prop.defaultValueStr = "enabled"
prop._addConstant("disabled", "disabled", 2)
prop._addConstant("enabled", "enabled", 1)
prop._addConstant("failed", "failed", 4)
prop._addConstant("initializing", "initializing", 3)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("operSt", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
615006d06bcf4d93bc93ed9798d73762df416462 | 947fa6a4a6155ffce0038b11f4d743603418ad68 | /.c9/metadata/environment/clean_code/clean_code_submissions/clean_code_assignment_004/fb_post/utils/reply_to_comment.py | bf2b3191ba7a9a31d3d72f9ff3c6587ad9dd601e | [] | no_license | bharathi151/bharathi_diyyala | bd75e10639d7d22b332d5ce677e7799402dc4984 | 99f8657d010c790a0e4e4c9d6b57f81814784eb0 | refs/heads/master | 2022-11-21T12:43:48.401239 | 2020-07-23T09:05:52 | 2020-07-23T09:05:52 | 281,903,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,914 | py | {"filter":false,"title":"reply_to_comment.py","tooltip":"/clean_code/clean_code_submissions/clean_code_assignment_004/fb_post/utils/reply_to_comment.py","undoManager":{"mark":57,"position":57,"stack":[[{"start":{"row":21,"column":45},"end":{"row":22,"column":0},"action":"insert","lines":["",""],"id":2},{"start":{"row":22,"column":0},"end":{"row":22,"column":8},"action":"insert","lines":[" "]}],[{"start":{"row":22,"column":4},"end":{"row":22,"column":8},"action":"remove","lines":[" "],"id":3},{"start":{"row":22,"column":0},"end":{"row":22,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":21,"column":8},"end":{"row":21,"column":45},"action":"remove","lines":["is_valid_reply_content(reply_content)"],"id":4}],[{"start":{"row":21,"column":4},"end":{"row":21,"column":8},"action":"remove","lines":[" "],"id":5},{"start":{"row":21,"column":0},"end":{"row":21,"column":4},"action":"remove","lines":[" "]},{"start":{"row":20,"column":53},"end":{"row":21,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":18,"column":66},"end":{"row":19,"column":0},"action":"insert","lines":["",""],"id":6},{"start":{"row":19,"column":0},"end":{"row":19,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":19,"column":4},"end":{"row":19,"column":41},"action":"insert","lines":["is_valid_reply_content(reply_content)"],"id":7}],[{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":8},{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"remove","lines":[" "],"id":9},{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":10},{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":8,"column":49},"end":{"row":9,"column":0},"action":"insert","lines":["",""],"id":11},{"start":{"row":9,"column":0},"end":{"row":9,"column":12},"action":"insert","lines":[" "]}],[{"start":{"row":9,"column":8},"end":{"row":9,"column":12},"action":"remove","lines":[" "],"id":12},{"start":{"row":9,"column":4},"end":{"row":9,"column":8},"action":"remove","lines":[" "]},{"start":{"row":9,"column":0},"end":{"row":9,"column":4},"action":"remove","lines":[" "]},{"start":{"row":8,"column":49},"end":{"row":9,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":8,"column":34},"end":{"row":8,"column":35},"action":"insert","lines":["\\"],"id":13}],[{"start":{"row":8,"column":35},"end":{"row":9,"column":0},"action":"insert","lines":["",""],"id":14},{"start":{"row":9,"column":0},"end":{"row":9,"column":8},"action":"insert","lines":[" "]}],[{"start":{"row":9,"column":8},"end":{"row":9,"column":12},"action":"insert","lines":[" "],"id":15}],[{"start":{"row":9,"column":12},"end":{"row":9,"column":16},"action":"insert","lines":[" "],"id":16}],[{"start":{"row":9,"column":16},"end":{"row":9,"column":20},"action":"insert","lines":[" "],"id":17}],[{"start":{"row":9,"column":20},"end":{"row":9,"column":24},"action":"insert","lines":[" "],"id":18}],[{"start":{"row":10,"column":48},"end":{"row":10,"column":49},"action":"remove","lines":[" "],"id":19},{"start":{"row":10,"column":44},"end":{"row":10,"column":48},"action":"remove","lines":[" "]},{"start":{"row":10,"column":40},"end":{"row":10,"column":44},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":46},"end":{"row":11,"column":0},"action":"insert","lines":["",""],"id":20},{"start":{"row":11,"column":0},"end":{"row":11,"column":40},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":36},"end":{"row":11,"column":40},"action":"remove","lines":[" "],"id":21},{"start":{"row":11,"column":32},"end":{"row":11,"column":36},"action":"remove","lines":[" "]},{"start":{"row":11,"column":28},"end":{"row":11,"column":32},"action":"remove","lines":[" "]},{"start":{"row":11,"column":24},"end":{"row":11,"column":28},"action":"remove","lines":[" "]}],[{"start":{"row":18,"column":37},"end":{"row":19,"column":0},"action":"insert","lines":["",""],"id":22},{"start":{"row":19,"column":0},"end":{"row":19,"column":8},"action":"insert","lines":[" "]},{"start":{"row":19,"column":8},"end":{"row":19,"column":9},"action":"insert","lines":["r"]},{"start":{"row":19,"column":9},"end":{"row":19,"column":10},"action":"insert","lines":["e"]}],[{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"insert","lines":["r"],"id":23},{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"insert","lines":["u"]}],[{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"remove","lines":["u"],"id":24},{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"remove","lines":["r"]}],[{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"insert","lines":["t"],"id":25},{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"insert","lines":["u"]},{"start":{"row":19,"column":12},"end":{"row":19,"column":13},"action":"insert","lines":["r"]},{"start":{"row":19,"column":13},"end":{"row":19,"column":14},"action":"insert","lines":["n"]}],[{"start":{"row":19,"column":14},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":26},{"start":{"row":20,"column":0},"end":{"row":20,"column":8},"action":"insert","lines":[" "]},{"start":{"row":20,"column":4},"end":{"row":20,"column":8},"action":"remove","lines":[" "]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"remove","lines":[" "],"id":27}],[{"start":{"row":20,"column":0},"end":{"row":21,"column":0},"action":"insert","lines":["",""],"id":28}],[{"start":{"row":19,"column":8},"end":{"row":19,"column":14},"action":"remove","lines":["return"],"id":29},{"start":{"row":19,"column":4},"end":{"row":19,"column":8},"action":"remove","lines":[" "]}],[{"start":{"row":13,"column":8},"end":{"row":15,"column":29},"action":"remove","lines":["new_comment_id = comment_creation(user_id, comment_id,"," reply_content, comment)"," return new_comment_id"],"id":30}],[{"start":{"row":13,"column":4},"end":{"row":13,"column":8},"action":"remove","lines":[" "],"id":31},{"start":{"row":13,"column":0},"end":{"row":13,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":17,"column":0},"end":{"row":17,"column":4},"action":"remove","lines":[" "],"id":32}],[{"start":{"row":18,"column":0},"end":{"row":18,"column":4},"action":"insert","lines":[" "],"id":33}],[{"start":{"row":18,"column":4},"end":{"row":20,"column":29},"action":"insert","lines":["new_comment_id = comment_creation(user_id, comment_id,"," reply_content, comment)"," return new_comment_id"],"id":34}],[{"start":{"row":20,"column":6},"end":{"row":20,"column":7},"action":"remove","lines":[" "],"id":35}],[{"start":{"row":20,"column":6},"end":{"row":20,"column":7},"action":"remove","lines":[" "],"id":36},{"start":{"row":20,"column":5},"end":{"row":20,"column":6},"action":"remove","lines":[" "]},{"start":{"row":20,"column":4},"end":{"row":20,"column":5},"action":"remove","lines":[" "]}],[{"start":{"row":13,"column":0},"end":{"row":14,"column":0},"action":"remove","lines":["",""],"id":37},{"start":{"row":12,"column":0},"end":{"row":13,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":40},"action":"remove","lines":[" "],"id":38}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"insert","lines":[" "],"id":39},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"insert","lines":[" "]},{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"insert","lines":[" "]},{"start":{"row":10,"column":39},"end":{"row":10,"column":40},"action":"insert","lines":[" "]}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":40},"action":"remove","lines":[" "],"id":40}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"insert","lines":[" "],"id":41},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"insert","lines":[" "]},{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":20},"end":{"row":11,"column":24},"action":"remove","lines":[" "],"id":42}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"remove","lines":[" "],"id":43},{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"remove","lines":[" "]}],[{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"insert","lines":[" "],"id":44}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"insert","lines":[" "],"id":45}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"remove","lines":[" "],"id":46}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":17},"action":"insert","lines":[" "],"id":47}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":17},"action":"remove","lines":[" "],"id":48},{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"remove","lines":[" "]},{"start":{"row":11,"column":8},"end":{"row":11,"column":12},"action":"remove","lines":[" "]},{"start":{"row":11,"column":4},"end":{"row":11,"column":8},"action":"remove","lines":[" "]},{"start":{"row":11,"column":0},"end":{"row":11,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":45},"end":{"row":11,"column":0},"action":"remove","lines":["",""],"id":49},{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"remove","lines":["'"]}],[{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"insert","lines":["'"],"id":50}],[{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"remove","lines":[" "],"id":51},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"remove","lines":[" "]},{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"remove","lines":[" "]},{"start":{"row":10,"column":32},"end":{"row":10,"column":36},"action":"remove","lines":[" "]},{"start":{"row":10,"column":28},"end":{"row":10,"column":32},"action":"remove","lines":[" "]},{"start":{"row":10,"column":24},"end":{"row":10,"column":28},"action":"remove","lines":[" "]},{"start":{"row":10,"column":20},"end":{"row":10,"column":24},"action":"remove","lines":[" "]},{"start":{"row":10,"column":16},"end":{"row":10,"column":20},"action":"remove","lines":[" "]},{"start":{"row":10,"column":12},"end":{"row":10,"column":16},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":8},"end":{"row":10,"column":12},"action":"remove","lines":[" "],"id":52},{"start":{"row":10,"column":4},"end":{"row":10,"column":8},"action":"remove","lines":[" "]},{"start":{"row":10,"column":0},"end":{"row":10,"column":4},"action":"remove","lines":[" "]},{"start":{"row":9,"column":56},"end":{"row":10,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":9,"column":56},"end":{"row":9,"column":57},"action":"insert","lines":[" "],"id":53}],[{"start":{"row":9,"column":39},"end":{"row":10,"column":0},"action":"insert","lines":["",""],"id":54},{"start":{"row":10,"column":0},"end":{"row":10,"column":28},"action":"insert","lines":[" "]}],[{"start":{"row":10,"column":52},"end":{"row":11,"column":0},"action":"insert","lines":["",""],"id":55},{"start":{"row":11,"column":0},"end":{"row":11,"column":28},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":24},"end":{"row":11,"column":28},"action":"remove","lines":[" "],"id":56}],[{"start":{"row":17,"column":41},"end":{"row":17,"column":42},"action":"remove","lines":[" "],"id":57},{"start":{"row":17,"column":40},"end":{"row":17,"column":41},"action":"remove","lines":[" "]},{"start":{"row":17,"column":36},"end":{"row":17,"column":40},"action":"remove","lines":[" "]}],[{"start":{"row":17,"column":36},"end":{"row":17,"column":37},"action":"insert","lines":[" "],"id":58},{"start":{"row":17,"column":37},"end":{"row":17,"column":38},"action":"insert","lines":[" "]}],[{"start":{"row":23,"column":0},"end":{"row":23,"column":4},"action":"remove","lines":[" "],"id":59}]]},"ace":{"folds":[],"scrolltop":199.04411764705884,"scrollleft":0,"selection":{"start":{"row":4,"column":0},"end":{"row":38,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":10,"state":"start","mode":"ace/mode/python"}},"timestamp":1588050830706,"hash":"97b5a1bf73781825fea117ab0236a3a13f64600e"} | [
"bharathi151273@gmail.com"
] | bharathi151273@gmail.com |
150395993458583ad6f83dfef4906e8d9512c6f0 | 0007ba97130140d0b9d608ece9879323c6dc5f85 | /11.py | 4ba1155f2341cf8426747753e3994fa4a3004dc4 | [] | no_license | Ashgomathi/ash | d0a4fb79fc8b15bb286d19afc121671a0ca8b79c | 83c879c570e8abc261069574ee671ddee042664a | refs/heads/master | 2020-06-10T10:32:58.875287 | 2019-07-27T09:07:40 | 2019-07-27T09:07:40 | 193,635,194 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | ashu,vara=map(int,input().split())
efghz=ashu**vara
print(efghz)
| [
"noreply@github.com"
] | Ashgomathi.noreply@github.com |
7f78ff3bbfee0ec659df2d2fe6639af9fe66f59b | 72b00923d4aa11891f4a3038324c8952572cc4b2 | /python/datastruct/dd_oob/pgm06_13.txt | 68d0a171c8d350cdcfdc58f5ebe0b45790150e1e | [] | no_license | taowuwen/codec | 3698110a09a770407e8fb631e21d86ba5a885cd5 | d92933b07f21dae950160a91bb361fa187e26cd2 | refs/heads/master | 2022-03-17T07:43:55.574505 | 2022-03-10T05:20:44 | 2022-03-10T05:20:44 | 87,379,261 | 0 | 0 | null | 2019-03-25T15:40:27 | 2017-04-06T02:50:54 | C | UTF-8 | Python | false | false | 1,058 | txt | #
# This file contains the Python code from Program 6.13 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by Bruno R. Preiss.
#
# Copyright (c) 2003 by Bruno R. Preiss, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm06_13.txt
#
class QueueAsArray(Queue):
def getHead(self):
if self._count == 0:
raise ContainerEmpty
return self._array[self._head]
def enqueue(self, obj):
if self._count == len(self._array):
raise ContainerFull
self._tail = self._tail + 1
if self._tail == len(self._array):
self._tail = 0
self._array[self._tail] = obj
self._count += 1
def dequeue(self):
if self._count == 0:
raise ContainerEmpty
result = self._array[self._head]
self._array[self._head] = None
self._head = self._head + 1
if self._head == len(self._array):
self._head = 0
self._count -= 1
return result
# ...
| [
"taowuwen@126.com"
] | taowuwen@126.com |
f6a3a81128f6bbad745e58700064cad349e5b879 | a6cedd26db1bba3120203d976b8541dc1ebaeda2 | /utrace/proc-cpu-data.py | 8a38a74a43fd654b661255253d773084ae94e8e9 | [] | no_license | basicthinker/Sestet-Bench | 388af13de9fc71103a45ac283c46b95cd1c69689 | 139755b4eed6e452b1e38476128209a64739a257 | refs/heads/master | 2020-05-18T21:02:31.257374 | 2013-12-05T06:07:31 | 2013-12-05T06:07:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | import sys
import string
if len(sys.argv) != 2:
print "Usage: python %s CPUTraceFile" % sys.argv[0]
sys.exit(1)
cpu_file = open(sys.argv[1], 'r')
begin_time = float(cpu_file.readline())
for line in cpu_file:
segs=string.split(line, '\t')
print "%.3f\t%s" % (float(segs[0]) - begin_time, segs[1])
cpu_file.close()
| [
"jinglei.ren@gmail.com"
] | jinglei.ren@gmail.com |
74afe0fedab125884016f29fb3eace55cb7f65ef | b869ae6b05182abda4955dc654b99aa0afec04c9 | /controllerClass_2.py | e91ecb02837e29feb7911c8de96dfc956ad1c8d2 | [] | no_license | davjul99/machine-learning-with-au-bom | 242a77b64956cb4e52cfdafbb4529d083796ba14 | 0cb3db356b280a2b53f37904fd1fa0fce7ca2136 | refs/heads/master | 2021-01-16T21:30:22.712362 | 2017-08-14T10:58:51 | 2017-08-14T10:58:51 | 100,235,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,675 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jul 16 10:16:09 2017
@author: Home
"""
from urllib.request import urlopen
import json
import os
import pandas as pd
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
import numpy as np
import matplotlib.pyplot as plt
url='http://www.bom.gov.au/fwo/IDN60801/IDN60801.94599.json'
paraLst = ['air_temp', 'aifstime_utc' ]
class dataUte(object):
data =[]
siteData=[]
def __init__(self, paraList, name, url):
self.paraList = paraList
self.name = name
self.url = url
self.jdata=self.loadJson(url)
def loadJson(self, url):
response = urlopen(url)
data = response.read().decode("utf-8")
return(json.loads(data))
def getObsData(self):
#sdata= self.loadJson(url)
return(self.jdata['observations']['data'])
def getParaData(self,para):
d=[]
obs=self.getObsData()
for ob in obs:
d.append(float(ob[para]))
return(d)
def getSiteData(self,paraLst):
d=[]
for para in paraLst:
d.append(self.getParaData(para))
return(d)
def getParamDict(dom, param):
return(dom[param])
#na=np.array(getSiteData(paraLst))
def makeDf(data, labels):
return(pd.DataFrame(data, labels))
def plotData(self, y):
y=self.getSiteData(['air_temp'])
x=self.getSiteData(['aifstime_utc'])
for i in self.paraList:
self.getSiteData
y=y[0][0:218]
#x=x[0][:]
plt.axis([0,48,0,40])
plt.grid()
plt.plot(y)
plt.show()
#d=getSiteData(paraLst)
#bldata(sites)
#paraLst = ['air_temp', 'aifstime_utc' ]
paraLst = ['air_temp', 'wind_spd_kmh', 'rel_hum' ]
url='http://www.bom.gov.au/fwo/IDN60801/IDN60801.94599.json'
d= dataUte(paraLst, 'Ballina', url)
from sklearn.preprocessing import PolynomialFeatures
from sklearn.preprocessing import Imputer
import random
obs=d.getSiteData(paraLst)
obsT=obs
pf=PolynomialFeatures()
import numpy as np
def contImp(lst):
for i, item in enumerate(lst):
if isinstance(item, (int, float, complex, bool)) == True:
return lst
if i == len(lst)-1:
return lst
else:
lst[i] = abs(lst[i-1]/lst[i+1])
| [
"noreply@github.com"
] | davjul99.noreply@github.com |
aa7ffba3fe09826c2f34bc1b6e714b134748cd13 | e95c254d6c75109041ea70577392577f9550ee1c | /backend/lib/python3.7/site-packages/django/template/base.py | 52ec516016d0f9ecd5657a4d2ef997d9d640fcbb | [] | no_license | Eloisa-R/tracks-CO2-analytics | 94e689f39b1644d5de7e91bfd6040db11ea1195c | 9ed8de07cd9e0fc4b969c18009e1b0416e2ef12c | refs/heads/master | 2023-03-02T21:03:18.763190 | 2021-02-12T16:35:58 | 2021-02-12T16:35:58 | 336,563,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,688 | py | """
This is the Django template system.
How it works:
The Lexer.tokenize() method converts a template string (i.e., a string
containing markup with custom template tags) to tokens, which can be either
plain text (TokenType.TEXT), variables (TokenType.VAR), or block statements
(TokenType.BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of a ForNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = '<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
'<html></html>'
"""
import inspect
import logging
import re
from enum import Enum
from django.template.context import BaseContext
from django.utils.formats import localize
from django.utils.html import conditional_escape, escape
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, mark_safe
from django.utils.text import (
get_text_list,
smart_split,
unescape_string_literal,
)
from django.utils.timezone import template_localtime
from django.utils.translation import gettext_lazy, pgettext_lazy
from .exceptions import TemplateSyntaxError
# template syntax constants
FILTER_SEPARATOR = "|"
FILTER_ARGUMENT_SEPARATOR = ":"
VARIABLE_ATTRIBUTE_SEPARATOR = "."
BLOCK_TAG_START = "{%"
BLOCK_TAG_END = "%}"
VARIABLE_TAG_START = "{{"
VARIABLE_TAG_END = "}}"
COMMENT_TAG_START = "{#"
COMMENT_TAG_END = "#}"
TRANSLATOR_COMMENT_MARK = "Translators"
SINGLE_BRACE_START = "{"
SINGLE_BRACE_END = "}"
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = "<unknown source>"
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = _lazy_re_compile(
"(%s.*?%s|%s.*?%s|%s.*?%s)"
% (
re.escape(BLOCK_TAG_START),
re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START),
re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START),
re.escape(COMMENT_TAG_END),
)
)
logger = logging.getLogger("django.template")
class TokenType(Enum):
TEXT = 0
VAR = 1
BLOCK = 2
COMMENT = 3
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return self.msg % self.params
class Origin:
def __init__(self, name, template_name=None, loader=None):
self.name = name
self.template_name = template_name
self.loader = loader
def __str__(self):
return self.name
def __eq__(self, other):
return (
isinstance(other, Origin)
and self.name == other.name
and self.loader == other.loader
)
@property
def loader_name(self):
if self.loader:
return "%s.%s" % (
self.loader.__module__,
self.loader.__class__.__name__,
)
class Template:
def __init__(self, template_string, origin=None, name=None, engine=None):
# If Template is instantiated directly rather than from an Engine and
# exactly one Django template engine is configured, use that engine.
# This is required to preserve backwards-compatibility for direct use
# e.g. Template('...').render(Context({...}))
if engine is None:
from .engine import Engine
engine = Engine.get_default()
if origin is None:
origin = Origin(UNKNOWN_SOURCE)
self.name = name
self.origin = origin
self.engine = engine
self.source = str(template_string) # May be lazy.
self.nodelist = self.compile_nodelist()
def __iter__(self):
for node in self.nodelist:
yield from node
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
with context.render_context.push_state(self):
if context.template is None:
with context.bind_template(self):
context.template_name = self.name
return self._render(context)
else:
return self._render(context)
def compile_nodelist(self):
"""
Parse and compile the template source into a nodelist. If debug
is True and an exception occurs during parsing, the exception is
annotated with contextual line information where it occurred in the
template source.
"""
if self.engine.debug:
lexer = DebugLexer(self.source)
else:
lexer = Lexer(self.source)
tokens = lexer.tokenize()
parser = Parser(
tokens,
self.engine.template_libraries,
self.engine.template_builtins,
self.origin,
)
try:
return parser.parse()
except Exception as e:
if self.engine.debug:
e.template_debug = self.get_exception_info(e, e.token)
raise
def get_exception_info(self, exception, token):
"""
Return a dictionary containing contextual line information of where
the exception occurred in the template. The following information is
provided:
message
The message of the exception raised.
source_lines
The lines before, after, and including the line the exception
occurred on.
line
The line number the exception occurred on.
before, during, after
The line the exception occurred on split into three parts:
1. The content before the token that raised the error.
2. The token that raised the error.
3. The content after the token that raised the error.
total
The number of lines in source_lines.
top
The line number where source_lines starts.
bottom
The line number where source_lines ends.
start
The start position of the token in the template source.
end
The end position of the token in the template source.
"""
start, end = token.position
context_lines = 10
line = 0
upto = 0
source_lines = []
before = during = after = ""
for num, next in enumerate(linebreak_iter(self.source)):
if start >= upto and end <= next:
line = num
before = escape(self.source[upto:start])
during = escape(self.source[start:end])
after = escape(self.source[end:next])
source_lines.append((num, escape(self.source[upto:next])))
upto = next
total = len(source_lines)
top = max(1, line - context_lines)
bottom = min(total, line + 1 + context_lines)
# In some rare cases exc_value.args can be empty or an invalid
# string.
try:
message = str(exception.args[0])
except (IndexError, UnicodeDecodeError):
message = "(Could not get exception message)"
return {
"message": message,
"source_lines": source_lines[top:bottom],
"before": before,
"during": during,
"after": after,
"top": top,
"bottom": bottom,
"total": total,
"line": line,
"name": self.origin.name,
"start": start,
"end": end,
}
def linebreak_iter(template_source):
yield 0
p = template_source.find("\n")
while p >= 0:
yield p + 1
p = template_source.find("\n", p + 1)
yield len(template_source) + 1
class Token:
def __init__(self, token_type, contents, position=None, lineno=None):
"""
A token representing a string from the template.
token_type
A TokenType, either .TEXT, .VAR, .BLOCK, or .COMMENT.
contents
The token source string.
position
An optional tuple containing the start and end index of the token
in the template source. This is used for traceback information
when debug is on.
lineno
The line number the token appears on in the template source.
This is used for traceback information and gettext files.
"""
self.token_type, self.contents = token_type, contents
self.lineno = lineno
self.position = position
def __str__(self):
token_name = self.token_type.name.capitalize()
return '<%s token: "%s...">' % (
token_name,
self.contents[:20].replace("\n", ""),
)
def split_contents(self):
split = []
bits = smart_split(self.contents)
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith(('_("', "_('")):
sentinel = bit[2] + ")"
trans_bit = [bit]
while not bit.endswith(sentinel):
bit = next(bits)
trans_bit.append(bit)
bit = " ".join(trans_bit)
split.append(bit)
return split
class Lexer:
def __init__(self, template_string):
self.template_string = template_string
self.verbatim = False
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
lineno = 1
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, None, lineno, in_tag))
in_tag = not in_tag
lineno += bit.count("\n")
return result
def create_token(self, token_string, position, lineno, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag and token_string.startswith(BLOCK_TAG_START):
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
block_content = token_string[2:-2].strip()
if self.verbatim and block_content == self.verbatim:
self.verbatim = False
if in_tag and not self.verbatim:
if token_string.startswith(VARIABLE_TAG_START):
return Token(
TokenType.VAR, token_string[2:-2].strip(), position, lineno
)
elif token_string.startswith(BLOCK_TAG_START):
if block_content[:9] in ("verbatim", "verbatim "):
self.verbatim = "end%s" % block_content
return Token(TokenType.BLOCK, block_content, position, lineno)
elif token_string.startswith(COMMENT_TAG_START):
content = ""
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
return Token(TokenType.COMMENT, content, position, lineno)
else:
return Token(TokenType.TEXT, token_string, position, lineno)
class DebugLexer(Lexer):
def tokenize(self):
"""
Split a template string into tokens and annotates each token with its
start and end position in the source. This is slower than the default
lexer so only use it when debug is True.
"""
lineno = 1
result = []
upto = 0
for match in tag_re.finditer(self.template_string):
start, end = match.span()
if start > upto:
token_string = self.template_string[upto:start]
result.append(
self.create_token(token_string, (upto, start), lineno, in_tag=False)
)
lineno += token_string.count("\n")
token_string = self.template_string[start:end]
result.append(
self.create_token(token_string, (start, end), lineno, in_tag=True)
)
lineno += token_string.count("\n")
upto = end
last_bit = self.template_string[upto:]
if last_bit:
result.append(
self.create_token(
last_bit, (upto, upto + len(last_bit)), lineno, in_tag=False
)
)
return result
class Parser:
def __init__(self, tokens, libraries=None, builtins=None, origin=None):
# Reverse the tokens so delete_first_token(), prepend_token(), and
# next_token() can operate at the end of the list in constant time.
self.tokens = list(reversed(tokens))
self.tags = {}
self.filters = {}
self.command_stack = []
if libraries is None:
libraries = {}
if builtins is None:
builtins = []
self.libraries = libraries
for builtin in builtins:
self.add_library(builtin)
self.origin = origin
def parse(self, parse_until=None):
"""
Iterate through the parser tokens and compiles each one into a node.
If parse_until is provided, parsing will stop once one of the
specified tokens has been reached. This is formatted as a list of
tokens, e.g. ['elif', 'else', 'endif']. If no matching token is
reached, raise an exception with the unclosed block tag details.
"""
if parse_until is None:
parse_until = []
nodelist = NodeList()
while self.tokens:
token = self.next_token()
# Use the raw values here for TokenType.* for a tiny performance boost.
if token.token_type.value == 0: # TokenType.TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type.value == 1: # TokenType.VAR
if not token.contents:
raise self.error(
token, "Empty variable tag on line %d" % token.lineno
)
try:
filter_expression = self.compile_filter(token.contents)
except TemplateSyntaxError as e:
raise self.error(token, e)
var_node = VariableNode(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type.value == 2: # TokenType.BLOCK
try:
command = token.contents.split()[0]
except IndexError:
raise self.error(token, "Empty block tag on line %d" % token.lineno)
if command in parse_until:
# A matching token has been reached. Return control to
# the caller. Put the token back on the token list so the
# caller knows where it terminated.
self.prepend_token(token)
return nodelist
# Add the token to the command stack. This is used for error
# messages if further parsing fails due to an unclosed block
# tag.
self.command_stack.append((command, token))
# Get the tag callback function from the ones registered with
# the parser.
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
# Compile the callback into a node object and add it to
# the node list.
try:
compiled_result = compile_func(self, token)
except Exception as e:
raise self.error(token, e)
self.extend_nodelist(nodelist, compiled_result, token)
# Compile success. Remove the token from the command stack.
self.command_stack.pop()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TokenType.BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def extend_nodelist(self, nodelist, node, token):
# Check that non-text nodes don't appear before an extends tag.
if node.must_be_first and nodelist.contains_nontext:
raise self.error(
token,
"%r must be the first tag in the template." % node,
)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
# Set origin and token here since we can't modify the node __init__()
# method.
node.token = token
node.origin = self.origin
nodelist.append(node)
def error(self, token, e):
"""
Return an exception annotated with the originating token. Since the
parser can be called recursively, check if a token is already set. This
ensures the innermost token is highlighted if an exception occurs,
e.g. a compile error within the body of an if statement.
"""
if not isinstance(e, Exception):
e = TemplateSyntaxError(e)
if not hasattr(e, "token"):
e.token = token
return e
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(
token,
"Invalid block tag on line %d: '%s', expected %s. Did you "
"forget to register or load this tag?"
% (
token.lineno,
command,
get_text_list(["'%s'" % p for p in parse_until], "or"),
),
)
raise self.error(
token,
"Invalid block tag on line %d: '%s'. Did you forget to register "
"or load this tag?" % (token.lineno, command),
)
def unclosed_block_tag(self, parse_until):
command, token = self.command_stack.pop()
msg = "Unclosed tag on line %d: '%s'. Looking for one of: %s." % (
token.lineno,
command,
", ".join(parse_until),
)
raise self.error(token, msg)
def next_token(self):
return self.tokens.pop()
def prepend_token(self, token):
self.tokens.append(token)
def delete_first_token(self):
del self.tokens[-1]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
"strdq": r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
"strsq": r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
"i18n_open": re.escape("_("),
"i18n_close": re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:\s*%(filter_sep)s\s*
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
"constant": constant_string,
"num": r"[-+\.]?\d[\d\.e]*",
"var_chars": r"\w\.",
"filter_sep": re.escape(FILTER_SEPARATOR),
"arg_sep": re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = _lazy_re_compile(filter_raw_string, re.VERBOSE)
class FilterExpression:
"""
Parse a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError(
"Could not parse some characters: "
"%s|%s|%s" % (token[:upto], token[upto:start], token[start:])
)
if var_obj is None:
var, constant = match["var"], match["constant"]
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError(
"Could not find variable at " "start of %s." % token
)
else:
var_obj = Variable(var)
else:
filter_name = match["filter_name"]
args = []
constant_arg, var_arg = match["constant_arg"], match["var_arg"]
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError(
"Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token)
)
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
string_if_invalid = context.template.engine.string_if_invalid
if string_if_invalid:
if "%s" in string_if_invalid:
return string_if_invalid % self.var
else:
return string_if_invalid
else:
obj = string_if_invalid
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, "expects_localtime", False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, "needs_autoescape", False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, "is_safe", False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
# First argument, filter input, is implied.
plen = len(provided) + 1
# Check to see if a decorator is providing the real function.
func = inspect.unwrap(func)
args, _, _, defaults, _, _, _ = inspect.getfullargspec(func)
alen = len(args)
dlen = len(defaults or [])
# Not enough OR Too many
if plen < (alen - dlen) or plen > alen:
raise TemplateSyntaxError(
"%s requires %d arguments, %d provided" % (name, alen - dlen, plen)
)
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
class Variable:
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':'News'}}
>>> Variable('article.section').resolve(c)
'News'
>>> Variable('article').resolve(c)
{'section': 'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = 'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
if not isinstance(var, str):
raise TypeError("Variable must be a string or number, got %s" % type(var))
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
# Try to interpret values containing a period or an 'e'/'E'
# (possibly scientific notation) as a float; otherwise, try int.
if "." in var or "e" in var.lower():
self.literal = float(var)
# "2." is invalid
if var.endswith("."):
raise ValueError
else:
self.literal = int(var)
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith("_(") and var.endswith(")"):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + "_") > -1 or var[0] == "_":
raise TemplateSyntaxError(
"Variables and attributes may "
"not begin with underscores: '%s'" % var
)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
is_safe = isinstance(value, SafeData)
msgid = value.replace("%", "%%")
msgid = mark_safe(msgid) if is_safe else msgid
if self.message_context:
return pgettext_lazy(self.message_context, msgid)
else:
return gettext_lazy(msgid)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Perform resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
# ValueError/IndexError are for numpy.array lookup on
# numpy < 1.9 and 1.9+ respectively
except (TypeError, AttributeError, KeyError, ValueError, IndexError):
try: # attribute lookup
# Don't return class attributes if the class is the context:
if isinstance(current, BaseContext) and getattr(
type(current), bit
):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError):
# Reraise if the exception was raised by a @property
if not isinstance(current, BaseContext) and bit in dir(current):
raise
try: # list-index lookup
current = current[int(bit)]
except (
IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError,
): # unsubscriptable object
raise VariableDoesNotExist(
"Failed lookup for key " "[%s] in %r", (bit, current)
) # missing attribute
if callable(current):
if getattr(current, "do_not_call_in_templates", False):
pass
elif getattr(current, "alters_data", False):
current = context.template.engine.string_if_invalid
else:
try: # method call (assuming no args required)
current = current()
except TypeError:
signature = inspect.signature(current)
try:
signature.bind()
except TypeError: # arguments *were* required
current = (
context.template.engine.string_if_invalid
) # invalid method call
else:
raise
except Exception as e:
template_name = getattr(context, "template_name", None) or "unknown"
logger.debug(
"Exception while resolving variable '%s' in template '%s'.",
bit,
template_name,
exc_info=True,
)
if getattr(e, "silent_variable_failure", False):
current = context.template.engine.string_if_invalid
else:
raise
return current
class Node:
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ("nodelist",)
token = None
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def render_annotated(self, context):
"""
Render the node. If debug is True and an exception occurs during
rendering, the exception is annotated with contextual line information
where it occurred in the template. For internal usage this method is
preferred over using the render method directly.
"""
try:
return self.render(context)
except Exception as e:
if context.template.engine.debug and not hasattr(e, "template_debug"):
e.template_debug = context.render_context.template.get_exception_info(
e, self.token
)
raise
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = node.render_annotated(context)
else:
bit = node
bits.append(str(bit))
return mark_safe("".join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.s[:25])
def render(self, context):
return self.s
def render_value_in_context(value, context):
"""
Convert any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a string. If value is a
string, it's expected to already be translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
if context.autoescape:
if not issubclass(type(value), str):
value = str(value)
return conditional_escape(value)
else:
return str(value)
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ""
return render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = _lazy_re_compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
Parse token keyword arguments and return a dictionary of the arguments
retrieved from the ``bits`` token list.
`bits` is a list containing the remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments are removed from this
list.
`support_legacy` - if True, the legacy format ``1 as foo`` is accepted.
Otherwise, only the standard ``foo=1`` format is allowed.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so return the dictionary as soon as an invalid argument format
is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match[1]
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != "as":
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match[1]:
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != "as":
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != "and":
return kwargs
del bits[:1]
return kwargs
| [
"rios.rico.eloisa@gmail.com"
] | rios.rico.eloisa@gmail.com |
1cd5ab38d343741400f99a45262f8eb07239dffe | 16b959fa91342ad79e107d2507cf58c82c6febf1 | /Scribble-OSVOS/mypath.py | 7095f67a09e79e089627275c5c9c27ce22b8f2f8 | [] | no_license | lhs34/lhs-scribble | 8c2c065716d2fce09a97c6e9d4bb2cb89196cf13 | 16a0d5c41419452522ee887d760d9425235d93f7 | refs/heads/master | 2020-05-17T16:54:28.929124 | 2019-05-05T02:45:54 | 2019-05-05T02:45:54 | 183,830,335 | 0 | 0 | null | 2019-10-22T23:23:41 | 2019-04-27T23:18:46 | Python | UTF-8 | Python | false | false | 232 | py | class Path(object):
@staticmethod
def db_root_dir():
return './dataset/DAVIS'
@staticmethod
def save_root_dir():
return './results'
@staticmethod
def models_dir():
return './models'
| [
"laavanye.bahl@gmail.com"
] | laavanye.bahl@gmail.com |
8021c30f05e0cfe76beca6a1e334043b7716b457 | 5162d4005c6f1c992a145d5e0cf8b8400bbeb823 | /homework/3/webapps/webapps/settings.py | 0b6ba8364f8b45907a0df9288a048a28e8ac6647 | [] | no_license | yyoungrrun/Web-Application | e09bfb1c18a66a86aa89b491278fbb5b91f0eeb8 | 9b4a041c30a846ca4c3482b7c966ced7697d1872 | refs/heads/master | 2021-06-09T14:42:47.906057 | 2016-12-12T03:59:44 | 2016-12-12T03:59:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,379 | py | """
Django settings for webapps project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8thhyr61@7-2uo#@4h9pv%tm4*wb0e4eh#np-36jv-v54g-5#u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'grumblr',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webapps.urls'
# Used by the authentication system for the private-todo-list application.
# URL to use if the authentication system requires a user to log in.
LOGIN_URL = '/grumblr/login'
# Default URL to redirect to after a user logs in.
LOGIN_REDIRECT_URL = '/grumblr/login'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'webapps.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| [
"soap.cmu@gmail.com"
] | soap.cmu@gmail.com |
9a5ba5d34eb81fc909fbba3308e3186794cf923f | 61ddc41adef565519113c785e75b38210d152dce | /notebook/imdb_lstm_notebook.py | a13d02382bb7cf2ae46ea1c25a8eb8ecb205cf34 | [
"MIT"
] | permissive | kamujun/keras_lstm | 866c06fbded6b1b67aeb3bcd68c7bcae2c90c245 | 29b6cc33a84cb1dbd12bdb562dc3d67d626a8e8e | refs/heads/master | 2020-05-31T21:38:41.598961 | 2017-06-12T03:04:02 | 2017-06-12T03:04:54 | 94,045,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,122 | py |
# coding: utf-8
# In[2]:
from __future__ import print_function
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Embedding
from keras.layers import LSTM
from keras.datasets import imdb
from keras.utils import plot_model
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
max_features = 20000
maxlen = 80 # cut texts after this number of words (among top max_features most common words)
batch_size = 32
# In[3]:
from datetime import datetime
print(datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
# In[4]:
print('Loading data...')
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
print(len(x_train), 'train sequences')
print(len(x_test), 'test sequences')
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
x_train = sequence.pad_sequences(x_train)
x_test = sequence.pad_sequences(x_test)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
print('Build model...')
# In[5]:
# try using different optimizers and different optimizer configs
def create_model(activation='sigmoid', drop_out=0.2, recurrent_dropout=0.2):
model = Sequential()
model.add(Embedding(max_features, 128))
model.add(LSTM(128, dropout=drop_out, recurrent_dropout=recurrent_dropout))
model.add(Dense(1, activation=activation))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# # plot model
# plot_model(model, to_file='model.png', show_shapes=True)
return model
# In[6]:
print(datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
# In[6]:
# # train
# from matplotlib import pyplot as plt
#
# print('Train...')
# lstm = model.fit(x_train, y_train,
# batch_size=batch_size,
# epochs=15,
# validation_data=(x_test, y_test))
#
# # evaluate
# score, acc = model.evaluate(x_test, y_test,
# batch_size=batch_size)
# print('Test score:', score)
# print('Test accuracy:', acc)
#
# # plot acc and loss
# x = range(15)
# plt.plot(x, lstm.history['acc'], label="acc")
# plt.plot(x, lstm.history['loss'], label="loss")
#
# plt.title("binary train accuracy")
# plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))
# plt.show()
# In[8]:
# grid search
#activations = ["softplus", "softsign", "relu", "tanh", "sigmoid", "hard_sigmoid", "linear"]
activations = ["tanh", "sigmoid"]
drop_outs = [0.2, 0.5]
recurrent_dropouts = [0.2, 0.5]
param_grid = dict(activation=activations, drop_out=drop_outs, recurrent_dropout=recurrent_dropouts)
model = KerasClassifier(build_fn=create_model, nb_epoch=15, batch_size=batch_size, verbose=0)
grid = GridSearchCV(estimator=model, param_grid=param_grid, scoring='accuracy')
grid_result = grid.fit(x_train, y_train)
print(grid_result.best_score_)
print(grid_result.best_params_)
print(datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
# In[ ]:
| [
"tie303217@tisnt.tis.co.jp"
] | tie303217@tisnt.tis.co.jp |
1ddbfb75321c4a6c9628325701f965d26cc4ace3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03254/s765346849.py | 33081586b65e211540fb56775a50c4be338f79f8 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | n, x = map(int, input().split())
a = list(map(int, input().split()))
a.sort()
ans = 0
for i in range(n-1):
if x >= a[i]:
x -= a[i]
ans += 1
if x == a[-1]:
ans += 1
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.