blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
67cf0989a0e446959fd12ab81bb45409d679494d | c3e5d162626b1a2609838d158059ba7bed94c0f7 | /demo/FileBrowser.py | d56c6eda3ff71758504bc1c7996feecdd7870b5f | [] | no_license | appo-soft/ezWxPython | 19f841e5de18cd3927ccb4bed9ade30e143e0077 | ec70907e3c37046a8b180951a33c40178059e340 | refs/heads/master | 2021-10-26T08:32:12.574629 | 2019-04-11T13:27:48 | 2019-04-11T13:27:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,010 | py | import os
import sys
import time
import ezWxPython as ew
######################################################################
# Handler
######################################################################
def onExit(event):
appWin.close()
def onClose(event):
rv = appWin.messageYesNo("Alert", "Do you want to quit ?" )
return rv
def onAbout(event):
appWin.messageBox("About", "TextFileBrowser Demo\nzdiv")
def onFileBrowse(filename):
browser = ew.getWxCtrl('file')
text = ew.getWxCtrl('text')
text.AppendText(browser.GetValue() + "\n")
text.AppendText(filename + "\n")
def onDirBrowse(filename):
browser = ew.getWxCtrl('dir')
text = ew.getWxCtrl('text')
text.AppendText(browser.GetValue() + "\n")
text.AppendText(filename + "\n")
######################################################################
# Layout
######################################################################
menu_def = {
"File" : {
"Exit" : onExit,
},
"Help" : {
"About" : onAbout
},
}
status_def = [
["Ready", -1],
]
body_def = [
[ ew.FileBrowser("Input File: ", "", "Open", handler=onFileBrowse, key='file', save=False, expand=True, proportion=1, border=1 ) ],
[ ew.FileBrowser("Output Folder: ", "", "Browse", handler=onDirBrowse, key='dir', directory=True, expand=True, proportion=1, border=0 ) ],
[ ew.Text(expand=True,proportion=1,multiline=True,readonly=True,key="text"),
{ 'expand' : True, 'proportion': 1 }
],
]
layout = {
"menu" : menu_def,
"status" : status_def,
"body" : body_def,
}
######################################################################
# Main
######################################################################
if __name__ == "__main__":
appWin = ew.WxApp("TextFileBrowser Demo", 600, 480)
appWin.makeLayout(layout)
appWin.closeHandle(onClose)
appWin.run()
| [
"noreply@github.com"
] | appo-soft.noreply@github.com |
6ebff9f00af4a85fc31739052bff0ea14033e47b | d80d822dfbfede6a113374b2cc2139112b5f68a5 | /model/interpolations/poly_interp.py | 68b30bd8cdfa51bcebfff8abae16679a69cbc956 | [
"MIT"
] | permissive | ibodumas/missing_stock_prices | dba27676f9dc6ab0146c88154e3e7478083ec829 | ce2429cf91d5bd863093d32a56c1158eb2dc6a09 | refs/heads/master | 2021-07-09T09:56:08.553189 | 2020-07-06T19:24:24 | 2020-07-06T19:24:24 | 155,148,586 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | """
Polynomial Interpolation:
This is a generalization of linear interpolation.
"""
##
from model import data_processing
import util
from model.estimator import PolyInterpEstimator
from sklearn.model_selection import GridSearchCV
from sklearn import metrics
x_train = data_processing.X_TRAIN
y_train = data_processing.Y_TRAIN
x_test = data_processing.X_TEST
y_test = data_processing.Y_TEST
degrees = [2, 3, 4, 5, 6, 7, 8, 9, 10]
param_tune = {"param1": degrees}
poly = GridSearchCV(
PolyInterpEstimator(), param_tune, cv=util.CV, scoring=util.MERIC_SCORING
)
poly.fit(x_train, y_train)
util.plot_actual_predicted(x_train, y_train, poly.predict(x_train))
poly.best_params_
py_test_pred_y = poly.predict(x_test)
test_poly_err = metrics.mean_absolute_error(py_test_pred_y, y_test)
| [
"ibodumas@gmail.com"
] | ibodumas@gmail.com |
bc1bf685a5ae217fe2edc76a6f7a828c8a350ab3 | 2bd62fd12c93025752f5fded036889e859653f58 | /智慧城市数据分析/show.py | 2c0ffe11a1cc29a28c6a41d81ac7aed1235931ab | [] | no_license | brkming/- | 5e970523986b9fea2d3eccb9c939d85dc0c10c83 | 1966fb8f61b8b0e2322452088610e7fdca83417f | refs/heads/master | 2020-05-24T13:44:50.291037 | 2019-05-18T00:52:55 | 2019-05-18T00:52:55 | 187,296,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,649 | py | import pandas as pd
import matplotlib.pyplot as plt
from pyecharts import Bar
from pyecharts import Page
from pyecharts import Pie
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus']=False
city = pd.read_csv('date_deal.csv')
page = Page()
def bar_1():
top10_city = city.sort_values(by=['总分'],ascending=False).head(10)
bar_1=Bar('2016年智慧城市排名前十的城市')
bar_1.add('智慧城市总分',top10_city['城市'],top10_city['总分'],is_more_utils = True)
page.add(bar_1)
def bar_2():
city_all = city.sort_values(by=['总分'],ascending=False)
bar_2=Bar('2016年智慧城市排名')
bar_2.add('城市',city_all['城市'],city_all['总分'])
page.add(bar_2)
def bar_3():
global x_list
global y_list
list=[0]*7
X_list=['20-30','30-40','40-50','50-60','60-70','70-80','80-90']
for i in city['总分']:
if 20<=i<30:
list[0]=list[0]+1
elif 30<=i<40:
list[1] = list[1] + 1
elif 40<=i<50:
list[2] = list[2] + 1
elif 50<=i<60:
list[3] = list[3] + 1
elif 60<=i<70:
list[4] = list[4] + 1
elif 70<=i<80:
list[5] = list[5] + 1
elif 80<=i<90:
list[6] = list[6] + 1
x_list=X_list
y_list=list
bar_3=Bar('2016年智慧城市区间分布')
bar_3.add('城市',x_list,y_list)
page.add(bar_3)
def pie_1():
pie_1=Pie('')
pie_1.add('',x_list,y_list,is_label_show=True)
page.add(pie_1)
def main():
bar_1()
bar_2()
bar_3()
pie_1()
page.render()
if __name__=='__main__':
main()
| [
"945694120@qq.com"
] | 945694120@qq.com |
e984ed448f3a0a8dc1424728498e0d9e98beb857 | 0032c98333ffc0efdb920ecca31ab224378880e5 | /rpi-tutorial/Servo2.py | a25b57ffaefc303c79cc41c4e84ef8fd55d8d646 | [] | no_license | raspibrick/install | bd1c6f9a8cb524f2ab5a2c17ad8c5463b768dffa | 96288d6ca21abd8fb993cc376e37c16473b54dd5 | refs/heads/master | 2021-01-10T05:00:39.159879 | 2019-07-25T09:46:04 | 2019-07-25T09:46:04 | 40,703,681 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | # Servo2.py
# Two servo motors driven by PCA9685 chip
from smbus import SMBus
from PCA9685 import PWM
import time
i2c_address = 0x40
fPWM = 50
channel = 1
a = 8.5
b = 3
def setup():
global pwm
bus = SMBus(1) # Raspberry Pi revision 2
pwm = PWM(bus, i2c_address)
pwm.setFreq(fPWM)
def setDirection(direction):
duty = a / 180 * direction + b
pwm.setDuty(channel, duty)
print "direction =", direction, "-> duty =", duty
time.sleep(0.5) # allow to settle
print "starting"
setup()
channel = 0
for direction in range(0, 91, 10):
setDirection(direction)
direction = 0
setDirection(0)
channel = 1
for direction in range(0, 91, 10):
setDirection(direction)
direction = 0
setDirection(0)
print "done"
| [
"a2015@pluess.name"
] | a2015@pluess.name |
1ade999b5b7da4ccaf7103e691a2a866f3c700f5 | 7f641dcc2b23df23a09c15f4f2389a6f601c5ff9 | /curso-aula_em_video/Ex 017 - Catetos e hipotenusa.py | d127d02434f7fdb6cf4aefaef2224968c737c6d6 | [] | no_license | euyouer/python-cev | 4a4b8ddb7153db089d868eb6242aca44fe46f312 | d261f2aeadc26b657cce0df3b12e7ce0e2125b24 | refs/heads/master | 2023-07-05T22:11:41.401838 | 2020-06-18T21:05:27 | 2020-06-18T21:05:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | import math
'''co = float(input('Informe o valor do cateto oposto: '))
ca = float(input('Informe o valor do cateto adjacente: '))
hp = sqrt(co**2 + ca**2)
print('O valor da hipotenusa é {:.1f}'.format(hp))'''
hp = math.hypot(float(input('Informe o valor do cateto oposto:')), float(input('Informe o valor do cateto adjacente: ')))
print('O valor da hipotenusa é {:.1f}'.format(hp)) | [
"tiaguinho.alima@gmail.com"
] | tiaguinho.alima@gmail.com |
18980fa6088d413877057a2e6b580e7ea1c5e7ae | a2279597faf557bc701455480d582853f3afc31c | /setup.py | 29d1832bb999e85ed845a63d676ae8670f40cba9 | [
"MIT"
] | permissive | odo22/GV-simulation | 1d92184bbcc1ae19070cb3b64aa8234733849200 | 85c19c0606a8545f6d357f9f42586c90c48b61fe | refs/heads/master | 2021-01-17T17:26:00.068955 | 2016-12-08T10:11:37 | 2016-12-08T10:11:37 | 59,563,241 | 0 | 0 | null | 2016-05-24T10:26:55 | 2016-05-24T10:21:08 | null | UTF-8 | Python | false | false | 1,228 | py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
descrip = ("Simulate light propagation in multilayer thin and/or thick "
"films using the fresnel equations and transfer matrix "
"method.")
data_files = ['README.rst','LICENSE.txt','Changes.txt','manual.pdf']
setup(
name = "tmm",
version = '0.1.4',
author = "Steven Byrnes",
author_email = "steven.byrnes@gmail.com",
description = descrip,
license = "MIT",
keywords = "optics, fresnel, reflection, absorption, photovoltaics, ellipsometry, transfer matrix method",
url = "http://pypi.python.org/pypi/tmm",
packages=['tmm'],
package_data={'tmm':data_files},
package_dir={'tmm': '.'},
long_description=read('README.rst'),
install_requires=["numpy", "scipy"],
classifiers=[
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3"],
)
| [
"odo22@cam.ac.uk"
] | odo22@cam.ac.uk |
09046e44a2b1664051f657a2d66481a3fc496673 | ae501bd68e5f1ee71ba94be7d5e43b3be16e5978 | /webserver/forms.py | bce667deb2e3370fc9d4d3cd1eb9586c92142e61 | [] | no_license | neworld0/flask-webserver | 2047082314634e96db81fa56bd3e68e89dcbe5aa | ead9983ac841f40d5d17b0257570870e919450a1 | refs/heads/master | 2023-07-10T12:46:14.753631 | 2021-08-13T07:49:46 | 2021-08-13T07:49:46 | 393,991,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, PasswordField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Length, EqualTo, Email
class QuestionForm(FlaskForm):
subject = StringField('제목', validators=[DataRequired('제목은 필수입력 항목입니다.')])
content = TextAreaField('내용', validators=[DataRequired('내용은 필수입력 항목입니다.')])
class AnswerForm(FlaskForm):
content = TextAreaField('내용', validators=[DataRequired('내용은 필수입력 항목입니다.')])
class UserCreateForm(FlaskForm):
username = StringField('사용자이름', validators=[DataRequired(), Length(min=3, max=25)])
password1 = PasswordField('비밀번호', validators=[
DataRequired(), EqualTo('password2', '비밀번호가 일치하지 않습니다')])
password2 = PasswordField('비밀번호확인', validators=[DataRequired()])
email = EmailField('이메일', validators=[DataRequired(), Email()])
class UserLoginForm(FlaskForm):
username = StringField('사용자이름', validators=[DataRequired(), Length(min=3, max=25)])
password = PasswordField('비밀번호', validators=[DataRequired()])
class CommentForm(FlaskForm):
content = TextAreaField('내용', validators=[DataRequired()])
| [
"neworld0@gmail.com"
] | neworld0@gmail.com |
65fd8a519e31899e163edaa90cf64ad9cdce152a | 099771f7461b834d3060f6c65cbfa5533ad3410f | /ui/ui_classes/recover_password_window.py | c6e1d46bcae241ec5b8d28ec72d939af0bf284b8 | [] | no_license | DuncanHook/aYo | cf960940cbfe7f9897804824476caebd26b1d8e5 | 0c5cabad3dea44c0c7d3a470227b75b01bb6b425 | refs/heads/main | 2023-05-12T16:08:17.962335 | 2021-06-01T05:33:04 | 2021-06-01T05:33:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,682 | py | import re
from PyQt5 import uic
from PyQt5.QtWidgets import QWidget
class RecoverPasswordWindow(QWidget):
#recover account ui
def __init__(self):
super().__init__()
uic.loadUi(r"ui\\ui_files\\recovery.ui", self)
self.recoverCancelBtn.clicked.connect(self.close_window)
self.recoverBtn.clicked.connect(self.recover_password)
self.recoveryEmail.textEdited.connect(self.authenticate_email)
def close_window(self):
self.close()
def recover_password(self):
self.close()
def authenticate_email(self):
regex = '^\S+@\w+\.\w{2,3}$'
input = self.recoveryEmail.text()
if re.search(regex, input):
self.recoveryLbl.setText("")
self.recoverBtn.setEnabled(True)
self.recoverBtn.setStyleSheet(
"QPushButton{\
background-color: #3282B8;\
color: rgb(228,240,237);\
border-style: outset;\
border-width: 2px;\
border-radius: 10px;\
border-color: black;\
min-width: 10em;\
padding: 6px;\
}\
\
QPushButton:hover{\
border-color: lightgray;\
background-color: rgb(60, 158, 223);\
}\
\
QPushButton:focus{\
border-color: lightgray;\
background-color: rgb(60, 158, 223);\
}\
\
QPushButton:pressed {\
background-color: #0F4C75;\
border-style: inset;\
}"
) #end stylesheet edit
else:
self.recoveryLbl.setText("invalid email")
self.recoverBtn.setEnabled(False)
self.recoverBtn.setStyleSheet(
"QPushButton{\
background-color: #0F4C75;\
color: darkgray;\
border-style: outset;\
border-width: 2px;\
border-radius: 10px;\
border-color: black;\
min-width: 10em;\
padding: 6px;\
}\
\
QPushButton:hover{\
border-color: lightgray;\
background-color: rgb(60, 158, 223);\
}\
\
QPushButton:pressed {\
background-color: #0F4C75;\
border-style: inset;\
}"
)#end Stylesheet edit
| [
"hookd@spu.edu"
] | hookd@spu.edu |
f6ad90478ac12ffabdd5d912b6ea1c37c37bbc53 | fffff17b75897754f0e3b9cefe981a530526aa62 | /mainvc.py | 2a48df39634c1ceeb85f71a4ffea5ae12d7b1202 | [] | no_license | Dannmp/python-challenge-H3- | b57614b5ca0f58ba2510ab24bb408c6456ee5b13 | 68e0adc130345db9c56d9f4ad67c2551704a9a63 | refs/heads/master | 2022-07-20T13:27:24.420886 | 2020-05-18T18:37:50 | 2020-05-18T18:37:50 | 264,596,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,777 | py |
import os
import csv
print ("Election Results")
print("------------------------------------------")
csvpath = os.path.join("election_data.csv")
candidate = []
khan_c = 0
correy_c = 0
li_c = 0
otooley_c = 0
votes = 0
tvotes = 0
with open(csvpath) as csvfile:
reader = csv.reader(csvfile, delimiter = ",")
header = next(reader)
votes += 1
for row in reader:
tvotes = tvotes +1
if row[2] not in candidate:
candidate.append(row[2])
if row[2] == "Khan":
khan_c = khan_c +1
if row[2] == "Correy":
correy_c = correy_c +1
if row[2] == "Li":
li_c = li_c +1
if row[2] == "O'Tooley":
otooley_c = otooley_c +1
percent_Khan = round(khan_c / tvotes *100,2)
percent_correy = round(correy_c / tvotes *100,2)
percent_li = round(li_c / tvotes *100,2)
percent_otooley = round(otooley_c / tvotes *100,2)
list = [percent_otooley, percent_correy, percent_Khan,percent_li]
#Print of votes
print(f'{str("Total Votes:")} {tvotes}')
#Print division
print("-----------------------------------------")
print(f'{"Khan:"} {float(percent_Khan)}{"%"} {"("}{khan_c}{")"}')
print(f'{"Correy:"} {percent_correy}{"%"} {"("}{correy_c}{")"}')
print(f'{"Li:"} {percent_li}{"%"} {"("}{li_c}{")"}')
print(f'{"OTooley:"} {percent_otooley}{"%"} {"("}{otooley_c}{")"}')
print("-----------------------------------------")
maxl = max(list)
if maxl == percent_Khan:
print("Winner:" + "Khan")
if maxl == percent_correy:
print("Winner:" + "Correy")
if maxl == percent_li:
print("Winner:" + "Li")
if maxl == percent_otooley:
print("Winner:" + "O'Tooley")
output_file = os.path.join("py_poll.txt")
with open(output_file,"w") as text_file:
text_file.write("Election Results")
text_file.write("\n")
text_file.write("------------------------")
text_file.write("\n")
text_file.write("Total Votes: "+ str(tvotes))
text_file.write("\n")
text_file.write("------------------------")
text_file.write("\n")
text_file.write("Khan:"+ str(percent_Khan)+"%" + "(" + str(khan_c) +")")
text_file.write("\n")
text_file.write("Correy:" + str(percent_correy)+"%" + "(" + str(correy_c) + ")")
text_file.write("\n")
text_file.write("Li:" + str(percent_li)+"%" + "("+str(li_c)+")")
text_file.write("\n")
text_file.write("OTooley:" + str(percent_otooley)+"%" + "("+str(otooley_c)+")")
text_file.write("\n")
text_file.write("------------------------")
text_file.write("\n")
text_file.write("Winner: " + "Khan")
text_file.write("\n")
text_file.write("------------------------") | [
"noreply@github.com"
] | Dannmp.noreply@github.com |
da532dda2a8eb2ec942d2fe534c334fc921c5b68 | 4fdb75b1e189e6ada10fe7b982d130418ad76267 | /Application/python_stubs/-1007583135/_md5.py | 8924c6d6738536d022f1885d2d6ef70e04eeb987 | [] | no_license | slzgxw/ITAM | 7b4e83703adab3b50262b7b67979a817d037685e | f46dfab105a0849d732d96b68fbb49f9ba42bfd1 | refs/heads/master | 2023-01-06T08:26:57.293004 | 2020-11-04T07:54:47 | 2020-11-04T07:54:47 | 309,931,253 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,472 | py | # encoding: utf-8
# module _md5
# from (built-in)
# by generator 1.147
# no doc
# no imports
# functions
def md5(*args, **kwargs): # real signature unknown
""" Return a new MD5 hash object; optionally initialized with a string. """
pass
# classes
class MD5Type(object):
# no doc
def copy(self, *args, **kwargs): # real signature unknown
""" Return a copy of the hash object. """
pass
def digest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a bytes object. """
pass
def hexdigest(self, *args, **kwargs): # real signature unknown
""" Return the digest value as a string of hexadecimal digits. """
pass
def update(self, *args, **kwargs): # real signature unknown
""" Update this hash object's state with the provided string. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
digest_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def create_module(cls, *args, **kwargs): # real signature unknown
""" Create a built-in module """
pass
@classmethod
def exec_module(cls, *args, **kwargs): # real signature unknown
""" Exec a built-in module """
pass
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
"""
Load the specified module into sys.modules and return it.
This method is deprecated. Use loader.exec_module instead.
"""
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
_ORIGIN = 'built-in'
__dict__ = None # (!) real value is "mappingproxy({'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', '_ORIGIN': 'built-in', 'module_repr': <staticmethod object at 0x001D9640>, 'find_spec': <classmethod object at 0x001D9658>, 'find_module': <classmethod object at 0x001D9670>, 'create_module': <classmethod object at 0x001D9688>, 'exec_module': <classmethod object at 0x001D96A0>, 'get_code': <classmethod object at 0x001D96E8>, 'get_source': <classmethod object at 0x001D9730>, 'is_package': <classmethod object at 0x001D9778>, 'load_module': <classmethod object at 0x001D9790>, '__dict__': <attribute '__dict__' of 'BuiltinImporter' objects>, '__weakref__': <attribute '__weakref__' of 'BuiltinImporter' objects>})"
# variables with complex values
__spec__ = None # (!) real value is "ModuleSpec(name='_md5', loader=<class '_frozen_importlib.BuiltinImporter'>, origin='built-in')"
| [
"ganxiangwei@bytedance.com"
] | ganxiangwei@bytedance.com |
cbe4762fa10973f14cbfca4bb56bc18a9d4acbe8 | 8ea6d8e975a3f09718729701df37adbe3465f6ea | /venv/bin/easy_install-3.8 | 8574b495fb8315b86691b6e1b1918d2cadc821aa | [] | no_license | Theworldissosmall/api-test-advertiseconf | f3dad6ea82ccdf1d04d4887df4a7cef292c61bac | b4b149bbead4c0ebf3c5595ac6bab994f81e5da4 | refs/heads/master | 2023-01-08T02:30:04.737017 | 2020-10-19T10:44:38 | 2020-10-19T10:44:38 | 305,350,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | 8 | #!/Users/tianjianfeng/PycharmProjects/api-test-tian/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"RPCOcGyf5W"
] | RPCOcGyf5W |
d0585631be5a98578e7397c70df0b3441eda5577 | 72d6b3ab3fc2c7014967a156de082d1c617cbf0f | /操作数据库/使用Flask连接MySQL_将话务数据入库.py | 04fe27cd42ae98af04094148fdc468a3a171760e | [] | no_license | fengmingshan/python | 19a1732591ad061a8291c7c84e6f00200c106f38 | b35dbad091c9feb47d1f0edd82e568c066f3c6e9 | refs/heads/master | 2021-06-03T08:35:50.019745 | 2021-01-19T15:12:01 | 2021-01-19T15:12:01 | 117,310,092 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,285 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 10:16:45 2020
@author: Administrator
"""
from flask import Flask, render_template
from flask_sqlalchemy import SQLAlchemy
import pandas as pd
import os
work_path = 'd:/_python/python/操作数据库/'
os.chdir(work_path)
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "mysql+pymysql://root:a123456@localhost:3306/eric_traffic?charset=utf8"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config['SQLALCHEMY_COMMMIT_ON_TEARDOWN'] = True
# 建立数据库对象
db = SQLAlchemy(app)
#db = SQLAlchemy(app, use_native_unicode='utf8')
title = ['key',
'week',
'eNodeB',
'EUTRANCELLFDD',
'Acc_WirelessConnSucRate',
'Acc_ERAB_droppingrate',
'AirInterface_Traffic_Volume_UL_MBytes',
'AirInterface_Traffic_Volume_DL_MBytes',
'Int_DownlinkLatency',
'MaxnumberofUEinRRc',
'pmCellDowntimeAuto1',
'pmCellDowntimeMan1',
'Data_Coverage',
'Ava_CellAvail',
'NumofLTERedirectto3G',
'AvgNumberofULActiveUsers',
'AvgNumberofDLActiveUsers',
'DL_Util_of_PRB',
'DLactiveuesum',
'CellPDCPDLbit',
'AvgUserFellThroughput_Mbps'
]
df_eric = pd.read_csv('./爱立信0224-0301_mini.csv', header=None, names=title)
df_eric = df_eric[['key',
'week',
'eNodeB',
'EUTRANCELLFDD',
'Acc_WirelessConnSucRate',
'Acc_ERAB_droppingrate',
'AirInterface_Traffic_Volume_UL_MBytes',
'AirInterface_Traffic_Volume_DL_MBytes',
'Int_DownlinkLatency',
'MaxnumberofUEinRRc',
'AvgNumberofULActiveUsers',
'AvgNumberofDLActiveUsers',
'DL_Util_of_PRB',
'AvgUserFellThroughput_Mbps'
]]
# 建立数据库类,用来映射到数据库中的表。
class Eric_day(db.Model):
# 声明表名
__tablename__ = 'eric_day'
# 建立字段函数
key = db.Column(db.String(200), primary_key=True)
week = db.Column(db.Integer)
eNodeB = db.Column(db.String(200))
EUTRANCELLFDD = db.Column(db.String(200))
Acc_WirelessConnSucRate = db.Column(db.Float)
Acc_ERAB_droppingrate = db.Column(db.Float)
AirInterface_Traffic_Volume_UL_MBytes = db.Column(db.Float)
AirInterface_Traffic_Volume_DL_MBytes = db.Column(db.Float)
Int_DownlinkLatency = db.Column(db.Float)
MaxnumberofUEinRRc = db.Column(db.Integer)
AvgNumberofULActiveUsers = db.Column(db.Float)
AvgNumberofDLActiveUsers = db.Column(db.Float)
DL_Util_of_PRB = db.Column(db.Float)
AvgUserFellThroughput_Mbps = db.Column(db.Float)
def __repr__(self):
return '<User key: {}, week: {}, eNodeB: {}, EUTRANCELLFDD: {}, Acc_WirelessConnSucRate: {}, Acc_ERAB_droppingrate: {}>'.format(
self.key, self.week, self.eNodeB, self.EUTRANCELLFDD, self.Acc_WirelessConnSucRate, self.Acc_ERAB_droppingrate)
#db.drop_all()
db.create_all()
# =============================================================================
# 导入数据
# =============================================================================
traffic_data = [Eric_day(
key=key,
week=wk,
eNodeB=enb,
EUTRANCELLFDD=cell,
Acc_WirelessConnSucRate=accrate,
Acc_ERAB_droppingrate=drop,
AirInterface_Traffic_Volume_UL_MBytes=uth,
AirInterface_Traffic_Volume_DL_MBytes=dth,
Int_DownlinkLatency=lat,
MaxnumberofUEinRRc=mrrc,
AvgNumberofULActiveUsers=uact,
AvgNumberofDLActiveUsers=dact,
DL_Util_of_PRB=prb,
AvgUserFellThroughput_Mbps=fell
) for key,wk, enb, cell, accrate, drop, uth, dth, lat, mrrc, uact, dact, prb, fell in zip(
df_eric['key'],
df_eric['week'],
df_eric['eNodeB'],
df_eric['EUTRANCELLFDD'],
df_eric['Acc_WirelessConnSucRate'],
df_eric['Acc_ERAB_droppingrate'],
df_eric['AirInterface_Traffic_Volume_UL_MBytes'],
df_eric['AirInterface_Traffic_Volume_DL_MBytes'],
df_eric['Int_DownlinkLatency'],
df_eric['MaxnumberofUEinRRc'],
df_eric['AvgNumberofULActiveUsers'],
df_eric['AvgNumberofDLActiveUsers'],
df_eric['DL_Util_of_PRB'],
df_eric['AvgUserFellThroughput_Mbps']
)]
for item in traffic_data:
db.session.add(item)
db.session.commit()
# 原生SQL语句方式
#db.session.execute(r'insert into user values (8, "wjz", "test123")')
#db.session.execute(r'insert into user values (9, "wjz", "admin123")')
#
#db.session.commit()
# =============================================================================
# 查表
# =============================================================================
# ORM方式
btslist = Eric_day.query.order_by('eNodeB').all()
# 使用class User定义好的格式进行print
for bts in btslist:
print(bts)
# 自定义格式print
for bts in btslist:
print(bts.week, ' ', bts.eNodeB, ' ', bts.EUTRANCELLFDD, ' ', bts.Acc_WirelessConnSucRate, ' ', bts.Acc_ERAB_droppingrate)
# 原生数据库语句_推荐
item = db.session.execute('select * from user order by id asc')
# #将结果集强转为list
item = list(item)
for i in item:
print(i)
# =============================================================================
# 删除内容
# =============================================================================
# ORM方式
# User.query.filter_by(id=6).delete()
# User.query.filter_by(id=7).delete()
# User.query.filter_by(id=8).delete()
# User.query.filter_by(id=9).delete()
# db.session.commit()
#
# 原生SQL语句方式
#db.session.execute(r'delete from user where id = 7')
# db.session.commit()
# =============================================================================
# 修改内容
# =============================================================================
# ORM方式
# User.query.filter_by(id=3).update({'name':'张三'})
# User.query.filter_by(id=4).update({'name':'李四'})
# db.session.commit()
#
# 原生SQL语句方式
#db.session.execute(r'update user set name="李四" where id= 4')
#db.session.execute(r'update user set name="王二" where id= 5')
# db.session.commit()
#
#userlist1 = User.query.order_by('id').all()
| [
"fms_python@163.com"
] | fms_python@163.com |
211f00020a05944b181487ed9873e302b77af690 | 818afe5e04bcba8b8dfcd37c17a797d5a677725d | /baselines/cifar/data_utils.py | 260fe8f7100e8b11845eba45f58022f4ef5c8180 | [
"Apache-2.0"
] | permissive | nizamphoenix/uncertainty-baselines | 01614eea136603def0dc2a942c83b6fb1df589f2 | 0a7fbbac463788533cc5f26109e616971573dd2e | refs/heads/master | 2023-01-31T16:01:18.397839 | 2020-12-14T22:19:15 | 2020-12-14T22:19:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,699 | py | # coding=utf-8
# Copyright 2020 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data utilities for CIFAR-10 and CIFAR-100."""
import functools
from absl import logging
import tensorflow as tf
import tensorflow_datasets as tfds
import tensorflow_probability as tfp
import augment_utils # local file import
tfd = tfp.distributions
def normalize_convert_image(input_image, dtype):
input_image = tf.image.convert_image_dtype(input_image, dtype)
mean = tf.constant([0.4914, 0.4822, 0.4465])
std = tf.constant([0.2023, 0.1994, 0.2010])
return (input_image - mean) / std
def load_dataset(split,
batch_size,
name,
use_bfloat16,
normalize=True,
drop_remainder=True,
proportion=1.0,
validation_set=False,
validation_proportion=0.05,
aug_params=None):
"""Loads CIFAR dataset for training or testing.
Args:
split: tfds.Split.
batch_size: The global batch size to use.
name: A string indicates whether it is cifar10 or cifar100.
use_bfloat16: data type, bfloat16 precision or float32.
normalize: Whether to apply mean-std normalization on features.
drop_remainder: bool.
proportion: float, the proportion of dataset to be used.
validation_set: bool, whether to split a validation set from training data.
validation_proportion: float, the proportion of training dataset to be used
as the validation split, if validation_set is set to True.
aug_params: dict, data augmentation hyper parameters.
Returns:
Input function which returns a locally-sharded dataset batch.
"""
if proportion < 0. or proportion > 1.:
raise ValueError('proportion needs to lie in the range [0, 1]')
if validation_proportion < 0. or validation_proportion > 1.:
raise ValueError('validation_proportion needs to lie in the range [0, 1]')
if use_bfloat16:
dtype = tf.bfloat16
else:
dtype = tf.float32
ds_info = tfds.builder(name).info
image_shape = ds_info.features['image'].shape
dataset_size = ds_info.splits['train'].num_examples
num_classes = ds_info.features['label'].num_classes
if aug_params is None:
aug_params = {}
adaptive_mixup = aug_params.get('adaptive_mixup', False)
random_augment = aug_params.get('random_augment', False)
mixup_alpha = aug_params.get('mixup_alpha', 0)
ensemble_size = aug_params.get('ensemble_size', 1)
label_smoothing = aug_params.get('label_smoothing', 0.)
if adaptive_mixup and 'mixup_coeff' not in aug_params:
# Hard target in the first epoch!
aug_params['mixup_coeff'] = tf.ones([ensemble_size, num_classes])
if mixup_alpha > 0 or label_smoothing > 0:
onehot = True
else:
onehot = False
def preprocess(image, label):
"""Image preprocessing function."""
if split == tfds.Split.TRAIN:
image = tf.image.resize_with_crop_or_pad(
image, image_shape[0] + 4, image_shape[1] + 4)
image = tf.image.random_crop(image, image_shape)
image = tf.image.random_flip_left_right(image)
# Only random augment for now.
if random_augment:
count = aug_params['aug_count']
augmenter = augment_utils.RandAugment()
augmented = [augmenter.distort(image) for _ in range(count)]
image = tf.stack(augmented)
if split == tfds.Split.TRAIN and aug_params['augmix']:
augmenter = augment_utils.RandAugment()
image = _augmix(image, aug_params, augmenter, dtype)
elif normalize:
image = normalize_convert_image(image, dtype)
if split == tfds.Split.TRAIN and onehot:
label = tf.cast(label, tf.int32)
label = tf.one_hot(label, num_classes)
else:
label = tf.cast(label, dtype)
return image, label
if proportion == 1.0:
if validation_set:
new_name = '{}:3.*.*'.format(name)
if split == 'validation':
new_split = 'train[{}%:]'.format(
int(100 * (1. - validation_proportion)))
dataset = tfds.load(new_name, split=new_split, as_supervised=True)
elif split == tfds.Split.TRAIN:
new_split = 'train[:{}%]'.format(
int(100 * (1. - validation_proportion)))
dataset = tfds.load(name, split='train[:95%]', as_supervised=True)
# split == tfds.Split.TEST case
else:
dataset = tfds.load(name, split=split, as_supervised=True)
else:
dataset = tfds.load(name, split=split, as_supervised=True)
else:
logging.warning(
'Subset of training dataset is being used without a validation set.')
new_name = '{}:3.*.*'.format(name)
if split == tfds.Split.TRAIN:
new_split = 'train[:{}%]'.format(int(100 * proportion))
else:
new_split = 'test[:{}%]'.format(int(100 * proportion))
dataset = tfds.load(new_name, split=new_split, as_supervised=True)
if split == tfds.Split.TRAIN:
dataset = dataset.shuffle(buffer_size=dataset_size).repeat()
dataset = dataset.map(preprocess,
num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.batch(batch_size, drop_remainder=drop_remainder)
if mixup_alpha > 0 and split == tfds.Split.TRAIN:
if adaptive_mixup:
dataset = dataset.map(
functools.partial(adaptive_mixup_aug, batch_size, aug_params),
num_parallel_calls=8)
else:
dataset = dataset.map(
functools.partial(mixup, batch_size, aug_params),
num_parallel_calls=8)
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
return dataset
def augment_and_mix(image, depth, width, prob_coeff, augmenter, dtype):
"""Apply mixture of augmentations to image."""
mix_weight = tf.squeeze(tfd.Beta([prob_coeff], [prob_coeff]).sample([1]))
if width > 1:
branch_weights = tf.squeeze(tfd.Dirichlet([prob_coeff] * width).sample([1]))
else:
branch_weights = tf.constant([1.])
if depth < 0:
depth = tf.random.uniform([width],
minval=1,
maxval=4,
dtype=tf.dtypes.int32)
else:
depth = tf.constant([depth] * width)
mix = tf.cast(tf.zeros_like(image), tf.float32)
for i in tf.range(width):
branch_img = tf.identity(image)
for _ in tf.range(depth[i]):
branch_img = augmenter.distort(branch_img)
branch_img = normalize_convert_image(branch_img, dtype)
mix += branch_weights[i] * branch_img
return mix_weight * mix + (
1 - mix_weight) * normalize_convert_image(image, dtype)
def _augmix(image, params, augmenter, dtype):
"""Apply augmix augmentation to image."""
depth = params['augmix_depth']
width = params['augmix_width']
prob_coeff = params['augmix_prob_coeff']
count = params['aug_count']
augmented = [
augment_and_mix(image, depth, width, prob_coeff, augmenter, dtype)
for _ in range(count)
]
image = normalize_convert_image(image, dtype)
return tf.stack([image] + augmented, 0)
def mixup(batch_size, aug_params, images, labels):
"""Applies Mixup regularization to a batch of images and labels.
[1] Hongyi Zhang, Moustapha Cisse, Yann N. Dauphin, David Lopez-Paz
Mixup: Beyond Empirical Risk Minimization.
ICLR'18, https://arxiv.org/abs/1710.09412
Arguments:
batch_size: The input batch size for images and labels.
aug_params: Dict of data augmentation hyper parameters.
images: A batch of images of shape [batch_size, ...]
labels: A batch of labels of shape [batch_size, num_classes]
Returns:
A tuple of (images, labels) with the same dimensions as the input with
Mixup regularization applied.
"""
augmix = aug_params.get('augmix', False)
alpha = aug_params.get('mixup_alpha', 0.)
aug_count = aug_params.get('aug_count', 3)
# 4 is hard-coding to aug_count=3. Fix this later!
if augmix:
mix_weight = tfd.Beta(alpha, alpha).sample([batch_size, aug_count + 1, 1])
else:
mix_weight = tfd.Beta(alpha, alpha).sample([batch_size, 1])
mix_weight = tf.maximum(mix_weight, 1. - mix_weight)
if augmix:
images_mix_weight = tf.reshape(mix_weight,
[batch_size, aug_count + 1, 1, 1, 1])
else:
images_mix_weight = tf.reshape(mix_weight, [batch_size, 1, 1, 1])
# Mixup on a single batch is implemented by taking a weighted sum with the
# same batch in reverse.
images_mix = (
images * images_mix_weight + images[::-1] * (1. - images_mix_weight))
if augmix:
labels = tf.reshape(
tf.tile(labels, [1, aug_count + 1]), [batch_size, aug_count + 1, -1])
labels_mix = labels * mix_weight + labels[::-1] * (1. - mix_weight)
labels_mix = tf.reshape(tf.transpose(
labels_mix, [1, 0, 2]), [batch_size * (aug_count + 1), -1])
else:
labels_mix = labels * mix_weight + labels[::-1] * (1. - mix_weight)
return images_mix, labels_mix
def adaptive_mixup_aug(batch_size, aug_params, images, labels):
"""Applies Confidence Adjusted Mixup (CAMixup) regularization.
[1] Hongyi Zhang, Moustapha Cisse, Yann N. Dauphin, David Lopez-Paz
Mixup: Beyond Empirical Risk Minimization.
ICLR'18, https://arxiv.org/abs/1710.09412
Arguments:
batch_size: The input batch size for images and labels.
aug_params: Dict of data augmentation hyper parameters.
images: A batch of images of shape [batch_size, ...]
labels: A batch of labels of shape [batch_size, num_classes]
Returns:
A tuple of (images, labels) with the same dimensions as the input with
Mixup regularization applied.
"""
augmix = aug_params['augmix']
ensemble_size = aug_params['ensemble_size']
mixup_coeff = aug_params['mixup_coeff']
scalar_labels = tf.argmax(labels, axis=1)
alpha = tf.gather(mixup_coeff, scalar_labels, axis=-1) # 4 x Batch_size
# Need to filter out elements in alpha which equal to 0.
greater_zero_indicator = tf.cast(alpha > 0, alpha.dtype)
less_one_indicator = tf.cast(alpha < 1, alpha.dtype)
valid_alpha_indicator = tf.cast(
greater_zero_indicator * less_one_indicator, tf.bool)
sampled_alpha = tf.where(valid_alpha_indicator, alpha, 0.1)
mix_weight = tfd.Beta(sampled_alpha, sampled_alpha).sample()
mix_weight = tf.where(valid_alpha_indicator, mix_weight, alpha)
mix_weight = tf.reshape(mix_weight, [ensemble_size * batch_size, 1])
mix_weight = tf.clip_by_value(mix_weight, 0, 1)
mix_weight = tf.maximum(mix_weight, 1. - mix_weight)
images_mix_weight = tf.reshape(mix_weight,
[ensemble_size * batch_size, 1, 1, 1])
# Mixup on a single batch is implemented by taking a weighted sum with the
# same batch in reverse.
if augmix:
images_shape = tf.shape(images)
images = tf.reshape(tf.transpose(
images, [1, 0, 2, 3, 4]), [-1, images_shape[2],
images_shape[3], images_shape[4]])
else:
images = tf.tile(images, [ensemble_size, 1, 1, 1])
labels = tf.tile(labels, [ensemble_size, 1])
images_mix = (
images * images_mix_weight + images[::-1] * (1. - images_mix_weight))
labels_mix = labels * mix_weight + labels[::-1] * (1. - mix_weight)
return images_mix, labels_mix
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
631a4603a2395588b6698181b1d83999a2f36083 | 804b173c943dd537c9c61441c8aae73df311142d | /tools/listpng.py | 5d96aa1656b36d696cc1afe796c605438e14d927 | [
"Artistic-2.0",
"LicenseRef-scancode-public-domain-disclaimer",
"LicenseRef-scancode-public-domain"
] | permissive | simutrans/pak128 | 99a16eb11f64394b6ee601e570a6c760b08a5a9b | 02ff45afdd3003c258c45aeb3d28b3b25f10172e | refs/heads/master | 2023-08-16T19:34:18.099855 | 2023-02-28T20:10:17 | 2023-02-28T20:10:17 | 447,417,851 | 6 | 9 | Artistic-2.0 | 2023-08-14T22:40:54 | 2022-01-13T00:57:07 | Squirrel | UTF-8 | Python | false | false | 1,569 | py | #! /usr/bin/python
# -*- coding: utf-8 -*-
#
# Vladimír Slávik 2008-2011
# Python 2.5-3.1
#
# for Simutrans
# http://www.simutrans.com
#
# code is public domain
#
# read all dat files in all subfolders and produce list of these that have
# wrong color type (other than 24bit)
from __future__ import print_function, division
import os, glob
from socket import ntohl
from struct import unpack
badcounter = 0
allcounter = 0
png_types = {
0: "grayscale",
2: "RGB",
3: "indexed",
4: "grey+alpha",
6: "RGBA",
}
#-----
def pngType(color, palette) :
try :
kind = png_types[color]
except :
kind = str(color)
return " color type: %s, %d bpp" % (kind, palette)
#-----
def procFile(png) :
global badcounter, allcounter
f = open(png, 'rb')
f.seek(8 + (4 + 4) + (4 + 4))
# 8 bytes header, 4 length, 4 IHDR, 4+4 width+height
d = unpack("B", f.read(1))[0] # read "bit depth/palette" entry
c = unpack("B", f.read(1))[0] # read "color" entry
if (c != 2) or (d != 8) :
print(png)
print(pngType(c, d))
badcounter = badcounter + 1
f.close()
allcounter = allcounter + 1
#-----
def walkFiles(topdir) :
for png in glob.glob(os.path.join(topdir, "*.png")) :
procFile(png)
for fsitem in glob.glob(os.path.join(topdir, "*")) :
if os.path.isdir(fsitem) :
if not os.path.exists(os.path.join(topdir, fsitem, "statsignore.conf")) :
walkFiles(os.path.join(topdir, fsitem))
#-----
badcounter = 0
allcounter = 0
walkFiles(os.getcwd())
print("%d of %d files bad, or %.2f%%" % (badcounter, allcounter, 100*badcounter/allcounter))
# EOF
| [
"vs_cz@e96eaf88-093e-0410-925f-862d53288611"
] | vs_cz@e96eaf88-093e-0410-925f-862d53288611 |
af297092d8e5a8346864f1d4a1c13d185cade044 | 3f2c49ba8e89b68175ee9d07659871c5e3a29da5 | /manage.py | db2a596d1042844dac2078a6cfdff362027137d0 | [] | no_license | Jays-T/practice-writing-code-tests | 241ba6e7df70baaf96e99479535abde21e662698 | 91d1382c37b1021d0b348f00db2fe18ff0176d43 | refs/heads/master | 2023-08-18T04:09:23.599872 | 2021-01-06T15:51:43 | 2021-01-06T15:51:43 | 279,603,889 | 0 | 0 | null | 2021-09-22T19:29:01 | 2020-07-14T14:12:23 | HTML | UTF-8 | Python | false | false | 633 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lanthus_clark.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"j.j.h.traas@gmail.com"
] | j.j.h.traas@gmail.com |
341214ce0a249bddd010f09c10ca7e03d99e3426 | b76615ff745c6d66803506251c3d4109faf50802 | /pyobjc-core/Examples/Scripts/wmEnable.py | df5e11a37bc3d66f12b37c05d46dbc5fd132e7d2 | [
"MIT"
] | permissive | danchr/pyobjc-git | 6ef17e472f54251e283a0801ce29e9eff9c20ac0 | 62b787fddeb381184043c7ff136f1c480755ab69 | refs/heads/master | 2021-01-04T12:24:31.581750 | 2020-02-02T20:43:02 | 2020-02-02T20:43:02 | 240,537,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,513 | py | #!/usr/bin/pythonw
"""
This is an evil undocumented SPI hack that shows how to enable GUI operation
from a console application.
BUNDLES ARE RECOMMENDED, USE THIS AT YOUR OWN RISK!!
"""
import os
import sys
import objc
from Foundation import *
def S(*args):
return b"".join(args)
OSErr = objc._C_SHT
OUTPSN = b"o^{ProcessSerialNumber=LL}"
INPSN = b"n^{ProcessSerialNumber=LL}"
FUNCTIONS = [
# These two are public API
("GetCurrentProcess", S(OSErr, OUTPSN)),
("SetFrontProcess", S(OSErr, INPSN)),
# This is undocumented SPI
("CPSSetProcessName", S(OSErr, INPSN, objc._C_CHARPTR)),
("CPSEnableForegroundOperation", S(OSErr, INPSN)),
]
def WMEnable(name="Python"):
if not isinstance(name, bytes):
name = name.encode("utf8")
mainBundle = NSBundle.mainBundle()
bPath = os.path.split(os.path.split(os.path.split(sys.executable)[0])[0])[0]
if mainBundle.bundlePath() == bPath:
return True
bndl = NSBundle.bundleWithPath_(
objc.pathForFramework(
"/System/Library/Frameworks/ApplicationServices.framework"
)
)
if bndl is None:
print >>sys.stderr, "ApplicationServices missing"
return False
d = {}
objc.loadBundleFunctions(bndl, d, FUNCTIONS)
for (fn, sig) in FUNCTIONS:
if fn not in d:
print >>sys.stderr, "Missing", fn
return False
err, psn = d["GetCurrentProcess"](None)
if err:
print >>sys.stderr, "GetCurrentProcess", (err, psn)
return False
err = d["CPSSetProcessName"](psn, name)
if err:
print >>sys.stderr, "CPSSetProcessName", (err, psn)
return False
err = d["CPSEnableForegroundOperation"](psn)
if err:
print >>sys.stderr, "CPSEnableForegroundOperation", (err, psn)
return False
err = d["SetFrontProcess"](psn)
if err:
print >>sys.stderr, "SetFrontProcess", (err, psn)
return False
return True
class AppDelegate(NSObject):
def applicationDidFinishLaunching_(self, sender):
rval = AppKit.NSRunAlertPanel("WM Enabled", "WM was enabled!", None, None, None)
AppKit.NSApp().terminate_(self)
if __name__ == "__main__":
import sys
if WMEnable(os.path.basename(os.path.splitext(sys.argv[0])[0])):
import AppKit
app = AppKit.NSApplication.sharedApplication()
delegate = AppDelegate.alloc().init()
app.setDelegate_(delegate)
app.run()
else:
print("WM was not enabled")
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
23431939ada901e854bbd6ac06687c0c52e512f9 | 23a3c76882589d302b614da5f4be0fc626b4f3cd | /python_modules/dagster/dagster/core/definitions/trigger.py | ac2193f821092157e9e91f5367bb6b2bc68ba5d4 | [
"Apache-2.0"
] | permissive | DavidKatz-il/dagster | 3641d04d387cdbe5535ae4f9726ce7dc1981a8c3 | 7c6d16eb8b3610a21020ecb479101db622d1535f | refs/heads/master | 2022-12-20T13:08:36.462058 | 2020-09-14T18:12:12 | 2020-09-14T22:43:26 | 264,703,873 | 0 | 0 | Apache-2.0 | 2020-06-16T09:49:00 | 2020-05-17T15:56:57 | Python | UTF-8 | Python | false | false | 4,312 | py | from collections import namedtuple
from dagster import check
from dagster.core.instance import DagsterInstance
from dagster.utils.backcompat import experimental_class_warning
from .mode import DEFAULT_MODE_NAME
class TriggeredExecutionContext(namedtuple("TriggeredExecutionContext", "instance")):
"""Trigger-specific execution context.
An instance of this class is made available as the first argument to the
TriggeredExecutionDefinition execution_params_fn
Attributes:
instance (DagsterInstance): The instance configured to run the triggered execution
"""
def __new__(
cls, instance,
):
experimental_class_warning("TriggeredExecutionContext")
return super(TriggeredExecutionContext, cls).__new__(
cls, check.inst_param(instance, "instance", DagsterInstance),
)
class TriggeredExecutionDefinition(object):
"""Define a pipeline execution that responds to a trigger
Args:
name (str): The name of this triggered execution to create.
pipeline_name (str): The name of the pipeline to execute when the trigger fires.
run_config_fn (Callable[[TriggeredExecutionContext], [Dict]]): A function that takes a
TriggeredExecutionContext object and returns the environment configuration that
parameterizes this execution, as a dict.
tags_fn (Optional[Callable[[TriggeredExecutionContext], Optional[Dict[str, str]]]]): A
function that generates tags to attach to the triggered execution. Takes a
:py:class:`~dagster.TriggeredExecutionContext` and returns a dictionary of tags (string
key-value pairs).
should_execute_fn (Optional[Callable[[TriggeredExecutionContext], bool]]): A function that
runs at trigger time to determine whether a pipeline execution should be initiated or
skipped. Takes a :py:class:`~dagster.TriggeredExecutionContext` and returns a boolean
(``True`` if a pipeline run should be execute). Defaults to a function that always
returns ``True``.
mode (Optional[str]): The mode to apply when executing this pipeline. (default: 'default')
solid_selection (Optional[List[str]]): A list of solid subselection (including single
solid names) to execute when the trigger fires. e.g. ``['*some_solid+', 'other_solid']``
"""
__slots__ = [
"_name",
"_pipeline_name",
"_tags_fn",
"_run_config_fn",
"_should_execute_fn",
"_mode",
"_solid_selection",
]
def __init__(
self,
name,
pipeline_name,
run_config_fn=None,
tags_fn=None,
should_execute_fn=None,
mode="default",
solid_selection=None,
):
experimental_class_warning("TriggeredExecutionDefinition")
self._name = check.str_param(name, "name")
self._pipeline_name = check.str_param(pipeline_name, "pipeline_name")
self._run_config_fn = check.opt_callable_param(
run_config_fn, "run_config_fn", lambda _context: {}
)
self._tags_fn = check.opt_callable_param(tags_fn, "tags_fn", lambda _context: {})
self._should_execute_fn = check.opt_callable_param(
should_execute_fn, "should_execute_fn", lambda _context: True
)
self._mode = check.opt_str_param(mode, "mode", DEFAULT_MODE_NAME)
self._solid_selection = check.opt_nullable_list_param(
solid_selection, "solid_selection", of_type=str
)
@property
def pipeline_name(self):
return self._pipeline_name
@property
def solid_selection(self):
return self._solid_selection
@property
def name(self):
return self._name
@property
def mode(self):
return self._mode
def get_run_config(self, context):
check.inst_param(context, "context", TriggeredExecutionContext)
return self._run_config_fn(context)
def get_tags(self, context):
check.inst_param(context, "context", TriggeredExecutionContext)
return self._tags_fn(context)
def should_execute(self, context):
check.inst_param(context, "context", TriggeredExecutionContext)
return self._should_execute_fn(context)
| [
"prha@elementl.com"
] | prha@elementl.com |
6b81b4966f75b5b9a29916cf00fb1c0496b9d41c | 5cff4d858edc52267126f4fca8aa2e2858037ace | /FUZZY.py | fc61c60c4c75aa93aea9abe0750f34ee87b3880d | [] | no_license | clarisahasya/FuzzyLogic | 35c863bf2cccf587bbe911f0d74c38d9e8408989 | 1fee26262eeb56aea323a5883fcea4568cb5da2e | refs/heads/master | 2022-05-21T17:26:28.259159 | 2020-04-29T05:25:05 | 2020-04-29T05:25:05 | 259,832,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,066 | py | import csv
def loadData():
data = []
with open('influencers.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count=0
for row in csv_reader:
if (line_count != 0):
data.append(row)
line_count += 1
return data
def saveData(target):
with open('chosen.csv', mode='w', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
for index in target:
csv_writer.writerow([index])
return csv_writer
def followerCount(fol):
low, avg, high = 0, 0, 0
if (fol <= 20000):
low = 1
elif (fol > 30000) :
low = 0
elif (fol > 20000 and fol <= 30000):
low = (30000 - fol)/(30000 - 20000)
if (fol <= 20000 and fol > 60000):
avg = 0
elif (fol > 20000 and fol <= 30000):
avg = (fol - 20000)/(30000 - 20000)
elif (fol > 30000 and fol <= 55000):
avg = 1
elif (fol > 55000 and fol <= 60000):
avg = (60000 - fol)/(60000 - 55000)
if (fol <= 50000):
high = 0
elif (fol > 70000) :
high = 1
elif (fol > 50000 and fol <= 70000):
high = (fol - 50000)/(70000 - 50000)
return [low, avg, high]
def engagementRate(rate):
low, avg, high = 0.0, 0.0, 0.0
if (rate <= 2):
low = 1.0
elif (rate > 3.5) :
low = 0.0
elif (rate > 2 and rate <= 3.5):
low = (3.5 - rate)/(3.5 - 2)
if (rate <= 2 and rate > 6.5):
avg = 0.0
elif (rate > 2 and rate <= 3.5):
avg = (rate - 2)/(3.5 - 2)
elif (rate > 3.5 and rate <= 6):
avg = 1.0
elif (rate > 6 and rate <= 6.5):
avg = (6.5 - rate)/(6.5 - 6)
if (rate <= 5):
high = 0.0
elif (rate > 7) :
high = 1.0
elif (rate > 5 and rate <= 7):
high = (rate - 5)/(7 - 5)
return [low, avg, high]
def inference(fol,rate):
na, mi, me = [], [], []
na.append(max(fol[0],rate[0])) #low, low
na.append(max(fol[0],rate[1])) #low, average
na.append(max(fol[1],rate[0])) #average, low
mi.append(max(fol[0],rate[2])) #low, high
mi.append(max(fol[1],rate[1])) #average, average
mi.append(max(fol[2],rate[0])) #high, low
me.append(max(fol[1],rate[2])) #average, high
me.append(max(fol[2],rate[1])) #high, average
me.append(max(fol[2],rate[2])) #high, high
nano = max(na[0],na[1],na[2])
micro = max(mi[0],mi[1],mi[2])
medium = max(me[0],me[1],me[2])
return [nano, micro, medium]
def sugeno(inf):
s = ((40*inf[0]) + (60*inf[1]) + (70*inf[2])) / (inf[0]+inf[1]+inf[2])
return s
def sorting(defuzzy,index):
sort = [x for _, x in sorted(zip(defuzzy,index), reverse=True)]
return sort
#MAIN PROGRAM
follower, engagement, inf, defuzzy, index = [], [], [], [], []
data = loadData()
for row in data:
follower = followerCount(int(row[1]))
engagement = engagementRate(float(row[2]))X
inf = inference(follower,engagement)
defuzzy.append(sugeno(inf))
index.append(row[0])
# print("defuzzy",defuzzy)
# print("===========================================================================")
hasil = sorting(defuzzy,index)
twenty = hasil[:20]
print('*20 Influencers Terbaik*')
for twen in twenty:
print(twen)
saveData(twenty)
| [
"noreply@github.com"
] | clarisahasya.noreply@github.com |
a6287f09bcad512e733dd891f96edf767697e983 | c99dd9cde8127ca7c400731fa6a2684773f5872c | /OpenExtraction.py | d519e1ef840da2af9198fe494b3550a8f45ecf0e | [] | no_license | orensul/NLP4 | ad60aa5de6be4f1e9f2911ba512f4a51ce22637a | 7ae3602b2091ee2612328673086206771d82bfaa | refs/heads/master | 2021-09-04T11:56:39.581636 | 2018-01-18T14:01:51 | 2018-01-18T14:01:51 | 117,990,249 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | import wikipedia, spacy
import PosExtractor
import DepTreeExtractor
pages = ['Donald Trump', 'Brad Pitt', 'Angelina Jolie']
class OpenExtraction(object):
def __init__(self):
for page in pages:
self.tokens = []
self.nlp = spacy.load('en')
self.page = wikipedia.page(page).content
self.analyzed_page = self.nlp(self.page)
self.init_tokens()
print()
print("----------------- POS EXTRACTOR, page = " + page + ' -----------------')
self.pos_extractor = []
self.pos_extractor = PosExtractor.PosExtractor(self.tokens)
print()
print("----------------- DEP Tree EXTRACTOR, page = " + page + ' -----------------')
self.dep_tree_extractor = DepTreeExtractor.DepTreeExtractor(self.tokens)
def init_tokens(self):
"""
reads all of the tokens into self.tokens list
"""
for token in self.analyzed_page:
self.tokens.append(token)
def main():
oe = OpenExtraction()
main()
| [
"orens2806@gmail.com"
] | orens2806@gmail.com |
3bb0c204b7ae251f7c2f662f3fd3cecbbef027ca | a1ad2715e306fd4e7eaeda5348e00e1a363e7884 | /13-roman-to-integer/13-roman-to-integer.py | ef4dcd9a1620823db6a532b7b336dc75a9f3d2b4 | [] | no_license | MayankMaheshwar/DS-and-Algo-solving | cef54a800b3e8a070a707f97b4f30fccaa17d5c6 | ac6ea8f880920242a55d40c747368d68cb6f7534 | refs/heads/master | 2022-12-07T07:55:08.380505 | 2022-12-05T09:32:14 | 2022-12-05T09:32:14 | 237,103,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | class Solution:
def romanToInt(self, s: str) -> int:
d={'I' : 1,
'V' : 5,
'X' : 10,
'L' : 50,
'C' : 100,
'D' : 500,
'M' : 1000}
if len(s)==1:
return d[s]
i=len(s)-1
am=0
while i>=0:
if i==0:
am+=d[s[i]]
return am
if d[s[i]]>d[s[i-1]]:
am+=d[s[i]]-d[s[i-1]]
i-=2
else:
am+=d[s[i]]
i-=1
return am
| [
"mayank_an@srmuniv.edu.in"
] | mayank_an@srmuniv.edu.in |
b07e7a1b3bd384b16437882c7b0294f175abceb6 | d63811f9944dead8a745a46e1382f64800c72c5e | /linuxYazKampı/sonuç/blog2/blog2/urls.py | 84f8e6e2d23940c565a20447a9eff752bf390775 | [] | no_license | Arciles/Notes | 9dd77425209b9a10a6503dcd27a5c48c9666c35b | 095e361bdb11ca72c3bff801ed4a9b938827c84a | refs/heads/master | 2020-12-11T05:46:11.416975 | 2014-11-22T12:08:37 | 2014-11-22T12:08:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'blog2.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| [
"salihklc91@gmail.com"
] | salihklc91@gmail.com |
8344d1a9044d83a7d0867f09887cb1d7af8c0729 | 3259ffe73a1b2f1a17f0cf0512452d47f47f441d | /Leet Code/268_missing_number.py | f3cae5afa7eb9cf57fddd9e5bb12c9d9ea15054c | [
"MIT"
] | permissive | aayushmaru18/Competitive-Programming | a9160509afe32ee3eced0b7d830c33d62ba6f146 | 0ef237a140901005371a792eea4676b5386c7c50 | refs/heads/master | 2023-05-04T22:03:01.224426 | 2021-06-01T08:37:19 | 2021-06-01T08:37:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | class Solution:
def missingNumber(self, nums: List[int]) -> int:
n = len(nums)
s = n*(n+1)//2
for i in nums:
s -= i
return s
| [
"keshav.sm@somaiya.edu"
] | keshav.sm@somaiya.edu |
bd1ad54c4df2992288af9816c42e8e4f58c11fe9 | 5a71e09fd30d3d52c599015d47afa30e444ca964 | /InputOutput.py | be1086a5115ffff123302aafc8edeb3dbcd2876d | [] | no_license | danityang/PythonBase | 07987c6b2cd441650110347b906e5b290f9478b5 | 5df07763d70ce08ae44c917efbda76396449353a | refs/heads/master | 2021-01-20T22:55:11.933896 | 2017-10-17T09:22:09 | 2017-10-17T09:22:09 | 101,829,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,621 | py | # coding=utf-8
import math
# TODO Python3输入输出
# 使用文件对象的 write() 方法,标准输出文件可以用 sys.stdout 引用。
# 使用 str.format() 函数来格式化输出值。
# 如果你希望将输出的值转成字符串,可以使用 repr() 或 str() 函数来实现。
# TODO str(): 函数返回一个用户易读的表达形式。
# TODO repr(): 产生一个解释器易读的表达形式。
s = "hello,python3"
print str(s)
print repr(s)
print str(1 / 7)
print repr(10 * 3.25)
print repr(200 * 200)
# repr() 函数可以转义字符串中的特殊字符
hello = 'hello, runoob\n'
hellos = repr(hello)
print hellos
print # 换行
# repr() 的参数可以是 Python 的任何对象
'''
repr((x, y, ('Google', 'Runoob')))
'''
for x in range(1, 11):
print('{0:2d} {1:3d} {2:4d}'.format(x, x * x, x * x * x))
print # 换行
# TODO str.format() 的基本使用:
print('{}网址: "{}!"'.format('百度', 'www.baidu.com'))
# 括号及其里面的字符 (称作格式化字段) 将会被 format() 中的参数替换。
# 在括号中的数字用于指向传入对象在 format() 中的位置,如下所示
print('{0} 和 {1}'.format('Google', 'Runoob'))
print('{1} 和 {0}'.format('Google', 'Runoob'))
# 如果在 format() 中使用了关键字参数, 那么它们的值会指向使用该名字的参数。
print "{int},{word}".format(int=34, word='test word')
# 位置及关键字参数可以任意的结合:
print('站点列表 {0}, {1}, 和 {other}。'.format('Google', 'Runoob', other='Taobao'))
print # 换行
print('常量 PI 的值近似为: {!r}。'.format(math.pi))
# 保留小数点后3位
print('常量 PI 的值近似为 {0:.3f}。'.format(math.pi))
print # 换行
# 在 “:” 后传入一个整数, 可以保证该域至少有这么多的宽度。 用于美化表格时很有用。
table = {'Google': 1, 'Runoob': 2, 'Taobao': 3}
for name, number in table.items():
print('{0:10} ——> {1:10}'.format(name, number))
print # 换行
table = {'Google': 1, 'Runoob': 2, 'Taobao': 3}
print('Runoob: {0[Runoob]}; Google: {0[Google]}; Taobao: {0[Taobao]}'.format(table))
print('Runoob: {Runoob}, Google: {Google}, Taobao: {Taobao}'.format(**table))
print # 换行
# TODO 旧式字符串格式化
# "%"操作符也可以实现字符串格式化。 它将左边的参数作为类似 sprintf() 式的格式化字符串, 而将右边的代入, 然后返回格式化后的字符串. 例如:
print('常量 PI 的值近似为:%5.3f。' % math.pi)
print('值为:%0.3f' % 5)
print # 换行
# TODO 读取键盘输入
input_str = input("请输入:")
print "你输入的内容是: ", input_str
print # 换行
# TODO 读和写文件
# open()函数将会返回一个 file 对象,基本语法格式如下:
'''
open(filename, mode)
'''
# filename:一个包含了你要访问的文件名称的字符串值。
# mode:决定了打开文件的模式:只读,写入,追加等。所有可取值见如下的完全列表。这个参数是非强制的,默认文件访问模式为只读(r)。
# r 以只读方式打开文件。文件的指针将会放在文件的开头。这是默认模式。
# rb 以二进制格式打开一个文件用于只读。文件指针将会放在文件的开头。这是默认模式。
# r+ 打开一个文件用于读写。文件指针将会放在文件的开头。
# rb+ 以二进制格式打开一个文件用于读写。文件指针将会放在文件的开头。
# w 打开一个文件只用于写入。如果该文件已存在则将其覆盖。如果该文件不存在,创建新文件。
# wb 以二进制格式打开一个文件只用于写入。如果该文件已存在则将其覆盖。如果该文件不存在,创建新文件。
# w+ 打开一个文件用于读写。如果该文件已存在则将其覆盖。如果该文件不存在,创建新文件。
# wb+ 以二进制格式打开一个文件用于读写。如果该文件已存在则将其覆盖。如果该文件不存在,创建新文件。
# a 打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。也就是说,新的内容将会被写入到已有内容之后。如果该文件不存在,创建新文件进行写入。
# ab 以二进制格式打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。也就是说,新的内容将会被写入到已有内容之后。如果该文件不存在,创建新文件进行写入。
# a+ 打开一个文件用于读写。如果该文件已存在,文件指针将会放在文件的结尾。文件打开时会是追加模式。如果该文件不存在,创建新文件用于读写。
# ab+ 以二进制格式打开一个文件用于追加。如果该文件已存在,文件指针将会放在文件的结尾。如果该文件不存在,创建新文件用于读写。
# 在本地电脑磁盘H上创建/打开一个文件
file_path = "H:/python_file_test.txt"
f = open(file_path, "w")
f.write("Python 是一个非常好的语言。\n是的,的确非常好!!\n")
# 关闭打开的文件
f.close()
print "Create File Success"
print # 换行
# TODO 文件对象的方法
'''
f.read()
读取一个文件的内容,调用 f.read(size),
'''
# size 是一个可选的数字类型的参数。 当 size 被忽略了或者为负, 那么该文件的所有内容都将被读取并且返回。
f = open(file_path, "r")
print "文件内容: ", f.read()
f.close()
print # 换行
'''
f.readline()
f.readline() 会从文件中读取单独的一行。换行符为 '\n'。f.readline() 如果返回一个空字符串, 说明已经已经读取到最后一行。
'''
f = open(file_path, "r")
print "f.readline(): ", f.readline()
f.close()
print
'''
f.readlines()
f.readlines() 将返回该文件中包含的所有行。如果设置可选参数 sizehint, 则读取指定长度的字节, 并且将这些字节按行分割。
'''
f = open(file_path, "r")
print "f.readlines(): ", f.readlines()
f.close()
# 使用迭代法读取文件
f = open(file_path, "r")
for line in f:
print "迭代读取文件: ", line
f.close()
'''
f.write()
将string 写入到文件中, 然后返回写入的字符数。
'''
f = open(file_path, "a")
f.write("再来写几句话到里面测试测试\n")
print "写入成功"
print # 换行
f.close()
# 如果要写入一些不是字符串的东西, 那么将需要先进行转换:
f = open(file_path, "a")
value = 345
s = str(value)
f.write(s)
f.close()
print "写入成功"
'''
f.tell()
返回文件对象当前所处的位置, 它是从文件开头开始算起的字节数。
'''
f = open(file_path, "a")
print "f.tell(): ", f.tell()
f.close()
'''
f.seek()
如果要改变文件当前的位置, 可以使用 f.seek(offset, from_what) 函数
from_what 的值, 如果是 0 表示开头, 如果是 1 表示当前位置, 2 表示文件的结尾,例如:
seek(x,0) : 从起始位置即文件首行首字符开始移动 x 个字符
seek(x,1) : 表示从当前位置往后移动x个字符
seek(-x,2):表示从文件的结尾往前移动x个字符
from_what 值为默认为0,即文件开头。
'''
f = open(file_path, "rb+")
f.write('0123456789abcdef')
f.seek(5) # 移动到文件的第六个字节
print f.read(1)
f.seek(-3, 2) # 移动到文件的倒数第三字节
print f.read(1)
'''
f.close()
在文本文件中 (那些打开文件的模式下没有 b 的), 只会相对于文件起始位置进行定位。
当你处理完一个文件后, 调用 f.close() 来关闭文件并释放系统的资源,如果尝试再调用该文件,则会抛出异常。
'''
# TODO pickle 模块
| [
"yangdi128@163.com"
] | yangdi128@163.com |
de4dad0b5f5f6051ee87079d5474ef05edfbcb32 | 94a269f91196294ff6033d40e1b71bc343738218 | /.~c9_invoke_helab1.py | 5724218f85f5d967e9db792fe3bfa4a5b71edf68 | [] | no_license | Deandraharvin3/project2-cp3-dehar42 | d9a436ea71ec2e5fb4f301fc9937f276c813f65a | 097e617344457468615a8402730ed59f8ee0aab6 | refs/heads/master | 2022-12-18T13:59:59.151267 | 2019-10-19T02:38:34 | 2019-10-19T02:38:34 | 214,926,937 | 0 | 0 | null | 2022-12-04T15:52:35 | 2019-10-14T02:04:29 | Python | UTF-8 | Python | false | false | 1,524 | py | import os
import flask, flask_socketio,psycopg2
import models, chatbot
from rfc3987 import parse
app = flask.Flask(__name__)
socketio = flask_socketio.SocketIO(app)
@app.route('/')
def index():
return flask.render_template("index.html")
@socketio.on('connect')
def on_connect():
print('someone connected')
messages = models.Message.query.all()
chat = [m.text + '\n' for m in messages]
flask_socketio.emit('update', {
'data': 'Got your connection!',
'previous_messages': chat
})
@socketio.on('disconnect')
def on_disconnect():
print('Someone disconnected!')
flask_socketio.emit('update', {
'data': 'Disconnected'
})
def query(url):
messages = models.Message.query.all()
chat = [m.text + '\n' for m in messages]
socketio.emit('message received', {
'message': chat,
'isURL': url
})
@socketio.on('new message')
def on_new_number(data):
url = False
print(("Got an event for new number with data:"), data)
new_message = models.Message(data['message'])
models.db.session.add(new_message)
models.db.session.commit()
if new_message[:4] == 'http':
parse(new_message, rule='IRI')
url = True
if new_message[:2] == '!!':
chatbot.Chatbot.get_response(new_message[2:len(new_message)])
query(url)
if __name__ == '__main__':
socketio.run(
app,
host=os.getenv('IP', '0.0.0.0'),
port=int(os.getenv('PORT', 8080)),
debug=True
) | [
"ubuntu@ip-172-31-33-191.ec2.internal"
] | ubuntu@ip-172-31-33-191.ec2.internal |
51acc55e0eb2422ff9cb13db6dce6210559574e2 | 0418e92c92ae4aab43d65e3bebb8af3ec4e91555 | /2.Python运维/2.业务服务监控/2.1 filecmp.cmp单文件差异.py | 2944397a38906d081718a5e84beda74272e704e7 | [] | no_license | warmsirius/python-notebook | 171ba8a8743247528ef9c3da00a6460545697408 | 6343695d46a8bcd0111d021f44029b4454778879 | refs/heads/master | 2023-02-27T23:36:32.183388 | 2021-01-29T03:37:35 | 2021-01-29T03:37:35 | 304,596,790 | 0 | 0 | null | 2021-01-29T03:37:36 | 2020-10-16T10:40:13 | Python | UTF-8 | Python | false | false | 557 | py | import filecmp
if __name__ == "__main__":
print(filecmp.cmp(
"/2.Python运维/2.业务服务监控/1.3 对比Nginx配置文件差异.py",
"/Users/yuanjun/Documents/priv_repo/python-notebook/2.Python运维/2.业务服务监控/1.difflib-文件内容差异对比.md"))
print(filecmp.cmp(
"/2.Python运维/2.业务服务监控/1.3 对比Nginx配置文件差异.py",
"/Users/yuanjun/Documents/priv_repo/python-notebook/2.Python运维/2.业务服务监控/1.difflib-文件内容差异对比.md",
shallow=False))
| [
"861022418@qq.com"
] | 861022418@qq.com |
ffc3e0a708efdb334677d8fcea0d1a1dc4ef2f87 | 81c8beba79c93c50df57ae9654ed23a6b5a1546f | /more/highrest/model.py | 539ab0b21c09af56c5c1161765d2bf3524b4d785 | [] | no_license | morepath/more.highrest | d80a0f3813b246ce636e63b3bf62954ac899ee2f | c15b700b647cd59f4a4dc8bb422e8eb7f9574c4d | refs/heads/master | 2021-01-23T03:21:27.649821 | 2017-03-24T16:41:56 | 2017-03-24T16:41:56 | 86,072,014 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 918 | py | class CollectionBase(object):
def clone(self, offset, limit, *args, **kw):
raise NotImplementedError()
def add(self, data):
raise NotImplementedError()
def query(self):
raise NotImplementedError()
@staticmethod
def load(request):
raise NotImplementedError()
def count(self):
raise NotImplementedError()
def previous(self):
if self.offset == 0:
return None
offset = self.offset - self.limit
if offset < 0:
offset = 0
return self.clone(offset, self.limit)
def next(self):
if self.offset + self.limit >= self.count():
return None
offset = self.offset + self.limit
return self.clone(offset, self.limit)
class ItemBase(object):
def update(self, data):
raise NotImplementedError()
def remove(self):
raise NotImplementedError()
| [
"faassen@startifact.com"
] | faassen@startifact.com |
37387eb072b3d1b56bdcaee8162e595327e1223a | a1ce390907fa4a176a8c9dcc512cc4c4c31d6172 | /slam_library.py | 7d257352e253d36cab3eda42f7869a06785285cb | [] | no_license | ntd252/lineSLAM | 62d4546e2e7a6a557836a137e02e6c05539babc0 | 64bb0422d793fd465d95d595b75edbd5fcbc82ab | refs/heads/main | 2023-08-30T06:26:54.363033 | 2021-11-01T16:04:50 | 2021-11-01T16:04:50 | 405,521,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,767 | py | #Library for line matching, includes:
# Check if 2 lines are overlap
# Check if 2 lines are close to each other
# Check if detected lines from floor segmentation result are new or already detected
# Merge 2 lines if they are detected to be the same line.
from math import sin, cos, pi, sqrt
import numpy as np
import cv2
def distance_point(point1, point2):
return sqrt((point1[0] - point2[0])**2 + (point1[1] - point2[1])**2)
def check_overlap(line1, line2):
combination = np.array([line1,
line2,
[line1[0], line1[1], line2[0], line2[1]],
[line1[0], line1[1], line2[2], line2[3]],
[line1[2], line1[3], line2[0], line2[1]],
[line1[2], line1[3], line2[2], line2[3]]])
distance = np.sqrt((combination[:,0] - combination[:,2])**2 + (combination[:,1] - combination[:,3])**2)
max = np.amax(distance)
overlap = distance[0] + distance[1] - max
endpoint = combination[np.argmax(distance)]
return (overlap >= -230), endpoint
def get_close_lines(robot):
slope = 0.57
#get current robot location
theta = robot.state[2]
x_L = robot.state[0] + robot.scanner_displacement*cos(theta)
y_L = robot.state[1] + robot.scanner_displacement*sin(theta)
line_in_state = np.column_stack((robot.state[3::2], robot.state[4::2])) #[[r, alpha],...] from state
a_matrix = np.cos(line_in_state[:,1])
b_matrix = np.sin(line_in_state[:,1])
r_state_in_cam = np.abs(x_L*a_matrix + y_L*b_matrix - line_in_state[:,0])
close_lines = np.nonzero(r_state_in_cam < 500)
#convert robot.lines list from world coordinate to camera coordinate system
x1 = cos(theta)*robot.lines[:,0] + sin(theta)*robot.lines[:,1] - x_L#x_cam1
y1 = -sin(theta)*robot.lines[:,0] + cos(theta)*robot.lines[:,1] - y_L#y_cam1
x2 = cos(theta)*robot.lines[:,2] + sin(theta)*robot.lines[:,3] - x_L#x_cam2
y2 = -sin(theta)*robot.lines[:,2] + cos(theta)*robot.lines[:,3] - y_L#y_cam2
line_list = np.column_stack((x1, y1, x2, y2)).astype(int)
y1_y2 = np.array([line_list[:,1],line_list[:,3]]) #get y1 y2 out of the line list
#upper maximum
index_max_y1_y2 = np.argmax(y1_y2, axis=0) #0 for y1, 1 for y2
index_max_y1_y2[index_max_y1_y2 == 1] += 2 #get index for y2 in line form [x1 y1 x2 y2]
index_max_y1_y2[index_max_y1_y2 == 0] += 1 #get index for y1 in line form [x1 y1 x2 y2
index_max = np.column_stack((index_max_y1_y2-1, index_max_y1_y2)) #merge index of (x, y)
max_filter = np.take_along_axis(line_list,index_max,1) #get (x,y) from line list
x_positive = np.nonzero(max_filter[:,0] > 0)
out_of_upper = np.nonzero((max_filter[:,1] - slope*max_filter[:,0]) > 0)
out_of_upper = np.intersect1d(x_positive, out_of_upper)
#negative minimum
index_max_y1_y2 = np.argmin(y1_y2, axis=0) #0 for y1, 1 for y2
index_max_y1_y2[index_max_y1_y2 == 1] += 2 #get index for y2 in line form [x1 y1 x2 y2]
index_max_y1_y2[index_max_y1_y2 == 0] += 1 #get index for y1 in line form [x1 y1 x2 y2
index_max = np.column_stack((index_max_y1_y2-1, index_max_y1_y2)) #merge index of (x, y)
max_filter = np.take_along_axis(line_list,index_max,1) #get (x,y) from line list
x_positive = np.nonzero(max_filter[:,0] > 0)
out_of_lower = np.nonzero((max_filter[:,1] + slope*max_filter[:,0]) < 0)
out_of_lower = np.intersect1d(x_positive, out_of_lower)
out_of_range = np.intersect1d(out_of_upper, out_of_lower)
point1 = line_list[:,0:2]
point2 = line_list[:,2:4]
point1_in_range_up = (point1[:,1] - slope*point1[:,0]) < 0
point1_in_range_low = (point1[:,1] + slope*point1[:,0]) > 0
point1_in_range = np.logical_and(point1_in_range_up, point1_in_range_low)
point1_positive = point1[:,0] > 0
point1_in_range_positive = np.logical_and(point1_in_range, point1_positive)
point2_in_range_up = (point2[:,1] - slope*point2[:,0]) < 0
point2_in_range_low = (point2[:,1] + slope*point2[:,0]) > 0
point2_in_range = np.logical_and(point2_in_range_up, point2_in_range_low)
point2_positive = point2[:,0] > 0
point2_in_range_positive = np.logical_and(point2_in_range, point2_positive)
point_in_range = np.array(np.nonzero(np.logical_or(point1_in_range_positive, point2_in_range_positive))) #index of line which has at least 1 point in the range
in_range_radius = np.concatenate((out_of_range, point_in_range[0]))
robot.current_close_lines = np.intersect1d(in_range_radius, close_lines)
#print("robot.current_close_line", robot.current_close_lines)
def get_observations(line_list_cam, robot,
max_r_distance, max_alpha_distance):
a = line_list_cam[:,1] - line_list_cam[:,3]
b = line_list_cam[:,2] - line_list_cam[:,0]
c = line_list_cam[:,0]*line_list_cam[:,3] - line_list_cam[:,2]*line_list_cam[:,1]
r_meas = np.divide(np.abs(c), np.sqrt(a*a+b*b))
alpha_meas = (np.arctan2(-b*c,-a*c) + pi) % (2*pi) - pi
measurement = np.column_stack((r_meas, alpha_meas))
# Compute scanner pose from robot pose.
theta = robot.state[2]
x_L = robot.state[0] + robot.scanner_displacement*cos(theta)
y_L = robot.state[1] + robot.scanner_displacement*sin(theta)
#convert line_list from camera coordinate to world coordinate system
X1 = cos(theta)*line_list_cam[:,0] - sin(theta)*line_list_cam[:,1] + x_L#x1
Y1 = sin(theta)*line_list_cam[:,0] + cos(theta)*line_list_cam[:,1] + y_L#y1
X2 = cos(theta)*line_list_cam[:,2] - sin(theta)*line_list_cam[:,3] + x_L#x2
Y2 = sin(theta)*line_list_cam[:,2] + cos(theta)*line_list_cam[:,3] + y_L#y2
line_list = np.column_stack((X1, Y1, X2, Y2)).astype(int)
A = line_list[:,1] - line_list[:,3]
B = line_list[:,2] - line_list[:,0]
C = line_list[:,0]*line_list[:,3] - line_list[:,2]*line_list[:,1]
r = np.divide(np.abs(C), np.sqrt(A*A+B*B))
alpha = (np.arctan2(-B*C,-A*C) + pi) % (2*pi) - pi
obs_inworld_list = np.column_stack((r, alpha)) #an array of observed lines in (r,alpha) form to world frame
# For every detected cylinder which has a closest matching pole in the
# cylinders that are part of the current state, put the measurement
# (distance, angle) and the corresponding cylinder index into the result list.
result = []
line_in_state = np.column_stack((robot.state[3::2], robot.state[4::2])) #[[r, alpha],...] from state
a_matrix = np.cos(line_in_state[:,1])
b_matrix = np.sin(line_in_state[:,1])
r_state_in_cam = np.abs(x_L*a_matrix + y_L*b_matrix - line_in_state[:,0])
xo_matrix = b_matrix*(x_L*b_matrix - y_L*a_matrix) + a_matrix*line_in_state[:,0]
yo_matrix = a_matrix*(y_L*a_matrix - x_L*b_matrix) + b_matrix*line_in_state[:,0]
dx_matrix = xo_matrix - x_L
dy_matrix = yo_matrix - y_L
alpha_state_in_cam = (np.arctan2(dy_matrix, dx_matrix) - theta + np.pi) % (2*pi) - pi
ralpha_state_in_cam = np.column_stack((r_state_in_cam, alpha_state_in_cam))
get_close_lines(robot)
for line_i in range(len(obs_inworld_list)):
'''
compare = np.abs(line_in_state - obs_inworld_list[line_i])
index_list = np.nonzero(np.logical_and(compare[:,0] < local_r_distance, compare[:,1] < local_alpha_distance)) #index of state lines close to observed line
'''
compare = np.abs(ralpha_state_in_cam - measurement[line_i])
index_list = np.nonzero(np.logical_and(compare[:,0] < max_r_distance, compare[:,1] < max_alpha_distance)) #index of state lines close to observed line
obs_r, obs_alpha = measurement[line_i]
#check if observed line and state lines are overlap
best_index = -1
best_distance = max_r_distance * max_r_distance
best_endpoints = np.array([-1,-1,-1,-1])
for line_index in index_list[0]:
#print("state.lines", robot.lines[line_index])
#print("state.r,alpha", robot.state[3+2*line_index:5+2*line_index])
overlap, endpoints = check_overlap(robot.lines[line_index], line_list[line_i])
if overlap:
state_r, state_alpha = ralpha_state_in_cam[line_index]
dist_2 = (state_r - obs_r)**2 + ((max_r_distance/max_alpha_distance)*(state_alpha - obs_alpha))**2
if dist_2 < best_distance:
best_distance = dist_2
best_index = line_index
best_endpoints = endpoints
result.append((measurement[line_i], line_list[line_i], best_endpoints, best_index))
return result
| [
"noreply@github.com"
] | ntd252.noreply@github.com |
7b9b373d631a67d5e8a64e9ae1deaee6da841a34 | fbb1550dc5437d672ed0137bd7711eba3290dee3 | /students/amirg/lesson06/assignment/expand_file.py | c69a8fe2642955bfc677064b997480e95fd0bba4 | [] | no_license | JavaRod/SP_Python220B_2019 | 2cc379daf5290f366cf92dc317b9cf68e450c1b3 | 5dac60f39e3909ff05b26721d602ed20f14d6be3 | refs/heads/master | 2022-12-27T00:14:03.097659 | 2020-09-27T19:31:12 | 2020-09-27T19:31:12 | 272,602,608 | 1 | 0 | null | 2020-06-16T03:41:14 | 2020-06-16T03:41:13 | null | UTF-8 | Python | false | false | 2,993 | py | '''
This file expands the exercise csv file from 10
to 1,000,000 records
'''
#pylint: disable=redefined-builtin
import csv
import uuid
import logging
import random
import time
#format for the log
LOG_FORMAT = "%(asctime)s %(filename)s: %(lineno)-3d %(levelname)s %(message)s"
#setup for formatter and log file
FORMATTER = logging.Formatter(LOG_FORMAT)
LOG_FILE = 'db.log'
#setup for file hanlder at error level
FILE_HANDLER = logging.FileHandler(LOG_FILE, mode='w')
FILE_HANDLER.setLevel(30)
FILE_HANDLER.setFormatter(FORMATTER)
#setup for console handler at debug level
CONSOLE_HANDLER = logging.StreamHandler()
CONSOLE_HANDLER.setLevel(10)
CONSOLE_HANDLER.setFormatter(FORMATTER)
#setup for logging set at debug level
LOGGER = logging.getLogger()
LOGGER.setLevel(10)
LOGGER.addHandler(FILE_HANDLER)
LOGGER.addHandler(CONSOLE_HANDLER)
#dict to convert debug input to log level
LOG_LEVEL = {'0': 51, '1': 40, '2': 30, '3': 10}
def expand_data(directory_name, input_file):
'''
This module expands the data in the csv file
'''
#logging.debug('Attempting to open %s', input_file)
try:
with open(directory_name + "/" + input_file, 'a', newline='') as file:
expanded_writer = csv.writer(file, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
#logging.debug('Successfully opened file %s', input_file)
for i in range(11, 1000001, 1):
new_list = []
rand_uuid = str(uuid.uuid4())
logging.debug('Generated random uuid')
rand_date = random_date('1/1/2010', '12/31/2020', random.random())
logging.debug('Generated random date')
rand_string = random.choice(['', 'ao'])
logging.debug('Generated random string')
new_list = [rand_uuid, i, i+1, i+2, i+3, rand_date, rand_string]
expanded_writer.writerow(new_list)
logging.debug('Successfully expanded data')
except FileNotFoundError:
logging.error('Could not open file')
def str_time_prop(start, end, format, prop):
'''
Get a time at a proportion of a range of two formatted times.
start and end should be strings specifying times formated in the
given format (strftime-style), giving an interval [start, end].
prop specifies how a proportion of the interval to be taken after
start. The returned time will be in the specified format.
'''
stime = time.mktime(time.strptime(start, format))
etime = time.mktime(time.strptime(end, format))
ptime = stime + prop * (etime - stime)
return time.strftime(format, time.localtime(ptime))
def random_date(start, end, prop):
'''
Generates a random date
'''
return str_time_prop(start, end, '%m/%d/%Y', prop)
if __name__ == "__main__":
expand_data(r'C:/Users/Amir G/SP_Python220B_2019/students/amirg/lesson06/assignment/data',
'exercise.csv')
| [
"amirgnyawali@gmail.com"
] | amirgnyawali@gmail.com |
0603a3669f5e124d892ba55d02f5e796c270385f | e46f56cc1ffa52dee7da4efc718a09405a323a4e | /COT/commands/tests/test_edit_hardware.py | 0f68e914fafb800190f595e04ffbdf89335ad945 | [
"MIT"
] | permissive | harrisonfeng/cot | b8f8a7610c73c3154f7428c576b29d9c3795a97c | 532a6263c5b0462aa290a2852d27317fcc76d576 | refs/heads/master | 2020-12-31T04:15:57.133565 | 2017-04-03T18:27:12 | 2017-04-03T18:27:12 | 56,671,341 | 0 | 0 | null | 2016-04-20T08:51:56 | 2016-04-20T08:51:55 | null | UTF-8 | Python | false | false | 86,299 | py | #!/usr/bin/env python
#
# edit_hardware.py - test cases for the COTEditHardware class
#
# December 2014, Glenn F. Matthews
# Copyright (c) 2013-2017 the COT project developers.
# See the COPYRIGHT.txt file at the top-level directory of this distribution
# and at https://github.com/glennmatthews/cot/blob/master/COPYRIGHT.txt.
#
# This file is part of the Common OVF Tool (COT) project.
# It is subject to the license terms in the LICENSE.txt file found in the
# top-level directory of this distribution and at
# https://github.com/glennmatthews/cot/blob/master/LICENSE.txt. No part
# of COT, including this file, may be copied, modified, propagated, or
# distributed except according to the terms contained in the LICENSE.txt file.
"""Unit test cases for the COT.edit_hardware.COTEditHardware class."""
import re
from COT.commands.tests.command_testcase import CommandTestCase
from COT.ui import UI
from COT.commands.edit_hardware import COTEditHardware
from COT.data_validation import InvalidInputError
from COT.platforms import IOSv, IOSXRv
class TestCOTEditHardware(CommandTestCase):
"""Test the COTEditHardware class."""
NEW_HW_FROM_SCRATCH = {
'levelname': 'NOTICE',
'msg': "No existing items.*Will create new.*from scratch",
}
MEMORY_UNIT_GUESS = {
'levelname': 'WARNING',
'msg': "Memory units not specified, guessing",
}
NO_ITEMS_NO_WORK = {
'levelname': 'WARNING',
'msg': "No items.*found. Nothing to do.",
}
REMOVING_NETWORKSECTION = {
'levelname': "NOTICE",
'msg': "removing NetworkSection",
}
GENERIC_NETWORK = {
'levelname': "WARNING",
'msg': "No network names specified, but NICs must be mapped.*",
'args': ('VM Network',),
}
@staticmethod
def removing_network_message(name=None):
"""Warning log message for deleting a network entry.
Args:
name (str): Name of network being deleted. Defaults to 'VM Network'.
Returns:
dict: kwargs suitable for passing into :meth:`assertLogged`
"""
if not name:
name = "VM Network"
return {
'levelname': "NOTICE",
'msg': "Removing unused network %s",
'args': [name],
}
command_class = COTEditHardware
def test_not_ready_with_no_args(self):
"""Test ready_to_run() behavior."""
self.command.package = self.input_ovf
ready, reason = self.command.ready_to_run()
self.assertEqual(ready, False)
self.assertTrue(re.search("No work requested", reason))
self.assertRaises(InvalidInputError, self.command.run)
def test_valid_args(self):
"""Verify that various valid args are accepted and stored."""
self.command.package = self.input_ovf
self.command.cpus = "1"
self.assertEqual(self.command.cpus, 1)
self.command.memory = "1GB"
self.assertEqual(self.command.memory, 1024)
self.command.memory = "2g"
self.assertEqual(self.command.memory, 2048)
self.command.memory = "256M"
self.assertEqual(self.command.memory, 256)
self.command.memory = "1024"
self.assertLogged(**self.MEMORY_UNIT_GUESS)
self.assertEqual(self.command.memory, 1024)
self.command.nics = 1
self.assertEqual(self.command.nics, 1)
self.command.serial_ports = 1
self.assertEqual(self.command.serial_ports, 1)
def test_invalid_always_args(self):
"""Verify that various values are always invalid."""
# pylint: disable=redefined-variable-type
self.command.package = self.input_ovf
with self.assertRaises(InvalidInputError):
self.command.cpus = 0
with self.assertRaises(InvalidInputError):
self.command.cpus = "a"
with self.assertRaises(InvalidInputError):
self.command.memory = 0
with self.assertRaises(InvalidInputError):
self.command.memory = "GB"
with self.assertRaises(InvalidInputError):
self.command.nics = -1
with self.assertRaises(InvalidInputError):
self.command.nics = "b"
with self.assertRaises(InvalidInputError):
self.command.serial_ports = -1
with self.assertRaises(InvalidInputError):
self.command.serial_ports = "c"
def test_valid_by_platform(self):
"""Verify that some input values' validity depends on platform."""
self.command.package = self.input_ovf
self.command.ui.default_confirm_response = False
# IOSv only supports 1 vCPU and up to 3 GB of RAM
self.set_vm_platform(IOSv)
with self.assertRaises(InvalidInputError):
self.command.cpus = 2
with self.assertRaises(InvalidInputError):
self.command.memory = "4GB"
# ...but IOSXRv supports up to 8 CPUs and 3-8 GB of RAM
self.set_vm_platform(IOSXRv)
self.command.cpus = 2
self.command.cpus = 8
with self.assertRaises(InvalidInputError):
self.command.cpus = 9
self.command.memory = "4"
self.assertLogged(**self.MEMORY_UNIT_GUESS)
self.command.memory = "8GB"
with self.assertRaises(InvalidInputError):
self.command.memory = "9GB"
def test_set_system_type_single(self):
"""Set the VirtualSystemType to a single value."""
self.command.package = self.input_ovf
self.command.virtual_system_type = ['vmx-09']
self.command.run()
self.command.finished()
self.check_diff("""
<vssd:VirtualSystemIdentifier>test</vssd:VirtualSystemIdentifier>
- <vssd:VirtualSystemType>vmx-07 vmx-08</vssd:VirtualSystemType>
+ <vssd:VirtualSystemType>vmx-09</vssd:VirtualSystemType>
</ovf:System>
""")
def test_set_system_type_list(self):
"""Set the VirtualSystemType to a list of values."""
self.command.package = self.input_ovf
self.command.virtual_system_type = \
['vmx-07', 'vmx-08', 'vmx-09', 'Cisco:Internal:VMCloud-01']
# 'profiles' will be ignored in this case,
# as VirtualSystemType is not filtered by profile
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
# TODO - catch warning logger message that should be generated
# due to profiles being ignored.
self.command.finished()
self.check_diff("""
<vssd:VirtualSystemIdentifier>test</vssd:VirtualSystemIdentifier>
- <vssd:VirtualSystemType>vmx-07 vmx-08</vssd:VirtualSystemType>
+ <vssd:VirtualSystemType>vmx-07 vmx-08 vmx-09 \
Cisco:Internal:VMCloud-01</vssd:VirtualSystemType>
</ovf:System>
""")
def test_set_system_type_no_existing(self):
"""Add a VirtualSystemType to an OVF that doesn't have any."""
self.command.package = self.minimal_ovf
self.command.virtual_system_type = ['vmx-07', 'vmx-08']
self.command.run()
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:vssd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_VirtualSystemSettingData">
<ovf:References />
...
<ovf:Info />
+ <ovf:System>
+ <vssd:ElementName>Virtual System Type</vssd:ElementName>
+ <vssd:InstanceID>0</vssd:InstanceID>
+ <vssd:VirtualSystemType>vmx-07 vmx-08</vssd:VirtualSystemType>
+ </ovf:System>
</ovf:VirtualHardwareSection>
""")
def test_set_cpus_one_profile(self):
"""Change the number of CPUs under a specific profile."""
self.command.package = self.input_ovf
self.command.cpus = 8
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
+ <rasd:ElementName>8 virtual CPU(s)</rasd:ElementName>
<rasd:InstanceID>1</rasd:InstanceID>
<rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
+ <rasd:VirtualQuantity>8</rasd:VirtualQuantity>
<vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
""")
def test_set_cpus_merge_profiles(self):
"""Change # CPUs under one profile to match another profile."""
self.command.package = self.input_ovf
self.command.cpus = 4
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item ovf:configuration="2CPU-2GB-1NIC 4CPU-4GB-3NIC">
<rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
""")
def test_set_cpus_all_profiles(self):
"""Change value under all profiles, merging a group of Items."""
self.command.package = self.input_ovf
self.command.cpus = 1
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>4 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>4</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
<ovf:Item>
""")
def test_set_cpus_no_existing(self):
"""Create a CPU definition in an OVF that doesn't have one."""
self.command.package = self.minimal_ovf
self.command.cpus = 1
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:ElementName>cpu</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>3</rasd:ResourceType>
+ <rasd:VirtualQuantity>1</rasd:VirtualQuantity>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_memory_one_profile(self):
"""Set memory allocation under one profile."""
self.command.package = self.input_ovf
self.command.memory = 3072
self.assertLogged(**self.MEMORY_UNIT_GUESS)
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>2048MB of memory</rasd:ElementName>
+ <rasd:ElementName>3072MB of memory</rasd:ElementName>
<rasd:InstanceID>2</rasd:InstanceID>
<rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>2048</rasd:VirtualQuantity>
+ <rasd:VirtualQuantity>3072</rasd:VirtualQuantity>
</ovf:Item>
""")
def test_set_memory_all_profiles(self):
"""Set memory allocation under one profile."""
self.command.package = self.input_ovf
self.command.memory = "3072M"
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>1024MB of memory</rasd:ElementName>
+ <rasd:ElementName>3072MB of memory</rasd:ElementName>
<rasd:InstanceID>2</rasd:InstanceID>
<rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>1024</rasd:VirtualQuantity>
- </ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
- <rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>2048MB of memory</rasd:ElementName>
- <rasd:InstanceID>2</rasd:InstanceID>
- <rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>2048</rasd:VirtualQuantity>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
- <rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>4096MB of memory</rasd:ElementName>
- <rasd:InstanceID>2</rasd:InstanceID>
- <rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>4096</rasd:VirtualQuantity>
+ <rasd:VirtualQuantity>3072</rasd:VirtualQuantity>
</ovf:Item>
""")
def test_set_memory_no_existing(self):
"""Create a RAM definition in an OVF that doesn't have one."""
self.command.package = self.minimal_ovf
self.command.memory = "4GB"
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
+ <rasd:ElementName>memory</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>4</rasd:ResourceType>
+ <rasd:VirtualQuantity>4096</rasd:VirtualQuantity>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_nic_type_one_profile(self):
"""Set NIC hardware type under a single profile."""
self.command.package = self.input_ovf
self.command.profiles = ['4CPU-4GB-3NIC']
self.command.nic_type = "E1000"
self.command.run()
self.command.finished()
# This requires cloning the "default" NIC under instance 11
# to create a profile-specific version of this NIC
self.check_diff("""
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:AddressOnParent>11</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:Description>E1000 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:InstanceID>11</rasd:InstanceID>
+ <rasd:ResourceSubType>E1000</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
<rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Description>E1000 ethernet adapter on "VM Network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>E1000</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
...
<rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Description>E1000 ethernet adapter on "VM Network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>E1000</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
""")
def test_set_nic_type_all_profiles(self):
"""Change NIC hardware type under all profiles."""
self.command.package = self.input_ovf
self.command.nic_type = "virtio-net-pci"
self.assertEqual(self.command.nic_type, "virtio")
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Description>virtio ethernet adapter on "VM Network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet1</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
...
<rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Description>virtio ethernet adapter on "VM Network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
...
<rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Description>virtio ethernet adapter on "VM Network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
""")
def test_set_nic_type_no_existing(self):
"""Set NIC hardware type for an OVF with no NICs (no-op)."""
self.command.package = self.minimal_ovf
self.command.nic_type = "virtio"
self.command.run()
self.assertLogged(**self.NO_ITEMS_NO_WORK)
self.command.finished()
self.check_diff("", file1=self.minimal_ovf)
def test_set_nic_count_add(self):
"""Add additional NICs across all profiles."""
self.command.package = self.input_ovf
self.command.nics = 5
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item>
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item>
<rasd:AddressOnParent>13</rasd:AddressOnParent>
...
<rasd:InstanceID>13</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:ElementName>Ethernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>15</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:ElementName>Ethernet5</rasd:ElementName>
+ <rasd:InstanceID>15</rasd:InstanceID>
<rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>""")
def test_set_nic_count_add_smart_networks(self):
"""Add additional NICs (and implicitly networks) across all profiles.
In this OVF, each NIC is mapped to a unique network, so COT must be
smart enough to create additional networks as well.
"""
self.command.package = self.csr_ovf
self.command.nics = 6
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Description>Data network 3</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="GigabitEthernet4">
+ <ovf:Description>Data network 4</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="GigabitEthernet5">
+ <ovf:Description>Data network 5</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="GigabitEthernet6">
+ <ovf:Description>Data network 6</ovf:Description>
</ovf:Network>
...
</ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>GigabitEthernet4</rasd:Connection>
+ <rasd:Description>NIC representing GigabitEthernet4</rasd:Description>
+ <rasd:ElementName>GigabitEthernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>15</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>GigabitEthernet5</rasd:Connection>
+ <rasd:Description>NIC representing GigabitEthernet5</rasd:Description>
+ <rasd:ElementName>GigabitEthernet5</rasd:ElementName>
+ <rasd:InstanceID>15</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>16</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>GigabitEthernet6</rasd:Connection>
+ <rasd:Description>NIC representing GigabitEthernet6</rasd:Description>
+ <rasd:ElementName>GigabitEthernet6</rasd:ElementName>
+ <rasd:InstanceID>16</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>""", file1=self.csr_ovf)
def test_set_nic_count_named_nics_and_networks(self):
"""Add more NICs and explicitly named networks across all profiles.
This tests a user-reported issue where COT gets confused because the
base OVF uses the same strings for NIC and network names, but the
desired output OVF does not.
"""
self.command.package = self.csr_ovf
self.command.nics = 4
self.command.nic_names = ['GigabitEthernet{1}']
self.command.nic_networks = ["Alpha", "Beta", "Delta", "Gamma"]
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message('GigabitEthernet1'))
self.assertLogged(**self.removing_network_message('GigabitEthernet2'))
self.assertLogged(**self.removing_network_message('GigabitEthernet3'))
self.check_diff("""
<ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="GigabitEthernet1">
- <ovf:Description>Data network 1</ovf:Description>
+ <ovf:Network ovf:name="Alpha">
+ <ovf:Description>Alpha</ovf:Description>
</ovf:Network>
- <ovf:Network ovf:name="GigabitEthernet2">
- <ovf:Description>Data network 2</ovf:Description>
+ <ovf:Network ovf:name="Beta">
+ <ovf:Description>Beta</ovf:Description>
</ovf:Network>
- <ovf:Network ovf:name="GigabitEthernet3">
- <ovf:Description>Data network 3</ovf:Description>
+ <ovf:Network ovf:name="Delta">
+ <ovf:Description>Delta</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="Gamma">
+ <ovf:Description>Gamma</ovf:Description>
</ovf:Network>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>GigabitEthernet1</rasd:Connection>
+ <rasd:Connection>Alpha</rasd:Connection>
<rasd:Description>NIC representing GigabitEthernet1</rasd:Description>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>GigabitEthernet2</rasd:Connection>
+ <rasd:Connection>Beta</rasd:Connection>
<rasd:Description>NIC representing GigabitEthernet2</rasd:Description>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>GigabitEthernet3</rasd:Connection>
+ <rasd:Connection>Delta</rasd:Connection>
<rasd:Description>NIC representing GigabitEthernet3</rasd:Description>
...
<rasd:InstanceID>13</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>Gamma</rasd:Connection>
+ <rasd:Description>NIC representing GigabitEthernet4</rasd:Description>
+ <rasd:ElementName>GigabitEthernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
<rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
""", file1=self.csr_ovf)
def test_set_nic_count_merge_profiles(self):
"""Add NICs that already exist under one profile to another."""
self.command.package = self.input_ovf
self.command.nics = 3
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item ovf:configuration="2CPU-2GB-1NIC 4CPU-4GB-3NIC">
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item ovf:configuration="2CPU-2GB-1NIC 4CPU-4GB-3NIC">
<rasd:AddressOnParent>13</rasd:AddressOnParent>
""")
def test_set_nic_count_create_new_one_profile(self):
"""Create a new NIC under a single profile."""
self.command.package = self.input_ovf
self.command.nics = '4'
self.command.profiles = ['4CPU-4GB-3NIC']
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:ElementName>Ethernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_nic_count_create_new_and_new_profile(self):
"""Create new NICs under a new profile. Test for issue #64."""
self.command.package = self.input_ovf
self.command.nics = '4'
self.command.profiles = ['4CPU-4GB-4NIC']
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Configuration>
+ <ovf:Configuration ovf:id="4CPU-4GB-4NIC">
+ <ovf:Label>4CPU-4GB-4NIC</ovf:Label>
+ <ovf:Description>4CPU-4GB-4NIC</ovf:Description>
+ </ovf:Configuration>
</ovf:DeploymentOptionSection>
...
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC 4CPU-4GB-4NIC">
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC 4CPU-4GB-4NIC">
<rasd:AddressOnParent>13</rasd:AddressOnParent>
...
<rasd:InstanceID>13</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-4NIC">
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:ElementName>Ethernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
<rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
""")
def test_set_nic_count_create_new_and_split_new_profile(self):
"""Create new NICs under a new profile splitting from unified profile.
Another test for issue #64.
"""
self.command.package = self.csr_ovf
self.command.nics = '4'
self.command.profiles = ['4CPU-4GB-4NIC']
self.command.run()
self.command.finished()
self.check_diff(file1=self.csr_ovf, expected="""
</ovf:Network>
+ <ovf:Network ovf:name="GigabitEthernet4">
+ <ovf:Description>Data network 4</ovf:Description>
+ </ovf:Network>
</ovf:NetworkSection>
...
<ovf:Description>Large hardware profile (requires purchase of DRAM \
upgrade SKU) - 4 vCPUs, 8 GB RAM</ovf:Description>
+ </ovf:Configuration>
+ <ovf:Configuration ovf:id="4CPU-4GB-4NIC">
+ <ovf:Label>4CPU-4GB-4NIC</ovf:Label>
+ <ovf:Description>4CPU-4GB-4NIC</ovf:Description>
</ovf:Configuration>
...
</ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-4NIC">
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>GigabitEthernet4</rasd:Connection>
+ <rasd:Description>NIC representing GigabitEthernet4</rasd:Description>
+ <rasd:ElementName>GigabitEthernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3 virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_nic_count_delete_nics(self):
"""Set NIC count to a lower value, deleting some NICs."""
self.command.package = self.input_ovf
self.command.nics = 0
self.command.profiles = ['1CPU-1GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item>
+ <ovf:Item ovf:configuration="2CPU-2GB-1NIC 4CPU-4GB-3NIC">
<rasd:AddressOnParent>11</rasd:AddressOnParent>
""")
def test_set_nic_count_delete_nics_new_profile(self):
"""Set NIC count to a lower value under a newly created profile."""
self.command.package = self.csr_ovf
self.command.nics = 1
self.command.profiles = ['1CPU-4GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff(file1=self.csr_ovf, expected="""
</ovf:Configuration>
+ <ovf:Configuration ovf:id="1CPU-4GB-1NIC">
+ <ovf:Label>1CPU-4GB-1NIC</ovf:Label>
+ <ovf:Description>1CPU-4GB-1NIC</ovf:Description>
+ </ovf:Configuration>
</ovf:DeploymentOptionSection>
...
</ovf:Item>
- <ovf:Item>
+ <ovf:Item ovf:configuration="1CPU-4GB 2CPU-4GB 4CPU-4GB 4CPU-8GB">
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
</ovf:Item>
- <ovf:Item>
+ <ovf:Item ovf:configuration="1CPU-4GB 2CPU-4GB 4CPU-4GB 4CPU-8GB">
<rasd:AddressOnParent>13</rasd:AddressOnParent>
""")
def test_set_nic_count_no_existing(self):
"""Create a NIC when nothing pre-exists."""
self.command.package = self.minimal_ovf
self.command.nics = 2
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.assertLogged(**self.GENERIC_NETWORK)
self.command.finished()
self.check_diff(file1=self.minimal_ovf, expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
+ <ovf:NetworkSection>
+ <ovf:Info>Logical networks</ovf:Info>
+ <ovf:Network ovf:name="VM Network">
+ <ovf:Description>VM Network</ovf:Description>
+ </ovf:Network>
+ </ovf:NetworkSection>
<ovf:VirtualSystem ovf:id="x">
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:ElementName>Ethernet1</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:ElementName>Ethernet2</rasd:ElementName>
+ <rasd:InstanceID>2</rasd:InstanceID>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_nic_count_zero_then_re_add(self):
"""Set NIC count to zero, then recreate the NICs."""
self.command.package = self.v09_ovf
self.command.nics = 0
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message('bridged'))
self.assertLogged(**self.REMOVING_NETWORKSECTION)
self.command.package = self.temp_file
self.command.nics = 1
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.assertLogged(**self.GENERIC_NETWORK)
self.command.finished()
self.check_diff(file1=self.v09_ovf, expected="""
<ovf:Section xsi:type="ovf:NetworkSection_Type">
- <ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="bridged">
- <ovf:Description>The bridged network</ovf:Description>
+ <ovf:Info>Logical networks</ovf:Info>
+ <ovf:Network ovf:name="VM Network">
+ <ovf:Description>VM Network</ovf:Description>
</ovf:Network>
...
<ovf:Item>
- <rasd:Caption>ethernet0</rasd:Caption>
- <rasd:Description>PCNet32 ethernet adapter</rasd:Description>
+ <rasd:Caption>Ethernet1</rasd:Caption>
<rasd:InstanceId>8</rasd:InstanceId>
<rasd:ResourceType>10</rasd:ResourceType>
- <rasd:ResourceSubType>PCNet32</rasd:ResourceSubType>
- <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>bridged</rasd:Connection>
- <rasd:AddressOnParent>1</rasd:AddressOnParent>
+ <rasd:Connection>VM Network</rasd:Connection>
</ovf:Item>
""")
def test_set_nic_network_one_profile(self):
"""Create a new network and map a NIC to it under a single profile."""
# Create a new network and map to it under one profile
# This involves splitting the existing NIC into two items
self.command.package = self.input_ovf
self.command.nic_networks = ['UT']
self.command.network_descriptions = ['Unit test network']
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Description>VM Network</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT">
+ <ovf:Description>Unit test network</ovf:Description>
</ovf:Network>
...
</ovf:Item>
+ <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
+ <rasd:AddressOnParent>11</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
+ <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:InstanceID>11</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
""")
def test_set_nic_network_all_profiles(self):
"""Test changing NIC network mapping across all profiles."""
self.command.package = self.input_ovf
self.command.nic_networks = ['UT', 'UT', 'UT']
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message())
self.check_diff("""
<ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="VM Network">
- <ovf:Description>VM Network</ovf:Description>
+ <ovf:Network ovf:name="UT">
+ <ovf:Description>UT</ovf:Description>
</ovf:Network>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
<rasd:ElementName>GigabitEthernet1</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
""")
def test_set_nic_network_list_expansion(self):
"""Specify fewer networks than NICs to test implicit NIC assignment.
Also specify fewer network descriptions than networks.
Remaining networks get the last description in the list.
Remaining NICs get the last network in the list.
"""
self.command.package = self.input_ovf
self.command.nic_networks = ['UT1', 'UT2']
self.command.network_descriptions = ['First UT']
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message())
self.check_diff("""
<ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="VM Network">
- <ovf:Description>VM Network</ovf:Description>
+ <ovf:Network ovf:name="UT1">
+ <ovf:Description>First UT</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT2">
+ <ovf:Description>First UT</ovf:Description>
</ovf:Network>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT1</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT1"</rasd:Description>
<rasd:ElementName>GigabitEthernet1</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT2</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT2"</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT2</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT2"</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
""")
def test_set_nic_network_list_pattern(self):
"""Use wildcard expansion to create multiple networks as needed."""
self.command.package = self.input_ovf
self.command.nic_networks = ["UT_{20}_network"]
self.command.network_descriptions = ['First network', '#{2} Network']
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message())
self.check_diff("""
<ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="VM Network">
- <ovf:Description>VM Network</ovf:Description>
+ <ovf:Network ovf:name="UT_20_network">
+ <ovf:Description>First network</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT_21_network">
+ <ovf:Description>#2 Network</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT_22_network">
+ <ovf:Description>#3 Network</ovf:Description>
</ovf:Network>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT_20_network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT_20_network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet1</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT_21_network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT_21_network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT_22_network</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT_22_network"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
""")
def test_set_network_description_only(self):
"""Set network descriptions without changing network names."""
self.command.package = self.input_ovf
self.command.network_descriptions = ['Network 1', 'Network 2']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Network ovf:name="VM Network">
- <ovf:Description>VM Network</ovf:Description>
+ <ovf:Description>Network 1</ovf:Description>
</ovf:Network>
""")
def test_set_nic_mac_address_single_all_profiles(self):
"""Set a single MAC address on all NICs on all profiles."""
self.command.package = self.input_ovf
self.command.mac_addresses_list = ['10:20:30:40:50:60']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item>
+ <rasd:Address>10:20:30:40:50:60</rasd:Address>
<rasd:AddressOnParent>11</rasd:AddressOnParent>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>10:20:30:40:50:60</rasd:Address>
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>10:20:30:40:50:60</rasd:Address>
<rasd:AddressOnParent>13</rasd:AddressOnParent>
""")
def test_set_nic_mac_addresses_list_all_profiles(self):
"""Set a sequence of MAC addresses for all profiles."""
self.command.package = self.input_ovf
self.command.mac_addresses_list = \
['10:20:30:40:50:60', '01:02:03:04:05:06', 'ab:cd:ef:00:00:00']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item>
+ <rasd:Address>10:20:30:40:50:60</rasd:Address>
<rasd:AddressOnParent>11</rasd:AddressOnParent>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>01:02:03:04:05:06</rasd:Address>
<rasd:AddressOnParent>12</rasd:AddressOnParent>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>ab:cd:ef:00:00:00</rasd:Address>
<rasd:AddressOnParent>13</rasd:AddressOnParent>
""")
def test_set_nic_name_list_exact(self):
"""Set a list of names identical in length to the number of NICs."""
self.command.package = self.input_ovf
self.command.nic_names = ['foo', 'bar', 'baz']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:ElementName>foo</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
+ <rasd:ElementName>bar</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
+ <rasd:ElementName>baz</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
""")
def test_set_nic_name_list_extra(self):
"""Set a list of NIC names that's longer than needed."""
self.command.package = self.input_ovf
self.command.nic_names = ['foo', 'bar', 'baz', 'bat']
self.command.run()
self.assertLogged(levelname="WARNING",
msg="not all %s values were used",
args=('ethernet', 'ElementName', ['bat']))
self.command.finished()
self.check_diff("""
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:ElementName>foo</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
+ <rasd:ElementName>bar</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
+ <rasd:ElementName>baz</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
""")
def test_set_nic_name_list_short(self):
"""Set a list of NIC names that's shorter than needed."""
self.command.package = self.input_ovf
self.command.nic_names = ['foo', 'bar']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:ElementName>foo</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
+ <rasd:ElementName>bar</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
+ <rasd:ElementName>bar</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
""")
def test_set_nic_name_pattern(self):
"""Set NIC names based on a pattern."""
self.command.package = self.input_ovf
self.command.nic_names = ['eth{0}']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:ElementName>eth0</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
+ <rasd:ElementName>eth1</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
+ <rasd:ElementName>eth2</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
""")
def test_set_nic_name_list_pattern(self):
"""Set NIC names based on a constant plus a pattern."""
self.command.package = self.input_ovf
self.command.nic_names = ['foo', 'eth{10}']
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:ElementName>foo</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
+ <rasd:ElementName>eth10</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
...
<rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
+ <rasd:ElementName>eth11</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
""")
def test_deprecated_nic_type(self):
"""The nic_type method is deprecated by nic_types."""
self.command.package = self.input_ovf
self.assertEqual(self.command.nic_type, None)
self.command.nic_type = 'e1000'
self.assertEqual(self.command.nic_type, 'E1000')
self.assertEqual(self.command.nic_types, ['E1000'])
self.command.nic_types = ['e1000', 'virtio']
self.assertEqual(self.command.nic_types, ['E1000', 'virtio'])
with self.assertRaises(TypeError):
assert self.command.nic_type
def test_set_nic_kitchen_sink_all_profiles(self):
"""Test changing many NIC properties at once under all profiles."""
self.command.package = self.input_ovf
self.command.nic_types = ['e1000', 'virtio']
self.command.nic_networks = ['UT1', 'UT2', 'UT3']
self.command.mac_addresses_list = \
['00:00:00:00:00:01', '11:22:33:44:55:66', 'fe:fd:fc:fb:fa:f9']
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message())
self.check_diff("""
<ovf:Info>The list of logical networks</ovf:Info>
- <ovf:Network ovf:name="VM Network">
- <ovf:Description>VM Network</ovf:Description>
+ <ovf:Network ovf:name="UT1">
+ <ovf:Description>UT1</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT2">
+ <ovf:Description>UT2</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT3">
+ <ovf:Description>UT3</ovf:Description>
</ovf:Network>
...
<ovf:Item>
+ <rasd:Address>00:00:00:00:00:01</rasd:Address>
<rasd:AddressOnParent>11</rasd:AddressOnParent>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT1</rasd:Connection>
+ <rasd:Description>E1000/virtio ethernet adapter on "UT1"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet1</rasd:ElementName>
<rasd:InstanceID>11</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>E1000 virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>11:22:33:44:55:66</rasd:Address>
<rasd:AddressOnParent>12</rasd:AddressOnParent>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT2</rasd:Connection>
+ <rasd:Description>E1000/virtio ethernet adapter on "UT2"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
<rasd:InstanceID>12</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>E1000 virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>fe:fd:fc:fb:fa:f9</rasd:Address>
<rasd:AddressOnParent>13</rasd:AddressOnParent>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT3</rasd:Connection>
+ <rasd:Description>E1000/virtio ethernet adapter on "UT3"\
</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceSubType>E1000 virtio</rasd:ResourceSubType>
<rasd:ResourceType>10</rasd:ResourceType>
""")
def test_set_nic_kitchen_sink_one_profile(self):
"""Test changing many NIC properties at once under one profile."""
self.command.package = self.input_ovf
self.command.profiles = ['4CPU-4GB-3NIC']
self.command.nics = 4
self.command.nic_networks = ['UT']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Description>VM Network</ovf:Description>
+ </ovf:Network>
+ <ovf:Network ovf:name="UT">
+ <ovf:Description>UT</ovf:Description>
</ovf:Network>
...
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:AddressOnParent>11</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"\
</rasd:Description>
+ <rasd:ElementName>GigabitEthernet1</rasd:ElementName>
+ <rasd:InstanceID>11</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
<rasd:AddressOnParent>12</rasd:AddressOnParent>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
<rasd:ElementName>GigabitEthernet2</rasd:ElementName>
...
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
<rasd:ElementName>GigabitEthernet3</rasd:ElementName>
<rasd:InstanceID>13</rasd:InstanceID>
+ <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:AddressOnParent>14</rasd:AddressOnParent>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Connection>UT</rasd:Connection>
+ <rasd:Description>VMXNET3 ethernet adapter on "UT"</rasd:Description>
+ <rasd:ElementName>Ethernet4</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
<rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
""")
def test_set_nic_kitchen_sink_no_existing(self):
"""Define NIC in an OVF that previously had none."""
self.command.package = self.minimal_ovf
self.command.nics = 1
self.command.nic_networks = ['testme']
self.command.nic_types = ['virtio-net-pci', 'e1000']
self.command.mac_addresses_list = ['12:34:56:78:9a:bc']
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
+ <ovf:NetworkSection>
+ <ovf:Info>Logical networks</ovf:Info>
+ <ovf:Network ovf:name="testme">
+ <ovf:Description>testme</ovf:Description>
+ </ovf:Network>
+ </ovf:NetworkSection>
<ovf:VirtualSystem ovf:id="x">
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:Address>12:34:56:78:9a:bc</rasd:Address>
+ <rasd:Connection>testme</rasd:Connection>
+ <rasd:ElementName>Ethernet1</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceSubType>virtio E1000</rasd:ResourceSubType>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_serial_count_delete_one_profile(self):
"""Remove a shared serial port from one profile only."""
self.command.package = self.input_ovf
self.command.profiles = ['2CPU-2GB-1NIC']
self.command.serial_ports = 1
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:required="false">
+ <ovf:Item ovf:configuration="1CPU-1GB-1NIC 4CPU-4GB-3NIC" \
ovf:required="false">
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
""")
def test_set_serial_count_delete_all_profiles(self):
"""Remove a serial port across all profiles."""
self.command.package = self.input_ovf
self.command.serial_ports = 1
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
- <ovf:Item ovf:required="false">
- <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Description>Serial Port acting as IOSd Aux Port\
</rasd:Description>
- <rasd:ElementName>Serial 2</rasd:ElementName>
- <rasd:InstanceID>10</rasd:InstanceID>
- <rasd:ResourceType>21</rasd:ResourceType>
- </ovf:Item>
<ovf:Item>
""")
def test_set_serial_count_create_all_profiles(self):
"""Create a serial port under all profiles."""
self.command.package = self.input_ovf
self.command.serial_ports = 3
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Item>
+ <ovf:Item ovf:required="false">
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Description>Serial Port acting as IOSd Aux Port\
</rasd:Description>
+ <rasd:ElementName>Serial 2</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceType>21</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_serial_count_no_existing(self):
"""Create a serial port in an OVF that had none."""
self.command.package = self.minimal_ovf
self.command.serial_ports = 1
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:ElementName>serial</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>21</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_set_serial_connectivity_one_port_all_profiles(self):
"""Set serial connectivity for one port under all profiles."""
self.command.package = self.input_ovf
self.command.serial_connectivity = ['telnet://localhost:22001']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://localhost:22001</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
""")
def test_set_serial_connectivity_two_ports_all_profiles(self):
"""Set serial connectivity for multiple ports across all profiles."""
self.command.package = self.input_ovf
self.command.serial_connectivity = \
['telnet://localhost:22001', 'telnet://localhost:22002']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://localhost:22001</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
...
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://localhost:22002</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
""")
def test_serial_create_kitchen_sink(self):
"""Create a serial port and set connectivity in one pass."""
self.command.package = self.input_ovf
self.command.serial_ports = '3'
self.command.serial_connectivity = \
['telnet://foo:1', 'telnet://foo:2', 'telnet://foo:3']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://foo:1</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
...
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://foo:2</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
...
<rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:required="false">
+ <rasd:Address>telnet://foo:3</rasd:Address>
+ <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
+ <rasd:Description>Serial Port acting as IOSd Aux Port\
</rasd:Description>
+ <rasd:ElementName>Serial 2</rasd:ElementName>
+ <rasd:InstanceID>14</rasd:InstanceID>
+ <rasd:ResourceType>21</rasd:ResourceType>
</ovf:Item>
""")
def test_serial_delete_kitchen_sink(self):
"""Delete a serial port and set connectivity in one pass."""
self.command.package = self.input_ovf
self.command.serial_ports = 1
self.command.serial_connectivity = ['telnet://bar:22']
self.command.run()
self.command.finished()
self.check_diff("""
<ovf:Item ovf:required="false">
+ <rasd:Address>telnet://bar:22</rasd:Address>
<rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
...
<rasd:InstanceID>9</rasd:InstanceID>
- <rasd:ResourceType>21</rasd:ResourceType>
- </ovf:Item>
- <ovf:Item ovf:required="false">
- <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Description>Serial Port acting as IOSd Aux Port\
</rasd:Description>
- <rasd:ElementName>Serial 2</rasd:ElementName>
- <rasd:InstanceID>10</rasd:InstanceID>
<rasd:ResourceType>21</rasd:ResourceType>
""")
def test_set_scsi_subtype_all_profiles(self):
"""Set SCSI controller subtype under all profiles."""
self.command.package = self.input_ovf
self.command.scsi_subtype = "virtio"
self.assertEqual(self.command.scsi_subtype, "virtio")
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:InstanceID>3</rasd:InstanceID>
- <rasd:ResourceSubType>lsilogic</rasd:ResourceSubType>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
<rasd:ResourceType>6</rasd:ResourceType>
""")
def test_clear_scsi_subtype_all_profiles(self):
"""Clear SCSI controller subtype under all profiles."""
self.command.package = self.input_ovf
# TODO: this should really be an empty list or None
self.command.scsi_subtype = ""
self.assertEqual(self.command.scsi_subtype, None)
self.assertEqual(self.command.scsi_subtypes, [])
self.command.run()
self.command.finished()
self.check_diff("""
<rasd:InstanceID>3</rasd:InstanceID>
- <rasd:ResourceSubType>lsilogic</rasd:ResourceSubType>
<rasd:ResourceType>6</rasd:ResourceType>
""")
def test_set_scsi_subtype_one_profile(self):
"""Set SCSI controller subtype under a single profile."""
self.command.package = self.input_ovf
self.command.scsi_subtypes = ['buslogic', 'lsilogic']
self.assertEqual(self.command.scsi_subtypes, ['buslogic', 'lsilogic'])
with self.assertRaises(TypeError):
assert self.command.scsi_subtype
self.command.profiles = ['4CPU-4GB-3NIC']
self.command.run()
self.command.finished()
# This requires creating a new variant of the SCSI controller
# specific to this profile
self.check_diff("""
</ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>0</rasd:Address>
+ <rasd:Description>SCSI Controller</rasd:Description>
+ <rasd:ElementName>SCSI Controller 0</rasd:ElementName>
+ <rasd:InstanceID>3</rasd:InstanceID>
+ <rasd:ResourceSubType>buslogic lsilogic</rasd:ResourceSubType>
+ <rasd:ResourceType>6</rasd:ResourceType>
+ </ovf:Item>
<ovf:Item>
""")
def test_set_scsi_subtype_no_existing(self):
"""Set SCSI controller subtype for an OVF with none (no-op)."""
self.command.package = self.minimal_ovf
self.assertEqual(self.command.scsi_subtype, None)
self.assertEqual(self.command.scsi_subtypes, None)
self.command.scsi_subtype = "virtualscsi"
self.assertEqual(self.command.scsi_subtype, "VirtualSCSI")
self.assertEqual(self.command.scsi_subtypes, ["VirtualSCSI"])
self.command.run()
self.assertLogged(**self.NO_ITEMS_NO_WORK)
self.command.finished()
self.check_diff("", file1=self.minimal_ovf)
def test_set_ide_subtype_all_profiles(self):
"""Set IDE controller subtype across all profiles."""
self.command.package = self.input_ovf
self.command.ide_subtypes = ["virtio", "piix4"]
self.assertEqual(self.command.ide_subtypes, ["virtio", "PIIX4"])
with self.assertRaises(TypeError):
assert self.command.ide_subtype
self.command.run()
self.command.finished()
# Since there is no pre-existing subtype, we just create it
# under each controller:
self.check_diff("""
<rasd:InstanceID>4</rasd:InstanceID>
+ <rasd:ResourceSubType>virtio PIIX4</rasd:ResourceSubType>
<rasd:ResourceType>5</rasd:ResourceType>
...
<rasd:InstanceID>5</rasd:InstanceID>
+ <rasd:ResourceSubType>virtio PIIX4</rasd:ResourceSubType>
<rasd:ResourceType>5</rasd:ResourceType>
""")
def test_set_ide_subtype_one_profile(self):
"""Set IDE controller subtype under a single profile."""
self.command.package = self.input_ovf
self.command.ide_subtype = "virtio"
self.assertEqual(self.command.ide_subtype, "virtio")
self.assertEqual(self.command.ide_subtypes, ["virtio"])
self.command.profiles = ['4CPU-4GB-3NIC']
self.command.run()
self.command.finished()
# Here we have to create new controllers under this profile
# while leaving the default alone
self.check_diff("""
</ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>1</rasd:Address>
+ <rasd:Description>IDE Controller</rasd:Description>
+ <rasd:ElementName>VirtualIDEController 1</rasd:ElementName>
+ <rasd:InstanceID>4</rasd:InstanceID>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
+ <rasd:ResourceType>5</rasd:ResourceType>
+ </ovf:Item>
<ovf:Item>
...
<rasd:InstanceID>5</rasd:InstanceID>
+ <rasd:ResourceType>5</rasd:ResourceType>
+ </ovf:Item>
+ <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
+ <rasd:Address>0</rasd:Address>
+ <rasd:Description>IDE Controller</rasd:Description>
+ <rasd:ElementName>VirtualIDEController 0</rasd:ElementName>
+ <rasd:InstanceID>5</rasd:InstanceID>
+ <rasd:ResourceSubType>virtio</rasd:ResourceSubType>
<rasd:ResourceType>5</rasd:ResourceType>
""")
def test_set_ide_subtype_no_existing(self):
"""Set IDE controller subtype for an OVF with none (no-op)."""
self.command.package = self.minimal_ovf
self.assertEqual(self.command.ide_subtype, None)
self.assertEqual(self.command.ide_subtypes, None)
self.command.ide_subtype = "virtio"
self.command.run()
self.assertLogged(**self.NO_ITEMS_NO_WORK)
self.command.finished()
self.check_diff("", file1=self.minimal_ovf)
def test_create_profile_inherit_default(self):
"""Create a new profile that's identical to the default one."""
self.command.package = self.input_ovf
self.command.profiles = ['UT']
self.command.cpus = 1
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Configuration>
+ <ovf:Configuration ovf:id="UT">
+ <ovf:Label>UT</ovf:Label>
+ <ovf:Description>UT</ovf:Description>
+ </ovf:Configuration>
</ovf:DeploymentOptionSection>
""")
def test_create_new_profile(self):
"""Create a new profile with new values."""
self.command.package = self.input_ovf
self.command.profiles = ['UT']
self.command.cpus = 8
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Configuration>
+ <ovf:Configuration ovf:id="UT">
+ <ovf:Label>UT</ovf:Label>
+ <ovf:Description>UT</ovf:Description>
+ </ovf:Configuration>
</ovf:DeploymentOptionSection>
...
</ovf:Item>
+ <ovf:Item ovf:configuration="UT">
+ <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
+ <rasd:Description>Number of Virtual CPUs</rasd:Description>
+ <rasd:ElementName>8 virtual CPU(s)</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>3</rasd:ResourceType>
+ <rasd:VirtualQuantity>8</rasd:VirtualQuantity>
+ <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
+ </ovf:Item>
<ovf:Item>
""")
def test_create_two_profiles(self):
"""Create two profiles at once."""
self.command.package = self.input_ovf
self.command.profiles = ['UT', 'UT2']
self.command.memory = 8192
self.assertLogged(**self.MEMORY_UNIT_GUESS)
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Configuration>
+ <ovf:Configuration ovf:id="UT">
+ <ovf:Label>UT</ovf:Label>
+ <ovf:Description>UT</ovf:Description>
+ </ovf:Configuration>
+ <ovf:Configuration ovf:id="UT2">
+ <ovf:Label>UT2</ovf:Label>
+ <ovf:Description>UT2</ovf:Description>
+ </ovf:Configuration>
</ovf:DeploymentOptionSection>
...
</ovf:Item>
+ <ovf:Item ovf:configuration="UT UT2">
+ <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
+ <rasd:Description>Memory Size</rasd:Description>
+ <rasd:ElementName>8192MB of memory</rasd:ElementName>
+ <rasd:InstanceID>2</rasd:InstanceID>
+ <rasd:ResourceType>4</rasd:ResourceType>
+ <rasd:VirtualQuantity>8192</rasd:VirtualQuantity>
+ </ovf:Item>
<ovf:Item>
""")
def test_create_profile_no_existing(self):
"""Add a profile to an OVF that doesn't have any."""
self.command.package = self.minimal_ovf
self.command.profiles = ['UT']
self.command.run()
self.command.finished()
self.check_diff(file1=self.minimal_ovf,
expected="""
<ovf:References />
+ <ovf:DeploymentOptionSection>
+ <ovf:Info>Configuration Profiles</ovf:Info>
+ <ovf:Configuration ovf:id="UT">
+ <ovf:Label>UT</ovf:Label>
+ <ovf:Description>UT</ovf:Description>
+ </ovf:Configuration>
+ </ovf:DeploymentOptionSection>
<ovf:VirtualSystem ovf:id="x">
""")
def test_delete_one_profile(self):
"""Delete one configuration profile."""
self.command.package = self.input_ovf
self.command.profiles = ['1CPU-1GB-1NIC', '4CPU-4GB-3NIC']
self.command.delete_all_other_profiles = True
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:Configuration>
- <ovf:Configuration ovf:id="2CPU-2GB-1NIC">
- <ovf:Label>2 vCPUs, 2 GB RAM, 1 NIC</ovf:Label>
- <ovf:Description>Minimal hardware profile - 2 vCPUs, 2 GB RAM, \
1 NIC</ovf:Description>
- </ovf:Configuration>
<ovf:Configuration ovf:default="true" ovf:id="4CPU-4GB-3NIC">
...
</ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
<ovf:Item ovf:configuration="4CPU-4GB-3NIC">
...
<rasd:VirtualQuantity>1024</rasd:VirtualQuantity>
- </ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
- <rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>2048MB of memory</rasd:ElementName>
- <rasd:InstanceID>2</rasd:InstanceID>
- <rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>2048</rasd:VirtualQuantity>
</ovf:Item>
""")
def test_delete_all_profiles(self):
"""Delete all configuration profiles, leaving only the default hw."""
self.command.package = self.input_ovf
self.command.delete_all_other_profiles = True
self.command.run()
self.command.finished()
self.check_diff("""
</ovf:NetworkSection>
- <ovf:DeploymentOptionSection>
- <ovf:Info>Configuration Profiles</ovf:Info>
- <ovf:Configuration ovf:id="1CPU-1GB-1NIC">
- <ovf:Label>1 vCPU, 1 GB RAM, 1 NIC</ovf:Label>
- <ovf:Description>Minimal hardware profile - 1 vCPU, 1 GB RAM, 1 NIC\
</ovf:Description>
- </ovf:Configuration>
- <ovf:Configuration ovf:id="2CPU-2GB-1NIC">
- <ovf:Label>2 vCPUs, 2 GB RAM, 1 NIC</ovf:Label>
- <ovf:Description>Minimal hardware profile - 2 vCPUs, 2 GB RAM, 1 NIC\
</ovf:Description>
- </ovf:Configuration>
- <ovf:Configuration ovf:default="true" ovf:id="4CPU-4GB-3NIC">
- <ovf:Label>4 vCPUs, 4 GB RAM, 3 NICs</ovf:Label>
- <ovf:Description>Default hardware profile - 4 vCPUs, 4 GB RAM, 3 NICs\
</ovf:Description>
- </ovf:Configuration>
- </ovf:DeploymentOptionSection>
<ovf:VirtualSystem ovf:id="test">
...
</ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AllocationUnits>hertz * 10^6</rasd:AllocationUnits>
- <rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>4 virtual CPU(s)</rasd:ElementName>
- <rasd:InstanceID>1</rasd:InstanceID>
- <rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>4</rasd:VirtualQuantity>
- <vmw:CoresPerSocket ovf:required="false">1</vmw:CoresPerSocket>
- </ovf:Item>
<ovf:Item>
...
<rasd:VirtualQuantity>1024</rasd:VirtualQuantity>
- </ovf:Item>
- <ovf:Item ovf:configuration="2CPU-2GB-1NIC">
- <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
- <rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>2048MB of memory</rasd:ElementName>
- <rasd:InstanceID>2</rasd:InstanceID>
- <rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>2048</rasd:VirtualQuantity>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AllocationUnits>byte * 2^20</rasd:AllocationUnits>
- <rasd:Description>Memory Size</rasd:Description>
- <rasd:ElementName>4096MB of memory</rasd:ElementName>
- <rasd:InstanceID>2</rasd:InstanceID>
- <rasd:ResourceType>4</rasd:ResourceType>
- <rasd:VirtualQuantity>4096</rasd:VirtualQuantity>
</ovf:Item>
...
</ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AddressOnParent>12</rasd:AddressOnParent>
- <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet2</rasd:ElementName>
- <rasd:InstanceID>12</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
- <rasd:ResourceType>10</rasd:ResourceType>
- </ovf:Item>
- <ovf:Item ovf:configuration="4CPU-4GB-3NIC">
- <rasd:AddressOnParent>13</rasd:AddressOnParent>
- <rasd:AutomaticAllocation>true</rasd:AutomaticAllocation>
- <rasd:Connection>VM Network</rasd:Connection>
- <rasd:Description>VMXNET3 ethernet adapter on "VM Network"\
</rasd:Description>
- <rasd:ElementName>GigabitEthernet3</rasd:ElementName>
- <rasd:InstanceID>13</rasd:InstanceID>
- <rasd:ResourceSubType>VMXNET3</rasd:ResourceSubType>
- <rasd:ResourceType>10</rasd:ResourceType>
- </ovf:Item>
</ovf:VirtualHardwareSection>
""")
def test_create_delete_network_no_existing(self):
"""Create then delete a network in an OVF with none previously."""
self.command.package = self.minimal_ovf
self.command.nic_networks = ["VM Network", "Foobar"]
self.command.nics = 1
self.command.run()
self.assertLogged(**self.NEW_HW_FROM_SCRATCH)
self.assertLogged(levelname="WARNING",
msg="not all %s values were used",
args=('ethernet', 'Connection', ['Foobar']))
self.command.finished()
# network 'Foobar' is not used, so it'll be deleted
self.assertLogged(**self.removing_network_message('Foobar'))
self.check_diff(file1=self.minimal_ovf,
expected="""
<?xml version='1.0' encoding='utf-8'?>
-<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1">
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData">
<ovf:References />
+ <ovf:NetworkSection>
+ <ovf:Info>Logical networks</ovf:Info>
+ <ovf:Network ovf:name="VM Network">
+ <ovf:Description>VM Network</ovf:Description>
+ </ovf:Network>
+ </ovf:NetworkSection>
<ovf:VirtualSystem ovf:id="x">
...
<ovf:Info />
+ <ovf:Item>
+ <rasd:Connection>VM Network</rasd:Connection>
+ <rasd:ElementName>Ethernet1</rasd:ElementName>
+ <rasd:InstanceID>1</rasd:InstanceID>
+ <rasd:ResourceType>10</rasd:ResourceType>
+ </ovf:Item>
</ovf:VirtualHardwareSection>
""")
self.command.destroy()
self.command = None
self.validate_with_ovftool(self.temp_file)
# Now remove all NICs and make sure it's cleaned back up
self.command = COTEditHardware(UI())
self.command.output = self.temp_file
self.command.package = self.temp_file
self.command.nics = 0
self.command.run()
self.command.finished()
self.assertLogged(**self.removing_network_message())
self.assertLogged(**self.REMOVING_NETWORKSECTION)
self.check_diff(file1=self.temp_file, file2=self.minimal_ovf,
expected="")
def test_set_cpus_v09(self):
"""Test CPU count settings with a v0.9 OVF."""
self.command.package = self.v09_ovf
self.command.cpus = 2
self.command.run()
self.command.finished()
self.check_diff(file1=self.v09_ovf,
expected="""
<ovf:Item>
- <rasd:Caption>1 virtual CPU(s)</rasd:Caption>
+ <rasd:Caption>2 virtual CPU(s)</rasd:Caption>
<rasd:Description>Number of Virtual CPUs</rasd:Description>
...
<rasd:AllocationUnits>MegaHertz</rasd:AllocationUnits>
- <rasd:VirtualQuantity>1</rasd:VirtualQuantity>
+ <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
</ovf:Item>
""")
def test_set_cpus_vmware(self):
"""Test CPU setting with a VMWare OVF."""
self.command.package = self.vmware_ovf
self.command.cpus = 4
self.command.run()
self.command.finished()
self.check_diff(file1=self.vmware_ovf,
expected="""
-<?xml version="1.0" encoding="UTF-8"?>
-<ovf:Envelope vmw:buildId="build-880146" \
xmlns="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:cim="http://schemas.dmtf.org/wbem/wscim/1/common" \
xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData" \
xmlns:vmw="http://www.vmware.com/schema/ovf" \
xmlns:vssd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_VirtualSystemSettingData" \
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+<?xml version='1.0' encoding='utf-8'?>
+<ovf:Envelope xmlns:ovf="http://schemas.dmtf.org/ovf/envelope/1" \
xmlns:rasd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_ResourceAllocationSettingData" \
xmlns:vmw="http://www.vmware.com/schema/ovf" \
xmlns:vssd="http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/\
CIM_VirtualSystemSettingData" vmw:buildId="build-880146">
<ovf:References>
...
<rasd:Description>Number of Virtual CPUs</rasd:Description>
- <rasd:ElementName>2 virtual CPU(s)</rasd:ElementName>
+ <rasd:ElementName>4 virtual CPU(s)</rasd:ElementName>
<rasd:InstanceID>1</rasd:InstanceID>
...
<rasd:ResourceType>3</rasd:ResourceType>
- <rasd:VirtualQuantity>2</rasd:VirtualQuantity>
+ <rasd:VirtualQuantity>4</rasd:VirtualQuantity>
<vmw:CoresPerSocket ovf:required="false">2</vmw:CoresPerSocket>
...
</ovf:VirtualSystem>
-</ovf:Envelope>
+</ovf:Envelope>
""") # noqa - trailing whitespace above is in base file
| [
"glenn@e-dad.net"
] | glenn@e-dad.net |
0732687f9af1a0375b4ad3b03fab82ae9f0c8c27 | c1c7151a88c1f296b1bb8db5c87f94622b0d36fb | /CarND-Advanced-Lane-Lines/LaneDetection.py | 0a67f8a6ca05c87eea57c7edb6925e1b715b300a | [
"MIT"
] | permissive | sridhar912/Self-Driving-Car-NanoDegree | 8a9b5d9dbdcc9426858215e06e242287a6374ad4 | d1e6b0231c80ab728ec805cde479c0d2fc1ca20a | refs/heads/master | 2021-01-18T22:13:59.188666 | 2017-05-19T08:34:25 | 2017-05-19T08:34:25 | 72,340,166 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,408 | py | import numpy as np
import cv2
import matplotlib.pyplot as plt
from skimage.exposure import adjust_gamma
NORMAL = 1
DARK = 2
BRIGHT = 3
class LaneDetection():
def __init__(self, cameraInfo, prespectiveInfo):
self.edge_bird_view = None
self.edge_front_view = None
self.img_undist = None
self.img_undist_warp = None
self.cameraInfo = cameraInfo
self.prespectiveInfo = prespectiveInfo
self.mtx, self.dist = cameraInfo.get_camera_parameters()
self.mtx_perp, self.mtx_perp_inv = prespectiveInfo.get_prespective_parameters()
# 1--> normal 2--> Dark 3-->bright
self.condition = NORMAL
def nonZeroCount(self, img, offset):
return cv2.countNonZero(img[offset:, :])
def check_saturation(self, white_lane, yellow_lane, white_lane_warp, yellow_lane_warp, offset=480, thresh=(500, 20000)):
count_wl = self.nonZeroCount(white_lane, offset)
count_wlw = self.nonZeroCount(white_lane_warp, offset)
count_yl = self.nonZeroCount(yellow_lane, offset)
count_ylw = self.nonZeroCount(yellow_lane_warp, offset)
if (count_wl < thresh[1] and count_wlw < thresh[1]):
if (count_wl < thresh[0] and count_wlw < thresh[0]) or (count_yl < thresh[0] and count_ylw < thresh[0]) or (
count_yl > thresh[1] or count_ylw > thresh[1]):
return DARK
else:
return NORMAL
else:
return BRIGHT
def extract_color_info(self, img, threshL=(210, 250), threshB=(200, 250)):
lab = cv2.cvtColor(img, cv2.COLOR_RGB2LAB).astype(np.float)
channelL, channelA, channelB = cv2.split(lab)
channelL_norm = np.uint8(255 * channelL / np.max(channelL))
white_lane = cv2.inRange(channelL_norm, threshL[0], threshL[1])
channelB_norm = np.uint8(255 * channelB / np.max(channelB))
yellow_lane = cv2.inRange(channelB_norm, threshB[0], threshB[1])
#plt.imshow(channelL_norm)
#plt.show()
return white_lane, yellow_lane
def extract_sobel_edge(self,img):
sobel = np.absolute(cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=3))
scaled_sobel = np.uint8(255 * sobel / np.max(sobel))
sobel_output = np.zeros_like(scaled_sobel)
sobel_output[(scaled_sobel >= 20) & (scaled_sobel <= 200)] = 255
return sobel_output
def extract_lane_information_diff_condition(self, img, condition):
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY).astype(np.float)
if condition == 2:
gray_norm = adjust_gamma(gray, 0.4)
else:
gray_norm = adjust_gamma(gray, 5)
#gray_norm = np.uint8(255 * (gray) / np.max(gray))
sobelx = np.absolute(cv2.Sobel(gray_norm, cv2.CV_64F, 1, 0, ksize=15))
sobely = np.absolute(cv2.Sobel(gray_norm, cv2.CV_64F, 0, 1, ksize=15))
scaled_sobelx = np.uint8(255 * sobelx / np.max(sobelx))
binary_outputx = np.zeros_like(scaled_sobelx)
binary_outputx[(scaled_sobelx >= 20) & (scaled_sobelx <= 200)] = 1
scaled_sobely = np.uint8(255 * sobely / np.max(sobely))
binary_outputy = np.zeros_like(scaled_sobely)
binary_outputy[(scaled_sobely >= 20) & (scaled_sobely <= 200)] = 1
# show_images(binary_outputx,binary_outputy)
absgraddir = np.arctan2((binary_outputy), (binary_outputx))
binary_output = np.zeros_like(absgraddir)
binary_output[(absgraddir >= 0.7) & (absgraddir <= 0.8)] = 1
lanes_front_view = np.uint8(255 * binary_output / np.max(binary_output))
lanes_bird_view = self.prespectiveInfo.warp_image(lanes_front_view)
return lanes_front_view, lanes_bird_view
def extract_lane_information(self, img, useEdge = True, show_images = False):
img_undist = self.cameraInfo.undistort_image(img)
img_undist_warp = self.prespectiveInfo.warp_image(img_undist)
white_lane, yellow_lane = self.extract_color_info(img_undist)
color_lane = cv2.bitwise_or(white_lane, yellow_lane)
color_lane_warped = self.prespectiveInfo.warp_image(color_lane)
white_lane_warp, yellow_lane_warp = self.extract_color_info(img_undist_warp)
color_lane_warp = cv2.bitwise_or(white_lane_warp, yellow_lane_warp)
lanes_bird_view = cv2.bitwise_or(color_lane_warp, color_lane_warped)
lanes_front_view = self.prespectiveInfo.warp_image(lanes_bird_view,inverse=True)
condition = self.check_saturation(white_lane, yellow_lane, white_lane_warp, yellow_lane_warp)
if condition != 1:
# Currently not used
#print()
lanes_front_view, lanes_bird_view = self.extract_lane_information_diff_condition(img_undist, condition)
if useEdge:
edge_front_view = self.extract_sobel_edge(lanes_front_view)
edge_bird_view = self.extract_sobel_edge(lanes_bird_view)
self.edge_bird_view = edge_bird_view
self.edge_front_view = edge_front_view
else:
self.edge_bird_view = lanes_bird_view
self.edge_front_view = lanes_front_view
self.img_undist = img_undist
if show_images:
self.show_output(img_undist,white_lane,yellow_lane)
self.show_output(img_undist_warp,white_lane_warp, yellow_lane_warp)
self.show_output(img_undist,self.edge_front_view,self.edge_bird_view,'Input','Combined Front View','Combined BirdEye View')
self.img_undist = img_undist
self.img_undist_warp = img_undist_warp
self.condition = condition
def show_output(self, img1, img2, img3, t1 = 'Input', t2 = 'White Lane', t3 = 'Yellow Lane'):
"""
Show orginal and undistorted images
:param org: The original image
:param undist: The undistorted image
:return:
"""
f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(20, 10))
ax1.imshow(img1)
ax1.set_title(t1, fontsize=20)
ax2.imshow(img2, cmap ='gray')
ax2.set_title(t2, fontsize=20)
ax3.imshow(img3, cmap='gray')
ax3.set_title(t3, fontsize=20)
plt.show()
def get_undistored_image(self):
return self.img_undist
def get_warped_image(self):
return self.img_undist_warp
def get_lane_output(self):
return self.edge_front_view, self.edge_bird_view
| [
"sridhar912@gmail.com"
] | sridhar912@gmail.com |
b0f1368286422b9f1a1348f09be7d61a2844cbdb | b08c50ab0e01ab04abec320e09b9bfb5cffa2145 | /preprocess/utils.py | 185a8073bfa4f234eca5dc503858b7844861df3f | [] | no_license | aqua1907/tiny_imagenet | 7d2c7746e22c95057a3b52e6d59de19abcee0b98 | 235b5dcd693daa1838441525423dcf06f6deebe3 | refs/heads/master | 2020-12-07T03:20:39.988920 | 2020-01-10T23:29:13 | 2020-01-10T23:29:13 | 232,620,299 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | import cv2
def mean_preprocess(image, rMean, gMean, bMean):
# get each channel of image
(R, G, B) = cv2.split(image)
# subtract the means values from each channel
R -= rMean
G -= gMean
B -= bMean
# merge the channels back together and return the image
return cv2.merge([R, G, B])
| [
"noreply@github.com"
] | aqua1907.noreply@github.com |
23664843f002ad6c0d0c7736fa3b73fa81c7c87b | cf76311190b9bd60be5191500e39bbb1d4dd819a | /appengine/standard_python37/bigquery/main_test.py | 727f0c1533c079eebc4886b6b4b4cd6388442245 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | mahongmsncom/python-docs-samples | b277ace657d7db21c409c33554f359f3acdbcd25 | 36c631cc5bc842f3aed243b0d87eb4d3cd2cba66 | refs/heads/master | 2020-04-03T21:23:58.197400 | 2019-09-18T02:18:30 | 2019-09-18T02:18:30 | 155,573,156 | 0 | 0 | Apache-2.0 | 2018-10-31T14:41:38 | 2018-10-31T14:41:38 | null | UTF-8 | Python | false | false | 783 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def test_main():
import main
main.app.testing = True
client = main.app.test_client()
r = client.get('/')
assert r.status_code == 200
assert 'Query Result' in r.data.decode('utf-8')
| [
"noreply@github.com"
] | mahongmsncom.noreply@github.com |
08aeea578451052c0f6f40d15c4ad2b81aea0d04 | 4bcb8469fcb386d7ca5b9bc23f0bfce63884f845 | /test/test.py | 1b1efa5e96f2ae1cb292c2b7b2cbebbce0d407e2 | [
"MIT"
] | permissive | culdo/trading_alert | 92e5820a4db0d5e15beb3f347a9ef264261b835b | de3b85e54b0087426e1fca2960f8c764a4d7641d | refs/heads/master | 2023-08-29T23:07:52.112675 | 2021-11-12T09:29:49 | 2021-11-12T09:29:49 | 398,337,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | import tkinter
from trading_alert.trading_alert import TradingAlert
from trading_alert.util.time_tool import get_before_time
from trading_alert.util.util import load_ta
if __name__ == '__main__':
_ = TradingAlert(get_before_time(weeks=1), interval="4h")
# load previous saved session
# load_ta()
tkinter.mainloop()
| [
"wuorsut@gmail.com"
] | wuorsut@gmail.com |
27f37a64fd56abf5c84a6de0d251780d79d6574c | f889bc01147869459c0a516382e7b95221295a7b | /swagger_client/models/body_19.py | c3a0a50364961c0b3731d25640fcee6a6b0617c9 | [] | no_license | wildatheart/magento2-api-client | 249a86f5c0289743f8df5b0324ccabd76f326512 | e6a707f85b37c6c3e4ef3ff78507a7deb8f71427 | refs/heads/master | 2021-07-14T16:01:17.644472 | 2017-10-18T13:33:08 | 2017-10-18T13:33:08 | 107,412,121 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,764 | py | # coding: utf-8
"""
Magento Community
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Body19(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'product': 'CatalogDataProductInterface',
'save_options': 'bool'
}
attribute_map = {
'product': 'product',
'save_options': 'saveOptions'
}
def __init__(self, product=None, save_options=None):
"""
Body19 - a model defined in Swagger
"""
self._product = None
self._save_options = None
self.product = product
if save_options is not None:
self.save_options = save_options
@property
def product(self):
"""
Gets the product of this Body19.
:return: The product of this Body19.
:rtype: CatalogDataProductInterface
"""
return self._product
@product.setter
def product(self, product):
"""
Sets the product of this Body19.
:param product: The product of this Body19.
:type: CatalogDataProductInterface
"""
if product is None:
raise ValueError("Invalid value for `product`, must not be `None`")
self._product = product
@property
def save_options(self):
"""
Gets the save_options of this Body19.
:return: The save_options of this Body19.
:rtype: bool
"""
return self._save_options
@save_options.setter
def save_options(self, save_options):
"""
Sets the save_options of this Body19.
:param save_options: The save_options of this Body19.
:type: bool
"""
self._save_options = save_options
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Body19):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"sander@wildatheart.eu"
] | sander@wildatheart.eu |
3a573e8d5a2f91cb94202eac0599359fdfa8e9e7 | aeb133919a97f19cccc096f0b9be2a318465767b | /CH15/practiceH1.py | cc5fdb719d4b7d8e6d7084047d6008a427884f73 | [] | no_license | Taneil-Kew/Hw | f668c48a6d994168f1dca6a0bf42986dace4b254 | dffff239a3827c5dc77052c77c21c2eb41d46b4d | refs/heads/master | 2021-09-14T14:11:47.519753 | 2018-05-15T02:21:26 | 2018-05-15T02:21:26 | 113,489,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | class Point:
""" Point class represents and manipulates x,y coords. """
def __init__(self, x=0, y=0):
""" Create a new point at the origin """
self.x = x
self.y = y
def distance_from_origin(self):
""" Compute my distance from the origin """
return ((self.x ** 2) + (self.y ** 2)) ** 0.5
def __str__(self): # All we have done is renamed the method
return "({0}, {1})".format(self.x, self.y)
p = Point(3,4)
q = Point(10,11)
f = Point(3,4)
print(p.x,p.y,q.x,q.y)
p.x = 4
p.y = 6
q.x = 10
q.y = 11
print(p.x, p.y, q.x, q.y)
print("(x={0}, y={1})".format(p.x, p.y))
distance_squared_from_origin = p.x * p.x + p.y * p.y
dist = q.distance_from_origin()
print(dist)
def print_point(pt):
print("({0}, {1})".format(pt.x, pt.y))
print_point(p)
#print(p.to_string())
print(str(p)) | [
"daronson@kewforest.org"
] | daronson@kewforest.org |
14ce2ddf6b8c775f696eacb60d098939171aed93 | 30541f23e34b6df5c33e6a6063c72cae0612e203 | /utils/derivative.py | bdae96fa9cb03fe488d0ef426207978af8b0e82d | [] | no_license | moulin1024/windfarm-opti | fd96e64789fd1c9a7db85e7b972824372af5dfab | a5a3a90be2bec9367700f010fb38daabba149cc1 | refs/heads/master | 2020-09-05T04:59:35.885935 | 2019-11-06T12:19:45 | 2019-11-06T12:19:45 | 219,989,071 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,867 | py | import numpy as np
from jobs import Job
from abc import ABC, abstractmethod
from itertools import combinations_with_replacement
class AbstractDerivative:
def __init__(self,x,dx,axis):
self.x = x
self.dx = dx
self.axis = axis
@abstractmethod
def create_plan(self):
pass
class Dx(AbstractDerivative):
def __init__(self,x,dx,axis):
super().__init__(x,dx,axis)
self.x_prime = np.zeros(1,self.x.size)
def create_plan(self):
self.x_prime[0,self.axis[0]] = self.x[0,self.axis[0]] - self.dx
class DxDx(AbstractDerivative):
def __init__(self,x,dx,axis):
super().__init__(x,dx,axis)
self.x_prime = np.repeat([self.x],2,axis=0)
def create_plan(self):
self.x_prime[0,self.axis[0]] = self.x[self.axis[0]] - self.dx
self.x_prime[1,self.axis[0]] = self.x[self.axis[0]] + self.dx
class DxDy(AbstractDerivative):
def __init__(self,x,dx,axis):
super().__init__(x,dx,axis)
self.x_prime = np.repeat([self.x],4,axis=0)
def create_plan(self):
self.x_prime[0,[self.axis[0],self.axis[1]]] = self.x[[self.axis[0],self.axis[1]]] + [-self.dx,-self.dx]
self.x_prime[1,[self.axis[0],self.axis[1]]] = self.x[[self.axis[0],self.axis[1]]] + [-self.dx,self.dx]
self.x_prime[2,[self.axis[0],self.axis[1]]] = self.x[[self.axis[0],self.axis[1]]] + [self.dx,-self.dx]
self.x_prime[3,[self.axis[0],self.axis[1]]] = self.x[[self.axis[0],self.axis[1]]] + [self.dx,self.dx]
def create_jobs_for_hessian(base_job,iter_num,opti_wt_list):
axis_list = combinations_with_replacement(list(opti_wt_list[0]), 2)
case_list = []
for axis in axis_list:
axis = [int(i) for i in axis]
if axis[0] == axis[1]:
hessian_job = DxDx(base_job.yaw_angle,base_job.d_gamma,axis)
hessian_job.create_plan()
for i in range(2):
job_name = str(iter_num) + "-d" + str(axis[0])+"d" + str(axis[1])+"-"+str(i)
case_list.append(job_name)
job_yaw_angle = hessian_job.x_prime[i,:]
job = Job(base_job.case,job_name,job_yaw_angle,base_job.config_path)
job.create_old()
else:
hessian_job = DxDy(base_job.yaw_angle,base_job.d_gamma,axis)
hessian_job.create_plan()
for i in range(4):
case_list.append(job_name)
job_name = str(iter_num) + "-d" + str(axis[0])+"d" + str(axis[1])+"-"+str(i)
job_yaw_angle = hessian_job.x_prime[i,:]
job = Job(base_job.case,job_name,job_yaw_angle,base_job.config_path)
job.create_old()
def create_jobs_for_grad(base_job,iter_num,opti_wt_list):
axis_list = combinations_with_replacement(list(opti_wt_list[0]), 2)
case_list = []
for axis in axis_list:
axis = [int(i) for i in axis]
if axis[0] == axis[1]:
hessian_job = DxDx(base_job.yaw_angle,base_job.d_gamma,axis)
hessian_job.create_plan()
for i in range(2):
job_name = str(iter_num) + "-d" + str(axis[0])+"d" + str(axis[1])+"-"+str(i)
case_list.append(job_name)
job_yaw_angle = hessian_job.x_prime[i,:]
job = Job(base_job.case,job_name,job_yaw_angle,base_job.config_path)
job.create_old()
else:
hessian_job = DxDy(base_job.yaw_angle,base_job.d_gamma,axis)
hessian_job.create_plan()
for i in range(4):
case_list.append(job_name)
job_name = str(iter_num) + "-d" + str(axis[0])+"d" + str(axis[1])+"-"+str(i)
job_yaw_angle = hessian_job.x_prime[i,:]
job = Job(base_job.case,job_name,job_yaw_angle,base_job.config_path)
job.create_old() | [
"moulin1024@gmail.com"
] | moulin1024@gmail.com |
a0889ee9592daf41787d2076e74ea7d070e7768d | 46bc1390a15bfaa06a813bbdda55d9be04b3b8f2 | /.env/lib/python2.7/site-packages/np_utils/gen_utils.py | 83961ab05a71550457558b1e6ef1d4396deae652 | [] | no_license | ashutoshbindal/Deep_Learning_Comp | 0ba7cf559b6abd6f2443249e9974f1627dbe2f52 | 806fe96d011d3293dd5ff397c950e4307dce2563 | refs/heads/master | 2021-06-26T17:44:15.409109 | 2017-09-14T03:26:52 | 2017-09-14T03:26:52 | 103,167,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,442 | py | '''Some general utilities functions:
Conversion utilities:
intOrFloat -> converts string to either int if possible, float otherwise
floatIntStringOrNone -> try to make an integer, then a float, then a string, otherwise None
Flow control utilities:
minmax -> Return the tuple (min(...), max(...))
callFunctionIfNotNone -> Take a function of 2 arguments (a and b) and return
None if both a and b are None,
a if b is None (and vice versa),
and f(a,b) if neither is None.
minmaxIgnoreNone -> A poorly named function that takes 2 minima and
2 maixima and returns the global min and max
using callFunctionIfNotNone
'''
from builtins import map, int
import os
import types
import re
#############################
## File handling utilities ##
#############################
def makeifnotexists(d):
'''If a directory does not exist, make it
Return d (for functional use cases)'''
if not os.path.exists(d):
os.mkdir(d)
return d
##########################
## Conversion utilities ##
##########################
def intOrFloat(string):
'''Not sure if your string is formatted as an int or a float? Use intOrFloat instead!'''
try:
return int(string)
except ValueError:
return float(string)
def floatIntStringOrNone(string):
'''An even more generic version of intOrFloat'''
if string=='None':
return None
for fun in (int, float):
try:
return fun(string)
except ValueError:
pass
return string
def islistlike(x):
'''Test if something is an iterable but NOT as string'''
return hasattr(x, '__iter__') and not isinstance(x, str)
######################
## String utilities ##
######################
def string_between(s, before, after):
'''Find the string between two substrings
Bascially rolling up the pattern:
s.split(before)[1].split(after)[0]
into an actual function with proper checks.
To start from the beginning or end of the string,
pass None as before or after'''
if before is not None:
t = s.split(before)
assert len(t) > 1, '"before" argument is not in the string!'
s = t[1]
if after is not None:
t = s.split(after)
assert len(t) > 1, '"after" argument is not in the string!'
s = t[0]
return s
def multisplit(string, *delimiters):
'''Split a string at any of a number of delimeters.
With one delimeter, this is equivalent to string.split.'''
pattern = '|'.join(map(re.escape, delimiters))
return re.split(pattern, string)
def multireplace(text, *replpairs):
'''Chain multiple calls of string.replace
A "re"-based approach may be better for very long strings
and/or many replacements:
http://code.activestate.com/recipes/81330-single-pass-multiple-replace/'''
for i,o in replpairs:
text = text.replace(i,o)
return text
def multiremove(text, *removals):
'''Chain multiple calls of string.replace
where the second argument is always '' '''
for r in removals:
text = text.replace(r,'')
return text
def replace_non_printable_chars(text, repl=r''):
re.doit
TODO
############################
## Flow control utilities ##
############################
def minmax(*args,**kwds):
'''A really simple function that makes it cleaner to get the min and max
from an expression without duplication or creating a local variable.
See builtins min and max for details about arguments.'''
return min(*args,**kwds),max(*args,**kwds)
def callFunctionIfNotNone(f,a,b):
''''A really simple function to call a function only if both arguments
are not None'''
if a==None: return b
elif b==None: return a
else: return f(a,b)
def minmaxIgnoreNone(Amin,Bmin, Amax,Bmax): ## pairwiseMinMaxIgnoreNone(Amin,Bmin, Amax,Bmax):
'''Given two minima and two maxima, calculate the global minima and maxima,
ignoring values that are None
TODO: This should be renamed (maybe pairwiseMinMaxIgnoreNone?) to avoid confusion with minmax
BTW, when you do fix the name, realize that the max was computing mins instead!!!!''' #TODO
return callFunctionIfNotNone(min,Amin,Bmin),callFunctionIfNotNone(max,Amax,Bmax)
| [
"ayushkr79@gmail.com"
] | ayushkr79@gmail.com |
f4bac242f4ebcee19ff5f95406e835f40256a054 | 6118f2fa2be32a1b1d50a0965f7fa3e137b408bc | /examples/increment.py | 63228df71029b9ba4cb0701f57bd88b8a8616fee | [
"MIT"
] | permissive | eerimoq/mqttools | 66f296c3c76b4909c86d5d287e4a96b6b755bd44 | a28c86e89af0852249a5d6f33f9e67036c3eb8fe | refs/heads/master | 2021-12-27T04:35:33.868653 | 2021-12-24T12:15:01 | 2021-12-24T12:15:01 | 184,444,451 | 58 | 13 | MIT | 2021-11-30T19:04:53 | 2019-05-01T16:15:41 | Python | UTF-8 | Python | false | false | 799 | py | import asyncio
import mqttools
HOST = 'localhost'
PORT = 1883
async def main():
client = mqttools.Client(HOST, PORT)
await client.start()
print(f'Connected to {HOST}:{PORT}.')
await client.subscribe('/mqttools/incrementer/value/request')
print('Subscribed to topic /mqttools/incrementer/value/request.')
while True:
message = await client.messages.get()
if message is None:
print('Broker connection lost!')
break
count = int(message.message)
print(f'Request count: {count}')
count += 1
print(f'Response count: {count}')
client.publish(mqttools.Message('/mqttools/counter-client/value/response',
str(count).encode('ascii')))
asyncio.run(main())
| [
"erik.moqvist@gmail.com"
] | erik.moqvist@gmail.com |
3aa258624befc6c07974876a322e6f40189fb284 | 704bede77f00088c3e6f07368cc628119cd86dc0 | /edge_check_in/edge_check_in/settings.py | 37067cbb216c51fe68e1cb9cfdf04292705ede5f | [] | no_license | joeyborai/edge_symptom_checker | a080d3a27c24ec18eec315c9011dfc7c3560e70d | 2bfc6125b6fe02a6ede643459a0650618d99dfde | refs/heads/master | 2023-02-28T20:50:30.175335 | 2021-02-11T00:05:05 | 2021-02-11T00:05:05 | 292,895,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,186 | py | """
Django settings for edge_check_in project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '73v#6o4t&wynb7vp!55(%0pu^ms#vmj!d9ctg!a5cibjtqz+j('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
"18.217.209.252",
"edgecheckin.com"
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'edge_check_in.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'edge_check_in.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
| [
"root@ip-172-31-39-6.us-east-2.compute.internal"
] | root@ip-172-31-39-6.us-east-2.compute.internal |
52292840859bc169d9a124ff333ffb91105a651a | 97ad4c6e1c9f7d56dcb7afb86da76457a8cb136d | /tg_qso_bot/qso_sources/source.py | f858c7f248de677fd1a0bc8047450ce437af16e8 | [
"MIT"
] | permissive | exepirit/tg-qso-bot | b053cbc1849b2da630f766f1b43040e01ca9bd66 | 74ab790d8e4a3be2a66674ec15c6652444b58859 | refs/heads/master | 2023-08-25T03:58:04.369104 | 2021-10-10T06:44:19 | 2021-10-10T06:44:19 | 266,490,063 | 1 | 0 | MIT | 2021-07-02T15:06:22 | 2020-05-24T07:19:53 | Python | UTF-8 | Python | false | false | 596 | py | from abc import ABC, abstractmethod
from typing import List
from tg_qso_bot.models import Qso
class QsoSource(ABC):
"""
Storage adapter, from which QSO can be extracted.
All implemented QSO sources inherit this class.
"""
@abstractmethod
def get_qso_list(self, callsign: str, limit: int, skip: int = 0) -> List[Qso]:
"""
Get QSO list from a source.
:param callsign: Callsign.
:param limit: Requested QSO amount.
:param skip: Indent from top of list.
:return: List of QSOs.
"""
raise NotImplementedError()
| [
"in37her@protonmail.com"
] | in37her@protonmail.com |
b57adaa12ab834e6bcddbaa4760a0f5c15f933d9 | 943d59871cb8eecd65d3563cae6c7f48e49afaab | /src/arftools.py | 4777699f2edaf0cfaec59ae50dff84522ed0e837 | [] | no_license | ATidiane/PLDAC19 | b22f93490de15956d79239d5b9b8897e9d022abb | 720a688b70cf4f68b061bf317980c0e2fb864bab | refs/heads/master | 2020-04-24T23:18:47.595979 | 2019-06-06T07:04:23 | 2019-06-06T07:04:23 | 172,339,932 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,868 | py | #from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import cm
def plot_data(data, labels=None, title=None):
"""
Affiche des donnees 2D
:param data: matrice des donnees 2d
:param labels: vecteur des labels (discrets)
:return:
"""
if title is not None:
plt.title(title)
cols, marks = [
"red", "green", "blue", "orange", "black", "cyan"], [
".", "+", "*", "o", "x", "^"]
if labels is None:
plt.scatter(data[:, 0], data[:, 1], marker="x")
return
for i, l in enumerate(sorted(list(set(labels.flatten())))):
plt.scatter(data[labels == l, 0], data[labels == l, 1],
c=cols[i], marker=marks[i])
def plot_frontiere(data, f, step=20):
""" Trace un graphe de la frontiere de decision de f
:param data: donnees
:param f: fonction de decision
:param step: pas de la grille
:return:
"""
grid, x, y = make_grid(data=data, step=step)
plt.contourf(x, y, f(grid).reshape(x.shape),
colors=('gray', 'blue'), levels=[-1, 0, 1])
def make_grid(data=None, xmin=-5, xmax=5, ymin=-5, ymax=5, step=20):
""" Cree une grille sous forme de matrice 2d de la liste des points
:param data: pour calcluler les bornes du graphe
:param xmin: si pas data, alors bornes du graphe
:param xmax:
:param ymin:
:param ymax:
:param step: pas de la grille
:return: une matrice 2d contenant les points de la grille
"""
if data is not None:
xmax, xmin, ymax, ymin = np.max(data[:, 0]), np.min(
data[:, 0]), np.max(data[:, 1]), np.min(data[:, 1])
x, y = np. meshgrid(np.arange(xmin, xmax, (xmax - xmin) * 1. / step),
np.arange(ymin, ymax, (ymax - ymin) * 1. / step))
grid = np.c_[x.ravel(), y.ravel()]
return grid, x, y
def gen_arti(
centerx=1,
centery=1,
sigma=0.1,
nbex=1000,
data_type=0,
epsilon=0.02):
""" Generateur de donnees,
:param centerx: centre des gaussiennes
:param centery:
:param sigma: des gaussiennes
:param nbex: nombre d'exemples
:param data_type: 0: melange 2 gaussiennes, 1: melange 4 gaussiennes, 2:echequier
:param epsilon: bruit dans les donnees
:return: data matrice 2d des donnnes,y etiquette des donnnees
"""
if data_type == 0:
# melange de 2 gaussiennes
xpos = np.random.multivariate_normal(
[centerx, centerx], np.diag([sigma, sigma]), nbex // 2)
xneg = np.random.multivariate_normal(
[-centerx, -centerx], np.diag([sigma, sigma]), nbex // 2)
data = np.vstack((xpos, xneg))
y = np.hstack((np.ones(nbex // 2), -np.ones(nbex // 2)))
if data_type == 1:
# melange de 4 gaussiennes
xpos = np.vstack((np.random.multivariate_normal([centerx, centerx], np.diag(
[sigma, sigma]), nbex // 4), np.random.multivariate_normal([-centerx, -centerx], np.diag([sigma, sigma]), nbex // 4)))
xneg = np.vstack((np.random.multivariate_normal([-centerx, centerx], np.diag(
[sigma, sigma]), nbex // 4), np.random.multivariate_normal([centerx, -centerx], np.diag([sigma, sigma]), nbex // 4)))
data = np.vstack((xpos, xneg))
y = np.hstack((np.ones(nbex // 2), -np.ones(nbex // 2)))
if data_type == 2:
# echiquier
data = np.reshape(np.random.uniform(-4, 4, 2 * nbex), (nbex, 2))
y = np.ceil(data[:, 0]) + np.ceil(data[:, 1])
y = 2 * (y % 2) - 1
# un peu de bruit
data[:, 0] += np.random.normal(0, epsilon, nbex)
data[:, 1] += np.random.normal(0, epsilon, nbex)
# on mélange les données
idx = np.random.permutation((range(y.size)))
data = data[idx, :]
y = y[idx]
return data, y
| [
"baldeahmedtidiane36@gmail.com"
] | baldeahmedtidiane36@gmail.com |
f5803b458fcb6a984c18b2e938bc5bb953c6c0f3 | dc5aebeaea2eec9290542940b04217ff8409b10c | /gcpc.py | 0461cde340220a783ab33f63e2ba0e3e02ae726d | [] | no_license | paamb/Kattis | 5b59629d40769bbe437a28f47f8688f83258bffe | f237639d32d14b0737dc97736ea19841af168565 | refs/heads/master | 2023-09-05T19:29:48.262284 | 2021-10-21T20:51:23 | 2021-10-21T20:51:23 | 419,864,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,349 | py | num_teams, num_events = [int(x) for x in input().split()]
teams_by_index = [(0,0) for i in range(num_teams)]
better_teams = []
team_1_solved = 0
team_1_penalty = 0
def better(solved_1, penalty_1, solved_2, penalty_2):
if solved_1 > solved_2:
return True
elif solved_1 == solved_2 and penalty_1 < penalty_2:
return True
return False
for i in range(num_events):
team, penalty = [int(x) for x in input().split()]
team -= 1
team_points, team_penalty = teams_by_index[team]
team_points += 1
team_penalty += penalty
teams_by_index[team] = (team_points, team_penalty)
if team != 0:
if not better(team_points - 1, team_penalty - penalty, team_1_solved, team_1_penalty) and better(team_points, team_penalty, team_1_solved, team_1_penalty):
better_teams.append(team)
teams_by_index[team] = (team_points, team_penalty)
else:
team_1_solved += 1
team_1_penalty += penalty
new_better_teams = []
for i in range(len(better_teams)):
team = better_teams[i]
team_points, team_penalty = teams_by_index[team]
if better(team_points, team_penalty, team_1_solved, team_1_penalty):
new_better_teams.append(team)
better_teams = list(new_better_teams)
print(len(better_teams) + 1)
| [
"paamb@stud.ntnu.no"
] | paamb@stud.ntnu.no |
76ac817cfc162c86cc3bb7259d5375b3dd4816dd | 7286d8d7b5f9dbcdb9e56ddc21907dbd72e716e2 | /appCV/settings.py | 81b585f2ec5270497af6fbf1306c476320b4b705 | [] | no_license | betoFlores58/Django-Docker | 7cab955cf2e9ac58e9b52a780c0503029e071590 | dd98ea6dfb2d966abdf7b37b165e0416b17e403b | refs/heads/master | 2023-06-03T19:33:14.306423 | 2021-05-07T23:47:03 | 2021-05-07T23:47:03 | 365,376,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,398 | py | """
Django settings for appCV project.
Generated by 'django-admin startproject' using Django 3.0.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n_5onnt)6@)kx-_^&%*-h%&gywcp5p(^5njt)&3jy*try%qkl2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
#REDIRIGE AL HOME DESPUES DE LOGUEARTE Y DESLOGUEARTE
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
# Application definition
INSTALLED_APPS = [
'cv',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'appCV.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'appCV.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'db',
'PORT': 5432
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')] | [
"albertfr@live.com.mx"
] | albertfr@live.com.mx |
04770445c65e2107f0f3d6c06809a00c996a5403 | 2000aeaca9c183dd321e03d15a5736d5dc01f528 | /software/pwgc/__init__.py | 476666f7a16eb82f82919e39092c89f2937bc55a | [] | no_license | dazhwu/granger_causality | 9054fa74663ab8b024a337ad2d66c16706aeb63d | 6543b08eb9cc9707b79eaaf3ab6a0a96bb2b34fc | refs/heads/master | 2023-03-21T08:19:34.977444 | 2019-11-16T21:16:34 | 2019-11-16T21:16:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14 | py | name = "pwgc"
| [
"Ryan@Kinnear.ca"
] | Ryan@Kinnear.ca |
444f75d7b1112c24ca4b75290fb37c63298722af | 177d0cef294fba61d0c92939cdafdf3bf455fe47 | /home/migrations/0005_auto_20200406_0855.py | 988981530d6a991e63ab0565313b6459713d5127 | [] | no_license | sahil2k19/CRM | c4ed3e568369e268844dbb55352779207cf70425 | b4e973875892b204da30e4ef9ca017a09cdeee3d | refs/heads/master | 2022-04-23T11:12:57.339911 | 2020-04-25T07:35:30 | 2020-04-25T07:35:30 | 258,713,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | # Generated by Django 3.0.2 on 2020-04-06 03:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20200406_0850'),
]
operations = [
migrations.RemoveField(
model_name='order',
name='tags',
),
migrations.AddField(
model_name='product',
name='tags',
field=models.ManyToManyField(to='home.Tag'),
),
]
| [
"sahilgagan227@gmail.com"
] | sahilgagan227@gmail.com |
581450ad4e5cf8d56bb5c634e2b0d022aba0848e | 5cb440606ce17acadff65b410bcde153eafe479b | /Disease-prediction-using-Machine-Learning-master/clean_code.py | 8eaf67eeb13cfa29fe4750532dbd39ac5033264f | [] | no_license | anjalipatil19/Disease-Prediction-using-Machine-Learning | d73ea093d8eaf6531eca0f01766bf6ccb7a216ab | 5098eff3ed97f86e8755780546c8fab55b64744d | refs/heads/master | 2023-08-27T21:48:24.856551 | 2021-09-24T12:33:10 | 2021-09-24T12:33:10 | 409,951,226 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,806 | py | from tkinter import *
import numpy as np
import pandas as pd
# from gui_stuff import *
l1=['back_pain','constipation','abdominal_pain','diarrhoea','mild_fever','yellow_urine',
'yellowing_of_eyes','acute_liver_failure','fluid_overload','swelling_of_stomach',
'swelled_lymph_nodes','malaise','blurred_and_distorted_vision','phlegm','throat_irritation',
'redness_of_eyes','sinus_pressure','runny_nose','congestion','chest_pain','weakness_in_limbs',
'fast_heart_rate','pain_during_bowel_movements','pain_in_anal_region','bloody_stool',
'irritation_in_anus','neck_pain','dizziness','cramps','bruising','obesity','swollen_legs',
'swollen_blood_vessels','puffy_face_and_eyes','enlarged_thyroid','brittle_nails',
'swollen_extremeties','excessive_hunger','extra_marital_contacts','drying_and_tingling_lips',
'slurred_speech','knee_pain','hip_joint_pain','muscle_weakness','stiff_neck','swelling_joints',
'movement_stiffness','spinning_movements','loss_of_balance','unsteadiness',
'weakness_of_one_body_side','loss_of_smell','bladder_discomfort','foul_smell_of urine',
'continuous_feel_of_urine','passage_of_gases','internal_itching','toxic_look_(typhos)',
'depression','irritability','muscle_pain','altered_sensorium','red_spots_over_body','belly_pain',
'abnormal_menstruation','dischromic _patches','watering_from_eyes','increased_appetite','polyuria','family_history','mucoid_sputum',
'rusty_sputum','lack_of_concentration','visual_disturbances','receiving_blood_transfusion',
'receiving_unsterile_injections','coma','stomach_bleeding','distention_of_abdomen',
'history_of_alcohol_consumption','fluid_overload','blood_in_sputum','prominent_veins_on_calf',
'palpitations','painful_walking','pus_filled_pimples','blackheads','scurring','skin_peeling',
'silver_like_dusting','small_dents_in_nails','inflammatory_nails','blister','red_sore_around_nose',
'yellow_crust_ooze']
disease=['Fungal infection','Allergy','GERD','Chronic cholestasis','Drug Reaction',
'Peptic ulcer diseae','AIDS','Diabetes','Gastroenteritis','Bronchial Asthma','Hypertension',
' Migraine','Cervical spondylosis',
'Paralysis (brain hemorrhage)','Jaundice','Malaria','Chicken pox','Dengue','Typhoid','hepatitis A',
'Hepatitis B','Hepatitis C','Hepatitis D','Hepatitis E','Alcoholic hepatitis','Tuberculosis',
'Common Cold','Pneumonia','Dimorphic hemmorhoids(piles)',
'Heartattack','Varicoseveins','Hypothyroidism','Hyperthyroidism','Hypoglycemia','Osteoarthristis',
'Arthritis','(vertigo) Paroymsal Positional Vertigo','Acne','Urinary tract infection','Psoriasis',
'Impetigo']
l2=[]
for x in range(0,len(l1)):
l2.append(0)
# TESTING DATA df -------------------------------------------------------------------------------------
df=pd.read_csv("Training.csv")
df.replace({'prognosis':{'Fungal infection':0,'Allergy':1,'GERD':2,'Chronic cholestasis':3,'Drug Reaction':4,
'Peptic ulcer diseae':5,'AIDS':6,'Diabetes ':7,'Gastroenteritis':8,'Bronchial Asthma':9,'Hypertension ':10,
'Migraine':11,'Cervical spondylosis':12,
'Paralysis (brain hemorrhage)':13,'Jaundice':14,'Malaria':15,'Chicken pox':16,'Dengue':17,'Typhoid':18,'hepatitis A':19,
'Hepatitis B':20,'Hepatitis C':21,'Hepatitis D':22,'Hepatitis E':23,'Alcoholic hepatitis':24,'Tuberculosis':25,
'Common Cold':26,'Pneumonia':27,'Dimorphic hemmorhoids(piles)':28,'Heart attack':29,'Varicose veins':30,'Hypothyroidism':31,
'Hyperthyroidism':32,'Hypoglycemia':33,'Osteoarthristis':34,'Arthritis':35,
'(vertigo) Paroymsal Positional Vertigo':36,'Acne':37,'Urinary tract infection':38,'Psoriasis':39,
'Impetigo':40}},inplace=True)
# print(df.head())
X= df[l1]
y = df[["prognosis"]]
np.ravel(y)
# print(y)
# TRAINING DATA tr --------------------------------------------------------------------------------
tr=pd.read_csv("Testing.csv")
tr.replace({'prognosis':{'Fungal infection':0,'Allergy':1,'GERD':2,'Chronic cholestasis':3,'Drug Reaction':4,
'Peptic ulcer diseae':5,'AIDS':6,'Diabetes ':7,'Gastroenteritis':8,'Bronchial Asthma':9,'Hypertension ':10,
'Migraine':11,'Cervical spondylosis':12,
'Paralysis (brain hemorrhage)':13,'Jaundice':14,'Malaria':15,'Chicken pox':16,'Dengue':17,'Typhoid':18,'hepatitis A':19,
'Hepatitis B':20,'Hepatitis C':21,'Hepatitis D':22,'Hepatitis E':23,'Alcoholic hepatitis':24,'Tuberculosis':25,
'Common Cold':26,'Pneumonia':27,'Dimorphic hemmorhoids(piles)':28,'Heart attack':29,'Varicose veins':30,'Hypothyroidism':31,
'Hyperthyroidism':32,'Hypoglycemia':33,'Osteoarthristis':34,'Arthritis':35,
'(vertigo) Paroymsal Positional Vertigo':36,'Acne':37,'Urinary tract infection':38,'Psoriasis':39,
'Impetigo':40}},inplace=True)
X_test= tr[l1]
y_test = tr[["prognosis"]]
np.ravel(y_test)
# ------------------------------------------------------------------------------------------------------
def DecisionTree():
from sklearn import tree
clf3 = tree.DecisionTreeClassifier() # empty model of the decision tree
clf3 = clf3.fit(X,y)
# calculating accuracy-------------------------------------------------------------------
from sklearn.metrics import accuracy_score
y_pred=clf3.predict(X_test)
print(accuracy_score(y_test, y_pred))
print(accuracy_score(y_test, y_pred,normalize=False))
# -----------------------------------------------------
psymptoms = [Symptom1.get(),Symptom2.get(),Symptom3.get(),Symptom4.get(),Symptom5.get()]
for k in range(0,len(l1)):
# print (k,)
for z in psymptoms:
if(z==l1[k]):
l2[k]=1
inputtest = [l2]
predict = clf3.predict(inputtest)
predicted=predict[0]
h='no'
for a in range(0,len(disease)):
if(predicted == a):
h='yes'
break
if (h=='yes'):
t1.delete("1.0", END)
t1.insert(END, disease[a])
else:
t1.delete("1.0", END)
t1.insert(END, "Not Found")
def randomforest():
from sklearn.ensemble import RandomForestClassifier
clf4 = RandomForestClassifier()
clf4 = clf4.fit(X,np.ravel(y))
# calculating accuracy-------------------------------------------------------------------
from sklearn.metrics import accuracy_score
y_pred=clf4.predict(X_test)
print(accuracy_score(y_test, y_pred))
print(accuracy_score(y_test, y_pred,normalize=False))
# -----------------------------------------------------
psymptoms = [Symptom1.get(),Symptom2.get(),Symptom3.get(),Symptom4.get(),Symptom5.get()]
for k in range(0,len(l1)):
for z in psymptoms:
if(z==l1[k]):
l2[k]=1
inputtest = [l2]
predict = clf4.predict(inputtest)
predicted=predict[0]
h='no'
for a in range(0,len(disease)):
if(predicted == a):
h='yes'
break
if (h=='yes'):
t2.delete("1.0", END)
t2.insert(END, disease[a])
else:
t2.delete("1.0", END)
t2.insert(END, "Not Found")
def NaiveBayes():
from sklearn.naive_bayes import GaussianNB
gnb = GaussianNB()
gnb=gnb.fit(X,np.ravel(y))
# calculating accuracy-------------------------------------------------------------------
from sklearn.metrics import accuracy_score
y_pred=gnb.predict(X_test)
print(accuracy_score(y_test, y_pred))
print(accuracy_score(y_test, y_pred,normalize=False))
# -----------------------------------------------------
psymptoms = [Symptom1.get(),Symptom2.get(),Symptom3.get(),Symptom4.get(),Symptom5.get()]
for k in range(0,len(l1)):
for z in psymptoms:
if(z==l1[k]):
l2[k]=1
inputtest = [l2]
predict = gnb.predict(inputtest)
predicted=predict[0]
h='no'
for a in range(0,len(disease)):
if(predicted == a):
h='yes'
break
if (h=='yes'):
t3.delete("1.0", END)
t3.insert(END, disease[a])
else:
t3.delete("1.0", END)
t3.insert(END, "Not Found")
# gui_stuff------------------------------------------------------------------------------------
root = Tk()
root.configure(background='blue')
# entry variables
Symptom1 = StringVar()
Symptom1.set(None)
Symptom2 = StringVar()
Symptom2.set(None)
Symptom3 = StringVar()
Symptom3.set(None)
Symptom4 = StringVar()
Symptom4.set(None)
Symptom5 = StringVar()
Symptom5.set(None)
Name = StringVar()
# Heading
w2 = Label(root, justify=LEFT, text="Disease Predictor using Machine Learning", fg="white", bg="blue")
w2.config(font=("Elephant", 30))
w2.grid(row=1, column=0, columnspan=2, padx=100)
w2 = Label(root, justify=LEFT, text="A Project by Yaswanth Sai Palaghat", fg="white", bg="blue")
w2.config(font=("Aharoni", 30))
w2.grid(row=2, column=0, columnspan=2, padx=100)
# labels
NameLb = Label(root, text="Name of the Patient", fg="yellow", bg="black")
NameLb.grid(row=6, column=0, pady=15, sticky=W)
S1Lb = Label(root, text="Symptom 1", fg="yellow", bg="black")
S1Lb.grid(row=7, column=0, pady=10, sticky=W)
S2Lb = Label(root, text="Symptom 2", fg="yellow", bg="black")
S2Lb.grid(row=8, column=0, pady=10, sticky=W)
S3Lb = Label(root, text="Symptom 3", fg="yellow", bg="black")
S3Lb.grid(row=9, column=0, pady=10, sticky=W)
S4Lb = Label(root, text="Symptom 4", fg="yellow", bg="black")
S4Lb.grid(row=10, column=0, pady=10, sticky=W)
S5Lb = Label(root, text="Symptom 5", fg="yellow", bg="black")
S5Lb.grid(row=11, column=0, pady=10, sticky=W)
lrLb = Label(root, text="DecisionTree", fg="white", bg="red")
lrLb.grid(row=15, column=0, pady=10,sticky=W)
destreeLb = Label(root, text="RandomForest", fg="white", bg="red")
destreeLb.grid(row=17, column=0, pady=10, sticky=W)
ranfLb = Label(root, text="NaiveBayes", fg="white", bg="red")
ranfLb.grid(row=19, column=0, pady=10, sticky=W)
# entries
OPTIONS = sorted(l1)
NameEn = Entry(root, textvariable=Name)
NameEn.grid(row=6, column=1)
S1En = OptionMenu(root, Symptom1,*OPTIONS)
S1En.grid(row=7, column=1)
S2En = OptionMenu(root, Symptom2,*OPTIONS)
S2En.grid(row=8, column=1)
S3En = OptionMenu(root, Symptom3,*OPTIONS)
S3En.grid(row=9, column=1)
S4En = OptionMenu(root, Symptom4,*OPTIONS)
S4En.grid(row=10, column=1)
S5En = OptionMenu(root, Symptom5,*OPTIONS)
S5En.grid(row=11, column=1)
dst = Button(root, text="DecisionTree", command=DecisionTree,bg="green",fg="yellow")
dst.grid(row=8, column=3,padx=10)
rnf = Button(root, text="Randomforest", command=randomforest,bg="green",fg="yellow")
rnf.grid(row=9, column=3,padx=10)
lr = Button(root, text="NaiveBayes", command=NaiveBayes,bg="green",fg="yellow")
lr.grid(row=10, column=3,padx=10)
#textfileds
t1 = Text(root, height=1, width=40,bg="orange",fg="black")
t1.grid(row=15, column=1, padx=10)
t2 = Text(root, height=1, width=40,bg="orange",fg="black")
t2.grid(row=17, column=1 , padx=10)
t3 = Text(root, height=1, width=40,bg="orange",fg="black")
t3.grid(row=19, column=1 , padx=10)
root.mainloop()
| [
"anjalipatil7757@gmail.com"
] | anjalipatil7757@gmail.com |
5c41edc54e8e9283a6870f5b3623e2c2ac088296 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayOpenPublicTopicModifyResponse.py | 6966bb50727abcc86c30bc3aab8df6044201e7f2 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 446 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayOpenPublicTopicModifyResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenPublicTopicModifyResponse, self).__init__()
def parse_response_content(self, response_content):
response = super(AlipayOpenPublicTopicModifyResponse, self).parse_response_content(response_content)
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
71124aa4ef3291702963977a083e44d82483c956 | f1d3490858040c8917746807bbecc42a316f97fe | /pyMysql.py | e099c451ee25b90c4a02e470339371ba8e2487e1 | [
"Apache-2.0"
] | permissive | corder-ybh/gpst | 7b245fba65bcc2c9d8b8fcf9a1d9f1688abd5cef | 3e66456f14618f9ef251a26ccce82fb84b84adaa | refs/heads/master | 2021-05-09T11:19:48.395228 | 2019-02-20T10:04:04 | 2019-02-20T10:04:04 | 118,988,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,599 | py | # -*- coding: UTF-8 -*-
import tushare as ts
import matplotlib.pyplot as plt
import pandas as pd
from sqlalchemy import create_engine
import datetime
import time
import math
engine = create_engine('mysql://root:root@localhost:3306/finance?charset=utf8')
df = pd.read_sql(sql="SELECT `index`,`code`,`name`,`industry` FROM finance.stock_basics", con=engine)
#获取n天前日期
def getNdatAgo(date, n) :
t = time.strptime(date, "%Y-%m-%d")
y, m, d = t[0:3]
Date = str(datetime.datetime(y, m, d) - datetime.timedelta(n)).split()
return Date[0]
def calAvePrice(data) :
code = data['code']
index = data['index']
name = data['name']
industry = data['industry']
#获取数据
tDate = time.strftime("%Y-%m-%d", time.localtime())
nDate = getNdatAgo(tDate, 100)
caDf = ts.get_k_data(code, start=nDate, end=tDate, retry_count=5,pause=2)
if (caDf is None or caDf.empty) :
return
#计算均线
days = [5, 34, 55, 50]
for ma in days:
column_name = "MA{}".format(ma)
caDf[column_name] = caDf[['close']].rolling(window=ma).mean()
endDf = caDf.tail(10)
feature = 0
newPrice = 0
for i in endDf.index :
temp = endDf.loc[i]
newPrice = temp['close']
if (math.isnan(temp['MA5'])) :
return
elif (temp['MA5'] > temp['MA55']) :
feature += 1
else :
feature = 1
if (feature > 6) :
head = endDf.iloc[1]
tail = endDf.iloc[-1]
if(head['MA5'] < head['MA55'] and tail['MA5'] > tail['MA55']) :
res = pd.DataFrame({"index":index,"code":code, "name":name,"industry":industry, "newPrice":newPrice},index=["0"])
res.to_csv("res.csv",index=0, encoding='utf8', sep=',', mode='a', header=0)
#筛选股价在55日均线下的股票
def under55jx(data) :
code = data['code']
index = data['index']
name = data['name']
industry = data['industry']
# 获取数据
tDate = time.strftime("%Y-%m-%d", time.localtime())
nDate = getNdatAgo(tDate, 700)
caDf = ts.get_k_data(code, start=nDate, end=tDate, retry_count=5, pause=2)
if (caDf is None or caDf.empty) :
return
days = [5, 34, 55, 50]
for ma in days:
column_name = "MA{}".format(ma)
caDf[column_name] = caDf[['close']].rolling(window=ma).mean()
# 计算浮动比例
caDf["pcMA55"] = caDf.MA55.pct_change()
sum = caDf.pcMA55.sum()
print(sum)
for i in df.index :
# if (i < 1970) :
# continue
under55jx(df.loc[10])
print(df.loc[i]['index'])
| [
"wa@q.com"
] | wa@q.com |
dece7f6ce1f98a6bb3ae51e8add48daeef2acce4 | 5c2723ea0b2014473420f46d61fd6031b6a99500 | /ChatLog.py | 258d85fd4d5a80e3f367828cab9a44b062612c53 | [
"MIT"
] | permissive | Toontown-Open-Source-Initiative/Toontown-Chat-Log | 8f7adab50bce7f88339bf90c757370df666b503a | a3eaf4477f964741b72b33d76f00d178de01aff9 | refs/heads/main | 2023-08-26T16:42:56.473577 | 2021-10-21T01:06:19 | 2021-10-21T01:06:19 | 419,505,671 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 18,113 | py | from panda3d.core import *
from direct.gui.DirectGui import *
from otp.otpbase import OTPLocalizer, OTPGlobals
from toontown.speedchat.SCGlobals import speedChatStyles
from toontown.toonbase import ToontownGlobals, TTLocalizer
from otp.nametag import NametagGlobals
from otp.speedchat.ColorSpace import *
from direct.showbase.PythonUtil import makeTuple
from otp.nametag import WhisperPopup
class ChatLog(DirectButton):
notify = directNotify.newCategory('ChatLog')
def __init__(self, chatMgr, **kwargs):
self.chatMgr = chatMgr
gui = loader.loadModel('phase_3/models/gui/ChatPanel')
def findNodes(names, model = gui):
results = []
for name in names:
for nm in makeTuple(name):
node = model.find('**/%s' % nm)
if not node.isEmpty():
results.append(node)
break
return results
def scaleNodes(nodes, scale):
bgTop, bgBottom, bgLeft, bgRight, bgMiddle, bgTopLeft, bgBottomLeft, bgTopRight, bgBottomRight = nodes
bgTopLeft.setSx(aspect2d, scale)
bgTopLeft.setSz(aspect2d, scale)
bgBottomRight.setSx(aspect2d, scale)
bgBottomRight.setSz(aspect2d, scale)
bgBottomLeft.setSx(aspect2d, scale)
bgBottomLeft.setSz(aspect2d, scale)
bgTopRight.setSx(aspect2d, scale)
bgTopRight.setSz(aspect2d, scale)
bgTop.setSz(aspect2d, scale)
bgBottom.setSz(aspect2d, scale)
bgLeft.setSx(aspect2d, scale)
bgRight.setSx(aspect2d, scale)
nodes = findNodes([('top', 'top1'), 'bottom', 'left', 'right', 'middle', 'topLeft', 'bottomLeft', 'topRight',
'bottomRight'])
scaleNodes(nodes, 0.25)
args = {'parent': base.a2dBottomCenter, 'relief': None, 'geom': gui, 'geom_scale': (1, 1, 0.55),
'sortOrder': DGG.FOREGROUND_SORT_INDEX}
kwargs.update(args)
DirectButton.__init__(self, **kwargs)
self.initialiseoptions(ChatLog)
scaleNodes(nodes, 0.45)
buttonRowOffset = 0.25
centerOffset = 0.035
self.currentTab = 0
self.chatTabs = []
mainTab = DirectButton(parent=self, relief=None, geom=gui, geom_scale=(1.2, 1, 0.55), text=TTLocalizer.ChatLogTabMain,
text_scale=0.25, text_pos=(0.6, -0.3, 0.0), scale=0.15, pos=(centerOffset, 0.0, 0.09),
command=self.__toggleButton, extraArgs=[0])
whisperTab = DirectButton(parent=self, relief=None, geom=gui, geom_scale=(1.2, 1, 0.55), text=TTLocalizer.ChatLogTabWhispers,
text_scale=0.25, text_pos=(0.6, -0.3, 0.0), text_fg=(0, 0, 0, 0.5), scale=0.15,
pos=(buttonRowOffset + centerOffset, 0.0, 0.09), command=self.__toggleButton, extraArgs=[1])
globalTab = DirectButton(parent=self, relief=None, geom=gui, geom_scale=(1.2, 1, 0.55), text=TTLocalizer.ChatLogTabGlobal,
text_scale=0.25, text_pos=(0.6, -0.3, 0.0), text_fg=(0, 0, 0, 0.5), scale=0.15,
pos=((buttonRowOffset * 2) + centerOffset, 0.0, 0.09), command=self.__toggleButton, extraArgs=[2])
systemTab = DirectButton(parent=self, relief=None, geom=gui, geom_scale=(1.2, 1, 0.55), text=TTLocalizer.ChatLogTabSystem,
text_scale=0.25, text_pos=(0.6, -0.3, 0.0), text_fg=(0, 0, 0, 0.5), scale=0.15,
pos=((buttonRowOffset * 3) + centerOffset, 0.0, 0.09), command=self.__toggleButton, extraArgs=[3])
self.chatTabs.append(mainTab)
self.chatTabs.append(whisperTab)
self.chatTabs.append(globalTab)
self.chatTabs.append(systemTab)
self.logs = []
self.realLogs = []
self.currents = []
self.texts = []
self.textNodes = []
self.notificationBubbles = []
# Generate the stuff for each tab
for x in range(len(self.chatTabs)):
chatTab = self.chatTabs[x]
chatTabPos = chatTab.getPos()
chatTab.flattenStrong()
chatTab.wrtReparentTo(self.chatMgr.chatLogNode)
log = []
realLog = []
current = 0
text = TextNode('text')
text.setWordwrap(23.5)
text.setAlign(TextNode.ALeft)
text.setTextColor(0, 0, 0, 1)
text.setFont(ToontownGlobals.getToonFont())
textNode = self.attachNewNode(text, 0)
textNode.setPos(0.0, 0.0, -0.05)
textNode.setScale(0.04)
notificationBubble = DirectLabel(self, relief=None, scale=0.075, pos=chatTabPos, text='0', text_pos=(2.0, 0.0, 0.0), text_fg=(1, 0, 0, 1), text_shadow=(0, 0, 0, 1))
notificationBubble.hide()
self.logs.append(log)
self.realLogs.append(realLog)
self.currents.append(current)
self.texts.append(text)
self.textNodes.append(textNode)
self.notificationBubbles.append(notificationBubble)
self.guildHint = None
scaleNodes(nodes, 0.25)
if base.cr.wantSpeedchatPlus():
self.guildEntry = DirectEntry(self, relief=None, state=DGG.NORMAL, geom=gui, geom_scale=(1, 1, 0.085),
text_scale=0.045, text_pos=(0.0, -0.05, 0.0), pos=(0.0, 0.0, -0.575),
numLines=1, width=20.0, cursorKeys=False)
self.guildEntry.setTransparency(True)
self.guildEntry.bind(DGG.OVERFLOW, self.sendGuildChat)
self.guildEntry.bind(DGG.TYPE, self.typeCallback)
self.guildEntry.bind(DGG.ERASE, self.typeCallback)
self.guildEntry.bind(DGG.ENTER, self.enterCallback)
self.guildEntry.bind(DGG.EXIT, self.exitCallback)
self.accept('enter', self.sendGuildChat)
else:
self.guildEntry = None
# TODO: Temporary
self.resetGuildHint()
gui.removeNode()
self.autoScroll = True
self.closed = False
# Left clicking the Chat Log will drag it around the screen
self.bind(DGG.B1PRESS, self.dragStart)
self.bind(DGG.B1RELEASE, self.dragStop)
# Right clicking the Chat Log will scale it up and down
self.bind(DGG.B3PRESS, self.scaleStart)
self.bind(DGG.B3RELEASE, self.scaleStop)
# Middle mouse button will go through the allowed opacities
self.bind(DGG.B2PRESS, self.opacityStart, extraArgs=[True])
self.bind(DGG.B2RELEASE, self.opacityStart, extraArgs=[False])
self.accept('addChatHistory', self.__addChatHistory)
self.accept('SpeedChatStyleChange', self.__updateSpeedChatStyle)
self.__toggleButton(0)
self.hotkey = None
self.opacity = 0.5
def setGuildHint(self, hintText):
if not self.guildEntry:
return
self.guildEntry.set(hintText)
self.guildEntry.setCursorPosition(0)
self.guildHint = hintText
def resetGuildHint(self):
self.setGuildHint(TTLocalizer.ChatLogSendToGuild)
def sendGuildChat(self, *args):
if not self.guildEntry:
return
if self.guildHint:
return
message = self.guildEntry.get(plain=True).strip()
self.resetGuildHint()
if message:
base.talkAssistant.sendGuildTalk(message)
self.guildEntry['focus'] = 1
def typeCallback(self, *args):
message = self.guildEntry.get(plain=True)
if self.guildHint and message != self.guildHint:
message = message.replace(self.guildHint, '')
self.guildHint = None
if base.whiteList:
message = base.whiteList.processThroughAll(message)
if message:
self.guildEntry.set(message)
else:
self.resetGuildHint()
def enterCallback(self, *args):
self.chatMgr.chatInputNormal.chatEntry['backgroundFocus'] = 0
def exitCallback(self, *args):
if self.chatMgr.wantBackgroundFocus:
self.chatMgr.chatInputNormal.chatEntry['backgroundFocus'] = 1
def enableHotkey(self):
self.hotkey = base.getHotkey(ToontownGlobals.HotkeyInteraction, ToontownGlobals.HotkeyChatlog)
self.accept(self.hotkey, self.openChatlog)
def disableHotkey(self):
self.ignore(self.hotkey)
def destroy(self):
if not hasattr(self, 'logs'):
return
del self.logs
del self.texts
for textNode in self.textNodes:
textNode.removeNode()
del self.textNodes
taskMgr.remove(self.taskName('dragTask'))
taskMgr.remove(self.taskName('scaleTask'))
DirectButton.destroy(self)
self.ignoreAll()
def show(self):
if self.closed:
return
DirectButton.show(self)
node = self.chatMgr.chatLogNode
node.show()
self.__updateSpeedChatStyle()
self.computeRealLog(0, opening=True)
self.bind(DGG.ENTER, self.acceptWheelMovements)
self.bind(DGG.EXIT, self.ignoreWheelMovements)
def hide(self):
DirectButton.hide(self)
node = self.chatMgr.chatLogNode
node.hide()
self.ignoreWheelMovements()
def closeChatlog(self):
self.closed = True
self.hide()
def openChatlog(self):
if not self.closed:
return
if not base.localAvatar.tutorialAck:
return
self.closed = False
self.show()
def toggleChatLog(self):
if self.closed:
self.openChatlog()
else:
self.closeChatlog()
def scrollToCurrent(self, tab):
minimum = max(0, self.currents[tab] - 12)
self.texts[tab].setText('\n'.join(self.realLogs[tab][minimum:self.currents[tab]]))
def computeRealLog(self, tab, opening=False, forcePush=False):
oldText = self.texts[tab].getText()
self.texts[tab].setText('\n'.join(self.logs[tab]))
self.realLogs[tab] = self.texts[tab].getWordwrappedText().split('\n')
notificationBubble = self.notificationBubbles[tab]
missedNotifications = int(notificationBubble['text'])
if not opening:
if not forcePush:
self.notify.debug('forcepush: ' + str(forcePush))
if tab != self.currentTab:
missedNotifications += 1
notificationBubble.setText(str(missedNotifications))
if missedNotifications > 0 and tab != self.currentTab:
notificationBubble.show()
else:
notificationBubble.hide()
if self.autoScroll:
self.currents[tab] = len(self.realLogs[tab])
self.scrollToCurrent(tab)
else:
self.texts[tab].setText(oldText)
def __updateSpeedChatStyle(self):
color = speedChatStyles[base.localAvatar.speedChatStyleIndex][3]
h, s, v = rgb2hsv(*color)
color = hsv2rgb(h, 0.5 * s, v)
r, g, b = color
self['geom_color'] = (r, g, b, self.opacity)
for tab in self.chatTabs:
tab['geom_color'] = (r, g, b, self.opacity)
self.guildEntry['geom_color'] = (r, g, b, self.opacity)
def __addChatHistory(self, name, font, speechFont, color, chat, type=WhisperPopup.WTNormal):
tab = 0
colon = ':'
forcePush = False
if name and not font and not speechFont:
tab = 1
if not speechFont:
speechFont = OTPGlobals.getInterfaceFont()
if font == ToontownGlobals.getSuitFont():
color = 5
if not name:
if ":" in chat:
name, chat = chat.split(":", 1)
else:
name = 'System'
if not font:
font = OTPGlobals.getInterfaceFont()
if type == WhisperPopup.WTSystem:
tab = 3
if isinstance(color, int):
color = Vec4(0.8, 0.3, 0.6, 1)
elif type == WhisperPopup.WTGuild:
tab = 2
elif type == WhisperPopup.WTQuickTalker:
forcePush = True
if isinstance(color, int):
color = NametagGlobals.getArrowColor(color)
self.logs[tab].append('\x01%s\x01\x01%s\x01%s%s\x02\x02 \x01%s\x01%s\x02' % (OTPLocalizer.getPropertiesForFont(font),
OTPLocalizer.getPropertiesForColor(color),
name, colon, OTPLocalizer.getPropertiesForFont(speechFont),
chat))
while len(self.logs[tab]) > 250:
del self.logs[tab][0]
self.computeRealLog(tab, forcePush=forcePush)
def __wheel(self, amount):
oldCurrent = self.currents[self.currentTab]
minimum = min(12, len(self.realLogs[self.currentTab]))
self.currents[self.currentTab] += amount
self.autoScroll = self.currents[self.currentTab] >= len(self.realLogs[self.currentTab])
if self.autoScroll:
self.currents[self.currentTab] = len(self.realLogs[self.currentTab])
if self.currents[self.currentTab] < minimum:
self.currents[self.currentTab] = minimum
if oldCurrent != self.currents[self.currentTab]:
self.scrollToCurrent(self.currentTab)
def dragStart(self, event):
node = self.chatMgr.chatLogNode
taskMgr.remove(self.taskName('dragTask'))
vWidget2render2d = node.getPos(render2d)
vMouse2render2d = Point3(event.getMouse()[0], 0, event.getMouse()[1])
editVec = Vec3(vWidget2render2d - vMouse2render2d)
task = taskMgr.add(self.dragTask, self.taskName('dragTask'))
task.editVec = editVec
def dragTask(self, task):
node = self.chatMgr.chatLogNode
mwn = base.mouseWatcherNode
if mwn.hasMouse():
vMouse2render2d = Point3(mwn.getMouse()[0], 0, mwn.getMouse()[1])
newPos = vMouse2render2d + task.editVec
if newPos[0] < 0.5:
node.wrtReparentTo(base.a2dBottomLeft)
else:
node.wrtReparentTo(base.a2dBottomRight)
windowSizeX = base.win.getProperties().getXSize()
windowSizeY = base.win.getProperties().getYSize()
pixelPos = self.getPos(pixel2d)
nodePixelPos = node.getPos(pixel2d)
if pixelPos[0] < -100:
node.setPos(pixel2d, nodePixelPos.getX() + 10, 0, pixelPos[2])
elif pixelPos[0] > windowSizeX - windowSizeX / 7.5:
node.setPos(pixel2d, nodePixelPos.getX() - 10, 0, pixelPos[2])
elif pixelPos[2] > 100:
node.setZ(pixel2d, nodePixelPos.getZ() - 10)
elif pixelPos[2] < -windowSizeY + 50:
node.setZ(pixel2d, nodePixelPos.getZ() + 10)
else:
node.setPos(render2d, newPos)
return task.cont
def dragStop(self, event):
taskMgr.remove(self.taskName('dragTask'))
node = self.chatMgr.chatLogNode
pos = node.getPos(render2d)
self.notify.debug("chat log pos is {}".format(pos))
def scaleStart(self, event):
node = self.chatMgr.chatLogNode
taskMgr.remove(self.taskName('scaleTask'))
vWidget2render2d = node.getPos(render2d)
vMouse2render2d = Point3(event.getMouse()[0], 0, event.getMouse()[1])
editVecLen = Vec3(vWidget2render2d - vMouse2render2d).length()
task = taskMgr.add(self.scaleTask, self.taskName('scaleTask'))
task.editVecLen = editVecLen
task.refPos = vWidget2render2d
task.initScale = node.getScale()
def scaleTask(self, task):
node = self.chatMgr.chatLogNode
mwn = base.mouseWatcherNode
if mwn.hasMouse():
vMouse2render2d = Point3(mwn.getMouse()[0], 0, mwn.getMouse()[1])
newEditVecLen = Vec3(task.refPos - vMouse2render2d).length()
newScale = task.initScale * (newEditVecLen/task.editVecLen)
if newScale > 5:
newScale = 5
if newScale < 0.25:
newScale = 0.25
node.setScale(newScale)
return task.cont
def scaleStop(self, event):
taskMgr.remove(self.taskName('scaleTask'))
node = self.chatMgr.chatLogNode
scale = node.getScale(render2d)
self.notify.debug("scale is {}".format(scale))
def opacityStart(self, state, event):
if state:
taskMgr.doMethodLater(0.1, self.updateOpacity, self.uniqueName('opacity'))
else:
taskMgr.remove(self.uniqueName('opacity'))
def updateOpacity(self, task):
value = 0.05
opacity = self.opacity - value
if opacity > 0.9:
opacity = 0.1
elif opacity < 0.1:
opacity = 0.9
self.opacity = opacity
self.__updateSpeedChatStyle()
return task.again
def __toggleButton(self, index):
self.currentTab = index
for x in range(len(self.chatTabs)):
self.chatTabs[x]['text_fg'] = (0, 0, 0, 0.5)
self.textNodes[x].hide()
self.chatTabs[index]['text_fg'] = (0, 0, 0, 1)
self.textNodes[index].show()
self.scrollToCurrent(index)
notificationBubble = self.notificationBubbles[index]
notificationBubble.setText('0')
notificationBubble.hide()
if index == 2:
self.guildEntry.show()
else:
self.guildEntry.hide()
def acceptWheelMovements(self, bind):
self.accept('wheel_up-up', self.__wheel, [-1])
self.accept('wheel_down-up', self.__wheel, [1])
def ignoreWheelMovements(self, bind = None):
self.ignore('wheel_up-up')
self.ignore('wheel_down-up')
| [
"11221724+Benjamin8693@users.noreply.github.com"
] | 11221724+Benjamin8693@users.noreply.github.com |
7d3e2e4a8668d1420c8fc1c24a2439ce0dd761f5 | d1a22d10c5538dda6a8de7e41b957a3883795e8f | /twitterbot1.py | 534881e8ad8697cbc267a6dbb07f34fed57a9d5e | [] | no_license | kattelasaikrishna/twitter-bot-followers | d063a160b15e3d8c3f2f797731aebcd92e80c74b | 7cd8dd12ffacbefc8137284caee4cd4e27f4b530 | refs/heads/main | 2023-08-27T20:05:29.282593 | 2021-10-14T18:21:08 | 2021-10-14T18:21:08 | 412,338,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 691 | py | # twitter-bot-followers
import tweepy
import time
consumer_key= "QCdRNHn1n47hzce1jjVAgrpa5"
consumer_secret= "dy723xpmvozMy37Jy8NdBLNnIxM70idmOepW7vzB1Xb20xEr3K"
access_token= "1442682448265957380-HN8ihm9D6lAp00ZGzRCNE6NCvsjNOs"
access_token_secret= "5OpsxEVnbXOlJ6ZKrgoGXMcfHb2mY9sm5BxMjxZZMtwM2"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
print("autheicated")
public_tweets = api.home_timeline()
for tweet in public_tweets:
print(tweet.text)
while True:
u= user.followers_count
api.update_profile(name=f"saikrishna {u} followers")
print(f"saikrishna {u} followers")
time.sleep(60)
| [
"noreply@github.com"
] | kattelasaikrishna.noreply@github.com |
c0501cb9929bcf12f787d370a8a1d9c9a0509d34 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/4/usersdata/133/2972/submittedfiles/swamee.py | 8bc3612585f4c89c2b305dc9bb6a96027440b5e3 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
#COMECE SEU CÓDIGO AQUI
f=input('Digite o valor de f:')
l=input('Digite o valor de l:')
q=input('Digite o valor de q:')
deltah=input('Digite o valor de deltah:')
v=input('Digite o valor de v:')
D = ((8*f*l*(q*q))/(((math.pi)**2)*9.81*deltah))**0.2
Rey = (4*q)/((math.pi)*D*v)
k = (0.25)/((math.log10((0.000002)/(3.7*D) + (5.74)/((Rey)**0.9)))**0.5)
print('%.4f, %.4f, %.4f' %D %Rey %k)
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
f64afa58f5a6f08a1523e16c3916b11fca083f7b | ff7ec5ee4110ba5a753215fca0c27c19083f29ef | /utils/sort_utils.py | 3b55af3d9535f96c8cf96063d0244626fd50db54 | [] | no_license | rlheureu/arctic | b1419405269666cd51e990746d4fcd814d6e322f | 394da68484be746560f65297bc5e30e32bed7676 | refs/heads/master | 2021-05-14T18:26:46.089795 | 2017-08-17T22:10:30 | 2017-08-17T22:10:30 | 116,071,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | '''
Created on May 31, 2017
@author: shanrandhawa
'''
def get_score_available_and_recommended(part):
if hasattr(part, "compatible") and not part.compatible: return -1
if part.recommended and part.available: return 2 + int(part.sort_order) if part.sort_order else 2
elif part.available and len(part.prices) > 0: return 1 + int(part.sort_order) if part.sort_order else 1
else: return int(part.sort_order) if part.sort_order else 0
def sort_by_available_and_recommended(a,b):
"""
unavailable parts receive the lowest score
parts that are recommended and available receive the highest score
"""
return get_score_available_and_recommended(a) - get_score_available_and_recommended(b)
| [
"shanrandhawa@calspl.com"
] | shanrandhawa@calspl.com |
f367cebe1363d802e922e8687971491e981ea2d1 | 91d2e9cf2c51a8084916b88641fbebc30e888c15 | /ai_vision_for_endoscopy/eval_figures.py | acd6e644fa71d5e41ffc9d2793a5b26d54159d54 | [] | no_license | juingzhou/ai-vision-for-endoscopy | 531685c91a8074b67a38272223bc9b4487557d0b | ef2db47da1cbcc193d37f230d254de803f2c40a6 | refs/heads/master | 2023-05-30T01:36:05.225395 | 2019-06-01T00:34:06 | 2019-06-01T00:34:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,784 | py | #!/usr/bin/env python3
# filename: eval_figures.py
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from pylab import savefig
from sklearn import metrics
from sklearn.metrics import confusion_matrix
import itertools
import model_evaluation as m_eval
def make_acc_loss_plots(histories, eval_fig_path, plot_run_name):
# Want to plot accuracy and loss over training epochs
plot_these = [['acc','val_acc','accuracy','Acc'],
['loss','val_loss','loss','Loss']]
# [(history key), (history key), (plot label), (filename affix)]
for plot_this in plot_these:
plt.figure()
for i in range(len(histories)):
plt.plot(histories[i].history[plot_this[0]],
linestyle='solid') # accuracy
for i in range(len(histories)):
plt.plot(histories[i].history[plot_this[1]],
linestyle='dashed') # loss
plt.title('{} model {}'.format(
plot_run_name, plot_this[2] ) )
plt.ylabel(plot_this[2])
plt.xlabel('epoch')
legend_labels = []
for i in range(len(histories)):
legend_labels.append(f'train fold {i}')
for i in range(len(histories)):
legend_labels.append(f'validation f{i}')
if plot_this[0]=='acc':
plt.legend(legend_labels, loc='lower right')
if plot_this[0]=='loss':
plt.legend(legend_labels, loc='upper right')
savefig(eval_fig_path +
'{}_{}.png'.format(plot_run_name, plot_this[3]),
dpi=300, bbox_inches='tight')
plt.close()
def make_roc_plot(test_set, eval_fig_path, plot_run_name):
roc_data = metrics.roc_curve(test_set['abnormality'],
test_set['abnormality_pred'])
fpr, tpr, thrsh = roc_data
roc_auc = metrics.auc(fpr, tpr)
plt.figure()
lw = 2
plt.plot(fpr, tpr, color='darkorange',
lw=lw, label=f'ROC curve (area = {roc_auc:0.2f})')
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title(f'{plot_run_name} ROC curve')
plt.legend(loc='lower right')
savefig(eval_fig_path
+f'{plot_run_name}_ROC_Curve.png',
dpi=300, bbox_inches='tight')
plt.close()
def pick_thresh_make_figures(evaluations, test_w_reckoning_choices,
eval_path, eval_fig_path, plot_run_name):
# Pick threshold for a specific set of reckonings.
# (use thresh=0.5 and another good value, with FN=0 and FP minimized, if
# thresh must be different from 0.5 to achieve that result)
thresh = m_eval.pick_threshold(evaluations, eval_path, plot_run_name)
evaluations_chosen = \
evaluations.loc[evaluations['Score Threshold'] == thresh]
test_w_reckonings = test_w_reckoning_choices[['abnormality',
'abnormality_pred',
f'{thresh:0.3f}']]
# CM fig
make_eval_metric_figures(test_w_reckonings, thresh,
eval_fig_path, plot_run_name)
if thresh != 0.5:
# Repeat with thresh=0.5
thresh = 0.5
evaluations_compare = \
evaluations.loc[evaluations['Score Threshold'] == thresh]
dataframes = [evaluations_chosen, evaluations_compare]
evaluations_chosen = pd.concat(dataframes)
test_w_reckonings = test_w_reckoning_choices[['abnormality',
'abnormality_pred',
f'{thresh:0.3f}']]
# CM fig
make_eval_metric_figures(test_w_reckonings, thresh,
eval_fig_path, plot_run_name)
evaluations_chosen.to_csv(eval_path +
f'{plot_run_name}_eval_' +
f'thresholds_chosen.csv', index=None)
def make_eval_metric_figures(test_w_reckonings, thresh,
eval_fig_path, plot_run_name):
# code from:
# https://scikit-learn.org/
# stable/auto_examples/model_selection/plot_confusion_matrix.html
# #sphx-glr-auto-examples-model-selection-plot-confusion-matrix-py
# Should have class_names as input:
class_names = ['Normal','Abnormal']
#print(__doc__)
df = test_w_reckonings # will turn column of this dataframe to list
y_test = df.loc[:,['abnormality']].T.values.tolist()[0] # labels
y_pred = df.loc[:,[f'{thresh:0.3f}']].T.values.tolist()[0] # reckn's
# Compute confusion matrix
cnf_matrix = confusion_matrix(y_test, y_pred)
np.set_printoptions(precision=2)
# Plot non-normalized confusion matrix
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=class_names,
title=f'Confusion matrix (T={thresh:0.3f})')
savefig(eval_fig_path
+f'{plot_run_name}_Confusion_Matrix_T_{thresh:0.3f}.png',
dpi=150, bbox_inches='tight')
plt.close()
# Plot normalized confusion matrix
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=class_names,
normalize=True,
title=f'Normalized confusion matrix ' +
f'(T={thresh:0.3f})')
savefig(eval_fig_path +
f'{plot_run_name}_Confusion_Matrix_norm_' +
f'T_{thresh:0.3f}.png',
dpi=150, bbox_inches='tight')
plt.close()
# Save data file
test_w_reckonings.to_csv(eval_fig_path +
f'{plot_run_name}_CM_data_' +
f'T_{thresh:0.3f}.csv', index=None)
def plot_confusion_matrix(cm, classes, normalize=False,
title='Confusion matrix', cmap=plt.cm.Blues):
"""
This function plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]),
range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment='center',
color='white' if cm[i, j] > thresh else 'black')
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
| [
"awforrester@gmail.com"
] | awforrester@gmail.com |
9831a9513edc6f1d40569d4822b3082a26726a88 | 00eda813fa8c1d035848b9dce20aff017bf01c2b | /Ask13.py | 5d3b7c4744242d054a9c9519c707ed5463e38559 | [] | no_license | GiorgosPad/Python | 9ae0ad5aa06f4a2c3e4960b9de94206479791dd8 | dd259317c79c8e04f9e8d55542eab2fccd7262bf | refs/heads/master | 2021-01-10T17:43:26.129197 | 2016-03-10T18:49:49 | 2016-03-10T18:49:49 | 53,590,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | from PIL import Image
import ImageDraw
img = Image.open("myimage.jpg")
picture = img.load()
print img.size
print picture[10,100]
x = size.width()
y = size.height()
k = 0
colors = []*(x*y - 1)
times = []*(x*y - 1)
for i in range(1,x):
for j in range (1,y):
if i==1 and j==1:
colors[k]=picture[i,j]
k = k+1
else:
flag = False
for z in range(0,k-1):
if colors[k]==picture[i,z]:
flag = True
if flag == False:
colors[k]=picture[i,j]
k=k+1
s = len(colors)
for k in range(0,s):
for i in range(1,x):
for j in range(1,y):
if colors[k]==picture[i,j]:
times[k]= times[k] + 1
for i in range(s-1,0,-1):
for j in range(i):
if times[j]<times[j+1]:
temp = times[j]
times[j] = times[j+1]
times[j+1] = temp
temp2 = colors[j]
colors[j]= colors[j+1]
colors[j+1]=temp2
for i in range[0,4]:
print colors[i]
import Image
import ImageDraw
im = Image.new("P", (400, 400), 0)
im.putpalette([
colors[0],
colors[1],
colors[2],
colors[3],
colors[4]
])
| [
"Giorgos_23@outlook.com"
] | Giorgos_23@outlook.com |
6735448576686f80c49127dcb9fbf02d875ceb77 | d9101a2f77dcdb6241063f2716f059114e18e351 | /env/bin/python-config | 8b74cd2efd9d9d5a9ca6f23cd8fa6e9c44445bb3 | [] | no_license | CameronSima/phantom-social-media-django-rest | 7071a76514808a42fc8f1dd316f6f396cded72f2 | 687f47a73cc3044a3ea52b510029558840dfea8c | refs/heads/master | 2022-12-15T15:13:02.997289 | 2019-03-05T00:52:41 | 2019-03-05T00:52:41 | 165,405,029 | 0 | 0 | null | 2022-12-08T01:37:10 | 2019-01-12T15:46:20 | Python | UTF-8 | Python | false | false | 2,364 | #!/home/cameron/dev/reddit-clone-django-rest/env/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"cjsima@gmail.com"
] | cjsima@gmail.com | |
f6b013861f1b1b8359806f39c88e8cf6de2dd45d | ce2af69ee31e79424ec35b4bbaa437158942dbc3 | /undersampled-imbalance-cnn/imbalance-cnn/train_eval_single_file | b41aa88b490788ec61a7faa601832a32995bdd49 | [] | no_license | smallsq14/Tensorflow | 5cc9733368bd01bba7c977382055f19d91196b3f | e5c228e656c689a1ddfb2401d8a026e1ea2a402f | refs/heads/master | 2021-01-19T20:38:13.143440 | 2017-03-12T15:39:40 | 2017-03-12T15:39:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,158 | #! /usr/bin/env python
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import random
import data_helpers_single_file
import mysql.connector
from random import randint
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import confusion_matrix
from text_cnn import TextCNN
from tensorflow.contrib import learn
# Parameters
# ==================================================
# Model Hyperparameters
tf.flags.DEFINE_integer("embedding_dim", 128, "Dimensionality of character embedding (default: 128)")
tf.flags.DEFINE_string("filter_sizes", "3,4,5", "Comma-separated filter sizes (default: '3,4,5')")
tf.flags.DEFINE_integer("num_filters", 128, "Number of filters per filter size (default: 128)")
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (default: 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda", 0.0, "L2 regularizaion lambda (default: 0.0)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (default: 64)")
tf.flags.DEFINE_integer("num_epochs", 200, "Number of training epochs (default: 200)")
tf.flags.DEFINE_integer("evaluate_every", 100, "Evaluate model on dev set after this many steps (default: 100)")
tf.flags.DEFINE_integer("checkpoint_every", 100, "Save model after this many steps (default: 100)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr, value in sorted(FLAGS.__flags.items()):
print("{}={}".format(attr.upper(), value))
print("")
cnx = mysql.connector.connect(user='datauser',password='datauser', database='tensorflow')
cursor = cnx.cursor()
# Data Preparation
# ==================================================
# Load data
random_seed = 10
run_number = 1
t = 0
rand_seed = randint(0, 9)
print("Loading data...")
imbalance_size = 5000
pos_or_neg = "positive"
if t == 0:
imbalance_size = 1500
pos_or_neg = "positive"
random_seed = rand_seed
if t == 1:
imbalance_size = 1500
pos_or_neg = "negative"
random_seed = rand_seed
if t == 2:
imbalance_size = 2500
pos_or_neg = "positive"
random_seed = rand_seed
if t == 3:
imbalance_size = 2500
pos_or_neg = "negative"
random_seed = rand_seed
if t == 4:
imbalance_size = 3500
pos_or_neg = "positive"
if t == 5:
imbalance_size = 3500
pos_or_neg = "negative"
outfile = open('sf_'+str(imbalance_size)+'_' + pos_or_neg + '_results_run_'+str(p)+'.txt', 'w')
dbfieldname = 'sf_'+str(imbalance_size)+'_' + pos_or_neg + '_results_run_'+str(p)
outfile.write("Data Resutls for {} {}".format(imbalance_size,pos_or_neg))
x_text, y = data_helpers_single_file.load_data_and_labels(imbalance_size,pos_or_neg)
# Build vocabulary
max_document_length = max([len(x.split(" ")) for x in x_text])
vocab_processor = learn.preprocessing.VocabularyProcessor(max_document_length)
x = np.array(list(vocab_processor.fit_transform(x_text)))
# Randomly shuffle data
np.random.seed(random_seed)
shuffle_indices = np.random.permutation(np.arange(len(y)))
#x_raw_shuffled = x_text[shuffle_indices]
x_shuffled = x[shuffle_indices]
for x in range(0, 2):
print("The x shuffled is:%s", x_shuffled)
y_shuffled = y[shuffle_indices]
# Split train/test set
# TODO: This is very crude, should use cross-validation
x_train, x_dev = x_shuffled[:-1000], x_shuffled[-1000:]
y_train, y_dev = y_shuffled[:-1000], y_shuffled[-1000:]
#x_raw = x_raw_shuffled[:-1000]
print("Writing out x dev")
np.save('dev_x.txt',x_dev)
print("Writing out y dev")
np.save('dev_y.txt',y_dev)
#Get count of positive negative in train set
pos_value = np.array([0,1])
neg_value = np.array([1,0])
list_positive_instances = []
list_negative_instances = []
list_positive_balanced = []
list_negative_balanced = []
runonceneg = 0
runoncepos = 0
for x in range(0, len(x_train)):
if (y_train[x]==pos_value).all():
print("Positive Label")
list_positive_instances.append(x_train[x])
else:
print("Negative label")
list_negative_instances.append(x_train[x])
#print("This is the new array:%s",x_train[1])
print("The count of positive labels in test: %s",len(list_positive_instances))
print("The count of negative labels in test: %s",len(list_negative_instances))
if len(list_positive_instances) > len(list_negative_instances):
print("Oversampling the negative instances")
outfile.write("Oversampling the negative instances")
for x in range(0,len(list_positive_instances)):
list_negative_balanced.append(list_negative_instances[random.randint(0,len(list_negative_instances)-1)])
print("Negative size now: {}".format(len(list_negative_balanced)))
outfile.write("Negative size now: {}".format(len(list_negative_balanced)))
list_negative_instances = list_negative_balanced
else:
print("Oversampling the positive instances")
outfile.write("Oversampling the positive instances")
for x in range(0,len(list_negative_instances)):
list_positive_balanced.append(list_positive_instances[random.randint(0,len(list_positive_instances)-1)])
print("Positive size now: %s",len(list_positive_balanced))
outfile.write("Positive size now: {}".format(len(list_positive_balanced)))
list_positive_instances = list_positive_balanced
#Regenerate the labels
positive_labels = [[0,1] for _ in list_positive_instances]
negative_labels = [[1,0] for _ in list_negative_instances]
print("Length of positive labels:%s",positive_labels)
print("Length of negative labels:%s",negative_labels)
p_length = len(positive_labels)
n_length = len(negative_labels)
y_t = np.concatenate([positive_labels, negative_labels],0)
x_t = np.array(list_positive_instances + list_negative_instances)
#for x in range(0,len(x_t)):
# print("Instances:%s",x_t[x])
#x_t = [data_helpers.clean_str(sent) for sent in x_t]
# Build vocabulary
#max_document_length = max([len(x.split(" ")) for x in x_t])
#vocab_processor = learn.preprocessing.VocabularyProcessor(max_document_length)
#x = np.array(list(vocab_processor.fit_transform(x_t)))
np.random.seed(10)
shuffle_indices = np.random.permutation(np.arange(len(y_t)))
x_train = x_t[shuffle_indices]
y_train = y_t[shuffle_indices]
print("Overall Length:%s", len(y_train))
outfile.write("Overall Length:{}".format(len(y_train)))
print("Vocabulary Size: {:d}".format(len(vocab_processor.vocabulary_)))
#print("Train/Dev split: {:d}/{:d}".format(len(y_train), len(y_dev)))
# Training
# ==================================================
with tf.Graph().as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
cnn = TextCNN(
sequence_length=x_train.shape[1],
num_classes=2,
vocab_size=len(vocab_processor.vocabulary_),
embedding_size=FLAGS.embedding_dim,
filter_sizes=list(map(int, FLAGS.filter_sizes.split(","))),
num_filters=FLAGS.num_filters,
l2_reg_lambda=FLAGS.l2_reg_lambda)
# Define Training procedure
global_step = tf.Variable(0, name="global_step", trainable=False)
optimizer = tf.train.AdamOptimizer(1e-3)
grads_and_vars = optimizer.compute_gradients(cnn.loss)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.histogram_summary("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.scalar_summary("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.merge_summary(grad_summaries)
# Output directory for models and summaries
timestamp = str(int(time.time()))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
print("Writing to {}\n".format(out_dir))
# Summaries for loss and accuracy
loss_summary = tf.scalar_summary("loss", cnn.loss)
acc_summary = tf.scalar_summary("accuracy", cnn.accuracy)
# Train Summaries
train_summary_op = tf.merge_summary([loss_summary, acc_summary, grad_summaries_merged])
train_summary_dir = os.path.join(out_dir, "summaries", "train")
train_summary_writer = tf.train.SummaryWriter(train_summary_dir, sess.graph)
# Dev summaries
dev_summary_op = tf.merge_summary([loss_summary, acc_summary])
dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
dev_summary_writer = tf.train.SummaryWriter(dev_summary_dir, sess.graph)
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints"))
checkpoint_prefix = os.path.join(checkpoint_dir, "model")
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.all_variables())
# Write vocabulary
vocab_processor.save(os.path.join(out_dir, "vocab"))
# Initialize all variables
sess.run(tf.initialize_all_variables())
def train_step(x_batch, y_batch):
"""
A single training step
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: FLAGS.dropout_keep_prob
}
_, step, summaries, loss, accuracy = sess.run(
[train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
train_summary_writer.add_summary(summaries, step)
def dev_step(x_batch, y_batch, writer=None):
"""
Evaluates model on a dev set
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: 1.0
}
step, summaries, loss, accuracy = sess.run(
[global_step, dev_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
#if writer:
# writer.add_summary(summaries, step)
# Generate batches
batches = data_helpers_single_file.batch_iter(
list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)
# Training loop. For each batch...
for batch in batches:
x_batch, y_batch = zip(*batch)
train_step(x_batch, y_batch)
current_step = tf.train.global_step(sess, global_step)
if current_step % FLAGS.evaluate_every == 0:
print("\nEvaluation:")
dev_step(x_dev, y_dev, writer=dev_summary_writer)
print("")
if current_step % FLAGS.checkpoint_every == 0:
path = saver.save(sess, checkpoint_prefix, global_step=current_step)
print("Saved model checkpoint to {}\n".format(path))
print("\n Beginning Dev data load")
#Begin Evaluation of Dev
x_raw = np.load("dev_x.txt.npy")
y_test = np.load("dev_y.txt.npy")
y_test = np.argmax(y_test, axis=1)
# Map data into vocabulary
vocab_path = os.path.join(checkpoint_dir, "..", "vocab")
vocab_processor = learn.preprocessing.VocabularyProcessor.restore(vocab_path)
x_test = np.load("dev_x.txt.npy")
print("\nEvaluating...\n")
# Evaluation
# ==================================================
checkpoint_file = tf.train.latest_checkpoint(checkpoint_dir)
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
# Load the saved meta graph and restore variables
saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
# Get the placeholders from the graph by name
input_x = graph.get_operation_by_name("input_x").outputs[0]
# input_y = graph.get_operation_by_name("input_y").outputs[0]
dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
# Tensors we want to evaluate
predictions = graph.get_operation_by_name("output/predictions").outputs[0]
# Generate batches for one epoch
batches = data_helpers_single_file.batch_iter(list(x_test), FLAGS.batch_size, 1, shuffle=False)
# Collect the predictions here
all_predictions = []
#print ("Number of batches: %s",len(batches))
for x_test_batch in batches:
batch_predictions = sess.run(predictions, {input_x: x_test_batch, dropout_keep_prob: 1.0})
all_predictions = np.concatenate([all_predictions, batch_predictions])
# Print accuracy if y_test is defined
if y_test is not None:
print("***************************************")
print("***********Results*********************")
#print("y_test: %s",y_test)
#print("x_test: %s",x_test)
correct_predictions = float(sum(all_predictions == y_test))
print("Total number of test examples: {}".format(len(y_test)))
print("All predictions%S",len(all_predictions))
print("y test: %s",len(y_test))
print("x_test: %s",len(x_test))
print("Incorrect Predictions %s", float(sum(all_predictions != y_test)))
print("Correct Predictions %s", len(y_test) - float(sum(all_predictions != y_test)))
print("Accuracy: {:g}".format(correct_predictions/float(len(y_test))))
print("Precision, Recall, Fscore")
outfile.write("\nTotal number of test examples: {}".format(len(y_test)))
outfile.write("\nAll predictions {}".format(len(all_predictions)))
outfile.write("\ny test: {}".format(len(y_test)))
outfile.write("\nx_test: {}".format(len(x_test)))
outfile.write("\nIncorrect Predictions {}".format(float(sum(all_predictions != y_test))))
outfile.write("\nCorrect Predictions {}".format(len(y_test) - float(sum(all_predictions != y_test))))
outfile.write("\nAccuracy: {:g}".format(correct_predictions / float(len(y_test))))
outfile.write("\nPrecision, Recall, Fscore")
#outfile.write(precision_recall_fscore_support(y_test, all_predictions, average='micro'))
outfile.write(np.array2string(confusion_matrix(y_test, all_predictions),separator=','))
#outfile.write(confusion_matrix(y_test, all_predictions))
outfile.close()
try:
c_matrix = confusion_matrix(y_test, all_predictions)
data_insert = {
'name': dbfieldname,
'imbalance': str(imbalance_size),
'positive_or_negative':pos_or_negative,
'train_negative':p_length,
'train_positive':n_length,
'true_negative':c_matrix[0][0],
'false_positive':c_matrix[0][1],
'false_negative':c_matrix[1][0],
'true_positive':c_matrix[1][1],
'accuracy':(correct_predictions/float(len(y_test))),
'incorrect':(float(sum(all_predictions != y_test))),
'correct':(len(y_test) - float(sum(all_predictions != y_test))),
'notes':''
}
sqlInsert = 'Insert into cnn_runs VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
print('\n')
print(sqlInsert)
cursor.execute(sqlInsert,data_insert)
except:
print("error inserting")
cursor.close()
cnx.close() | [
"trsonderm@gmail.com"
] | trsonderm@gmail.com | |
9a6213180a3b7206990f12a3342fc9c1ae19a54d | 01733042e84a768b77f64ec24118d0242b2f13b8 | /uhd_restpy/testplatform/sessions/ixnetwork/quicktest/traffictest_9709f3566877e5d5fb6ae115268058c6.py | 4aff724fb98589bc548a121034ccba351b540266 | [
"MIT"
] | permissive | slieberth/ixnetwork_restpy | e95673905854bc57e56177911cb3853c7e4c5e26 | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | refs/heads/master | 2023-01-04T06:57:17.513612 | 2020-10-16T22:30:55 | 2020-10-16T22:30:55 | 311,959,027 | 0 | 0 | NOASSERTION | 2020-11-11T12:15:34 | 2020-11-11T12:06:00 | null | UTF-8 | Python | false | false | 13,531 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class TrafficTest(Base):
"""
The TrafficTest class encapsulates a list of trafficTest resources that are managed by the user.
A list of resources can be retrieved from the server using the TrafficTest.find() method.
The list can be managed by using the TrafficTest.add() and TrafficTest.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'trafficTest'
_SDM_ATT_MAP = {
'ForceApplyQTConfig': 'forceApplyQTConfig',
'InputParameters': 'inputParameters',
'Mode': 'mode',
'Name': 'name',
}
def __init__(self, parent):
super(TrafficTest, self).__init__(parent)
@property
def Results(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.quicktest.results_23583c0cce1dabf7b75fe7d2ae18cfc4.Results): An instance of the Results class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.quicktest.results_23583c0cce1dabf7b75fe7d2ae18cfc4 import Results
return Results(self)._select()
@property
def TestConfig(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.quicktest.testconfig_e903cacf836ab6df3c51be95da4a21df.TestConfig): An instance of the TestConfig class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.quicktest.testconfig_e903cacf836ab6df3c51be95da4a21df import TestConfig
return TestConfig(self)._select()
@property
def TrafficSelection(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.quicktest.trafficselection_22c0efed3052ed5002942a33e331fb3b.TrafficSelection): An instance of the TrafficSelection class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.quicktest.trafficselection_22c0efed3052ed5002942a33e331fb3b import TrafficSelection
return TrafficSelection(self)
@property
def ForceApplyQTConfig(self):
"""
Returns
-------
- bool: Apply QT config
"""
return self._get_attribute(self._SDM_ATT_MAP['ForceApplyQTConfig'])
@ForceApplyQTConfig.setter
def ForceApplyQTConfig(self, value):
self._set_attribute(self._SDM_ATT_MAP['ForceApplyQTConfig'], value)
@property
def InputParameters(self):
"""
Returns
-------
- str: Input Parameters
"""
return self._get_attribute(self._SDM_ATT_MAP['InputParameters'])
@InputParameters.setter
def InputParameters(self, value):
self._set_attribute(self._SDM_ATT_MAP['InputParameters'], value)
@property
def Mode(self):
"""
Returns
-------
- str(existingMode | newMode): Test mode
"""
return self._get_attribute(self._SDM_ATT_MAP['Mode'])
@Mode.setter
def Mode(self, value):
self._set_attribute(self._SDM_ATT_MAP['Mode'], value)
@property
def Name(self):
"""
Returns
-------
- str: Test name
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
def update(self, ForceApplyQTConfig=None, InputParameters=None, Mode=None, Name=None):
"""Updates trafficTest resource on the server.
Args
----
- ForceApplyQTConfig (bool): Apply QT config
- InputParameters (str): Input Parameters
- Mode (str(existingMode | newMode)): Test mode
- Name (str): Test name
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ForceApplyQTConfig=None, InputParameters=None, Mode=None, Name=None):
"""Adds a new trafficTest resource on the server and adds it to the container.
Args
----
- ForceApplyQTConfig (bool): Apply QT config
- InputParameters (str): Input Parameters
- Mode (str(existingMode | newMode)): Test mode
- Name (str): Test name
Returns
-------
- self: This instance with all currently retrieved trafficTest resources using find and the newly added trafficTest resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained trafficTest resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ForceApplyQTConfig=None, InputParameters=None, Mode=None, Name=None):
"""Finds and retrieves trafficTest resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve trafficTest resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all trafficTest resources from the server.
Args
----
- ForceApplyQTConfig (bool): Apply QT config
- InputParameters (str): Input Parameters
- Mode (str(existingMode | newMode)): Test mode
- Name (str): Test name
Returns
-------
- self: This instance with matching trafficTest resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of trafficTest data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the trafficTest resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def Apply(self):
"""Executes the apply operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self):
"""Executes the applyAsync operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self):
"""Executes the applyAsyncResult operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self):
"""Executes the applyITWizardConfiguration operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self):
"""Executes the generateReport operation on the server.
Generate a PDF report for the last succesfull test run.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
"""Executes the run operation on the server.
Starts the specified Quick Test and waits for its execution to finish.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
run(InputParameters=string)list
-------------------------------
- InputParameters (str): The input arguments of the test.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Starts the specified Quick Test.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(InputParameters=string)
-----------------------------
- InputParameters (str): The input arguments of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stops the currently running Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self):
"""Executes the waitForTest operation on the server.
Waits for the execution of the specified Quick Test to be completed.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('waitForTest', payload=payload, response_object=None)
| [
"andy.balogh@keysight.com"
] | andy.balogh@keysight.com |
403e19661bd40b6bf2149564ce5505a7bad6c5a9 | ad030b54da9ef8c82be636a3e51ae071d569ae53 | /.ipynb_checkpoints/orange.py | 81932c57c8d04d1f8870b21de464a5e3b00a6919 | [
"MIT"
] | permissive | AJOlowokere/anjolaCSC102 | ea24a412e4a224077d03fd53b57aeadcdd03328e | 7ede591800da60c42d432ce05e7a8c3d76cdf8bf | refs/heads/main | 2023-06-02T16:57:56.056785 | 2021-06-17T13:53:11 | 2021-06-17T13:53:11 | 355,935,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | class Orange:
priceOfOranges = 5
stock = 30
def __init__(self, quantityToBuy):
self.quantityToBuy = quantityToBuy
def OrangeSelling(self):
if int(self.quantityToBuy) > Orange.stock:
print("We do not have enough oranges, Please select a lesser quantity.")
else:
Receipt = int(self.quantityToBuy) * Orange.priceOfOranges
Orange.stock = Orange.stock - int(self.quantityToBuy)
print (f"Your amount to pay is {int(self.quantityToBuy) * Orange.priceOfOranges} and we have {Orange.stock} oranges left.")
Buyer1 = Orange(input("Please input quantity to buy:"))
Buyer1.OrangeSelling() | [
"runse.aj@gmail.com"
] | runse.aj@gmail.com |
432c2709f3d512368d6cc34f6797b5bbe1277563 | c031e924f143fbdc2e6304d784311057baadc66a | /src/fatorial.py | 3d28127f9b7ea26e965a6c8b735f84edbabb38bc | [] | no_license | gutocosta/tcr | f6e98101a76fef03739ede7addf9ce64b6a57d62 | 038316d28054c444358ad61f87b13953c833db7b | refs/heads/master | 2022-11-26T20:47:30.906332 | 2020-08-03T14:22:08 | 2020-08-03T14:22:08 | 284,720,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | def fatorial(x):
fatorial = 1
while (x > 1):
fatorial = x * fatorial
x = x - 1
return fatorial | [
"augusto.costa@instruct.com.br"
] | augusto.costa@instruct.com.br |
71126f5026b405247e66d1ea3f443391361106a4 | 1026404f494e3504d66bb9b2a8715fa841b39e47 | /testsuite/test_userinput.py | 676d34348643d12ae72548dcfc7e82cf9900a526 | [] | no_license | onicho/PRM | 1bdc2ba8653c0638e66f85f2bfa2e973a16a63d5 | c6a253e71814b45974a1129c2e4ea9a890d81cb9 | refs/heads/master | 2020-12-25T14:13:38.060686 | 2016-09-19T00:27:15 | 2016-09-19T00:27:15 | 66,206,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,749 | py | from unittest import TestCase
from unittest.mock import patch
from presentation.input import *
# REFERENCE:
# patch decorator and side-effect test idea were sourced at
# https://docs.python.org/3/library/unittest.mock-examples.html
class TestInput(TestCase):
def test_valid_epic(self):
self.assertTrue(valid_epic("TSCO"))
self.assertFalse(valid_epic("6575096859"))
def test_get_tickers(self):
with patch('builtins.input', side_effect = ['BP', 'LLOY', '0']):
self.assertEqual(get_tickers(), ['BP', 'LLOY'])
# can only accept 10 elements even if more than 10 were given
with patch('builtins.input', side_effect=['BP', 'LLOY', 'RBS', 'BRBY',
'ERM', 'CGL', 'NG', 'III',
'VOD', 'BA', 'TSCO']):
self.assertEqual(get_tickers(), ['BP', 'LLOY', 'RBS', 'BRBY',
'ERM', 'CGL', 'NG', 'III',
'VOD', 'BA']) # no TSCO
with patch('builtins.input', side_effect=['9068958', 'LLOY', '0']):
self.assertEqual(get_tickers(), ['LLOY'])
with patch('builtins.input', side_effect=['0']):
self.assertEqual(get_tickers(), [])
def test_valid_date(self):
self.assertTrue(valid_date("2009-08-11"))
self.assertTrue(valid_date("2016-09-19"))
self.assertTrue(valid_date("2020-12-31"))
self.assertFalse(valid_date("6575096859"))
self.assertFalse(valid_date("02.05.16"))
self.assertFalse(valid_date("2 Sen 2015"))
def test_get_period(self):
with patch('builtins.input', side_effect=['2009-01-01', '2015-04-12']):
self.assertEqual(get_period(), ['2009-01-01', '2015-04-12'])
with patch('builtins.input', side_effect=['2016-10-31', '2013-02-28',
'2013-02-28', '2013-03-04',
'2014', '2014-07-08']):
self.assertEqual(get_period(), ['2013-02-28', '2014-07-08'])
def test_get_rfr(self):
with patch('builtins.input', side_effect=['100', '5000',
'47392754.579843759847574983',
'2.75']):
self.assertEqual(get_rfr(), 2.75)
with patch('builtins.input', side_effect=['100.01', 'hi', '2.75']):
self.assertEqual(get_rfr(), 2.75)
with patch('builtins.input', side_effect=['99.999999999999', 'hi',
'1.5']):
self.assertEqual(get_rfr(), 99.999999999999)
| [
"olya.nicholls@hotmail.com"
] | olya.nicholls@hotmail.com |
b489ba00e68e62b4a7ec2f01ce32fef0419aa217 | 4741f9fc1c9cbfa001ac68d668922dca110312f3 | /sign_dict.py | 7b62628a40a15df5a522312c23e00a4c46031c02 | [] | no_license | tywallace/ProjectX | 25f4365c40b7ab50e6001d1f4c2ece426afbedc0 | a2b47f2513948e74a286e2394c4ab073f0575962 | refs/heads/master | 2020-05-15T11:02:37.266665 | 2014-06-16T18:37:08 | 2014-06-16T18:37:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | import csv
from street_cleaning import if_cleaning
signs_list = ["signs1.CSV","signs2.CSV","signs3.CSV","signs4.CSV","signs5.CSV","signs6.CSV","signs7.CSV","signs8.CSV"]
s = {}
for x in signs_list:
reader= csv.reader(open(x,"r"))
for row in reader:
description = if_cleaning(row[5])
if description != None:
if row[1] not in s:
s[row[1]] = description
with open("signs_final.CSV", 'wb') as outfile:
csv_writer = csv.writer(outfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
for k,v in s.items():
csv_writer.writerow([k,v[0],v[1],v[2],v[3]])
| [
"tylerwallace@gmail.com"
] | tylerwallace@gmail.com |
2a0a61de42d1fb62393a26f6a5301d80a5b9877c | 8d64c464de5ea84cad92321f2723bd1075173b13 | /Month2-ImageClassification/hu_moments/hu_moments.py | 932735ea0468c0e0a53bb52bd7d50bd9976ae3fc | [] | no_license | azhugg/pyimagesearchguru | 5ebba994b471ba1ab2ad828f579b0b2101b1c630 | 6ec56b2a48d9b9b37673ea803f9e946f203abe10 | refs/heads/master | 2020-12-02T15:07:14.250129 | 2015-11-30T02:40:52 | 2015-11-30T02:40:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 915 | py | # -- coding: utf-8 --
__author__ = 'amryfitra'
import cv2
# load the input image and convert it to grayscale
image = cv2.imread("more_shapes_example.png")
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# compute the hu moments feature vector for the entire image and show it
moments = cv2.HuMoments(cv2.moments(image)).flatten()
print("Original Moments: {}".format(moments))
cv2.imshow("Image", image)
(_, cnts, _) = cv2.findContours(image.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# loop for each contour
for (i, c) in enumerate(cnts):
# extract the ROI from the image and compute the Hu Moments feature
# vector for the ROI
(x, y, w, h) = cv2.boundingRect(c)
roi = image[y:y+h, x:x+w]
moments = cv2.HuMoments(cv2.moments(roi)).flatten()
# show the moments and ROI
print("MOMENTS FOR PLANE #{}: {}".format(i + 1, moments))
cv2.imshow("ROI", roi)
cv2.waitKey(0)
| [
"amryfitra@gmail.com"
] | amryfitra@gmail.com |
25a07a25c379411ae4de9b98384409b13c9c112b | 3a3e4dd5f5e0219029167c5340fad71b3247395a | /BlockChain/blockchain.py | f1c9f65631eb67cfd269ec5851b42250f79a109d | [] | no_license | Junvn/InterestingPythonCode | b3452e1f095cc651dffc0c87960fe18c11309669 | 37b8330e05aa5e49150b76118847b3ec4a6d812f | refs/heads/master | 2020-03-14T16:15:18.146271 | 2018-08-05T04:17:31 | 2018-08-05T04:17:31 | 131,694,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,201 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# author:janvn
# datetime:18-8-4 上午9:27
# software:PyCharm
import json
import hashlib
from time import time
from flask import Flask,jsonify,request
from uuid import uuid4
from textwrap import dedent
from urllib.parse import urlparse
import requests
class BlockChain(object):
'''
负责管理链
'''
def __init__(self):
'''
初始构造函数
:param self:
:return:
'''
self.chain=[] #存储区块链
self.current_transactions=[] #保存交易
#创建一个创世区块
self.new_block(previous_hash=1,proof=100)
self.nodes = set()
def new_block(self,proof,previous_hash=None):
'''
创建新的区块并且增加其到链
:param proof:<int> The proof given by the Proof of work algorithm
:param previous_hash:(Optional) <str> Hash of previous Block
:return:<dict> New Block
'''
block = {
'index':len(self.chain),
'timestamp':time(),
'transactions':self.current_transactions,
'proof':proof,
'previous_hash':previous_hash or self.hash(self.chain[-1])
}
# Reset the current list of transaction
self.current_transactions = []
self.chain.append(block)
return block
def new_transaction(self,sender,recipient,amount):
'''
增加新的交易到交易列表中
:param sender: <str> Address of the Sender
:param recipient: <str> Addresss of the Recipient
:param amount: <int> Amount
:return: <int> The index of the Block that will hold this transaction
'''
self.current_transactions.append({
'sender':sender,
'recipient':recipient,
'amount':amount
})
return self.last_block['index'] + 1
@staticmethod
def hash(block):
'''
Creates a SHA-256 hash of a Block
Hashes a Block 对一个块进行哈希
:param block:<dict> Block
:return:<str>
'''
#We must make sure that the Dictionary is Ordered,or we'll have inconsistent hashes
block_string = json.dumps(block,sort_keys=True).encode()
return hashlib.sha256(block_string).hexdigest()
@property
def last_block(self):
'''
返回链中的最后一个块
:return:
'''
return self.chain[-1]
def proof_of_work(self,last_proof):
'''
:param last_proof:
:return:
'''
proof = 0
while self.valid_proof(last_proof,proof) is False:
proof += 1
return proof
@staticmethod
def valid_proof(last_proof,proof):
'''
:param last_proof:
:param proof:
:return:
'''
guess = f'{last_proof}{proof}'.encode()
guess_hash = hashlib.sha256(guess).hexdigest()
return guess_hash[:4] == "0000"
def register_node(self,address):
'''
新增节点到节点列表中
:param address:
:return:
'''
parsed_url = urlparse(address)
self.nodes.add(parsed_url.netloc)
def valid_chain(self,chain):
last_block = chain[0]
current_index = 1
while current_index < len(chain):
block = chain[current_index]
print(f'{last_block}')
print(f'{block}')
print('\n------------------\n')
if block['previous_hash'] != self.hash(last_block):
return False
if not self.valid_proof(last_block['proof'],block['proof']):
return False
last_block = block
current_index += 1
return True
def resolve_conflicts(self):
'''
一致性算法
:return:
'''
neighbours = self.nodes
new_chain = None
max_length = len(self.chain)
for node in neighbours:
response = requests.get(f'http://{node}/chain')
if response.status_code == 200:
length = response.json()['length']
chain = response.json()['chain']
if length > max_length and self.valid_chain(chain):
max_length = length
new_chain = chain
if new_chain:
self.chain = new_chain
return True
return False
# Instantiate our Node
app = Flask(__name__)
#Generate a globally unique address for this node
node_indentifier = str(uuid4()).replace('-','')
# Instantiate the Blockchain
blockchain = BlockChain()
@app.route('/mine',methods=['GET'])
def mine():
'''
挖矿
:return:
'''
last_block = blockchain.last_block
last_proof = last_block['proof']
proof = blockchain.proof_of_work(last_proof)
blockchain.new_transaction(
sender='0',
recipient=node_indentifier,
amount=1,
)
previous_hash = blockchain.hash(last_block)
block = blockchain.new_block(proof,previous_hash)
response = {
'message':'New Block Forged',
'index':block['index'],
'transaction':block['transactions'],
'proof':block['proof'],
'previous_hash':block['previous_hash'],
}
return jsonify(response),200
#return "We'll mine a new Block"
@app.route("/transactions/new",methods=['POST'])
def new_transaction():
values = request.get_json()
required = ['sender','recipient','amount']
if not all(k in values for k in required):
return 'Missing values',400
#create a new transaction
index = blockchain.new_transaction(values['sender'],values['recipient'],values['amount'])
response = {'message':f'Transaction will be added to Block {index}'}
return jsonify(response),201
@app.route('/chain',methods=['GET'])
def full_chain():
response = {
'chain':blockchain.chain,
'length':len(blockchain.chain),
}
return jsonify(response),200
@app.route('/nodes/register',methods=['POST'])
def register_nodes():
values = request.get_json()
nodes = values.get('nodes')
if nodes is None:
return 'Error: Please supply a valid list of nodes', 400
for node in nodes:
blockchain.register_node(node)
response = {
'message':'New nodes have been added',
'total_nodes':list(blockchain.nodes),
}
return jsonify(response),201
@app.route('/nodes/resolve',methods=['GET'])
def consensus():
replaced = blockchain.resolve_conflicts()
if replaced:
response = {
'message':'Our chain was replaced',
'new_chain':blockchain.chain
}
else:
response = {
'message':'Our chain is authoritative',
'chain':blockchain.chain
}
return jsonify(response),200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=5000, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| [
"wpzhan1989@163.com"
] | wpzhan1989@163.com |
9ddd32c317555ce13ffcf02f77fdeaef759cb0a9 | 495a68ff51a720622f341f646ba03f2202a59c21 | /reversePolish.py | 3930a475a0165d51226c9b6ce10941a1fc8fd69c | [
"MIT"
] | permissive | ezquire/python-challenges | 4d82d63ca7fdac6f687bde4404101ba2e28a7b37 | c953633eb211bb315eca4ed54b7bf837588dc36f | refs/heads/master | 2020-07-13T09:09:05.314900 | 2019-11-03T17:20:03 | 2019-11-03T17:20:03 | 205,053,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | ops = {
'+': lambda a,b: a+b,
'-': lambda a,b: a-b,
'*': lambda a,b: a*b,
'/': lambda a,b: a/b
}
def calc(exp):
stack = []
for i in exp:
if int(i):
if len(stack) > 1:
raise TypeError("Invalid RPN Expression")
stack.append(int(i))
elif i in ops.keys():
if len(stack) != 2:
raise TypeError("Invalid RPN Expression")
a = stack.pop()
b = stack.pop()
stack.append(ops[i](b,a))
return stack[0]
rpn = "3,4,-"
# rpn = '1,2,3'
# rpn = "3,4,+,+,2,*,1,+"
# rpn = "1988"
exp = rpn.split(',')
print(calc(exp)) | [
"tyler.gearing@gmail.com"
] | tyler.gearing@gmail.com |
3c64f805f30de925235ac240afe5eba8b75fd3ff | d48c8b6515021d2b072e6df6b44ca9554301270a | /gridExample.py | cbf6e534aacc3ee11ceabd2ab52dba3d3c7a7087 | [] | no_license | chadchabot/Altro | 97273d817d4caaf9bdf32f4d5ec97b86cac2809a | 07f52b8813f954b31f45a660311d8fb54664b248 | refs/heads/master | 2020-05-29T18:21:12.694144 | 2012-04-20T16:15:33 | 2012-04-20T16:15:33 | 4,079,767 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,774 | py | #! /usr/bin/env python
import time
from Tkinter import *
import tkMessageBox
#from Tkinter.ScrolledText import ScrolledText
textArea = None
querySelect = None
eb1 = None
eb2 = None
eb3 = None
eb4 = None
eb5 = None
def windowState():
global queryWindow
if queryWindow.state() == "withdrawn":
print "it is withdrawn!"
elif queryWindow.state() == "normal":
print "it is visible!"
def closeWindow():
global queryWindow
print "Closing window"
queryWindow.withdraw()
def submitQuery( option ):
global textArea
global querySelect
# textArea.config( state=NORMAL )
# option = querySelect.get()
if option == 1:
if len( eb1.get() ) != 0 :
sqlcmd = "SELECT * FROM bibrec WHERE author=" + eb1.get() + " ORDER BY author, title;"
else:
sqlcmd = "nothing entered"
elif option == 2:
sqlcmd = "SELECT COUNT (year) FROM bibrec WHERE year >=" + eb2.get() +";"
elif option == 3:
# sqlcmd = eb3.get()
sqlcmd = "SELECT author, COUNT(title) FROM bibrec GROUP BY author;"
elif option == 4:
sqlcmd = eb4.get()
else:
sqlcmd = eb5.get()
textArea.insert( END, "you chose " + str( option ) + " " + sqlcmd + "\n\\------------------------------------------------------------------------------/\n" )
textArea.see( END )
# textArea.config( state=DISABLED )
def clearPanel():
global textArea
# textArea.config( state=NORMAL )
textArea.delete( 1.0, END )
# textArea.config( state=DISABLED )
"""
def changeFocus():
global eb1, eb2, eb3, eb4, eb5
global querySelect
value = querySelect.get()
if value == 1:
eb1.focus_set()
elif value == 2:
eb2.focus_set()
elif value == 3:
eb3.focus_set()
elif value == 4:
eb4.focus_set()
elif value == 5:
eb5.focus_set()
"""
def changeFocus( entryBox ):
entryBox.focus_set()
def helpWindow():
userResponse = tkMessageBox.showinfo(
"About db queries",
"The database is structured as follows:\n\nTable name:\n\tbibrec\nColumns:\n\trec_id\t| primary key\n\tauthor\t| author name\n\ttitle\t\t| publication title\n\tpubinfo\t| publisher information\n\tcallnum\t| library reference #\n\tyear\t| year of publishing\n\txml\t\t| raw xml of MARC21 record" )
def enterPressed( variable ):
global querySelect
submitQuery( querySelect.get() )
def clearField( event ):
# print "clearField called"
global queryWindow, eb1
# print "return: event.widget is",event.widget
# print "focus is:", queryWindow.focus_get()
eb1.delete( 0, END )
queryWindow = Tk()
queryWindow.resizable(0,0)
queryWindow.title( "Query Menu" )
# four radio buttons
querySelect = IntVar()
# NOTE: add back 'takefocus=0' if the radiobuttons aren't part of the tab sequence
# how to have radiobuttons show their highlightcolor or highlightbackground when they have focus?
rb1 = Radiobutton( queryWindow, text="1", value=1, variable=querySelect, command=lambda: changeFocus( eb1 ), takefocus=0 )
rb2 = Radiobutton( queryWindow, text="2", value=2, variable=querySelect, command=lambda: changeFocus( eb2 ), takefocus=0 )
rb3 = Radiobutton( queryWindow, text="3", value=3, variable=querySelect, command=lambda: changeFocus( eb3 ), takefocus=0 )
rb4 = Radiobutton( queryWindow, text="4", value=4, variable=querySelect, command=lambda: changeFocus( eb4 ), takefocus=0 )
rb5 = Radiobutton( queryWindow, text="5", value=5, variable=querySelect, command=lambda: changeFocus( eb5 ), takefocus=0 )
querySelect.set( 1 )
eb1 = Entry( queryWindow, bg="white" )
eb2 = Entry( queryWindow, bg="white" )
eb3 = Entry( queryWindow, bg="white" )
eb4 = Entry( queryWindow, bg="white" )
eb5Text = StringVar()
eb5Text.set( "SELECT " )
eb5 = Entry( queryWindow, bg="white", textvariable=eb5Text )
#eb5.see(END)
lb1 = Label( queryWindow, text="Find all books by author:" )
lb2 = Label( queryWindow, text="How many books were published since the year: " )
lb3 = Label( queryWindow, text="List authors and # of books: " )
lb4 = Label( queryWindow, text="Canned 4" )
lb5 = Label( queryWindow, text="Enter any SQL command:" )
helpButton = Button( queryWindow, text="Help", state=NORMAL, command=helpWindow, takefocus=0 )
helpButton.grid( row=0, column=2, sticky=N+S+E+W)
rb1.grid( row=1, column=0 )
lb1.grid( row=1, column=1 )
eb1.grid( row=1, column=2 )
rb2.grid( row=2, column=0 )
lb2.grid( row=2, column=1 )
eb2.grid( row=2, column=2 )
rb3.grid( row=3, column=0 )
lb3.grid( row=3, column=1 )
eb3.grid( row=3, column=2 )
rb4.grid( row=4, column=0 )
lb4.grid( row=4, column=1 )
eb4.grid( row=4, column=2 )
rb5.grid( row=5, column=0 )
lb5.grid( row=5, column=1 )
eb5.grid( row=5, column=2 )
submitButton = Button( queryWindow, text="Submit SQL query", state=NORMAL, command=lambda: submitQuery( querySelect.get() ), takefocus=0 )
submitButton.grid( row=6, column=0, columnspan=3, sticky=N+S+E+W, pady=5)
queryWindow.bind( '<Return>', enterPressed )
queryWindow.bind( '<Key-Escape>', clearField )
queryWindow.protocol( "WM_DELETE_WINDOW", closeWindow )
scroller = Scrollbar( queryWindow, orient = VERTICAL )
textArea = Text( queryWindow, width=80, height=30, wrap=WORD, borderwidth=5, relief=GROOVE, takefocus=0, yscrollcommand=scroller.set )
textArea.grid( row=7, column=0, columnspan=3 )
scroller.config( command=textArea.yview )
scroller.grid( row=7, column=3, sticky=N+S+E+W )
clearButton = Button( queryWindow, text="Clear result window", state=NORMAL, command=clearPanel, takefocus=0 )
clearButton.grid( row=8, column=0, columnspan=3, padx=5, pady=5 )
eb1.focus_set()
print "about to disappear!"
#queryWindow.withdraw()
windowState()
time.sleep(3)
queryWindow.deiconify()
windowState()
mainloop() | [
"chabotc@FECS.UOGUELPH.CA@f29819a3-0c5e-4657-b260-b16cc275aa0b"
] | chabotc@FECS.UOGUELPH.CA@f29819a3-0c5e-4657-b260-b16cc275aa0b |
9d5268559b9f20871c0835f6a0a9edd415c007c8 | 280019d1106e6dd887f1c0fe020bcd433790d8e1 | /capture_tag/templatetags/capture_tags.py | 0b5e7b45174cd019e0a95bd44634aada96d9f16d | [
"Apache-2.0"
] | permissive | edoburu/django-capture-tag | 41af5dea34ec791791e03a95e2e52b88dd8c3ea8 | f63533dd1a5ce3926c36e5795a3767ab4d7eb6fc | refs/heads/master | 2023-08-14T05:05:00.023501 | 2021-11-16T22:04:50 | 2021-11-16T22:04:50 | 56,684,352 | 19 | 4 | Apache-2.0 | 2023-07-11T08:20:29 | 2016-04-20T12:19:46 | Python | UTF-8 | Python | false | false | 2,445 | py | from django.template import Library, Node, TemplateSyntaxError
register = Library()
@register.tag(name="capture")
def do_capture(parser, token):
"""
Capture the contents of a tag output.
Usage:
.. code-block:: html+django
{% capture %}..{% endcapture %} # output in {{ capture }}
{% capture silent %}..{% endcapture %} # output in {{ capture }} only
{% capture as varname %}..{% endcapture %} # output in {{ varname }}
{% capture as varname silent %}..{% endcapture %} # output in {{ varname }} only
For example:
.. code-block:: html+django
{# Allow templates to override the page title/description #}
<meta name="description" content="{% capture as meta_description %}{% block meta-description %}{% endblock %}{% endcapture %}" />
<title>{% capture as meta_title %}{% block meta-title %}Untitled{% endblock %}{% endcapture %}</title>
{# copy the values to the Social Media meta tags #}
<meta property="og:description" content="{% block og-description %}{{ meta_description }}{% endblock %}" />
<meta name="twitter:title" content="{% block twitter-title %}{{ meta_title }}{% endblock %}" />
"""
bits = token.split_contents()
# tokens
t_as = "as"
t_silent = "silent"
var = "capture"
silent = False
num_bits = len(bits)
if len(bits) > 4:
raise TemplateSyntaxError("'capture' node supports '[as variable] [silent]' parameters.")
elif num_bits == 4:
t_name, t_as, var, t_silent = bits
silent = True
elif num_bits == 3:
t_name, t_as, var = bits
elif num_bits == 2:
t_name, t_silent = bits
silent = True
else:
var = "capture"
silent = False
if t_silent != "silent" or t_as != "as":
raise TemplateSyntaxError("'capture' node expects 'as variable' or 'silent' syntax.")
nodelist = parser.parse(("endcapture",))
parser.delete_first_token()
return CaptureNode(nodelist, var, silent)
class CaptureNode(Node):
def __init__(self, nodelist, varname, silent):
self.nodelist = nodelist
self.varname = varname
self.silent = silent
def render(self, context):
output = self.nodelist.render(context)
context[self.varname] = output
if self.silent:
return ""
else:
return output
| [
"vdboor@edoburu.nl"
] | vdboor@edoburu.nl |
7f50743de0d5d46bbfdc8495e78fe260fec55f25 | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/communication/azure-communication-chat/tests/test_chat_thread_client_e2e_async.py | aea980942a7dfffee1131dd715df6326a604aeac | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 11,317 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import asyncio
import os
from datetime import datetime
from msrest.serialization import TZ_UTC
from azure.communication.administration import CommunicationIdentityClient
from azure.communication.chat.aio import (
ChatClient,
CommunicationUserCredential
)
from azure.communication.chat import (
ChatThreadMember,
ChatMessagePriority
)
from azure.communication.administration._shared.utils import parse_connection_str
from azure_devtools.scenario_tests import RecordingProcessor
from helper import URIIdentityReplacer
from chat_e2e_helper import ChatURIReplacer
from _shared.asynctestcase import AsyncCommunicationTestCase
from _shared.testcase import BodyReplacerProcessor, ResponseReplacerProcessor
class ChatThreadClientTestAsync(AsyncCommunicationTestCase):
def setUp(self):
super(ChatThreadClientTestAsync, self).setUp()
self.recording_processors.extend([
BodyReplacerProcessor(keys=["id", "token", "senderId", "chatMessageId", "nextLink", "members", "multipleStatus", "value"]),
URIIdentityReplacer(),
ResponseReplacerProcessor(keys=[self._resource_name]),
ChatURIReplacer()])
endpoint, _ = parse_connection_str(self.connection_str)
self.endpoint = endpoint
self.identity_client = CommunicationIdentityClient.from_connection_string(self.connection_str)
# create user
self.user = self.identity_client.create_user()
token_response = self.identity_client.issue_token(self.user, scopes=["chat"])
self.token = token_response.token
# create another user
self.new_user = self.identity_client.create_user()
# create ChatClient
self.chat_client = ChatClient(self.endpoint, CommunicationUserCredential(self.token))
def tearDown(self):
super(ChatThreadClientTestAsync, self).tearDown()
# delete created users
if not self.is_playback():
self.identity_client.delete_user(self.user)
self.identity_client.delete_user(self.new_user)
async def _create_thread(self):
# create chat thread
topic = "test topic"
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
members = [ChatThreadMember(
user=self.user,
display_name='name',
share_history_time=share_history_time
)]
self.chat_thread_client = await self.chat_client.create_chat_thread(topic, members)
self.thread_id = self.chat_thread_client.thread_id
async def _send_message(self):
# send a message
priority = ChatMessagePriority.NORMAL
content = 'hello world'
sender_display_name = 'sender name'
create_message_result = await self.chat_thread_client.send_message(
content,
priority=priority,
sender_display_name=sender_display_name)
self.message_id = create_message_result.id
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_update_thread(self):
async with self.chat_client:
await self._create_thread()
topic = "update topic"
async with self.chat_thread_client:
await self.chat_thread_client.update_thread(topic=topic)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
priority = ChatMessagePriority.NORMAL
content = 'hello world'
sender_display_name = 'sender name'
create_message_result = await self.chat_thread_client.send_message(
content,
priority=priority,
sender_display_name=sender_display_name)
self.assertTrue(create_message_result.id)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_get_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
message = await self.chat_thread_client.get_message(self.message_id)
assert message.id == self.message_id
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_messages(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
if self.is_live:
await asyncio.sleep(2)
chat_messages = self.chat_thread_client.list_messages(results_per_page=1)
items = []
async for item in chat_messages:
items.append(item)
assert len(items) > 0
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_update_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
content = "updated message content"
await self.chat_thread_client.update_message(self.message_id, content=content)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_delete_message(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
await self.chat_thread_client.delete_message(self.message_id)
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_members(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
chat_thread_members = self.chat_thread_client.list_members()
items = []
async for item in chat_thread_members:
items.append(item)
assert len(items) == 1
# delete chat threads
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_add_members(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
new_member = ChatThreadMember(
user=self.new_user,
display_name='name',
share_history_time=share_history_time)
members = [new_member]
await self.chat_thread_client.add_members(members)
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_remove_member(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
# add member first
share_history_time = datetime.utcnow()
share_history_time = share_history_time.replace(tzinfo=TZ_UTC)
new_member = ChatThreadMember(
user=self.new_user,
display_name='name',
share_history_time=share_history_time)
members = [new_member]
await self.chat_thread_client.add_members(members)
# test remove member
await self.chat_thread_client.remove_member(self.new_user)
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_typing_notification(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self.chat_thread_client.send_typing_notification()
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_send_read_receipt(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
await self.chat_thread_client.send_read_receipt(self.message_id)
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
@pytest.mark.live_test_only
@AsyncCommunicationTestCase.await_prepared_test
async def test_list_read_receipts(self):
async with self.chat_client:
await self._create_thread()
async with self.chat_thread_client:
await self._send_message()
# send read receipts first
await self.chat_thread_client.send_read_receipt(self.message_id)
if self.is_live:
await asyncio.sleep(2)
# list read receipts
read_receipts = self.chat_thread_client.list_read_receipts()
items = []
async for item in read_receipts:
items.append(item)
assert len(items) > 0
if not self.is_playback():
await self.chat_client.delete_chat_thread(self.thread_id)
| [
"noreply@github.com"
] | paultaiton.noreply@github.com |
07c86867c6a6240b881b7799c91f53d202d3a79c | e5e2b7da41fda915cb849f031a0223e2ac354066 | /sdk/python/pulumi_azure_native/media/v20200201preview/_enums.py | 60f247261361ebfece18ad62df164c9d945509e3 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | johnbirdau/pulumi-azure-native | b7d3bdddeb7c4b319a7e43a892ddc6e25e3bfb25 | d676cc331caa0694d8be99cb90b93fa231e3c705 | refs/heads/master | 2023-05-06T06:48:05.040357 | 2021-06-01T20:42:38 | 2021-06-01T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'MediaGraphRtspTransport',
]
class MediaGraphRtspTransport(str, Enum):
"""
Underlying RTSP transport. This can be used to enable or disable HTTP tunneling.
"""
HTTP = "Http"
TCP = "Tcp"
| [
"noreply@github.com"
] | johnbirdau.noreply@github.com |
af9714236266fa219b968befa664596fe35ee78b | 29f4ad2ff57cead763e890369911283b9c069f70 | /setup.py | 7eb6540d314d73875b199adb38e14891d615e66a | [
"ISC"
] | permissive | kasun/BeeSQL | d793de31f7adab92eaca7fb561ad2179463e2d3d | 919799b72ebb24f8c26dee1fac536ec058021d6c | refs/heads/master | 2020-05-07T06:02:29.929075 | 2016-11-09T14:47:01 | 2016-11-09T14:47:01 | 3,487,689 | 0 | 1 | null | 2012-06-15T16:28:04 | 2012-02-19T19:37:12 | Python | UTF-8 | Python | false | false | 283 | py | #!/usr/bin/env python
from distutils.core import setup
setup(name='BeeSQL',
version='0.1',
description='Pythonic SQL library',
author='Kasun Herath',
author_email='kasunh01@gmail.com',
packages=[
'beesql',
'beesql.backends',
],
)
| [
"kasunh01@gmail.com"
] | kasunh01@gmail.com |
3fb8ee32b13c910cd2c318de55d4b41a938a264a | 2f8e16eefe77bb57d001d1ed63ba591bbb72707c | /rabbitmq-demo/prepared/fanout/new-task-fanout-solution.py | 38fb6cfe5be9ed5caa2bb92f2709d497d76c16d0 | [] | no_license | zwilling/webtech-dafot | d3e11e17d70fcb773082b89b0eb18395cd3e31be | d323355c85fe900a11f46b38e17584d95b0d0def | refs/heads/master | 2021-01-02T08:18:54.577332 | 2015-01-26T19:13:18 | 2015-01-26T19:13:18 | 25,841,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | #!/usr/bin/env python
#####################################
# PRODUCER
# Creates new tasks for the workers
#####################################
import sys
# read message from argument
message = ' '.join(sys.argv[1:]) or "Hello World!"
# send message with RabbitMQ:
import pika # RabbitMQ client-lib
# connection to RabbitMQ server
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
# open channel
channel = connection.channel()
# create exchange
channel.exchange_declare(exchange="exchange-name", type="fanout")
# send
channel.basic_publish(exchange="exchange-name", routing_key="", body=message)
print " Sent %r" % (message)
| [
"zwilling@kbsg.rwth-aachen.de"
] | zwilling@kbsg.rwth-aachen.de |
872530536f3f6682b50288fc140a34b61bc5ebd4 | 566754f63c0d665af01bdad8814873468f8be888 | /python/learn/numpy/boolean.py | c40817a9576b6a15190233b89e8ef2a9284a34e1 | [
"MIT"
] | permissive | qrsforever/workspace | 7f7b0363649b73e96526745f85a22e70b1c749c9 | 53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f | refs/heads/master | 2022-05-04T18:58:41.562544 | 2020-05-25T04:07:00 | 2020-05-25T04:07:00 | 82,469,335 | 2 | 0 | MIT | 2022-04-12T21:54:15 | 2017-02-19T15:36:43 | Jupyter Notebook | UTF-8 | Python | false | false | 1,015 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import numpy as np
import numpy.random as rand
def test1():
"""
& | 布尔运算
"""
arr = np.zeros((16, 16)) + 3
# 从第4行(列)到倒数第4行(列)
arr[4:-4, 4:-4] = 6
arr[7:-7, 7:-7] = 9
# print(arr)
index1 = arr > 2
index2 = arr < 6
compound_index = index1 & index2
compound_index = (arr > 3) & (arr < 9)
arr2 = np.copy(arr, order='K')
arr2[compound_index] = 0
print(arr2)
compound_index = (arr == 9) | (index1 & index2)
arr3 = np.copy(arr)
arr3[compound_index] = 0
print(arr3)
def test2():
"""
随机处理数据
"""
# 返回高斯分布(0, 1)的一个样本
arr = rand.randn(100)
print(arr)
# 采集数值大于0.2的子集
index = arr > 0.2
res = arr[index]
# 子集中的数据平方减2
res = res ** 2 - 2
# 放回去
arr[index] = res
print(arr)
def main():
test1()
test2()
if __name__ == "__main__":
main()
| [
"lidong8@le.com"
] | lidong8@le.com |
b4179c234188cdc8e646f84d49efdfc1d9146070 | 8e010f1f11aa5d98d1c6caee2b3c4ceca3ba59bc | /Codes/Organising data/getdata3.py | d34332a0073679325e0a498a2185ee970b540203 | [] | no_license | rheaanan/kanoe-forests-in-india- | 43f840fbc126beb1e98e4421878f036bc1d58cf7 | 3f6fd655edc18dfa4ceeb1a322789d55284f4f9a | refs/heads/master | 2020-09-02T03:20:00.280122 | 2019-11-02T08:11:24 | 2019-11-02T08:11:24 | 219,122,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | import csv
f2 = open('2005.csv')
csv_f2 = csv.reader(f2)
l = []
m = []
for i in csv_f2:
l.append(i)
m = l[0]
l = l[1: ]
nf = open("2005try.csv",'w')
csv_w = csv.writer(nf,delimiter = ",")
ll = []
for i in l:
for k in range(2,33):
ll.append(i[0])
ll.append(i[1])
ll.append(m[k])
ll.append(i[k])
csv_w.writerow(ll)
ll = []
| [
"rheaanand@Rheas-MacBook-Pro.local"
] | rheaanand@Rheas-MacBook-Pro.local |
340d4560fceeb7f8f5ce6df9db6f28fa1f292720 | 2d4240a03bfa47386677a78250df220c55a7bf6c | /PythonCookbookLearning/chapter8/8.7.3.py | 52c8d9a319d86872ad3ba20726dd89ad240dfb57 | [] | no_license | Falonie/Notes | c7976e9e7514e5d7cddf918c3c54442a89532aab | 38e980cb5170a696626085b72795a096679e972b | refs/heads/master | 2022-02-13T11:20:39.613115 | 2019-09-02T01:07:27 | 2019-09-02T01:07:27 | 99,218,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | class Base(object):
def __init__(self):
print('Base.__init__')
class A(Base):
def __init__(self):
super().__init__()
print('A.__init__')
class B(Base):
def __init__(self):
super().__init__()
print('B.__init__')
class C(A, B):
def __init__(self):
super().__init__()
print('C.__init__')
if __name__ == '__main__':
c = C()
print(C.mro())
print(C.__mro__) | [
"541002901@qq.com"
] | 541002901@qq.com |
3ff96620869445cfdc07ee398dba4ae201a83cae | ce1acf913f54d30c47376cf50f2517a9a41b4878 | /space/integration/apinf.py | b9114687ef97ab7a9de8bdeff8d958e404f25ca7 | [
"MIT"
] | permissive | mauriciovieira/openapi-space | b5c2f210adaae646c3ef7b26d5a45cc70614edc5 | cfa512ee731d53c509ab6fae06013d62048cf764 | refs/heads/master | 2021-01-01T18:05:15.626369 | 2017-07-21T10:53:05 | 2017-07-21T10:53:05 | 98,243,827 | 0 | 0 | null | 2017-07-24T23:37:26 | 2017-07-24T23:37:26 | null | UTF-8 | Python | false | false | 1,168 | py | import requests
from space.app import config
def login(username, password):
payload = {"username": username, "password": password}
r = requests.post("%s/login" % (config["apinf_base_url"]), data=payload)
response = r.json()
if ("data" in response and response.get("status", "error") == "success"):
data = response["data"]
if "userId" in data and "authToken" in data:
return (data["userId"], data["authToken"])
return (None, None)
def check_token(userID, authToken):
headers = {"X-User-Id": userID, "X-Auth-Token": authToken}
r = requests.get("%s/users" % (config["apinf_base_url"]), headers=headers)
response = r.json()
if ("data" in response and response.get("status", "error") == "success" and
len(response["data"]) > 0):
user = response["data"][0]
if "username" not in user or "emails" not in user:
return None
email = ""
for emailObj in user["emails"]:
if emailObj.get("verified", False):
email = emailObj.get("address", "")
break
return (user["username"], email)
return (None, None)
| [
"tulir@maunium.net"
] | tulir@maunium.net |
5f8c921450528de7b0c8a839d1c9565eba62d8da | c0b1449573bfc776d137b4388a2c33094538d6af | /combine.py | 9e1c4f401ab6a7a16f576388ca7b8a527cb4c4eb | [] | no_license | lynth29/pytest | a9889dc61740c1533349abb46ede96161946266c | 0656ac89ba8120090dea7d6a2f19c0534eb1a907 | refs/heads/main | 2023-06-14T05:40:06.991554 | 2021-07-13T04:53:34 | 2021-07-13T04:53:34 | 385,473,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Test one Function at a time"""
from sentiment import extract_sentiment
from process import text_contain_word
import pytest
testdata = ["I think today will be a great day",
"I do no think this will turn out well"]
@pytest.mark.parametrize('sample', testdata)
def test_extract_sentiment(sample):
sentiment = extract_sentiment(sample)
assert sentiment > 0
testdata2 = [
('There is a duck in this text',True),
('There is nothing here',False)
]
@pytest.mark.parametrize('sample, expected_output', testdata2)
def test_text_contains(sample, expected_output):
word = 'duck'
assert text_contain_word(word, sample) == expected_output
| [
"ly.nguyen@cmetric.vn"
] | ly.nguyen@cmetric.vn |
f0fa5314e57b2ec10f82632087e4cfd3728f1cff | efcd320a480fae023d5d81f16a33aaa617cb6a68 | /backend/flaskr/test.py | cb92e066454de59859f2525f671cf241c6c19980 | [
"MIT"
] | permissive | DitrusNight/My-CS-Plan | 47c77e795978c42f3851e1581fd3b74d2f4ccf0d | 622bbc34a389ff42cc348c0183d7bdc519c2ce0c | refs/heads/main | 2023-08-24T09:16:47.884440 | 2021-10-23T21:23:03 | 2021-10-23T21:23:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | i = [
[3, 4, 5],
[6, 7, 8]
]
print([a for b in i for a in b]) | [
"james@harrell365.com"
] | james@harrell365.com |
98a4f5409336a01c4f7a38567ca2dfcdf5371cbc | c28783b279c89ea98967064304eb3d883940b193 | /src/di_replication/repl_read_top_row/repl_read_top_row.py | b024ae54582d3a03c023813d070c989e91a29ca5 | [
"MIT"
] | permissive | thhapke/di_replication | c7784f7c60dee527c5498e99f66d390e94db8645 | f23e48d60c0d76603eec5071ea57d0646a44389e | refs/heads/master | 2023-02-14T14:22:54.876677 | 2021-01-14T11:40:55 | 2021-01-14T11:40:55 | 277,468,734 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,994 | py | import sdi_utils.gensolution as gs
import sdi_utils.set_logging as slog
import sdi_utils.textfield_parser as tfp
import sdi_utils.tprogress as tp
import subprocess
import logging
import os
import random
from datetime import datetime, timezone
import pandas as pd
try:
api
except NameError:
class api:
queue = list()
class Message:
def __init__(self, body=None, attributes=""):
self.body = body
self.attributes = attributes
def send(port, msg):
if port == outports[1]['name']:
api.queue.append(msg)
class config:
## Meta data
config_params = dict()
version = '0.0.1'
tags = {'sdi_utils': ''}
operator_name = 'repl_read_top_row'
operator_description = "Read Top Row"
operator_description_long = "Read top row without constraint."
add_readme = dict()
add_readme["References"] = ""
debug_mode = True
config_params['debug_mode'] = {'title': 'Debug mode',
'description': 'Sending debug level information to log port',
'type': 'boolean'}
def process(msg):
att = dict(msg.attributes)
att['operator'] = 'repl_read_top_row'
logger, log_stream = slog.set_logging(att['operator'], loglevel=api.config.debug_mode)
sql = 'SELECT TOP 1 * FROM {table}'.format(table=att['replication_table'])
logger.info('SQL statement: {}'.format(sql))
att['sql'] = sql
api.send(outports[1]['name'], api.Message(attributes=att,body=sql))
log = log_stream.getvalue()
if len(log) > 0 :
api.send(outports[0]['name'], log )
inports = [{'name': 'data', 'type': 'message', "description": "Input data"}]
outports = [{'name': 'log', 'type': 'string', "description": "Logging data"}, \
{'name': 'msg', 'type': 'message', "description": "msg with sql statement"}]
#api.set_port_callback(inports[0]['name'], process)
def test_operator():
msg = api.Message(attributes={'packageid':4711,'replication_table':'repl_table','base_table':'repl_table','latency':30,\
'append_mode' : 'I', 'data_outcome':True},body='')
process(msg)
for msg in api.queue :
print(msg.attributes)
print(msg.body)
if __name__ == '__main__':
test_operator()
if True:
subprocess.run(["rm", '-r','../../../solution/operators/sdi_replication_' + api.config.version])
gs.gensolution(os.path.realpath(__file__), api.config, inports, outports)
solution_name = api.config.operator_name + '_' + api.config.version
subprocess.run(["vctl", "solution", "bundle",'../../../solution/operators/sdi_replication_' + api.config.version, \
"-t", solution_name])
subprocess.run(["mv", solution_name + '.zip', '../../../solution/operators'])
| [
"53856509+thhapke@users.noreply.github.com"
] | 53856509+thhapke@users.noreply.github.com |
e7cb9d3d626b68c0e92c3fbeda2d26d8d2812576 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/operations/_protected_item_operation_results_operations.py | 686c9dc622990d21ec56f8dc1e20569d14958b9b | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 6,058 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ProtectedItemOperationResultsOperations(object):
"""ProtectedItemOperationResultsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
vault_name, # type: str
resource_group_name, # type: str
fabric_name, # type: str
container_name, # type: str
protected_item_name, # type: str
operation_id, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ProtectedItemResource"]
"""Fetches the result of any operation on the backup item.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param fabric_name: Fabric name associated with the backup item.
:type fabric_name: str
:param container_name: Container name associated with the backup item.
:type container_name: str
:param protected_item_name: Backup item name whose details are to be fetched.
:type protected_item_name: str
:param operation_id: OperationID which represents the operation whose result needs to be
fetched.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ProtectedItemResource, or the result of cls(response)
:rtype: ~azure.mgmt.recoveryservicesbackup.models.ProtectedItemResource or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ProtectedItemResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'),
'containerName': self._serialize.url("container_name", container_name, 'str'),
'protectedItemName': self._serialize.url("protected_item_name", protected_item_name, 'str'),
'operationId': self._serialize.url("operation_id", operation_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProtectedItemResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupFabrics/{fabricName}/protectionContainers/{containerName}/protectedItems/{protectedItemName}/operationResults/{operationId}'} # type: ignore
| [
"noreply@github.com"
] | catchsrinivas.noreply@github.com |
27b3b039651d93a67daa087336ee7269dcfdd2a0 | cf772451375bbaaccb7021e5d444cd25fcdf98fb | /libro/ejemplo2_7.py | 57dd634df265e99a94c676eefb360521694bc9fc | [] | no_license | ArturoBarrios9000/CYPEnriqueBC | cc0703db2fbd644d259e2b5f1a8e0ab526cd7c84 | b0b33a391ad4b52c33a4f6f93f38701683cb8d1a | refs/heads/master | 2020-07-23T18:47:06.408629 | 2019-11-12T23:52:04 | 2019-11-12T23:52:04 | 207,672,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | NUM = int(input("Ingrese un numero entero positivo:"))
N = int(input("Ingresa otro numero entero positivo:"))
VAL=0
if NUM == 1:
VAL = 100 * N
elif NUM == 2:
VAL = 100 ** N
elif NUM ==3:
VAL = 100/N
else:
VAL = 0
print(VAL)
print("Eso es to esto eso es todo amigos")
| [
"arturobarrios9000@gmail.com"
] | arturobarrios9000@gmail.com |
aad043d9e84be1226cb1339efd2d7b18e5728634 | ce524923d9eafd54ae35fb087ff7f5e6ce610615 | /Kaggle17/Instacart/20170805 Sub Retrain Model without Median and with original features 0.1 LR.py | e563914ccfdabb2a587f947f96b283706a01b6ab | [] | no_license | jindal2309/KaggleCompetition | 36c58a2a9b9623ac5389527b347c21c0f836b227 | 4122f94762ea263338d88c3c56b60ed1131f15a2 | refs/heads/master | 2020-08-04T10:39:26.513037 | 2017-11-30T11:41:17 | 2017-11-30T11:41:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,870 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Aug 4 19:43:37 2017
@author: Jiashen Liu
Purpose: 去掉median特征,同时join会订单原有特征看看!
之前在做的时候一直在忽略原生特征
"""
from functions_in import *
import pandas as pd
import numpy as np
import time
DIR = 'data/'
W2C = pd.read_csv('data/WORD2VEC_Feat.csv')
ostreak = pd.read_csv(DIR+'order_streaks.csv')
priors, train, orders, products, aisles, departments, sample_submission = load_data(DIR)
product_name = pd.read_csv(DIR+'product_name_pca_2comp.csv')
print('Begin')
print(time.ctime())
## detail表里记录这以前所有订单的信息
priors_orders_detail = orders.merge(right=priors, how='inner', on='order_id')
#新特征
# _user_buy_product_times: 用户是第几次购买该商品
priors_orders_detail.loc[:,'_user_buy_product_times'] = priors_orders_detail.groupby(['user_id', 'product_id']).cumcount() + 1
"""
特征组1:产品组。主要看:在之前的订单记录中,产品的一些特性
_prod_tot_cnts: 产品被买的次数
_prod_reorder_tot_cnts:产品被回购的总次数
_prod_buy_first_time_total_cnt:产品被首次购买的次数
_prod_buy_second_time_total_cnt:产品被二次购买的次数
_prod_mean_cart_order:产品被放入购物篮顺序的均值
_prod_std_cart_order:产品被放入购物篮顺序的标准差
_prod_median_cart_order:产品被放入顺序的中位数
_prod_reorder_prob:不好理解,看特征重要性再说
_prod_reorder_ratio:回购率
_prod_reorder_times:产品被回购的次数??不好理解,看重要性
_prod_dow_*,_prod_hod_*,'_prod_days_since',:三个大类指标分别去衡量产品被订购的时间和日期,以及产品
被上次购买的信息
"""
agg_dict = {'user_id':{'_prod_tot_cnts':'count'},
'reordered':{'_prod_reorder_tot_cnts':'sum'},
'_user_buy_product_times': {'_prod_buy_first_time_total_cnt':lambda x: sum(x==1),
'_prod_buy_second_time_total_cnt':lambda x: sum(x==2)},
'add_to_cart_order':{'_prod_mean_cart_order':'mean',
'_prod_std_cart_order':'std'},
'order_dow':{'_prod_mean_dow':'mean',
'_prod_std_dow':'std'},
'order_hour_of_day':{'_prod_mean_hod':'mean',
'_prod_std_hod':'std'},
'days_since_prior_order':{
'_prod_sum_days_since_prior_order':'sum',
'_prod_mean_days_since_prior_order': 'mean',
'_prod_std_days_since_prior_order':'std'
}}
prd = ka_add_groupby_features_1_vs_n(priors_orders_detail, ['product_id'], agg_dict)
prd['_prod_reorder_prob'] = prd._prod_buy_second_time_total_cnt / prd._prod_buy_first_time_total_cnt
prd['_prod_reorder_ratio'] = prd._prod_reorder_tot_cnts / prd._prod_tot_cnts
#prd['_prod_reorder_times'] = 1 + prd._prod_reorder_tot_cnts / prd._prod_buy_first_time_total_cnt
prd = prd.merge(products,on='product_id',how='left')
prd = prd.merge(departments,on='department_id',how='left')
prd = prd.merge(aisles,on='aisle_id',how='left')
del prd['department']
del prd['aisle']
print('product done')
print(time.ctime())
priors_orders_detail = priors_orders_detail.merge(prd,on='product_id',how='left')
"""
特征组2: 用户组,统计一些用户的信息
_user_total_orders: 用户的总订单数
_user_sum_days_since_prior_order: 距离上次购买时间(和),这个只能在orders表里面计算,priors_orders_detail不是在order level上面unique
_user_mean_days_since_prior_order: 距离上次购买时间(均值)
_user_std_days_since_prior_order:距离上次买的时间的标准差
_user_median_days_since_prior_order:距离上次买的时间的中位数
_dow,_hod:购买时间特征
# _user_reorder_ratio: reorder的总次数 / 第一单后买后的总次数
# _user_total_products: 用户购买的总商品数
# _user_distinct_products: 用户购买的unique商品数
_user_average_basket: 购物蓝的大小
"""
agg_dict_2 = {'order_number':{'_user_total_orders':'max'},
'days_since_prior_order':{'_user_sum_days_since_prior_order':'sum',
'_user_mean_days_since_prior_order': 'mean',
'_user_std_days_since_prior_order':'std'},
'order_dow':{'_user_mean_dow':'mean',
'_user_std_dow':'std'},
'order_hour_of_day':{'_user_mean_hod':'mean',
'_user_std_hod':'std'}
}
users = ka_add_groupby_features_1_vs_n(orders[orders.eval_set == 'prior'], ['user_id'], agg_dict_2)
#用户相关的特征重新写成以下格式,时间缩短为不到20秒
us = pd.concat([
priors_orders_detail.groupby('user_id')['product_id'].count().rename('_user_total_products'),
priors_orders_detail.groupby('user_id')['product_id'].nunique().rename('_user_distinct_products'),
(priors_orders_detail.groupby('user_id')['reordered'].sum() /
priors_orders_detail[priors_orders_detail['order_number'] > 1].groupby('user_id')['order_number'].count()).rename('_user_reorder_ratio')
], axis=1).reset_index()
users = users.merge(us, how='inner')
users['_user_average_basket'] = users._user_total_products / users._user_total_orders
us = orders[orders.eval_set != "prior"][['user_id', 'order_id', 'eval_set', 'days_since_prior_order']]
us.rename(index=str, columns={'days_since_prior_order': 'time_since_last_order'}, inplace=True)
users = users.merge(us, how='inner')
print('User Done')
print(time.ctime())
"""
0730: 加四个关于department和aisle的特征
从数据上看,department和aisle是类似于商品类别的数据,可能会非常有用。因此想知道:
这个department/aisle是否是用户最喜欢的?
这个department/aisle是否是用户最常订购的?
如果好用,可以再加最近订购的参数,类似一样的。
在最后的数据部分,由user_id和department_id/aisle_id一起join,也可以把这些作为一个特征输入进去。看看内存。
0731加入部门和aisle的recency特征
0801加入部门和aisle的dow/hour of day特征
"""
agg_dict_dept = {'user_id':{'_user_dept_total_orders':'count'},
'reordered':{'_user_dept_total_reorders':'sum'},
'days_since_prior_order':{'_user_dept_sum_days_since_prior_order':'sum',
'_user_dept_mean_days_since_prior_order': 'mean',
'_user_dept_std_days_since_prior_order':'std'},
'order_dow':{'_user_dpet_mean_dow':'mean',
'_user_dept_std_dow':'std'},
'order_hour_of_day':{'_user_dept_mean_hod':'mean',
'_user_dept_std_hod':'std'}
}
agg_dict_ais = {'user_id':{'_user_ais_total_orders':'count'},
'reordered':{'_user_ais_total_reorders':'sum'},
'days_since_prior_order':{'_user_ais_sum_days_since_prior_order':'sum',
'_user_ais_mean_days_since_prior_order': 'mean',
'_use_aisr_std_days_since_prior_order':'std'},
'order_dow':{'_user_ais_mean_dow':'mean',
'_user_ais_std_dow':'std'},
'order_hour_of_day':{'_user_ais_mean_hod':'mean',
'_user_ais_std_hod':'std'}
}
user_dept_data = ka_add_groupby_features_1_vs_n(priors_orders_detail,
group_columns_list=['user_id', 'department_id'],
agg_dict=agg_dict_dept)
user_ais_data = ka_add_groupby_features_1_vs_n(priors_orders_detail,
group_columns_list=['user_id', 'aisle_id'],
agg_dict=agg_dict_ais)
user_dept_data['_user_dept_reorder_rate'] = user_dept_data['_user_dept_total_reorders']/user_dept_data['_user_dept_total_orders']
user_ais_data['_user_ais_reorder_rate'] = user_ais_data['_user_ais_total_reorders']/user_ais_data['_user_ais_total_orders']
print('User Ais Dept Done')
print(time.ctime())
"""
特征组3:用户和产品交互特征
0731: 加入:用户与产品的订购recency特征
"""
agg_dict_4 = {'order_number':{'_up_order_count': 'count',
'_up_first_order_number': 'min',
'_up_last_order_number':'max'},
'add_to_cart_order':{'_up_average_cart_position': 'mean',
'_up_cart_position_std':'std'},
'order_dow':{'_user_prd_order_mean_day':'mean',
'_user_prd_order_std_day':'std'},
'order_hour_of_day':{'_order_hod_mean':'mean',
'_order_hod_std':'std'},
'reordered':{'_total_time_of_reorder':'sum'},
'days_since_prior_order':{'_user_prd_sum_days_since_prior_order':'sum',
'_user_prd_mean_days_since_prior_order': 'mean',
'_use_prd_std_days_since_prior_order':'std'}}
data = ka_add_groupby_features_1_vs_n(df=priors_orders_detail,
group_columns_list=['user_id', 'product_id'],
agg_dict=agg_dict_4)
data = data.merge(prd, how='inner', on='product_id').merge(users, how='inner', on='user_id')
print('Data Done 1')
print(time.ctime())
# 该商品购买次数 / 总的订单数
# 最近一次购买商品 - 最后一次购买该商品
# 该商品购买次数 / 第一次购买该商品到最后一次购买商品的的订单数
data['_up_order_rate'] = data._up_order_count / data._user_total_orders
data['_up_order_since_last_order'] = data._user_total_orders - data._up_last_order_number
data['_up_order_rate_since_first_order'] = data._up_order_count / (data._user_total_orders - data._up_first_order_number + 1)
data['_usr_prd_reorder_rate'] = data._total_time_of_reorder/data._up_order_count
data['_usr_prd_buy_rate'] = data._up_order_count/data._user_total_products
# add user_id to train set
train = train.merge(right=orders[['order_id', 'user_id']], how='left', on='order_id')
data = data.merge(train[['user_id', 'product_id', 'reordered']], on=['user_id', 'product_id'], how='left')
data['reordered'] = data['reordered'].fillna(0)
data = data.merge(W2C,on='product_id',how='left')
data = data.merge(ostreak,on=['user_id','product_id'],how='left')
print('Data Done 2')
print(time.ctime())
data = data.merge(orders[['order_id','order_number','order_dow','order_hour_of_day','days_since_prior_order']],on='order_id',how='left')
del train, prd, users,priors_orders_detail, orders
del products,us,aisles,departments,priors,ostreak,W2C
del agg_dict,agg_dict_2,agg_dict_4,agg_dict_ais,agg_dict_dept
data = data.merge(user_dept_data,on=['user_id','department_id'],how='left')
del user_dept_data
data = data.merge(user_ais_data,on=['user_id','aisle_id'],how='left')
del user_ais_data
print('Data Done 3')
print(time.ctime())
#data = data.merge(dept,on='department_id',how='left').merge(ais,on='aisle_id',how='left')
#del dept,ais
#data = data.merge(product_name,on='product_id',how='left')
#del product_name
print('Data Done Final')
print(time.ctime())
train = data[data['eval_set']=='train']
test = data[data['eval_set']=='test']
del data
print('Ends')
print(time.ctime())
col = list(train.columns)
col.remove('reordered')
col.remove('eval_set')
col.remove('aisle_id')
col.remove('department_id')
import lightgbm as lgb
dtrain = lgb.Dataset(train[col],train['reordered'])
lgb_params = {
'learning_rate': 0.1,
'boosting_type': 'gbdt',
'objective': 'binary',
'metric': {'binary_logloss'},
'num_leaves': 120,
'feature_fraction': 0.95,
'bagging_fraction': 0.76,
'bagging_freq': 5,
'max_bin':500
}
print('Start training')
print(time.ctime())
model = lgb.train(lgb_params, dtrain, 700)
print('Start predicting')
print(time.ctime())
pred = model.predict(test[col],num_iteration=700)
print('Prediction Done')
print(time.ctime())
print(time.ctime())
test['reordered'] = pred
sub = sub_file(test,0.20,sample_submission)
test_set2 = test[['user_id','product_id','order_id','reordered']]
test_set2.to_csv('save_test_for_exp_without_median_76_Feats_0.1LR0805.csv',index=False)
sub.to_csv('submission/20170805_without_median_with_original_700Rounds_lgb_0.1LR.csv',index=False)
FI = get_feature_importance_lgb(model)
FI.to_csv('Feature_Importance_sub_model_without_median_Features.csv',index=False) | [
"jiashen.liu@dpa.nl"
] | jiashen.liu@dpa.nl |
fccdf002d3624704682080cfcfad0a8da497660d | 2be8a9f06d4003d12c0a727fb83d284c31a53050 | /HoudiniHotBox17.0/lib/mantrasurface.py | 14fd36488357f5ba0316020a601a4c3c65b4e2c7 | [] | no_license | LiuLiangFx/SmileHotBOX | 7551d9578b2defe612950cb8e3bffdb85024cede | 8bd8eac69b3c2a9824b9aa4488ca77789bea8d85 | refs/heads/master | 2021-01-01T10:22:26.959731 | 2020-02-09T03:16:32 | 2020-02-09T03:16:32 | 239,236,801 | 0 | 0 | null | 2020-02-09T02:47:18 | 2020-02-09T02:47:18 | null | UTF-8 | Python | false | false | 138 | py | import hou
class mantrasurface:
def run(self):
node = hou.node("/shop")
node.createNode("mantrasurface")
| [
"change52092@yahoo.com"
] | change52092@yahoo.com |
086a8df0c8339b236cf7ca37ad68644942a570e2 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_2203.py | d82b9273211c6488b6c4ed00ac417bea5f7bc2fb | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | # I want the actual file name that is returned by a PHP script
urllib.urlretrieve(URL, directory + "\\" + filename + "." + extension)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
30e1793b5512dded78a7e5ff8f44e7320b2d13fa | 1990b774037a3f576bcee96acfbefba27ecb6537 | /hlsparser.py | a1994743c8aed00b4018984c2b4b570bc66491d2 | [] | no_license | tiagolascasas/Vivado-HLS-Report-Parser | b34475dbf5699e91d1f5f7719954659902c1c2db | 852818a0cf40d7db1d9a97b21d3b054a6c326a13 | refs/heads/master | 2023-04-29T12:58:17.832314 | 2023-03-16T00:15:21 | 2023-03-16T00:15:21 | 241,089,339 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,480 | py | #!/usr/bin/python
import sys
import csv
import xml.etree.ElementTree as ET
from os import path
usage = '''
Usage:
\thlsparser <path to report> <name of input code> <optimizations>
\t - <path to report>: Relative path to the csynth.xml file (including the file)
\t Alternatively, use -d to assume default location
\t - <name of the input code>: Benchmark name, with commas if it has spaces
\t - <optimizations>: Optimizations performed, with commas if it has spaces
'''
def main(argv):
if len(argv) != 4:
print(usage)
return 1
report = {}
if argv[1] == "-d":
argv[1] = "solution1/syn/report/csynth.xml"
try:
root = ET.parse(argv[1]).getroot()
except OSError:
print("Unable to read specified report file \"" + argv[1] + "\", aborting...")
return 1
print(root)
user_assign = root.find('UserAssignments')
perf_estim = root.find('PerformanceEstimates')
area_estim = root.find('AreaEstimates/Resources')
report['input'] = argv[2]
report['optimizations'] = argv[3]
report['part'] = user_assign.find('Part').text
report['target_clock'] = user_assign.find('TargetClockPeriod').text
report['estim_clock'] = perf_estim.find('SummaryOfTimingAnalysis/EstimatedClockPeriod').text
report['lat_worst'] = perf_estim.find('SummaryOfOverallLatency/Worst-caseLatency').text
report['lat_avg'] = perf_estim.find('SummaryOfOverallLatency/Average-caseLatency').text
report['lat_best'] = perf_estim.find('SummaryOfOverallLatency/Best-caseLatency').text
report['FF'] = area_estim.find('FF').text
report['LUT'] = area_estim.find('LUT').text
report['BRAM'] = area_estim.find('BRAM_18K').text
report['DSP'] = area_estim.find('DSP48E').text
fieldnames = report.keys()
if path.exists('reports.csv'):
print("reports.csv found in current directory, adding...")
with open('reports.csv', 'a', newline='') as output:
writer = csv.DictWriter(output, fieldnames=fieldnames)
writer.writerow(report)
else:
with open('reports.csv', 'w', newline='') as output:
print("reports.csv not found, creating...")
writer = csv.DictWriter(output, fieldnames=fieldnames)
writer.writeheader()
writer.writerow(report)
print("Report for \"" + report['input'] + "\" successfully added to reports.csv")
return 0
if __name__ == "__main__":
main(sys.argv) | [
"tiagolascasas@hotmail.com"
] | tiagolascasas@hotmail.com |
6fe289c397e7616ad6104da5198b94d05879047a | 7b29f5a1a83750399b911d65726848daa46238f8 | /python/numbers_and_math/ex10.py | 644cd17382583411d5576e5fdf199b259fa1aaa0 | [] | no_license | eubnara/study | f9d82c22c9e454001fafe52d825469d6b42d5452 | 55d78985a291dad4166f8108d0181d26b2f7a9e0 | refs/heads/master | 2022-06-14T21:27:16.977421 | 2022-03-05T13:24:41 | 2022-03-05T13:24:41 | 80,288,541 | 8 | 2 | null | 2022-06-11T08:35:48 | 2017-01-28T14:23:48 | C | UTF-8 | Python | false | false | 337 | py | tabby_cat = "\tI'm tabbed in."
persian_cat = "I'm split\non a line."
backshlash_cat = "I'm \\ a \\ cat."
fat_cat = """
I'll do a list:
\t* Cat food
\t* Fishies
\t* Catnip\n\t* Grass
"""
print tabby_cat
print persian_cat
print backshlash_cat
print fat_cat
while True:
for i in ["/", "-", "|", "\\", "|"]:
print "%s\r" % i,
| [
"eubnara@gmail.com"
] | eubnara@gmail.com |
692f257e675d170e264656d73e5feac372dccbe2 | 405ea0d125bf7d33ea4fb889cd71acca723faab3 | /crazyflie_scripts/write_mem.py | 6ffc95f1b74ea0d1aba54d2f32631428901305e2 | [] | no_license | isobee/LUND-Robot-control-ROS | e54e8bfdbc7a9d6150f261f0669a1db2196e366e | 8327139c96aae0d56d7b1d1bf379213279d656d4 | refs/heads/master | 2020-05-20T23:42:11.588540 | 2019-07-09T08:52:40 | 2019-07-09T08:52:40 | 185,808,118 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,624 | py | import logging
import time
import cflib.crtp # noqa
from cflib.crazyflie import Crazyflie
from cflib.crazyflie.mem import LighthouseBsGeometry
from cflib.crazyflie.mem import MemoryElement
from cflib.crazyflie.syncCrazyflie import SyncCrazyflie
# Only output errors from the logging framework
logging.basicConfig(level=logging.ERROR)
class WriteMem:
def __init__(self, uri, bs1, bs2):
self.data_written = False
with SyncCrazyflie(uri, cf=Crazyflie(rw_cache='./cache')) as scf:
mems = scf.cf.mem.get_mems(MemoryElement.TYPE_LH)
count = len(mems)
if count != 1:
raise Exception('Unexpected nr of memories found:', count)
mems[0].geometry_data = [bs1, bs2]
print('Writing data')
mems[0].write_data(self._data_written)
while not self.data_written:
time.sleep(1)
def _data_written(self, mem, addr):
self.data_written = True
print('Data written')
if __name__ == '__main__':
# URI to the Crazyflie to connect to
uri = 'radio://0/80/2M'
# Initialize the low-level drivers (don't list the debug drivers)
cflib.crtp.init_drivers(enable_debug_driver=False)
bs1 = LighthouseBsGeometry()
bs1.origin = [1.0, 2.0, 3.0]
bs1.rotation_matrix = [
[4.0, 5.0, 6.0],
[7.0, 8.0, 9.0],
[10.0, 11.0, 12.0],
]
bs2 = LighthouseBsGeometry()
bs2.origin = [21.0, 22.0, 23.0]
bs2.rotation_matrix = [
[24.0, 25.0, 26.0],
[27.0, 28.0, 29.0],
[30.0, 31.0, 32.0],
]
WriteMem(uri, bs1, bs2)
| [
"ilees3628@gmail.com"
] | ilees3628@gmail.com |
5896ac6c8fcff5b1f132c38fcc1f61503a03fe48 | b319b30ba9816e7fb49e079354caf225927b4d5e | /TapHopSoNguyen.py | 40f7d56a62e719326afbb84d50c415e65e684fbe | [] | no_license | thanhchauns2/Python_PTIT | 2067cdc5b7a3c64dde418f8038e95ae756c90fde | d5a4eb9d3c0bcb2148258a148edd69791488a167 | refs/heads/main | 2023-08-17T19:23:26.860695 | 2021-10-11T11:16:17 | 2021-10-11T11:16:17 | 415,882,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | n = input()
a = sorted(list(map(int,input().split())))
b = sorted(list(map(int,input().split())))
tmp =[]
A = sorted(set(a))
B = sorted(set(b))
for i in A:
if i in B:
print(i,end= ' ')
print()
for i in A:
if i not in B:
print(i,end= ' ')
print()
for i in B:
if i not in A:
print(i,end= ' ')
print()
| [
"noreply@github.com"
] | thanhchauns2.noreply@github.com |
1dd36aeb507e434e6765a47f3ae36ae627f2c171 | 7d3422fea5cf801320aa5a12d8cc85f0936186a1 | /HiggsAnalysis/HiggsToZZ4Leptons/python/preselection/hTozzTo4leptonsPreselection_data_hzzskim_cff.py | 5ee6fa17f73f583f6478cd3d0f845447293ab9f2 | [] | no_license | rehamMaly/MonoHiggs-ZZ-4lanalysis-2017_NEW | f37ac9571078dedd581e5ffd2000ec7befc3d40b | b43f0f66dc98a514d5a09b1637f2f10987a064db | refs/heads/master | 2020-04-02T10:46:02.563372 | 2018-10-23T15:50:13 | 2018-10-23T15:50:13 | 154,353,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,912 | py | import FWCore.ParameterSet.Config as cms
##
SimpleCounter = cms.EDAnalyzer("SimpleCounter")
# Generic MC Truth analysis
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsGenSequence_cff import *
##
# Filter to select 2e2mu events
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCGenFilter_cfi import *
import HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCGenFilter_cfi
hTozzTo4leptonsMCGenFilter2e2mu = HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCGenFilter_cfi.hTozzTo4leptonsMCGenFilter.clone()
hTozzTo4leptonsMCGenFilter2e2mu.HZZ4LeptonsMCFilterLeptonFlavour=cms.int32(3)
# ParticleListDrawer
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCGenParticleListDrawer_cfi import *
hTozzTo4leptonsMCGenParticleListDrawer2e2mu = HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCGenParticleListDrawer_cfi.hTozzTo4leptonsMCGenParticleListDrawer.clone()
# Save MC truth:
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCDumper_cfi import *
import HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMCDumper_cfi
# CP producer:
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi import *
hTozzTo4leptonsMCCP=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi.hTozzTo4leptonsCP.clone()
hTozzTo4leptonsMCCP.RECOcollName = cms.InputTag("hTozzTo4leptonsMCDumper")
# PF muons
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsPFtoRECOMuon_cfi import *
# PF photons
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsPFfsrPhoton_cfi import *
hTozzTo4leptonsHLTInfo = cms.EDProducer("HZZ4LeptonsHLTInfo",
TriggerResultsTag = cms.InputTag("TriggerResults","","REDIGI311X")
)
# HZZ Skim
from Configuration.Skimming.PDWG_HZZSkim_cff import *
#zzdiMuonSequence = cms.Sequence( goodHzzMuons * diHzzMuons )
#zzdiElectronSequence = cms.Sequence( goodHzzElectrons * diHzzElectrons )
#zzeleMuSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * crossHzzLeptons )
#diHzzSkimleptonsMerger = cms.EDProducer("CandViewMerger",
# src = cms.VInputTag( "diHzzMuons", "diHzzElectrons","crossHzzLeptons")
#)
#diHzzSkimleptonsFilter = cms.EDFilter("CandViewCountFilter",
# src = cms.InputTag("diHzzSkimleptonsMerger"),
# minNumber = cms.uint32(1)
#)
zz4eSequence = cms.Sequence( goodHzzElectrons * hzzKinDiElectrons * hzzMassDiElectrons )
zz2e2mSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinDiElectrons * hzzMassDiMuons )
zz2eemSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinDiElectrons * hzzMassCrossLeptons )
zz2m2eSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinDiMuons * hzzKinDiElectrons )
zz4mSequence = cms.Sequence( goodHzzMuons * hzzKinDiMuons * hzzMassDiMuons )
zz2memSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinDiMuons * hzzMassCrossLeptons )
zzem2eSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinCrossLeptons * hzzMassDiElectrons )
zzem2mSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinCrossLeptons * hzzMassDiMuons )
zzememSequence = cms.Sequence( goodHzzMuons * goodHzzElectrons * hzzKinCrossLeptons * hzzMassCrossLeptons )
diHzzSkimleptonsMerger = cms.EDProducer("CandViewMerger",
src = cms.VInputTag( "hzzKinDiElectrons","hzzMassDiElectrons","hzzMassDiMuons","hzzMassCrossLeptons","hzzKinDiMuons","hzzKinCrossLeptons")
)
diHzzSkimleptonsFilter = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("diHzzSkimleptonsMerger"),
minNumber = cms.uint32(1)
)
# Use Early Skim and change input collections
useSkimEarlyData='false'
if useSkimEarlyData == 'true':
# electrons
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsElectronSequences_cff import *
ELECTRON_BASE_CUT=("(pt > 5 &&" +
" fbrem > 0 &&" +
" eSuperClusterOverP < 3 &&" +
" hcalOverEcal < 0.15 &&" +
" abs(deltaPhiSuperClusterTrackAtVtx) < 0.10 &&" +
" abs(deltaEtaSuperClusterTrackAtVtx) < 0.02 &&" +
" (( isEB && sigmaIetaIeta < 0.015) ||" +
" (!isEB && sigmaIetaIeta < 0.035)) )");
hTozzTo4leptonsElectronSelector = cms.EDFilter("GsfElectronRefSelector",
src = cms.InputTag("gsfElectrons"),
cut = cms.string(ELECTRON_BASE_CUT),
)
hTozzTo4leptonsElectronSequence=cms.Sequence(hTozzTo4leptonsElectronIdSequence + hTozzTo4leptonsElectronSelector)
# muons
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMuonSelector_cfi import *
TM_ARBITRATION = "numberOfMatches('SegmentAndTrackArbitration')>0";
MUON_BASE_CUT="(isGlobalMuon || (isTrackerMuon && "+TM_ARBITRATION+"))"
hTozzTo4leptonsMuonSelector = cms.EDFilter("MuonRefSelector",
src = cms.InputTag("muons"),
cut = cms.string(MUON_BASE_CUT),
)
else:
# Electron relaxed selection
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsElectronSequences_cff import *
hTozzTo4leptonsElectronSelector=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsElectronSelector_cfi.hTozzTo4leptonsElectronSelector.clone()
hTozzTo4leptonsElectronSelector.electronEtaMax=cms.double(2.5)
hTozzTo4leptonsElectronSelector.electronPtMin=cms.double(3.)
hTozzTo4leptonsElectronSelector.useEleID=cms.bool(False)
#hTozzTo4leptonsElectronSequence=cms.Sequence(hTozzTo4leptonsElectronIdSequence + hTozzTo4leptonsElectronSelector)
hTozzTo4leptonsElectronSequence=cms.Sequence(hTozzTo4leptonsElectronSelector)
# Muon ghost cleaning
from MuonAnalysis.MuonAssociators.muonCleanerBySegments_cfi import *
# Muon relaxed selection
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMuonSelector_cfi import *
hTozzTo4leptonsMuonSelector=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMuonSelector_cfi.hTozzTo4leptonsMuonSelector.clone()
hTozzTo4leptonsMuonSelector.muonCollection = cms.InputTag("cleanMuonsBySegments")
hTozzTo4leptonsMuonSelector.isGlobalMuon=cms.bool(False)
hTozzTo4leptonsMuonSelector.isTrackerMuon=cms.bool(True)
hTozzTo4leptonsMuonSelector.muonPtMin=cms.double(3.)
hTozzTo4leptonsMuonSelector.muonEtaMax=cms.double(2.5)
# hTozzTo4leptonsMuonSelector.muonCollection = cms.InputTag("hTozzTo4leptonsPFtoRECOMuon")
hTozzTo4leptonsMuonSequence=cms.Sequence(hTozzTo4leptonsMuonSelector)
#*******************
#2 Leptons No Presel
#*******************
# zToEE
from HiggsAnalysis.HiggsToZZ4Leptons.zToEE_cfi import *
zToEE.decay = cms.string('hTozzTo4leptonsElectronSelector@+ hTozzTo4leptonsElectronSelector@-')
zToEE.cut = cms.string('mass > 0')
# zToMuMu
from HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi import *
zToMuMu.decay=cms.string('hTozzTo4leptonsMuonSelector@+ hTozzTo4leptonsMuonSelector@-')
zToMuMu.cut = cms.string('mass > 0')
# zToMuMu_SS, zToEE_SS and zToCrossLeptons
from HiggsAnalysis.HiggsToZZ4Leptons.zToMuMuss_cfi import *
zToMuMussplus.decay = cms.string('hTozzTo4leptonsMuonSelector@+ hTozzTo4leptonsMuonSelector@+')
zToMuMussplus.cut = cms.string('mass > 0 && (daughter(0).charge>0 && daughter(1).charge>0)')
zToMuMussminus.decay = cms.string('hTozzTo4leptonsMuonSelector@- hTozzTo4leptonsMuonSelector@-')
zToMuMussminus.cut = cms.string('mass > 0 && (daughter(0).charge<0 && daughter(1).charge<0)')
zToMuMussmerge.src = cms.VInputTag( "zToMuMussplus", "zToMuMussminus")
zToMuMuss=cms.Sequence(zToMuMussplus+zToMuMussminus+zToMuMussmerge)
from HiggsAnalysis.HiggsToZZ4Leptons.zToEEss_cfi import *
zToEEssplus.decay = cms.string('hTozzTo4leptonsElectronSelector@+ hTozzTo4leptonsElectronSelector@+')
zToEEssplus.cut = cms.string('mass > 0 && (daughter(0).charge>0 && daughter(1).charge>0)')
zToEEssminus.decay = cms.string('hTozzTo4leptonsElectronSelector@- hTozzTo4leptonsElectronSelector@-')
zToEEssminus.cut = cms.string('mass > 0 && (daughter(0).charge<0 && daughter(1).charge<0)')
zToEEssmerge.src = cms.VInputTag( "zToEEssplus", "zToEEssminus")
zToEEss=cms.Sequence(zToEEssplus+zToEEssminus+zToEEssmerge)
from HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi import *
zToCrossLeptons.decay = cms.string("hTozzTo4leptonsMuonSelector hTozzTo4leptonsElectronSelector")
zToCrossLeptons.checkCharge = cms.bool(False)
zToCrossLeptons.cut = cms.string('mass > 0')
dileptons = cms.EDProducer("CandViewMerger",
src = cms.VInputTag( "zToEE", "zToMuMu","zToEEssmerge","zToMuMussmerge","zToCrossLeptons")
)
#*******************
#4 Leptons No Presel
#*******************
# hTozzToEEMuMu
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
hTozzTo4leptons=HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
hTozzTo4leptons.decay = cms.string('zToEE zToMuMu')
hTozzTo4leptons.cut = cms.string('mass > 0')
# hTozzToMuMuMuMu
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
hTozzTo4leptonsMMMM=HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
hTozzTo4leptonsMMMM.decay = cms.string('zToMuMu zToMuMu')
hTozzTo4leptonsMMMM.cut = cms.string('mass > 0')
# hTozzToEEEE
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
hTozzTo4leptonsEEEE=HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
hTozzTo4leptonsEEEE.decay = cms.string('zToEE zToEE')
hTozzTo4leptonsEEEE.cut = cms.string('mass > 0')
# other 4 leptons combinations with SS Z and Opposite Flavour Z
quadLeptons4Mu=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptons4Mu.decay = cms.string('zToMuMussmerge zToMuMussmerge')
quadLeptons4Mu.cut = cms.string('mass > 0')
quadLeptons2Mu2E=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptons2Mu2E.decay = cms.string('zToMuMussmerge zToEEssmerge')
quadLeptons2Mu2E.cut = cms.string('mass > 0')
quadLeptons4E=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptons4E.decay = cms.string('zToEEssmerge zToEEssmerge')
quadLeptons4E.cut = cms.string('mass > 0')
#one Z SS and on Z OS
quadLeptonsSSOSele=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptonsSSOSele.decay = cms.string('zToEE zToEEssmerge')
quadLeptonsSSOSele.cut = cms.string('mass > 0')
quadLeptonsSSOSmu=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptonsSSOSmu.decay = cms.string('zToMuMu zToMuMussmerge')
quadLeptonsSSOSmu.cut = cms.string('mass > 0')
quadLeptonsSSOSmuele=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptonsSSOSmuele.decay = cms.string('zToMuMu zToEEssmerge')
quadLeptonsSSOSmuele.cut = cms.string('mass > 0')
quadLeptonsSSOSelemu=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
quadLeptonsSSOSelemu.decay = cms.string('zToEE zToMuMussmerge')
quadLeptonsSSOSelemu.cut = cms.string('mass > 0')
#3Mu+1E, 3E+1Mu
quadLeptons3Mu1E1Z =HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
quadLeptons3Mu1E1Z.decay = cms.string('zToMuMu zToCrossLeptons')
quadLeptons3Mu1E1Z.checkCharge = cms.bool(False)
quadLeptons3Mu1E1Z.cut = cms.string('mass > 0')
quadLeptons3Mu1E0Z =HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
quadLeptons3Mu1E0Z.decay = cms.string('zToMuMussmerge zToCrossLeptons')
quadLeptons3Mu1E0Z.checkCharge = cms.bool(False)
quadLeptons3Mu1E0Z.cut = cms.string('mass > 0')
quadLeptons3E1Mu1Z =HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
quadLeptons3E1Mu1Z.decay = cms.string('zToEE zToCrossLeptons')
quadLeptons3E1Mu1Z.checkCharge = cms.bool(False)
quadLeptons3E1Mu1Z.cut = cms.string('mass > 0')
quadLeptons3E1Mu0Z =HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
quadLeptons3E1Mu0Z.decay = cms.string('zToEEssmerge zToCrossLeptons')
quadLeptons3E1Mu0Z.checkCharge = cms.bool(False)
quadLeptons3E1Mu0Z.cut = cms.string('mass > 0')
#ME + ME
quadLeptonsCrossZ=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
quadLeptonsCrossZ.decay = cms.string('zToCrossLeptons zToCrossLeptons')
quadLeptonsCrossZ.checkCharge = cms.bool(False)
quadLeptonsCrossZ.cut = cms.string('mass > 0')
#*******************
#3 Leptons No Presel
#*******************
triLeptonsMuMuMu=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
triLeptonsMuMuMu.decay = cms.string('hTozzTo4leptonsMuonSelector hTozzTo4leptonsMuonSelector hTozzTo4leptonsMuonSelector')
triLeptonsMuMuMu.cut = cms.string("mass > 0");
triLeptonsMuMuE=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
triLeptonsMuMuE.decay = cms.string('hTozzTo4leptonsMuonSelector hTozzTo4leptonsMuonSelector hTozzTo4leptonsElectronSelector')
triLeptonsMuMuE.cut = cms.string("mass > 0");
triLeptonsMuEE=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
triLeptonsMuEE.decay = cms.string('hTozzTo4leptonsMuonSelector hTozzTo4leptonsElectronSelector hTozzTo4leptonsElectronSelector')
triLeptonsMuEE.cut = cms.string("mass > 0");
triLeptonsEEE=HiggsAnalysis.HiggsToZZ4Leptons.zToCrossLeptons_cfi.zToCrossLeptons.clone()
triLeptonsEEE.decay = cms.string('hTozzTo4leptonsElectronSelector hTozzTo4leptonsElectronSelector hTozzTo4leptonsElectronSelector')
triLeptonsEEE.cut = cms.string("mass > 0");
#*************************
#All Combination No Presel
#*************************
allLLLL = cms.EDProducer("CandViewMerger",
src = cms.VInputTag("quadLeptons4Mu","quadLeptons2Mu2E","quadLeptons4E","quadLeptonsSSOSele", "quadLeptonsSSOSmu", "quadLeptonsSSOSmuele", "quadLeptonsSSOSelemu", "quadLeptons3Mu1E1Z","quadLeptons3Mu1E0Z","quadLeptons3E1Mu1Z","quadLeptons3E1Mu0Z", "hTozzTo4leptonsEEEE", "hTozzTo4leptonsMMMM", "hTozzTo4leptons", "quadLeptonsCrossZ" )
)
# Veto electron and muons for isolation
#vetoMuons = cms.EDFilter("MuonRefSelector",
# src = cms.InputTag("muons"),
# cut = cms.string("isGlobalMuon && isTrackerMuon && pt>5")
#)
vetoMuons = cms.EDFilter("MuonRefSelector",
src = cms.InputTag("muons"),
cut = cms.string("(isGlobalMuon || isTrackerMuon) && pt>1.")
)
vetoElectrons = cms.EDFilter("GsfElectronRefSelector",
src = cms.InputTag("gsfElectrons"),
cut = cms.string("pt>7 && gsfTrack().trackerExpectedHitsInner().numberOfHits<2")
)
# Electron Id
from RecoEgamma.ElectronIdentification.cutsInCategoriesElectronIdentificationV06_DataTuning_cfi import *
eidVeryLoose.src = "hTozzTo4leptonsElectronSelector"
eidLoose.src = "hTozzTo4leptonsElectronSelector"
eidMedium.src = "hTozzTo4leptonsElectronSelector"
eidTight.src = "hTozzTo4leptonsElectronSelector"
from RecoEgamma.ElectronIdentification.cutsInCategoriesHZZElectronIdentificationV06_cfi import *
eidHZZVeryLoose.src = "hTozzTo4leptonsElectronSelector"
eidHZZLoose.src = "hTozzTo4leptonsElectronSelector"
eidHZZMedium.src = "hTozzTo4leptonsElectronSelector"
eidHZZHyperTight1.src = "hTozzTo4leptonsElectronSelector"
# MVA Electron ID
from EGamma.EGammaAnalysisTools.electronIdMVAProducer_cfi import *
# mvaTrigV0.electronTag = cms.InputTag("hTozzTo4leptonsElectronSelector")
# mvaNonTrigV0.electronTag = cms.InputTag("hTozzTo4leptonsElectronSelector")
mvaTrigV0.electronTag = cms.InputTag("gsfElectrons")
mvaNonTrigV0.electronTag = cms.InputTag("gsfElectrons")
# Electron Regression
from EGamma.EGammaAnalysisTools.electronRegressionEnergyProducer_cfi import *
# Electron loose isolation
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsElectronIsolationProducerEgamma_cfi import *
hTozzTo4leptonsElectronIsolationProducerEgamma.threshold = cms.double(99999.)
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsElectronIsolationEgammaSequences_cff import *
hTozzTo4leptonsElectronIsolationSequenceEgamma=cms.Sequence(hTozzTo4leptonsElectronIsolationDepositSequence + hTozzTo4leptonsElectronIsolationProducerEgamma)
# Electron PF isolation
from CommonTools.ParticleFlow.PFBRECO_cff import *
from CommonTools.ParticleFlow.Isolation.pfElectronIsolation_cff import *
elPFIsoDepositCharged.src = cms.InputTag("hTozzTo4leptonsElectronSelector")
elPFIsoDepositChargedAll.src = cms.InputTag("hTozzTo4leptonsElectronSelector")
elPFIsoDepositNeutral.src = cms.InputTag("hTozzTo4leptonsElectronSelector")
elPFIsoDepositGamma.src = cms.InputTag("hTozzTo4leptonsElectronSelector")
elPFIsoDepositPU.src = cms.InputTag("hTozzTo4leptonsElectronSelector")
# Muon loose isolation
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMuonIsolationSequences_cff import *
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMuonIsolationProducerMu_cfi import *
hTozzTo4leptonsMuonIsolationProducerMu.threshold=cms.double(99999.)
# Muon PF isolation
from CommonTools.ParticleFlow.PFBRECO_cff import *
from CommonTools.ParticleFlow.Isolation.pfMuonIsolation_cff import *
muPFIsoDepositCharged.src = cms.InputTag("hTozzTo4leptonsMuonSelector")
muPFIsoDepositChargedAll.src = cms.InputTag("hTozzTo4leptonsMuonSelector")
muPFIsoDepositNeutral.src = cms.InputTag("hTozzTo4leptonsMuonSelector")
muPFIsoDepositGamma.src = cms.InputTag("hTozzTo4leptonsMuonSelector")
muPFIsoDepositPU.src = cms.InputTag("hTozzTo4leptonsMuonSelector")
# Photon PF
# from CommonTools.ParticleFlow.PFBRECO_cff import *
from CommonTools.ParticleFlow.Isolation.pfPhotonIsolation_cff import *
phPFIsoDepositCharged.src = cms.InputTag("hTozzTo4leptonsPFfsrPhoton")
phPFIsoDepositChargedAll.src = cms.InputTag("hTozzTo4leptonsPFfsrPhoton")
phPFIsoDepositNeutral.src = cms.InputTag("hTozzTo4leptonsPFfsrPhoton")
phPFIsoDepositGamma.src = cms.InputTag("hTozzTo4leptonsPFfsrPhoton")
phPFIsoDepositPU.src = cms.InputTag("hTozzTo4leptonsPFfsrPhoton")
#from RecoParticleFlow.PFProducer.photonPFIsolationValues_cff import *
#import RecoParticleFlow.PFProducer.photonPFIsolationValues_cff
## phPFIsoValueCharged03PFId.deposits.vetos = cms.vstring('Threshold(0.2)')
## phPFIsoValueNeutral03PFId.deposits.vetos = cms.vstring('Threshold(0.5)')
## phPFIsoValueGamma03PFId.deposits.vetos = cms.vstring('Threshold(0.5)')
## phPFIsoValuePU03PFId.deposits.vetos = cms.vstring('Threshold(0.2)')
# Common preselection
# from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCommonPreselectionSequences_cff import *
# zToEE loose isolated
from HiggsAnalysis.HiggsToZZ4Leptons.zToEE_cfi import *
import HiggsAnalysis.HiggsToZZ4Leptons.zToEE_cfi
zToEELooseIsol = HiggsAnalysis.HiggsToZZ4Leptons.zToEE_cfi.zToEE.clone()
zToEELooseIsol.decay = ('hTozzTo4leptonsElectronSelector@+ hTozzTo4leptonsElectronSelector@-')
zToEELooseIsol.cut = cms.string('mass > 0')
# zToMuMu loose isolated
from HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi import *
import HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi
zToMuMuLooseIsol = HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
zToMuMuLooseIsol.decay = ('hTozzTo4leptonsMuonSelector@+ hTozzTo4leptonsMuonSelector@-')
zToMuMuLooseIsol.cut = cms.string('mass > 0')
# hTozzToEEMuMu loose isolated
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
import HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi
hTozzTo4leptonsLooseIsol=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi.hTozzTo4leptons.clone()
hTozzTo4leptonsLooseIsol.decay = ('zToEELooseIsol zToMuMuLooseIsol')
hTozzTo4leptonsLooseIsol.cut = cms.string('mass > 0')
# hTozzToMuMuMuMu loose isolated
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
hTozzTo4leptonsMMMMLooseIsol=HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
hTozzTo4leptonsMMMMLooseIsol.decay = cms.string('zToMuMuLooseIsol zToMuMuLooseIsol')
hTozzTo4leptonsMMMMLooseIsol.cut = cms.string('mass > 0')
# hTozzToEEEE loose isolated
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptons_cfi import *
hTozzTo4leptonsEEEELooseIsol=HiggsAnalysis.HiggsToZZ4Leptons.zToMuMu_cfi.zToMuMu.clone()
hTozzTo4leptonsEEEELooseIsol.decay = cms.string('zToEELooseIsol zToEELooseIsol')
hTozzTo4leptonsEEEELooseIsol.cut = cms.string('mass > 0')
# 2e2mu best candidate producer
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsBestCandidateProducer_cfi import *
# 4mu best candidate producer
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsBestCandidateProducer_cfi import *
hTozzTo4leptonsBestCandidateProducerMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsBestCandidateProducer_cfi.hTozzTo4leptonsBestCandidateProducer.clone()
hTozzTo4leptonsBestCandidateProducerMMMM.decaychannel = cms.string('4mu')
hTozzTo4leptonsBestCandidateProducerMMMM.RECOcollName = cms.VInputTag(cms.InputTag("hTozzTo4leptonsMMMMLooseIsol"))
hTozzTo4leptonsBestCandidateProducerMMMM.decayChain = cms.string('hToZZTo4LeptonsBestCandidate')
# 4e best candidate producer
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsBestCandidateProducer_cfi import *
hTozzTo4leptonsBestCandidateProducerEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsBestCandidateProducer_cfi.hTozzTo4leptonsBestCandidateProducer.clone()
hTozzTo4leptonsBestCandidateProducerEEEE.decaychannel = cms.string('4e')
hTozzTo4leptonsBestCandidateProducerEEEE.RECOcollName = cms.VInputTag(cms.InputTag("hTozzTo4leptonsEEEELooseIsol"))
hTozzTo4leptonsBestCandidateProducerEEEE.decayChain = cms.string('hToZZTo4LeptonsBestCandidate')
# CP producer 2e2mu, 4mu, 4e
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi import *
hTozzTo4leptonsCP.RECOcollName = cms.InputTag("hTozzTo4leptonsLooseIsol")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi import *
hTozzTo4leptonsCPMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi.hTozzTo4leptonsCP.clone()
hTozzTo4leptonsCPMMMM.RECOcollName= cms.InputTag("hTozzTo4leptonsMMMMLooseIsol")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi import *
hTozzTo4leptonsCPEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCP_cfi.hTozzTo4leptonsCP.clone()
hTozzTo4leptonsCPEEEE.RECOcollName = cms.InputTag("hTozzTo4leptonsEEEELooseIsol")
# 3D IP KF
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerKF=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerKF.VertexLabel = cms.InputTag("offlinePrimaryVertices")
# Deterministic annealing - default now
# from RecoVertex.PrimaryVertexProducer.OfflinePrimaryVerticesDA_cfi import *
# 3D IP DA
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducer.VertexLabel = cms.InputTag("offlinePrimaryVertices")
# 2D IP DA
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsTipLipToVtxProducer_cfi import *
hTozzTo4leptonsTipLipToVtxProducer.VertexLabel = cms.InputTag("offlinePrimaryVertices")
## # Geometrical Discriminator
## from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsGeomDiscrimProducer_cfi import *
## hTozzTo4leptonsGeomDiscrimProducer.RECOcollName = cms.InputTag("hTozzTo4leptonsLooseIsol")
## hTozzTo4leptonsGeomDiscrimProducerMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsGeomDiscrimProducer_cfi.hTozzTo4leptonsGeomDiscrimProducer.clone()
## hTozzTo4leptonsGeomDiscrimProducerMMMM.RECOcollName=cms.InputTag("hTozzTo4leptonsMMMMLooseIsol")
## hTozzTo4leptonsGeomDiscrimProducerEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsGeomDiscrimProducer_cfi.hTozzTo4leptonsGeomDiscrimProducer.clone()
## hTozzTo4leptonsGeomDiscrimProducerEEEE.RECOcollName=cms.InputTag("hTozzTo4leptonsEEEELooseIsol")
# Constrained fit: input 4l:2e2mu
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi import *
hTozzTo4leptonsConstraintFitProducer.RECOcollName = cms.InputTag("hTozzTo4leptonsLooseIsol")
hTozzTo4leptonsConstraintFitProducer.VertexLabel = cms.InputTag("offlinePrimaryVertices")
# 4mu
hTozzTo4leptonsConstraintFitProducerMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerMMMM.RECOcollName =cms.InputTag("hTozzTo4leptonsMMMMLooseIsol")
hTozzTo4leptonsConstraintFitProducerMMMM.VertexLabel = cms.InputTag("offlinePrimaryVertices")
# 4e
hTozzTo4leptonsConstraintFitProducerEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerEEEE.RECOcollName =cms.InputTag("hTozzTo4leptonsEEEELooseIsol")
hTozzTo4leptonsConstraintFitProducerEEEE.VertexLabel = cms.InputTag("offlinePrimaryVertices")
# 3D IP with GDvertexFitter
## from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
## hTozzTo4leptonsIpToVtxProducerGD=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
## hTozzTo4leptonsIpToVtxProducerGD.VertexLabel = cms.InputTag("hTozzTo4leptonsGeomDiscrimProducer:GDFitVertex")
## from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
## hTozzTo4leptonsIpToVtxProducerGDMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
## hTozzTo4leptonsIpToVtxProducerGDMMMM.VertexLabel = cms.InputTag("hTozzTo4leptonsGeomDiscrimProducerMMMM:GDFitVertex")
## from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
## hTozzTo4leptonsIpToVtxProducerGDEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
## hTozzTo4leptonsIpToVtxProducerGDEEEE.VertexLabel = cms.InputTag("hTozzTo4leptonsGeomDiscrimProducerEEEE:GDFitVertex")
# 3D IP with Standard vertexFitter
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerStd=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerStd.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducer:StandardFitVertex")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerStdMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerStdMMMM.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducerMMMM:StandardFitVertex")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerStdEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerStdEEEE.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducerEEEE:StandardFitVertex")
# 3D IP with Kinematic vertexFitter
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerKin=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerKin.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducer:KinematicFitVertex")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerKinMMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerKinMMMM.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducerMMMM:KinematicFitVertex")
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi import *
hTozzTo4leptonsIpToVtxProducerKinEEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsIpToVtxProducer_cfi.hTozzTo4leptonsIpToVtxProducer.clone()
hTozzTo4leptonsIpToVtxProducerKinEEEE.VertexLabel = cms.InputTag("hTozzTo4leptonsConstraintFitProducerEEEE:KinematicFitVertex")
# Matching sequence
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsMatchingSequence_cff import *
# COMMON ROOT TREE
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCommonRootTree_cfi import *
hTozzTo4leptonsCommonRootTreePresel=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsCommonRootTree_cfi.hTozzTo4leptonsCommonRootTree.clone()
hTozzTo4leptonsCommonRootTreePresel.decaychannel = cms.string('2e2mu')
hTozzTo4leptonsCommonRootTreePresel.rootFileName = cms.untracked.string('roottree_leptons.root')
# hlt
hTozzTo4leptonsCommonRootTreePresel.fillHLTinfo = cms.untracked.bool(True)
hTozzTo4leptonsCommonRootTreePresel.HLTAnalysisinst = cms.string('hTozzTo4leptonsHLTAnalysisData')
# Data DoubleElectron and Muon
# MC 3_11_2
hTozzTo4leptonsCommonRootTreePresel.flagHLTnames=cms.VInputTag(cms.InputTag("flagHLTL1DoubleMuOpen"),cms.InputTag("flagHLTL2DoubleMu0"),cms.InputTag("flagHLTL2DoubleMu20NoVertexv1"),cms.InputTag("flagHLTDoubleMu0"),cms.InputTag("flagHLTDoubleMu0Quarkoniumv1"),cms.InputTag("flagHLTDoubleMu3v2"),cms.InputTag("flagHLTDoubleMu5v1"),cms.InputTag("flagHLTEle10SWL1Rv2"),cms.InputTag("flagHLTEle12SWTighterEleIdL1Rv2"),cms.InputTag("flagHLTEle17SWL1Rv2"),cms.InputTag("flagHLTEle17SWIsolL1Rv2"),cms.InputTag("flagHLTEle17SWTighterEleIdIsolL1Rv3"),cms.InputTag("flagHLTEle17SWTightCaloEleIdEle8HEL1Rv2"),cms.InputTag("flagHLTEle22SWL1Rv2"),cms.InputTag("flagHLTEle22SWTighterCaloIdIsolL1Rv2"),cms.InputTag("flagHLTEle22SWTighterEleIdL1Rv3"),cms.InputTag("flagHLTEle32SWTighterEleIdL1Rv2"),cms.InputTag("flagHLTPhoton20IsolCleanedL1Rv1"),cms.InputTag("flagHLTDoubleEle17SWL1Rv1"),cms.InputTag("flagHLTaccept"))
# skimEarlyData
if useSkimEarlyData == 'true':
hTozzTo4leptonsCommonRootTreePresel.useSkimEarlyData = cms.untracked.bool(True)
else:
hTozzTo4leptonsCommonRootTreePresel.useSkimEarlyData = cms.untracked.bool(False)
hTozzTo4leptonsCommonRootTreePresel.SkimEarlyDataAnalysisinst = cms.string('hTozzTo4leptonsSkimEarlyDataAnalysis')
hTozzTo4leptonsCommonRootTreePresel.flagSkimEarlyDatanames=cms.VInputTag(cms.InputTag("flagSkimhighEnergyMuons"),cms.InputTag("flagSkimhighEnergyElectrons"),cms.InputTag("flagSkimrecoWMNfromPf"),cms.InputTag("flagSkimrecoWMNfromTc"),cms.InputTag("flagSkimrecoWENfromPf"),cms.InputTag("flagSkimrecoWENfromTc"),cms.InputTag("flagSkimdiMuonsJPsi"),cms.InputTag("flagSkimdiMuonsZ"),cms.InputTag("flagSkimdiElectronsZ"),cms.InputTag("flagSkimtriLeptonsMuMuMu"),cms.InputTag("flagSkimtriLeptonsMuMuEl"),cms.InputTag("flagSkimtriLeptonsMuElEl"),cms.InputTag("flagSkimtriLeptonsElElEl"),cms.InputTag("flagSkimquadLeptons4Mu"),cms.InputTag("flagSkimquadLeptons2Mu2El"),cms.InputTag("flagSkimquadLeptons4El"))
# presel
hTozzTo4leptonsCommonRootTreePresel.flaginst = cms.string('hTozzTo4leptonsCommonPreselection')
hTozzTo4leptonsCommonRootTreePresel.flagtags = cms.vstring('PreselAtleast2Ele','PreselAtleast2Mu','PreselAtleast1ZEE','PreselAtleast1ZMuMu','PreselAtleast1H','PreselLoose2IsolEle','PreselLoose2IsolMu')
# MC truth
hTozzTo4leptonsCommonRootTreePresel.fillMCTruth = cms.untracked.bool(False)
hTozzTo4leptonsCommonRootTreePresel.MCcollName = cms.InputTag("hTozzTo4leptonsMCDumper")
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameBest2e2mu= cms.VInputTag(cms.InputTag("hTozzTo4leptonsBestCandidateProducer:hToZZTo4LeptonsBestCandidateMother"), cms.InputTag("hTozzTo4leptonsBestCandidateProducer:hToZZTo4LeptonsBestCandidateBoson0"), cms.InputTag("hTozzTo4leptonsBestCandidateProducer:hToZZTo4LeptonsBestCandidateBoson1"))
#hTozzTo4leptonsCommonRootTreePresel.RECOcollName=cms.VInputTag(cms.InputTag("hTozzTo4leptonsLooseIsol"),cms.InputTag("zToMuMuLooseIsol"), cms.InputTag("zToEELooseIsol"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameBest4mu= cms.VInputTag(cms.InputTag("hTozzTo4leptonsBestCandidateProducerMMMM:hToZZTo4LeptonsBestCandidateMother"), cms.InputTag("hTozzTo4leptonsBestCandidateProducerMMMM:hToZZTo4LeptonsBestCandidateBoson0"), cms.InputTag("hTozzTo4leptonsBestCandidateProducerMMMM:hToZZTo4LeptonsBestCandidateBoson1"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameBest4e= cms.VInputTag(cms.InputTag("hTozzTo4leptonsBestCandidateProducerEEEE:hToZZTo4LeptonsBestCandidateMother"), cms.InputTag("hTozzTo4leptonsBestCandidateProducerEEEE:hToZZTo4LeptonsBestCandidateBoson0"), cms.InputTag("hTozzTo4leptonsBestCandidateProducerEEEE:hToZZTo4LeptonsBestCandidateBoson1"))
hTozzTo4leptonsCommonRootTreePresel.useAdditionalRECO = cms.untracked.bool(True)
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameZ =cms.VInputTag(cms.InputTag("zToMuMu"), cms.InputTag("zToEE"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameZss = cms.VInputTag(cms.InputTag("zToMuMussmerge"),cms.InputTag("zToEEssmerge"),cms.InputTag("zToCrossLeptons"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameDiLep = cms.InputTag("dileptons")
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameEEMM = cms.VInputTag(cms.InputTag("hTozzTo4leptonsLooseIsol"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameMMMM = cms.VInputTag(cms.InputTag("hTozzTo4leptonsMMMMLooseIsol"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameEEEE = cms.VInputTag(cms.InputTag("hTozzTo4leptonsEEEELooseIsol"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameLLL = cms.VInputTag(cms.InputTag("triLeptonsMuMuMu"),cms.InputTag("triLeptonsMuMuE"),cms.InputTag("triLeptonsMuEE"),cms.InputTag("triLeptonsEEE"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameLLLLss = cms.VInputTag(cms.InputTag("quadLeptons4Mu"),cms.InputTag("quadLeptons2Mu2E"),cms.InputTag("quadLeptons4E"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameLLLLssos = cms.VInputTag(cms.InputTag("quadLeptonsSSOSele"),cms.InputTag("quadLeptonsSSOSmu"),cms.InputTag("quadLeptonsSSOSelemu"),cms.InputTag("quadLeptonsSSOSmuele"))
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameLLLl = cms.VInputTag(cms.InputTag("quadLeptons3Mu1E0Z"),cms.InputTag("quadLeptons3Mu1E1Z"),cms.InputTag(
"quadLeptons3E1Mu0Z"),cms.InputTag("quadLeptons3E1Mu1Z") )
hTozzTo4leptonsCommonRootTreePresel.RECOcollNameLLLL = cms.InputTag("allLLLL")
hTozzTo4leptonsCommonRootTreePresel.MuonsLabel = cms.InputTag("hTozzTo4leptonsMuonSelector")
hTozzTo4leptonsCommonRootTreePresel.MuonsMapLabel = cms.InputTag("hTozzTo4leptonsMuonSelector")
hTozzTo4leptonsCommonRootTreePresel.MuonsTkMapLabel = cms.InputTag("muIsoFromDepsTkOptimized")
hTozzTo4leptonsCommonRootTreePresel.MuonsEcalMapLabel = cms.InputTag("muIsoFromDepsEcalOptimized")
hTozzTo4leptonsCommonRootTreePresel.MuonsHcalMapLabel = cms.InputTag("muIsoFromDepsHcalOptimized")
hTozzTo4leptonsCommonRootTreePresel.MuonsLabelVert = cms.InputTag("hTozzTo4leptonsMuonSelector")
hTozzTo4leptonsCommonRootTreePresel.MuonsMapLabelVert = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsMapLabelVertValue = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexValueMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsMapLabelVertError = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexErrorMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSTIPMapLabelVert = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:TipMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSLIPMapLabelVert = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:LipMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSTIPMapLabelVertValue = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:TipValueMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSLIPMapLabelVertValue = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:LipValueMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSTIPMapLabelVertError = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:TipErrorMuMap")
hTozzTo4leptonsCommonRootTreePresel.MuonsSLIPMapLabelVertError = cms.InputTag("hTozzTo4leptonsTipLipToVtxProducer:LipErrorMuMap")
hTozzTo4leptonsCommonRootTreePresel.ElectronsEgmLabel = cms.InputTag("hTozzTo4leptonsElectronSelector")
hTozzTo4leptonsCommonRootTreePresel.ElectronsEgmTkMapLabel = cms.InputTag("eleIsoFromDepsTkOptimized")
hTozzTo4leptonsCommonRootTreePresel.ElectronsEgmEcalMapLabel = cms.InputTag("eleIsoFromDepsEcalFromHitsByCrystalOptimized")
hTozzTo4leptonsCommonRootTreePresel.ElectronsEgmHcalMapLabel = cms.InputTag("eleIsoFromDepsHcalFromTowersOptimized")
hTozzTo4leptonsCommonRootTreePresel.ElectronsLabelVert = cms.InputTag("hTozzTo4leptonsElectronSelector")
hTozzTo4leptonsCommonRootTreePresel.ElectronsMapLabelVert = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexEleMap")
hTozzTo4leptonsCommonRootTreePresel.ElectronsMapLabelVertValue = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexValueEleMap")
hTozzTo4leptonsCommonRootTreePresel.ElectronsMapLabelVertError = cms.InputTag("hTozzTo4leptonsIpToVtxProducer:VertexErrorEleMap")
# CP variables
hTozzTo4leptonsCommonRootTreePresel.MCCP_PhiLabel = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPPhi")
hTozzTo4leptonsCommonRootTreePresel.MCCP_Phi1Label = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPPhi1")
hTozzTo4leptonsCommonRootTreePresel.MCCP_Phi2Label = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPPhi2")
hTozzTo4leptonsCommonRootTreePresel.MCCP_phi1RFLabel = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPphi1RF")
hTozzTo4leptonsCommonRootTreePresel.MCCP_phi2RFLabel = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPphi2RF")
hTozzTo4leptonsCommonRootTreePresel.MCCP_cosThetaStarLabel = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPcosThetaStar")
hTozzTo4leptonsCommonRootTreePresel.MCCP_cosTheta1Label = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPcosTheta1")
hTozzTo4leptonsCommonRootTreePresel.MCCP_cosTheta2Label = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPcosTheta2")
hTozzTo4leptonsCommonRootTreePresel.MCCP_MELALabel = cms.InputTag("hTozzTo4leptonsMCCP:hToZZTo4LeptonsCPMELA")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_PhiLabel = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPPhi")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_Phi1Label = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPPhi1")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_Phi2Label = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPPhi2")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_phi1RFLabel = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPphi1RF")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_phi2RFLabel = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPphi2RF")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_cosThetaStarLabel = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPcosThetaStar")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_cosTheta1Label = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPcosTheta1")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_cosTheta2Label = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPcosTheta2")
hTozzTo4leptonsCommonRootTreePresel.CP2e2mu_MELALabel = cms.InputTag("hTozzTo4leptonsCP:hToZZTo4LeptonsCPMELA")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_PhiLabel = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPPhi")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_Phi1Label = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPPhi1")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_Phi2Label = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPPhi2")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_phi1RFLabel = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPphi1RF")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_phi2RFLabel = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPphi2RF")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_cosThetaStarLabel = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPcosThetaStar")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_cosTheta1Label = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPcosTheta1")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_cosTheta2Label = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPcosTheta2")
hTozzTo4leptonsCommonRootTreePresel.CP4mu_MELALabel = cms.InputTag("hTozzTo4leptonsCPMMMM:hToZZTo4LeptonsCPMELA")
hTozzTo4leptonsCommonRootTreePresel.CP4e_PhiLabel = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPPhi")
hTozzTo4leptonsCommonRootTreePresel.CP4e_Phi1Label = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPPhi1")
hTozzTo4leptonsCommonRootTreePresel.CP4e_Phi2Label = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPPhi2")
hTozzTo4leptonsCommonRootTreePresel.CP4e_phi1RFLabel = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPphi1RF")
hTozzTo4leptonsCommonRootTreePresel.CP4e_phi2RFLabel = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPphi2RF")
hTozzTo4leptonsCommonRootTreePresel.CP4e_cosThetaStarLabel = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPcosThetaStar")
hTozzTo4leptonsCommonRootTreePresel.CP4e_cosTheta1Label = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPcosTheta1")
hTozzTo4leptonsCommonRootTreePresel.CP4e_cosTheta2Label = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPcosTheta2")
hTozzTo4leptonsCommonRootTreePresel.CP4e_MELALabel = cms.InputTag("hTozzTo4leptonsCPEEEE:hToZZTo4LeptonsCPMELA")
######Conversion
from HiggsAnalysis.HiggsToZZ4Leptons.ConvValueMapProd_cfi import *
#### FastJet corrections
# from RecoJets.JetProducers.kt4PFJets_cfi import *
# import RecoJets.JetProducers.kt4PFJets_cfi
# kt6corPFJets=RecoJets.JetProducers.kt4PFJets_cfi.kt4PFJets.clone()
# kt6corPFJets.rParam = cms.double(0.6)
# kt6corPFJets.doRhoFastjet = cms.bool(True)
# kt6corPFJets.Rho_EtaMax = cms.double(2.5)
# kt6corPFJets.Ghost_EtaMax = cms.double(2.5)
from RecoJets.Configuration.RecoPFJets_cff import *
kt6PFJetsCentral = kt6PFJets.clone(
Ghost_EtaMax = cms.double(2.5),
Rho_EtaMax = cms.double(2.5),
)
#PFJet ID
from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsPFJetSelector_cfi import *
#PFJet Energy Corrections
from JetMETCorrections.Configuration.DefaultJEC_cff import *
from JetMETCorrections.Configuration.JetCorrectionServices_cff import *
from JetMETCorrections.Configuration.JetCorrectionServicesAllAlgos_cff import *
ak5PFJetsCorrection = ak5PFJetsL1FastL2L3.clone()
ak5PFJetsCorrection.src = cms.InputTag('hTozzTo4leptonsPFJetSelector')
ak5PFJetsCorrectionData = ak5PFJetsL1FastL2L3Residual.clone()
ak5PFJetsCorrectionData.src = cms.InputTag('hTozzTo4leptonsPFJetSelector')
from CMGTools.External.pujetidsequence_cff import puJetId
from CMGTools.External.pujetidsequence_cff import puJetMva
recoPuJetIdMC = puJetId.clone(
jets = cms.InputTag("ak5PFJetsCorrection"),
)
recoPuJetMvaMC = puJetMva.clone(
jets = cms.InputTag("ak5PFJetsCorrection"),
jetids = cms.InputTag("recoPuJetIdMC"),
)
recoPuJetIdMCsequence=cms.Sequence(recoPuJetIdMC * recoPuJetMvaMC)
recoPuJetIdData = puJetId.clone(
jets = cms.InputTag("ak5PFJetsCorrectionData"),
)
recoPuJetMvaData = puJetMva.clone(
jets = cms.InputTag("ak5PFJetsCorrectionData"),
jetids = cms.InputTag("recoPuJetIdData"),
)
recoPuJetIdDatasequence=cms.Sequence(recoPuJetIdData * recoPuJetMvaData)
# Constrained fit: input 2l
#from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducerLeptons_cfi import *
hTozzTo4leptonsConstraintFitProducerDiLeptons=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerDiLeptons.RECOcollName = cms.InputTag("dileptons")
hTozzTo4leptonsConstraintFitProducerDiLeptons.nParticles = cms.uint32(2)
# Constrained fit: input 3l: MMM
# from HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducerLeptons_cfi import *
hTozzTo4leptonsConstraintFitProducerTriLeptonsMMM=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerTriLeptonsMMM.VertexLabel = cms.InputTag("offlinePrimaryVertices")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMMM.RECOcollName =cms.InputTag("triLeptonsMuMuMu")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMMM.nParticles = cms.uint32(3)
# MME
hTozzTo4leptonsConstraintFitProducerTriLeptonsMME=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerTriLeptonsMME.VertexLabel = cms.InputTag("offlinePrimaryVertices")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMME.RECOcollName =cms.InputTag("triLeptonsMuMuE")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMME.nParticles = cms.uint32(3)
# EEE
hTozzTo4leptonsConstraintFitProducerTriLeptonsEEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerTriLeptonsEEE.VertexLabel = cms.InputTag("offlinePrimaryVertices")
hTozzTo4leptonsConstraintFitProducerTriLeptonsEEE.RECOcollName =cms.InputTag("triLeptonsEEE")
hTozzTo4leptonsConstraintFitProducerTriLeptonsEEE.nParticles = cms.uint32(3)
# EEM
hTozzTo4leptonsConstraintFitProducerTriLeptonsMEE=HiggsAnalysis.HiggsToZZ4Leptons.hTozzTo4leptonsConstraintFitProducer_cfi.hTozzTo4leptonsConstraintFitProducer.clone()
hTozzTo4leptonsConstraintFitProducerTriLeptonsMEE.VertexLabel = cms.InputTag("offlinePrimaryVertices")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMEE.RECOcollName =cms.InputTag("triLeptonsMuEE")
hTozzTo4leptonsConstraintFitProducerTriLeptonsMEE.nParticles = cms.uint32(3)
from PhysicsTools.PatAlgos.triggerLayer1.triggerProducer_cfi import *
patTrigger.processName=cms.string( "REDIGI311X")
muonTriggerMatchHLT = cms.EDProducer( 'PATTriggerMatcherDRDPtLessByR',
src = cms.InputTag( 'hTozzTo4leptonsMuonSelector' ),
matched = cms.InputTag( 'patTrigger' ),
matchedCuts = cms.string( 'path( "HLT_DoubleMu5_v*" ) || path( "HLT_DoubleMu7_v*" ) ' ),
andOr = cms.bool( False ),
filterIdsEnum = cms.vstring( '*' ),
filterIds = cms.vint32( 0 ),
filterLabels = cms.vstring( '*' ),
pathNames = cms.vstring( '*' ),
collectionTags = cms.vstring( 'hltL3MuonCandidates' ),
maxDPtRel = cms.double( 1. ),
maxDeltaR = cms.double( 0.2 ),
resolveAmbiguities = cms.bool( True ),
resolveByMatchQuality = cms.bool( False )
)
muonTriggerMatchHLTasym = cms.EDProducer( 'PATTriggerMatcherDRDPtLessByR',
src = cms.InputTag( 'hTozzTo4leptonsMuonSelector' ),
matched = cms.InputTag( 'patTrigger' ),
matchedCuts = cms.string( 'path( "HLT_Mu13_Mu8_v*" ) || path( "HLT_Mu17_Mu8_v*" )' ),
andOr = cms.bool( False ),
filterIdsEnum = cms.vstring( '*' ),
filterIds = cms.vint32( 0 ),
filterLabels = cms.vstring( '*' ),
pathNames = cms.vstring( '*' ),
collectionTags = cms.vstring( 'hltL3MuonCandidates' ),
maxDPtRel = cms.double( 1. ),
maxDeltaR = cms.double( 0.2 ),
resolveAmbiguities = cms.bool( True ),
resolveByMatchQuality = cms.bool( False )
)
electronTriggerMatchHLT = cms.EDProducer( 'PATTriggerMatcherDRDPtLessByR',
src = cms.InputTag( 'hTozzTo4leptonsElectronSelector' ),
matched = cms.InputTag( 'patTrigger' ),
matchedCuts = cms.string( 'path( "HLT_Ele17_CaloIdL_CaloIsoVL_Ele8_CaloIdL_CaloIsoVL_v*" ) || path( "HLT_Ele17_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_Ele8_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_v*" ) || path( "HLT_Ele17_SW_TightCaloEleId_Ele8HE_L1R_v*" )' ),
andOr = cms.bool( False ),
filterIdsEnum = cms.vstring( '*' ),
filterIds = cms.vint32( 0 ),
filterLabels = cms.vstring( '*' ),
pathNames = cms.vstring( '*' ),
collectionTags = cms.vstring( '*' ),
maxDPtRel = cms.double( 1. ),
maxDeltaR = cms.double( 0.2 ),
resolveAmbiguities = cms.bool( True ),
resolveByMatchQuality = cms.bool( False )
)
#from PhysicsTools.PatAlgos.triggerLayer1.triggerMatcher_cfi import *
#cleanMuonTriggerMatchHLTDoubleIsoMu3.src= cms.InputTag( "hTozzTo4leptonsMuonSelector" )
#cleanMuonTriggerMatchHLTDoubleIsoMu3.matchedCuts = cms.string( 'path( "HLT_DoubleMu3_v*" )' )
#from PhysicsTools.PatAlgos.triggerLayer1.triggerEventProducer_cfi import *
#patTriggerEvent.patTriggerMatches = cms.VInputTag("muonTriggerMatchHLT")
#patTriggerEvent.processName= cms.string( 'REDIGI311X' ) # default; change only, if you know exactly, what you are doing!
hTozzTo4leptonsSelectionSequenceData = cms.Sequence(
SimpleCounter +
# GenSequence +
# hTozzTo4leptonsMCGenFilter2e2mu +
# hTozzTo4leptonsMCGenParticleListDrawer2e2mu +
# hTozzTo4leptonsMCDumper +
# hTozzTo4leptonsMCCP +
hTozzTo4leptonsPFtoRECOMuon +
hTozzTo4leptonsPFfsrPhoton +
# zzdiMuonSequence +
# zzdiElectronSequence +
# zzeleMuSequence +
# diHzzSkimleptonsMerger +
# diHzzSkimleptonsFilter +
zz4eSequence +
zz2e2mSequence +
zz2eemSequence +
zz2m2eSequence +
zz4mSequence +
zz2memSequence +
zzem2eSequence +
zzem2mSequence +
diHzzSkimleptonsMerger +
diHzzSkimleptonsFilter +
hTozzTo4leptonsHLTInfo +
hTozzTo4leptonsElectronSelector +
mvaTrigV0 +
mvaNonTrigV0 +
muonCleanerBySegments +
hTozzTo4leptonsMuonSelector +
zToEE +
zToMuMu +
hTozzTo4leptons +
hTozzTo4leptonsMMMM +
hTozzTo4leptonsEEEE +
# additional collection
zToEEss +
zToMuMuss +
zToCrossLeptons +
dileptons +
triLeptonsMuMuMu +
triLeptonsMuMuE +
triLeptonsMuEE +
triLeptonsEEE +
quadLeptons4Mu +
quadLeptons2Mu2E +
quadLeptons4E +
quadLeptons3Mu1E0Z +
quadLeptons3Mu1E1Z +
quadLeptons3E1Mu0Z +
quadLeptons3E1Mu1Z +
quadLeptonsCrossZ +
quadLeptonsSSOSmu +
quadLeptonsSSOSele +
quadLeptonsSSOSmuele +
quadLeptonsSSOSelemu +
allLLLL +
vetoMuons +
vetoElectrons +
hTozzTo4leptonsElectronIsolationSequenceEgamma +
eidVeryLoose +
eidLoose +
eidMedium +
eidTight +
eidHZZVeryLoose +
eidHZZLoose +
eidHZZMedium +
eidHZZHyperTight1 +
hTozzTo4leptonsMuonIsolationSequence +
hTozzTo4leptonsMuonIsolationProducerMu +
# PF isolation for electrons and muons
pfParticleSelectionSequence +
pfElectronIsolationSequence +
pfMuonIsolationSequence +
pfPhotonIsolationSequence +
#
zToEELooseIsol +
zToMuMuLooseIsol +
hTozzTo4leptonsLooseIsol +
hTozzTo4leptonsMMMMLooseIsol +
hTozzTo4leptonsEEEELooseIsol +
hTozzTo4leptonsBestCandidateProducer +
hTozzTo4leptonsBestCandidateProducerMMMM +
hTozzTo4leptonsBestCandidateProducerEEEE +
hTozzTo4leptonsCP +
hTozzTo4leptonsCPMMMM +
hTozzTo4leptonsCPEEEE +
hTozzTo4leptonsIpToVtxProducerKF +
# offlinePrimaryVerticesDA +
hTozzTo4leptonsIpToVtxProducer +
hTozzTo4leptonsTipLipToVtxProducer +
## hTozzTo4leptonsGeomDiscrimProducer +
## hTozzTo4leptonsGeomDiscrimProducerMMMM +
## hTozzTo4leptonsGeomDiscrimProducerEEEE +
hTozzTo4leptonsConstraintFitProducer +
hTozzTo4leptonsConstraintFitProducerMMMM +
hTozzTo4leptonsConstraintFitProducerEEEE +
## hTozzTo4leptonsIpToVtxProducerGD +
## hTozzTo4leptonsIpToVtxProducerGDMMMM +
## hTozzTo4leptonsIpToVtxProducerGDEEEE +
hTozzTo4leptonsIpToVtxProducerStd +
hTozzTo4leptonsIpToVtxProducerStdMMMM +
hTozzTo4leptonsIpToVtxProducerStdEEEE +
hTozzTo4leptonsIpToVtxProducerKin +
hTozzTo4leptonsIpToVtxProducerKinMMMM +
hTozzTo4leptonsIpToVtxProducerKinEEEE +
# hTozzTo4leptonsConstraintFitProducerDiLeptons +
# hTozzTo4leptonsConstraintFitProducerTriLeptonsMMM +
# hTozzTo4leptonsConstraintFitProducerTriLeptonsMME +
# hTozzTo4leptonsConstraintFitProducerTriLeptonsEEE +
# hTozzTo4leptonsConstraintFitProducerTriLeptonsMEE +
ConvValueMapProd +
# kt6corPFJets +
kt6PFJetsCentral +
hTozzTo4leptonsPFJetSelector +
ak5PFJetsCorrection +
ak5PFJetsCorrectionData +
recoPuJetIdMCsequence +
recoPuJetIdDatasequence +
patTrigger +
muonTriggerMatchHLT +
muonTriggerMatchHLTasym +
electronTriggerMatchHLT +
electronRegressionEnergyProducer
# hTozzTo4leptonsCommonRootTreePresel
)
| [
"reham.aly@cern.ch"
] | reham.aly@cern.ch |
84ea742bc4b8ec566d853382b64ee95b8bd16a72 | 80528a28ccf8a13318fa5ba6682db5b7a6104641 | /venv/bin/chardetect | 042a0fff94b93be0f466e8014236fece1328c3eb | [] | no_license | kiki211/RESTAPI2020 | 18359b3d4b9ec90d0f4dabc1fc3d90582ed7fc44 | 6a3089affdaa9b69e0c41b6d6698ce7f72355ef4 | refs/heads/master | 2020-12-28T08:56:20.885714 | 2020-02-09T16:18:11 | 2020-02-09T16:18:11 | 238,253,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | #!/Users/abichevo/PycharmProjects/APIautomation/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"alexbichevoy211@gmail.com"
] | alexbichevoy211@gmail.com | |
c208e65a004c73d0f48559a896158d1e95398217 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/EDataServer/SourceAutocompletePrivate.py | 1de46992f4fdcbcee66390bb6885068da4e6eb42 | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 4,505 | py | # encoding: utf-8
# module gi.repository.EDataServer
# from /usr/lib64/girepository-1.0/EDataServer-1.2.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Gio as __gi_repository_Gio
import gi.repository.GObject as __gi_repository_GObject
import gi.repository.Soup as __gi_repository_Soup
import gobject as __gobject
class SourceAutocompletePrivate(__gi.Struct):
# no doc
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(SourceAutocompletePrivate), '__module__': 'gi.repository.EDataServer', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'SourceAutocompletePrivate' objects>, '__weakref__': <attribute '__weakref__' of 'SourceAutocompletePrivate' objects>, '__doc__': None})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(SourceAutocompletePrivate)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
755362365f434df4bbc6b2722e75b77297a7e724 | f89bd618c03a98bced67481be15c5827119c5acf | /listt.py | 54a840edd871d8b9aaf8515493cc122bf4b98733 | [] | no_license | Nagalakshme/python | 7af27ee03b23276ee0d73664fec467823f272e91 | 6c283f38f12e07b9a6d7c2383ad3f5858af2ff6c | refs/heads/master | 2020-07-11T22:38:08.457182 | 2019-12-26T12:25:43 | 2019-12-26T12:25:43 | 204,658,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | values='sherlock'
my_list=['holy',['hhh',values,46,74],25,2,[723,5,2],6,76,8]
print(my_list[1][1][1:3])
| [
"noreply@github.com"
] | Nagalakshme.noreply@github.com |
2a5b864a3ebcf588854fa0df2b2e9e32ddbee910 | 3fd6e85c36a7e9e4f9ddec163a55f3602ccfb98c | /hw/gimbal/firmware/site_scons/site_tools/arm_none_eabi.py | 1ed9fbfb9debd2019a680a3816a74982f6a83789 | [
"Apache-2.0"
] | permissive | SiChiTong/mjmech | acc5da4ac6edd9f1446cc13e471aedeea3e1c419 | a71f35e6ad6bc9c1530a0a33d68c45d073390b79 | refs/heads/master | 2020-03-20T03:44:13.276650 | 2018-05-06T02:59:55 | 2018-05-06T03:04:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,177 | py | # Copyright 2015 Josh Pieper, jjp@pobox.com. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
TOOLS = {
'CC' : 'gcc',
'CXX' : 'g++',
'LD' : 'gcc',
'AR' : 'ar',
'AS' : 'gcc',
'OBJCOPY' : 'objcopy',
'OBJDUMP' : 'objdump',
}
def generate(env, **kwargs):
# Let's assume that the host version of the compiler is here and
# available.
gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas']
for tool in gnu_tools:
env.Tool(tool)
for key, value in TOOLS.iteritems():
env[key] = 'arm-none-eabi-' + value
env.Append(ASFLAGS=['-c'])
env['PROGSUFFIX'] = '.elf'
def exists(env):
return 1
| [
"jjp@pobox.com"
] | jjp@pobox.com |
f6541ce58f56975601c779e9cc33e306459c362e | 6e6e521aaf8c08d5f3187a572f4e4077b2589f6a | /python_fileops.py | e75755c3b7eec21376e311b1337c95af97614e59 | [] | no_license | prahate/python-learn | 50aaa947fb9ae08937a8f012291742108cdbdc61 | 32196d50e93dfd5baae021ec015d4f62874b1669 | refs/heads/master | 2021-06-17T13:34:34.605607 | 2018-11-30T11:06:20 | 2018-11-30T11:06:20 | 96,204,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,031 | py | # We will learn about opening and editing files from python program
# open file using open function
# other argument to open function specifies mode, r = reading, r+ = read and write, a = append to file and so on.
# The other way we can open file is to use context
# the advantage of using context is that we don't need to close the file, it will take care of it
# f_handle = open('test.txt', 'r')
#with open('test.txt', 'r') as f_handle:
#print(f_handle.read())
# Instead of reading whole file contents using read we can read line by line using readlines or readline functions
#f_contents = f_handle.readlines()
#print(f_contents, end='')
#f_contents = f_handle.readline()
#print(f_contents, end='')
#for line in f_handle:
# print(line, end='')
# We are using write function to create copy of the existing file
with open('test.txt', 'r') as rf:
with open('test_copy.txt', 'w') as wf:
for line in rf:
wf.write(line)
# print(f_handle.name)
# print(f_handle.mode)
# print(f_handle.read())
# f_handle.close()
| [
"patyarahate@gmail.com"
] | patyarahate@gmail.com |
4dc0846a6f6da1dbe74a6a1beef46bc096075bc4 | 30b9c43c370af62c3076571d61e45480f4b98e2f | /classify/urls.py | 4de4f0e8498cacb276cc04f22ce7ea597126d998 | [] | no_license | Below0/mit_cf | 6cafb9926301c65bf4cceeb62353c3224ec5de8a | d51fdfe330dec2827bd0d8bf2d56436dbe63a599 | refs/heads/master | 2023-08-03T09:58:08.941252 | 2020-07-20T13:14:56 | 2020-07-20T13:14:56 | 281,120,287 | 0 | 0 | null | 2021-09-22T19:27:37 | 2020-07-20T13:13:07 | Python | UTF-8 | Python | false | false | 132 | py | from django.urls import path
from . import views
urlpatterns = [
path('<str:file_path>', views.classify_req, name='request'),
] | [
"harvey.lee@kakaocorp.com"
] | harvey.lee@kakaocorp.com |
c2da498755dbaea47f89c6ed9de0ae6ac15b2631 | 21e495f2ec7f27c04c583cc0d2ff621e4f21a390 | /serverRequests/menus.py | 489b7974c99faf7e87f7faf3c70562a949d2aadd | [] | no_license | Zetrocker/Groupme-Analytics | 527abf4b734863373c9f18cab574d2a981739bb9 | ce75600b2bbfbdca17d4be47ab296ca10ede19f3 | refs/heads/master | 2016-09-05T22:20:46.668776 | 2015-08-23T00:34:37 | 2015-08-23T00:34:37 | 41,010,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | __author__ = 'Zetrocker'
import sys
def selectGroupMenu(groups):
total = len(groups)
exitOption = total + 1
menuItem = 0
for x in groups:
menuItem += 1
print(repr(menuItem) + ')', dict(x).get(u'name'))
print(repr(exitOption) + ')' 'Quit')
try:
choice = int(input("Select Group:"))
if not (1 <= choice <= exitOption):
raise ValueError()
except ValueError:
print("Invalid Option")
else:
selectedGroup = choice - 1
print(groups[selectedGroup][u'name'], 'selected.')
theGroup = groups[selectedGroup]
return theGroup | [
"michaelcortezster@gmail.com"
] | michaelcortezster@gmail.com |
d79f9355049b93e48b152703f0d5f95c98de641e | cdc4e9c81b6887bff2b70e3f3893b93d6a824e41 | /misc/djauth/djauthapp/apps.py | ced13345572f53ca66c6484f4822613969d35721 | [] | no_license | eawasthi/CodingDojo | e5c363184add26d2b7fb8b442ca15c657599feab | bfbba677cf957b7b45e99465079d5c00bc6c3f6f | refs/heads/master | 2021-01-23T05:25:17.152128 | 2017-06-26T18:36:32 | 2017-06-26T18:36:32 | 86,299,143 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class DjauthappConfig(AppConfig):
name = 'djauthapp'
| [
"ekta.awasthi@hotmail.com"
] | ekta.awasthi@hotmail.com |
94466248bb258a43fb02fa6107f0dbaeec9875b2 | ecf3d8db156521c82883a4d72bc4740be4fad73e | /theseus-zj/metrics.py | b841db8109d0d6b6673bda51477e6d1be64a372f | [] | no_license | FNLPprojects/Model-Compress | 2ae32f897eacaf6b6c24e76a5b3005cf57813f5e | 41753e51a2f2a73096dd29ddd76c609645424b03 | refs/heads/master | 2023-06-28T20:31:29.115616 | 2020-05-25T01:24:59 | 2020-05-25T01:24:59 | 248,321,549 | 9 | 4 | null | 2023-06-12T21:28:35 | 2020-03-18T19:16:02 | Python | UTF-8 | Python | false | false | 2,758 | py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from scipy.stats import pearsonr, spearmanr
from sklearn.metrics import matthews_corrcoef, f1_score
_has_sklearn = True
except (AttributeError, ImportError):
_has_sklearn = False
def is_sklearn_available():
return _has_sklearn
if _has_sklearn:
def simple_accuracy(preds, labels):
return (preds == labels).mean()
def acc_and_f1(preds, labels):
acc = simple_accuracy(preds, labels)
f1 = f1_score(y_true=labels, y_pred=preds)
return {
"acc": acc,
"f1": f1,
"acc_and_f1": (acc + f1) / 2,
}
def pearson_and_spearman(preds, labels):
pearson_corr = pearsonr(preds, labels)[0]
spearman_corr = spearmanr(preds, labels)[0]
return {
"pearson": pearson_corr,
"spearmanr": spearman_corr,
"corr": (pearson_corr + spearman_corr) / 2,
}
def compute_metrics(task_name, preds, labels):
assert len(preds) == len(labels)
if task_name == "cola":
return {"mcc": matthews_corrcoef(labels, preds)}
elif task_name == "sst-2":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "mrpc":
return acc_and_f1(preds, labels)
elif task_name == "sts-b":
return pearson_and_spearman(preds, labels)
elif task_name == "qqp":
return acc_and_f1(preds, labels)
elif task_name == "mnli":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "mnli-mm":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "qnli":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "rte":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "wnli":
return {"acc": simple_accuracy(preds, labels)}
elif task_name == "hans":
return {"acc": simple_accuracy(preds, labels)}
else:
raise KeyError(task_name) | [
"noreply@github.com"
] | FNLPprojects.noreply@github.com |
e62bb40c823f97a4d88b9ee4884e3edb00c40a0d | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib64/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/dhcp/clientaddr.py | 134e0b93623bfb9ff57a9db85dbee07038399b49 | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 3,567 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ClientAddr(Mo):
"""
The DHCP address received for the client.
"""
meta = ClassMeta("cobra.model.dhcp.ClientAddr")
meta.moClassName = "dhcpClientAddr"
meta.rnFormat = "addr-[%(address)s]"
meta.category = MoCategory.REGULAR
meta.label = "Client Address"
meta.writeAccessMask = 0x8008020040001
meta.readAccessMask = 0x8008020040001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.dhcp.ClientIf")
meta.superClasses.add("cobra.model.dhcp.Addr")
meta.rnPrefixes = [
('addr-', True),
]
prop = PropMeta("str", "address", "address", 6133, PropCategory.REGULAR)
prop.label = "Address"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("address", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "address"))
getattr(meta.props, "address").needDelimiter = True
def __init__(self, parentMoOrDn, address, markDirty=True, **creationProps):
namingVals = [address]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"collinsctk@qytang.com"
] | collinsctk@qytang.com |
bb06143569e306a794a30eff304f5dbca4838171 | fb3b57ba265012b50ccb13dda6806cd3014805e5 | /Main.py | 1f429778ac39cd75c6a53f85e017781058903785 | [] | no_license | kunalvats/Squad-Parking | 3027ac2e2b78360805472b5e38307dd94040a3b1 | cb748a6205c7f2cea6cfcf2d7b7809e78967b122 | refs/heads/master | 2020-12-04T02:14:30.673964 | 2020-01-03T11:01:25 | 2020-01-03T11:01:25 | 231,567,499 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | from ParkingLot import ParkingLot
import fileinput
for line in fileinput.input():
'''here reading command one by one'''
if 'Create_parking_lot' in line:
data = line.split()
parkingLot=ParkingLot(int(data[1]))
if 'Park ' in line:
data = line.split()
slotflag = parkingLot.isSlotAvailable()
if slotflag == True:
slotNumber=parkingLot.getAvailableSlot()
parkingLot.parkThePark(slotNumber,data[1],data[3])
else:
print("Sorry, parking lot is full")
if 'Leave' in line:
data = line.split()
parkingLot.leaveTheCar(int(data[1]))
if 'Status' in line:
parkingLot.showParkingStatus()
if 'Slot_numbers_for_driver_of_age' in line:
data = line.split()
parkingLot.Slot_numbers_for_driver_of_age(data[1])
if 'Vehicle_registration_number_for_driver_of_age' in line:
data = line.split()
parkingLot.Vehicle_registration_number_for_driver_of_age(data[1])
if 'Slot_number_for_car_with_number' in line:
data = line.split()
parkingLot.getSlotNumberForRegistrationNumber(data[1])
| [
"kunalvats2000@gmail.com"
] | kunalvats2000@gmail.com |
e39d268ad37d8b57644b7f0f74b578c24e1d5f8d | 686bcedf19657706d6d7c6d928e01ac4218ef096 | /FIS.py | 1f2b4139963d32bfb822639ef793e565cca73a87 | [] | no_license | AceArthur/FIS | 577586f83a5ecd68bb09b7366e598c945df288cf | fc950ce11ae3ef6119908d48fbf5b08ac891dc6b | refs/heads/master | 2020-12-25T15:08:39.466030 | 2016-09-11T13:54:30 | 2016-09-11T13:54:30 | 67,931,963 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | def getMembership(mf,degree,x):
a,b,c,d = mf[degree]
if x<=a or x>=d:
return 0.0
else:
if a == b:
return 1.0 if a<=x<=c else (d-x)/(d-c)
if c == d:
return 1.0 if b<=x<=d else (x-a)/(b-a)
else:
if a<=x<=b:
return 1.0 if a == b else (x-a)/(b-a)
elif c<=x<=d:
return 1.0 if b == c else (d-x)/(d-c)
else:
return 1.0
def FIS(get_speed,get_dist):
mf_speed = {'LOW': (0,0,10,20),'MEDIUM':(15,25,35,45),'HIGH':(40,50,60,60)}
mf_closeness = {'LOW':(1.0,1.5,2.0,2.0) ,'MEDIUM':(0.5,0.75,1.0,1.25),'HIGH':(0,0,0.25,0.75)}
mfs = [[mf_speed,get_speed],[mf_closeness,get_dist]]
rulebase = [['HIGH',-10],['MEDIUM',5],['LOW',10]]
sum_w,sum_v = 0,0
for j in xrange(len(mfs)):
for i in rulebase:
weight = getMembership(mfs[j][0],i[0],mfs[j][1])
sum_w += weight
sum_v += weight * i[-1]
return sum_v / sum_w
get_speed = int(raw_input("Your Speend? (0-60 mph)"))
get_dist = float(raw_input("Your Distance to Target? (0-2.0 miles)"))
print "Adjust Speed by:", FIS(get_speed,get_dist), "mph."
| [
"noreply@github.com"
] | AceArthur.noreply@github.com |
b8d9f5ee64570bdf0c15ab55e124cd7e677cb144 | be429a1e5e4903616a4532c1bf238df20fea75c0 | /6.14/127.单词接龙.py | 3f21e478f21ed4a7dfac01e5e07586b2c193a049 | [] | no_license | pythonnewbird/LeetCodeSolution | ccc8cc17df4cea3109d84b0c347ae91c1bc33a28 | 2447f760f08fb3879c5f03d8650e30ff74115d3d | refs/heads/master | 2020-03-19T05:06:06.681429 | 2018-07-01T12:39:09 | 2018-07-01T12:39:09 | 135,899,944 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | class Solution(object):
def ladderLength(self, beginWord, endWord, wordList):
"""
:type beginWord: str
:type endWord: str
:type wordList: List[str]
:rtype: int
"""
wdset=set(wordList)
if endWord not in wdset:
return 0
lts="abcdefghijklmnopqrstuvwxyz"
dist=float("inf")
q=[beginWord]
seen={beginWord:0}
graph={beginWord:set()}
while q:
cur=q.pop(0)
d=seen[cur]
if d>=dist:
break
for i in range(len(cur)):
for lt in lts:
if lt!=cur[i]:
new=cur[:i]+lt+cur[i+1:]
if new in wdset and (new not in seen or d+1==seen[new]):
if cur in graph:
graph[cur].add(new)
else:
graph[cur]=set([new])
if new==endWord:
dist=d+1
if new not in seen:
seen[new]=d+1
q.append(new)
if dist!=float('inf') :
return dist+1
else:
return 0 | [
"21637007@zju.edu.cn"
] | 21637007@zju.edu.cn |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.