blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a2d8df1ae71f9e5f45610def83ccc3dd5bf31873 | ca2b25b0d9e42c3a79d13880dd36c6c638449eb1 | /geowind_crawler/geowind_crawler/urls.py | aee1e5df3a85ab368f6a64dd6cbac79cd65466c3 | [] | no_license | cnyangkui/distributed-crawler | f39c5f24f5feea6510613701979531749b9dbcbb | f25848bcea3259b4bd9191cf3cf103260f46eaa2 | refs/heads/master | 2022-04-27T12:52:13.916825 | 2020-04-27T05:00:04 | 2020-04-27T05:00:04 | 110,426,850 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,029 | py | from django.conf.urls import include, url
from django.contrib import admin
from crawlermanage import views
urlpatterns = [
# Examples:
# url(r'^$', 'geowind_crawler.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^crawlermanage/$',views.login, name='login'),
url(r'^crawlermanage/login/$',views.login, name='login'),
url(r'^crawlermanage/index/$',views.index, name='index'),
url(r'^crawlermanage/tasks/$',views.tasks, name='tasks'),
url(r'^crawlermanage/edittask/$',views.edittask, name='edittask'),
url(r'^crawlermanage/newsdata/$',views.newsdata, name='newsdata'),
#url(r'^crawlermanage/newsdetail/$',views.newsdetail, name='newsdetail'),
url(r'^crawlermanage/ecommercedata/$',views.ecommercedata, name='ecommercedata'),
url(r'^crawlermanage/layout/$',views.layout, name='layout'),
url(r'^crawlermanage/taskdetail/$',views.taskdetail, name='taskdetail'),
url(r'^crawlermanage/testarticles/$',views.testarticles, name='testarticles'),
url(r'^crawlermanage/testlist/$',views.testlist, name='testlist'),
url(r'^crawlermanage/extractarticle/$',views.extractarticle, name='extractarticle'),
url(r'^crawlermanage/processlist/$',views.processlist, name='processlist'),
url(r'^crawlermanage/machinelist/$', views.machinelist, name='machinelist'),
url(r'^crawlermanage/deleteip/$', views.deleteip, name='deleteip'),
url(r'^crawlermanage/addip/$', views.addip, name='addip'),
url(r'^crawlermanage/charts/$', views.charts, name='charts'),
url(r'^crawlermanage/testsingle/$', views.testsingle, name='testsingle'),
url(r'^crawlermanage/introduce/$', views.introduce, name='introduce'),
url(r'^crawlermanage/ecommercedata/$', views.ecommercedata, name='ecommercedata'),
url(r'^crawlermanage/blogdata/$', views.blogdata, name='blogdata'),
#url(r'^crawlermanage/blogdetail/$', views.blogdetail, name='blogdetail'),
url(r'^crawlermanage/extractsinger/$', views.extractsinger, name='extractsinger'),
url(r'^crawlermanage/extractmultiple/$', views.extractmultiple, name='extractmultiple'),
url(r'^crawlermanage/temparticle/$', views.temparticle, name='temparticle'),
url(r'^crawlermanage/editprocess/$', views.editprocess, name='editprocess'),
url(r'^crawlermanage/settings/$', views.settings, name='settings'),
url(r'^crawlermanage/domainautocomplete/$', views.domain_autocomplete, name='domainautocomplete'),
url(r'^crawlermanage/debug/$', views.debug, name='debug'),
url(r'^crawlermanage/export/$', views.export, name='export'),
# url(r'^crawlermanage/$','crawlermanage.views.login'),
# url(r'^crawlermanage/login/$','crawlermanage.views.login'),
# url(r'^crawlermanage/index/$','crawlermanage.views.index'),
# url(r'^crawlermanage/tasks/$','crawlermanage.views.tasks'),
# url(r'^crawlermanage/taskdata/$','crawlermanage.views.taskdata'),
# url(r'^crawlermanage/layout/$','crawlermanage.views.layout'),
]
| [
"1371826591@qq.com"
] | 1371826591@qq.com |
bd53b023c31aef6c6d112017587896a658352547 | 9941c8c68aaeee8cb35bc94ab404b0f693b8dfb2 | /Old Classes/Computer Science/148/csc148/A2/game_view.py | 33337e613eb0a54f665fe085f4dcdca872ca770d | [] | no_license | axelthorstein/university-projects | c861c0c1e308e26df7bc4b461eb09d0fc4504900 | 3638233c0c480c7bec802b0778e346583bfbe533 | refs/heads/master | 2020-05-23T09:06:39.650891 | 2017-04-08T10:52:24 | 2017-04-08T10:52:24 | 80,433,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,525 | py | class GameView:
'''
A game view for a two-player, sequential move, zero-sum,
perfect-information game.
'''
def __init__(self, state, strategy):
'''(GameView, GameState.__class__,
Strategy.__class__) -> NoneType
Create GameView self for game described by state, where
computer uses given strategy.
'''
player = input('Type c if you wish the computer to play first ')
if player == 'c':
p = 'p2'
else:
p = 'p1'
self.state = state(p, interactive=True)
self.strategy = strategy(interactive=True)
def play(self):
''' (GameView) -> NoneType
Play a game.
'''
print(self.state.instructions)
print(self.state)
print()
while not self.state.over:
if self.state.next_player == 'p1':
m = self.state.get_move()
while not m in self.state.possible_next_moves():
# The move was illegal.
print('Illegal move: {}\nPlease try again.\n'.format(m))
print(self.state.instructions)
print(self.state)
m = self.state.get_move()
print('You choose: {}'.format(m))
else:
# The computer makes a move.
m = self.strategy.suggest_move(self.state)
print('The computer chooses: {}'.format(m))
self.state = self.state.apply_move(m)
print('New game state: \n', str(self.state))
print()
if self.state.winner('p2'):
# p2, the computer, wins
print('Beat ya!')
elif self.state.winner('p1'):
# p1, the human challenger, wins
print('Congrats -- you won!!')
else:
print('We tied...')
if __name__ == '__main__':
from subtract_square_state import SubtractSquareState
from tippy_game_state import TippyGameState
game_state = ({'s': SubtractSquareState, 't': TippyGameState})
from strategy_random import StrategyRandom
from strategy_minimax import StrategyMinimax
strategy = ({'r': StrategyRandom, 'm': StrategyMinimax})
g = ''
while not g in game_state.keys():
g = input('s to play Subtract Square, t to play Tippy: ')
s = ''
while not s in strategy.keys():
s = input('r for random strategy for computer, \
m for minimax strategy: ')
GameView(game_state[g], strategy[s]).play()
| [
"axelthorstein@gmail.com"
] | axelthorstein@gmail.com |
0242ad91656a9be579908b441d9b94af3542b343 | ef821468b081ef2a0b81bf08596a2c81e1c1ef1a | /Programming Basics/Nested_Loops-LAB/Cinema_Tickets.py | 7dae726c936e27321845167e1b72d8edcf1c7c38 | [] | no_license | Ivaylo-Atanasov93/The-Learning-Process | 71db22cd79f6d961b9852f140f4285ef7820dd80 | 354844e2c686335345f6a54b3af86b78541ed3f3 | refs/heads/master | 2023-03-30T20:59:34.304207 | 2021-03-29T15:23:05 | 2021-03-29T15:23:05 | 294,181,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,362 | py | movie = ''
free_seats = 0
ticket_type = ''
sold_seats = 0
student = 0
standard = 0
kids = 0
total_tickets = 0
flag = False
while not flag:
movie = input()
if movie == 'Finish':
break
free_seats = int(input())
while ticket_type != 'End':
ticket_type = input()
if ticket_type == 'student':
student += 1
sold_seats += 1
total_tickets += 1
elif ticket_type == 'standard':
standard += 1
sold_seats += 1
total_tickets += 1
elif ticket_type == 'kid':
kids += 1
sold_seats += 1
total_tickets += 1
elif ticket_type == 'End':
print(f'{movie} - {(sold_seats / free_seats) * 100:.2f}% full.')
elif ticket_type == 'Finish':
print(f'{movie} - {(sold_seats / free_seats) * 100:.2f}% full.')
flag = True
break
if sold_seats == free_seats:
print(f'{movie} - {(sold_seats / free_seats) * 100:.2f}% full.')
break
sold_seats = 0
ticket_type = ''
if flag:
break
print(f'Total tickets: {total_tickets}')
print(f'{(student / total_tickets) * 100:.2f}% student tickets.')
print(f'{(standard / total_tickets) * 100:.2f}% standard tickets.')
print(f'{(kids / total_tickets) * 100:.2f}% kids tickets.')
| [
"ivailo.atanasov93@gmail.com"
] | ivailo.atanasov93@gmail.com |
a4b0d01b37ea699dd70c82b1abc50879a359bc1e | a37a966cebcf5e1dff7bad9184e73d8c9288fe24 | /scripts_older_versions/start_http_sneaky | 44035aa4866dcbbf9289e5ff003f113703f6ccf3 | [] | no_license | mehran-47/httpComponent | 3b54b926dfd6cd3e8ae009d41114346a313730f0 | 65bec3151d771bd00bfef273a046d7ca45dbbbf4 | refs/heads/master | 2021-01-15T15:51:59.731037 | 2016-03-22T19:00:18 | 2016-03-22T19:00:18 | 34,924,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,837 | #!/usr/bin/env python3
import os, sys, netifaces as ni, shelve as sh, threading, mimetypes
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
from subprocess import call
#Setting global variables for the component to function properly
component_dir = '/opt/httpComponent'
interface_to_host_in = 'eth0'
#Making server multithreaded
class MultiThreadedServer(ThreadingMixIn, HTTPServer):
pass
#HTTP server class
class WelcomeHandler(BaseHTTPRequestHandler):
error_message_format = '<h1>Har har</h1>'
def do_GET(self):
mime = {"html":"text/html", "css":"text/css", "png":"image/png",}
RequestedFileType = mimetypes.guess_type(self.path)[0] if mimetypes.guess_type(self.path)[0]!=None else 'text/html'
#print((self.path, RequestedFileType, component_dir))
try:
if self.path == '/':
self.send_response(200)
self.send_header("Content-type", RequestedFileType)
self.end_headers()
self.wfile.write(bytes(htmlpage, 'UTF-8'))
return
elif os.path.isfile(component_dir + self.path):
self.send_response(200)
self.send_header("Content-type", RequestedFileType)
self.end_headers()
fp = open(component_dir + self.path, 'rb')
self.wfile.write(fp.read())
fp.close()
return
else:
self.send_response(404, notfound)
self.send_header("Content-type", 'text/html')
self.end_headers()
self.wfile.write(bytes(notfound, 'UTF-8'))
return
except BrokenPipeError:
print('Failed to complete request')
if __name__ == '__main__':
if not os.environ.get('SA_AMF_COMPONENT_NAME'):
print("No component name found, quitting")
sys.exit()
else:
ip = ni.ifaddresses(interface_to_host_in)[2][0]['addr']
port = 8080
#sneaky!
os.environ['SAFHTTPATT'] = 'safCsi=AmfDemo,safSi=AmfDemo,safApp=AmfDemo1'
CSI_name = os.environ.get('SAFHTTPATT')
component_name = str(os.environ.get('SA_AMF_COMPONENT_NAME'))
#Getting environment variables to show in the web page and to store in the temporary DB.
#DB has nothing to do with monitoring, kept for debugging purposes/ease of starting/ending http
envVarStrings = ""
for key in os.environ: envVarStrings+=str(key)+":"+ str(os.environ[key]) +"\n"
with open('/opt/SA_stats/osaf_envs', 'w') as sf: sf.write(envVarStrings)
with open('/opt/SA_stats/pid', 'w') as sf: sf.write(str(os.getpid()))
try:
db = sh.open('/opt/SA_stats/compDB.db', writeback=True)
if not 'components' in db: db['components'] = {}
db['PIDs'] = db['PIDs']+[os.getpid()] if 'PIDs' in db else [os.getpid()]
db['osaf_envs'] = envVarStrings
db['components'][component_name] = {'CSI':CSI_name, 'PID':os.getpid(), 'component':component_name, 'HAState':'Active'}
finally:
db.close()
#Creating the main HTML page
htmlpage = '<html><head><title>SAF Web</title></head><body><p>Component-Web page</p></body></html>'
notfound = "File not found"
#Sending trace for the monitoring engine. A form of general instrumentation: command "/opt/httpComponent/send_trace.o <PID> <1>" or "/opt/httpComponent/send_trace.o <PID> <2>"
#The 1 signifies 'component instatiation' trace creation, 0 would signify 'component termination'.
call([component_dir+'/send_trace_sneaky.o', str(os.getpid()), '1'])
#passive monitoring
#call('amfpm --start -p'.split(' ')+ [str(os.getpid()), component_name])
httpserver = MultiThreadedServer((ip, port), WelcomeHandler)
httpserver.serve_forever() | [
"root@flap-vnode5.encs.private"
] | root@flap-vnode5.encs.private | |
015941bc8acaa405c2226776ad64ba3b9a7bc2ea | 2d2807d27a97cdfd864e001bd465b48ef9a9c2b7 | /src/base_control.py | daa07c9a5c6d053dd64aacf69269ce371b2e39b3 | [] | no_license | ToanLe147/cob_pushing | 1d021ecca7fe1ff2c809e444e17efd099f963d62 | dee1bd5ef71ca736565f860ac39e38a0fdb127d9 | refs/heads/master | 2020-06-11T06:49:13.806571 | 2019-10-27T15:53:47 | 2019-10-27T15:53:47 | 193,881,221 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | #!/usr/bin/env python
# license removed for brevity
import rospy
import actionlib
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
def movebase_client():
client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
client.wait_for_server()
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = "map"
goal.target_pose.header.stamp = rospy.Time.now()
goal.target_pose.pose.position.x = 0.5
goal.target_pose.pose.orientation.w = 1.0
client.send_goal(goal)
wait = client.wait_for_result()
if not wait:
rospy.logerr("Action server not available!")
rospy.signal_shutdown("Action server not available!")
else:
return client.get_result()
if __name__ == '__main__':
try:
rospy.init_node('movebase_client_py')
result = movebase_client()
if result:
rospy.loginfo("Goal execution done!")
except rospy.ROSInterruptException:
rospy.loginfo("Navigation test finished.")
| [
"duc.le@student.tut.fi"
] | duc.le@student.tut.fi |
6b9966fd76928a69a7d63ecd8c2b9856b2bfa4c9 | a46825af0830a0f84f426547fba1b1f45fb97b3f | /backend/apps/area/urls.py | ce30e18f5bcb945d04fae1d297b2a0b9a011ea19 | [] | no_license | szshysj/Digital_marketing_web | 47544c7b9e0c425a78b0d51195ac245fdaef0503 | 86b31f261158b4c8d130c64ae7e573b8316c8bc4 | refs/heads/master | 2020-08-29T21:26:23.402279 | 2020-03-18T07:55:27 | 2020-03-18T07:55:27 | 218,178,158 | 0 | 0 | null | 2019-12-05T10:16:02 | 2019-10-29T01:26:07 | Vue | UTF-8 | Python | false | false | 313 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Time : 2019/10/14 23:28
# @Author : 孔祥旭
# @Email : d90159@163.com / 351469076@qq.com
from tornado.web import url
from apps.area.handler import GetAreaHandler
urlpatten = [
# 获取所有可投放地域列表
url('/get/area/', GetAreaHandler)
]
| [
"d90159@163.com"
] | d90159@163.com |
622fc633771e548c466af246c6db465108474d96 | 6e4d4f85938a1e4abdfb54f163bbf59f7700bce3 | /miniML/anomaly.py | 5271103717db1279a76a8fac970a9be8fe6632c3 | [] | no_license | yonas-g/miniML | b0602c783e5357b64593d81558d2352cf2b14cbf | 26f6ca634c6400e752f43e21beb41699b3c2fdbf | refs/heads/main | 2023-01-23T08:54:00.423360 | 2020-11-20T12:52:13 | 2020-11-20T12:52:13 | 313,107,992 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,357 | py | import numpy as np
class Gaussian:
def __init__(self, epsilon=None):
self.epsilon = epsilon
self.mu = None
# covariance_matrix in the case of Multivariate Gaussian
self.sigma2 = None
#
self.X_val = None
self.y_val = None
def predict(self, X):
if self.epsilon == None and (self.X_val is None and self.y_val is None):
raise Exception(
'Epsilon Not Set. Initialize with epsilon or fit with X_val and y_val to find optimal value based on the highest F1 score using training data')
return
if self.epsilon == None and (self.X_val is not None and self.y_val is not None):
self.epsilon, _ = self.selectThreshold()
p = self.predict_probability(X)
# 1 anomaly, 0 not
return (p < self.epsilon).astype(int)
def selectThreshold(self):
bestF1 = 0
bestEpsilon = 0
p_val = self.predict_probability(self.X_val)
stepsize = (np.max(p_val) - np.min(p_val)) / 1000
for epsilon in np.arange(np.min(p_val), np.max(p_val), stepsize):
predictions = (p_val < epsilon).astype(int)
tp = np.sum((predictions == 1) & (self.y_val.flatten() == 1))
fp = np.sum((predictions == 1) & (self.y_val.flatten() == 0))
fn = np.sum((predictions == 0) & (self.y_val.flatten() == 1))
prec = tp / (tp+fp)
rec = tp / (tp+fn)
F1 = (2 * prec * rec) / (prec + rec)
if F1 > bestF1:
bestF1 = F1
bestEpsilon = epsilon
return bestEpsilon, bestF1
@property
def params(self):
return {
'Mean': self.mu,
'Sigma2': self.sigma2,
'Epsilon': self.epsilon
}
class NormalGaussian(Gaussian):
'''
Normal Gaussian Distribution for anomaly detection
'''
def __init__(self, epsilon=None):
Gaussian.__init__(self, epsilon)
def fit(self, X, X_val=None, y_val=None):
self.X_val = X_val
self.y_val = y_val
X = np.array(X)
m, n = X.shape
self.mu = np.sum(X, axis=0)/m
self.sigma2 = np.var(X, axis=0)
def predict_probability(self, X):
# we can use Multivariate gaussian by setting sigma2 diagonal matrix
X = (X - self.mu)**2
p = 1/(np.sqrt(2*np.pi)*np.sqrt(self.sigma2))*np.exp(-X/2*self.sigma2)
return np.prod(p, axis=1)
class MultivariateGaussian(Gaussian):
'''
Multivariate Gaussian Distribution for anomaly detection.
Computes the probabilitydensity function of the examples X
under the multivariate gaussian distribution with parameters mu and convariance matrix
'''
def __init__(self, epsilon=None):
Gaussian.__init__(self, epsilon)
def fit(self, X, X_val=None, y_val=None):
self.X_val = np.array(X_val)
self.y_val = np.array(y_val)
X = np.array(X)
m, n = X.shape
self.mu = np.sum(X, axis=0)/m
self.sigma2 = ((X - self.mu).T).dot(X - self.mu)/m
def predict_probability(self, X):
X = X - self.mu
m, n = X.shape
p = 1/((2 * np.pi)**(n/2) * np.linalg.det(self.sigma2)**0.5) *\
np.exp(-0.5 * np.sum(X * X.dot(np.linalg.pinv(self.sigma2)), axis=1))
return p
| [
"yonasgebeyaw1990@gmail.com"
] | yonasgebeyaw1990@gmail.com |
2d322f049fa8f8f91dfb80709a634df823f3de47 | 26f6313772161851b3b28b32a4f8d255499b3974 | /Python/RelativeRanks.py | d749965e66f0cfbeff330cd167f3bbe034cf128d | [] | no_license | here0009/LeetCode | 693e634a3096d929e5c842c5c5b989fa388e0fcd | f96a2273c6831a8035e1adacfa452f73c599ae16 | refs/heads/master | 2023-06-30T19:07:23.645941 | 2021-07-31T03:38:51 | 2021-07-31T03:38:51 | 266,287,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | """
Given scores of N athletes, find their relative ranks and the people with the top three highest scores, who will be awarded medals: "Gold Medal", "Silver Medal" and "Bronze Medal".
Example 1:
Input: [5, 4, 3, 2, 1]
Output: ["Gold Medal", "Silver Medal", "Bronze Medal", "4", "5"]
Explanation: The first three athletes got the top three highest scores, so they got "Gold Medal", "Silver Medal" and "Bronze Medal".
For the left two athletes, you just need to output their relative ranks according to their scores.
Note:
N is a positive integer and won't exceed 10,000.
All the scores of athletes are guaranteed to be unique.
"""
class Solution:
def findRelativeRanks(self, scores):
"""
:type nums: List[int]
:rtype: List[str]
"""
medals = ["Gold Medal", "Silver Medal", "Bronze Medal"]
sorted_scores_dict = {}
for order,score in enumerate(sorted(scores, reverse = True)):
if order <= 2:
sorted_scores_dict[score] = medals[order]
else:
sorted_scores_dict[score] = str(order + 1)
res = [sorted_scores_dict[score] for score in scores]
return res
s = Solution()
scores = [5, 4, 3, 2, 1]
print(s.findRelativeRanks(scores)) | [
"here0009@163.com"
] | here0009@163.com |
a5d7d1e55d35d5dc95e02c6e501613df521f4fb6 | 3873b03ac81354d4ed24e94df5fa8429e726bbd2 | /titles/101. 对称二叉树.py | 9236ac01422c19b4ad9190406e21e379f2b5f6e8 | [] | no_license | lichangg/myleet | 27032f115597481b6c0f3bbe3b83e80b34c76365 | 3d5a96d896ede3ea979783b8053487fe44e38969 | refs/heads/master | 2023-03-21T15:50:14.128422 | 2021-03-16T09:58:07 | 2021-03-16T09:58:07 | 286,616,721 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,428 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from utils.util_funcs import Tree
# 这个测试用例[1,2,2,None,3,None,3]过不了,本地能过
#
class Solution:
FLAG=True
def isSymmetric(self, root) -> bool:
l = root.left
r = root.right
stack = [l,r]
while stack and all(stack):
nums = []
for i in stack:
nums.append(i.val)
mid=int(len(nums)/2)
if nums[:mid]!=nums[mid:][::-1]:
Solution.FLAG = False
break
temp = []
for j in stack:
if j:
temp.append(j.left)
temp.append(j.right)
stack = temp
return Solution.FLAG
# 二刷,层序遍历
class Solution:
def isSymmetric(self, root) -> bool:
def is_symmetric(nums):
return nums == nums[::-1]
stack = [root]
while stack:
res = []
temp = []
for i in stack:
if i:
res.append(i.val)
temp.append(i.left)
temp.append(i.right)
else:res.append(None)
flag = is_symmetric(res)
if not flag:
return False
stack = temp
return True
t=Tree()
[t.add(i)for i in [1,2,2,None,3,None,3]]
a=Solution().isSymmetric(t.root)
print(a) | [
"lcg@ichunt.com"
] | lcg@ichunt.com |
4a80e21bb002a0991086c9f39aecff2089d6f44a | 7bf802a429d48f8baa717eb3e5667dabb1322cfb | /MI/Backstage/migrations/0001_initial.py | 863e0cebdf182f467d9b7310ee6b984173f137fb | [
"Apache-2.0"
] | permissive | ITgaoS/MI | 89476d0bcb620445982f13bc54a0e2d80c44141e | d35d53fa2c18ae0f5764f0568dbb01bca35050f3 | refs/heads/master | 2020-09-05T03:57:34.226630 | 2020-01-03T07:12:25 | 2020-01-03T07:12:25 | 219,975,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,360 | py | # Generated by Django 2.1.8 on 2019-11-06 23:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Commodity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32)),
('price', models.FloatField()),
('color', models.CharField(max_length=32)),
('specification', models.TextField()),
('version', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='CommodityType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=32)),
('picture', models.ImageField(default='backstage/images/11.jpeg', upload_to='backstage/images')),
],
),
migrations.AddField(
model_name='commodity',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Backstage.CommodityType'),
),
]
| [
"603128825@qq.com.com"
] | 603128825@qq.com.com |
2a79d20a208ebc9d6169732dc02a90889ef23ecb | a711f0bfb815bf5d991bf542675c59b4a343fe5d | /books_shop/asgi.py | 2d88c9966a2d7a9c3e491d00786f9378c7c4785b | [] | no_license | Bogdankozlovskiy/python2 | 9375b3bbfd96afe0ee577b5c708dae8b17756199 | 8c1797f8d46b76e53ac2d96c4ef08baa93da4666 | refs/heads/master | 2023-06-05T07:02:00.931751 | 2021-06-26T07:20:20 | 2021-06-26T07:20:20 | 368,925,281 | 1 | 1 | null | 2021-06-12T10:16:35 | 2021-05-19T15:58:07 | Python | UTF-8 | Python | false | false | 397 | py | """
ASGI config for books_shop project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'books_shop.settings')
application = get_asgi_application()
| [
"bogdan@mail.ru"
] | bogdan@mail.ru |
2d53d171c294de09a46bc84af32cc4b446dbd71f | 141bb6f52b864ca5ccc3b0d68e4f647d3417ff6a | /First/products/admin.py | bd6b71fd1dcd6692b668c6f7a0b1c020eed36f22 | [] | no_license | chintamanip99/EcommDjangoProject | 6b5830bd99376419bb72bc648503244fbfe04e8b | aea4cd9b7978032e653938d082ccd38d2c268250 | refs/heads/master | 2023-08-14T08:36:01.143313 | 2020-07-24T11:34:20 | 2020-07-24T11:34:20 | 282,202,034 | 0 | 0 | null | 2021-09-22T19:29:26 | 2020-07-24T11:27:17 | JavaScript | UTF-8 | Python | false | false | 855 | py | from django.contrib import admin
# Register your models here.
from products.models import Product,Category
from profiles.models import SellerProfile
class ProductModelAdmin(admin.ModelAdmin):
list_display=[
'seller',
'id',
'title',
'summary',
'price',
'new',
'image',
'items_available'
]
list_display_links=[
'id'
# 'price',
]
list_filter=[
'price',
'new'
]
search_fields=[
'title',
'price'
]
list_editable=[
'price','items_available','summary','title'
]
class Meta:
model=Product
def get_queryset(self,request):
if(request.user in [i.user for i in SellerProfile.objects.all()]):
queryset=self.model.objects.filter(seller=request.user)
return queryset
if(request.user.is_superuser):
return Product.objects.all()
admin.site.register(Category)
admin.site.register(Product,ProductModelAdmin)
| [
"chintamanip99@gmail.com"
] | chintamanip99@gmail.com |
fadaa9ffe0d877405c6c4d80ddc02990514fde2d | 401ffbd116ca7fcb1288609b1b00cd0faa702cff | /setup.py | 3067c2147d74e53baaca7e5dfd40c69c4f1acc90 | [
"MIT"
] | permissive | gabrielcoutod/TruthTable | 893fc88bfc78fe0aea19a7785b38d834fbc6325d | 146f2926c0586654ceb788867857bbc010e42634 | refs/heads/master | 2023-06-14T03:34:18.516799 | 2021-07-08T20:24:22 | 2021-07-08T20:24:22 | 284,828,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | from setuptools import setup
setup(name="TruthTable",
author="Gabriel Couto Domingues",
license="MIT",
install_requires=[
"tabulate",
],
zip_safe=False)
| [
"50423926+gabrielcoutod@users.noreply.github.com"
] | 50423926+gabrielcoutod@users.noreply.github.com |
d7cf184777adc0f7980c16fcc2f6eabb750712be | 6c14069181f313e84eeb524dd495e3882156ef50 | /samples/basic/crud/models/cisco-ios-xr/Cisco-IOS-XR-mpls-ldp-oper/nc-read-xr-mpls-ldp-oper-10-ydk.py | 1e872ab64c188f2aa654c40dc1d1d96d25dd113a | [
"Apache-2.0"
] | permissive | decolnz/ydk-py-samples | dde0fd64fd4df12a215588766a0f1fb8baf07fcd | 7fa3f53c4d458c3332d372fb2fe3c46c5e036f07 | refs/heads/master | 2021-01-19T03:24:19.877929 | 2017-04-04T17:16:46 | 2017-04-04T17:16:46 | 87,310,389 | 1 | 0 | null | 2017-04-05T13:06:57 | 2017-04-05T13:06:57 | null | UTF-8 | Python | false | false | 2,714 | py | #!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read all data for model Cisco-IOS-XR-mpls-ldp-oper.
usage: nc-read-xr-mpls-ldp-oper-10-ydk.py [-h] [-v] device
positional arguments:
device NETCONF device (ssh://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_mpls_ldp_oper \
as xr_mpls_ldp_oper
import logging
def process_mpls_ldp(mpls_ldp):
"""Process data in mpls_ldp object."""
pass
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create NETCONF provider
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
# create CRUD service
crud = CRUDService()
mpls_ldp = xr_mpls_ldp_oper.MplsLdp() # create object
# read data from NETCONF device
# mpls_ldp = crud.read(provider, mpls_ldp)
process_mpls_ldp(mpls_ldp) # process object data
provider.close()
exit()
# End of script
| [
"saalvare@cisco.com"
] | saalvare@cisco.com |
a406e2a620162230e1e7cc273a3998b61cf94a92 | 74926d4145b9cd91bd040a7887d6baef838865d3 | /autoencoder/metrics.py | 3ad28ecd5bf3da0615cf6a06bd1ac56acd7e6403 | [
"MIT"
] | permissive | Elaine0/Anomaly-Detection | 3837b602c6c8ba12fb2df7170292ebded893bbe0 | 45ab34235fd865006292a6645bbf2fc8bed9e959 | refs/heads/master | 2023-06-16T16:27:12.675954 | 2021-07-13T09:06:19 | 2021-07-13T09:06:19 | 282,931,372 | 0 | 0 | null | 2020-07-27T15:02:17 | 2020-07-27T15:02:17 | null | UTF-8 | Python | false | false | 455 | py | import tensorflow as tf
from tensorflow import keras
import keras.backend as K
def ssim_metric(dynamic_range):
def ssim(imgs_true, imgs_pred):
return K.mean(tf.image.ssim(imgs_true, imgs_pred, dynamic_range), axis=-1)
return ssim
def mssim_metric(dynamic_range):
def mssim(imgs_true, imgs_pred):
return K.mean(
tf.image.ssim_multiscale(imgs_true, imgs_pred, dynamic_range), axis=-1
)
return mssim
| [
"google-dl-platform@googlegroups.com"
] | google-dl-platform@googlegroups.com |
612b38333a8e5eeb879809135ecae672302314f9 | af363dc8d95289b8d2a1edeeb91980507ce4bc00 | /override_switch.py | 1e366cb2db287ce248774893d28aa5a1a2177949 | [] | no_license | d-EScape/Domoticz_iDetect | ef554a4801f8dd1910a73a66b11a48788debd525 | 200c6d444023c7acec469fdc6627fb7fbc2e9fa0 | refs/heads/master | 2023-04-11T04:33:04.540888 | 2023-03-31T19:03:12 | 2023-03-31T19:03:12 | 132,782,890 | 27 | 15 | null | 2023-03-20T16:56:50 | 2018-05-09T16:15:06 | Python | UTF-8 | Python | false | false | 1,076 | py | import helpers.data_helper as data_helper
from datetime import datetime
class override_switch():
def __init__(self, mode):
self.start_time = datetime.now()
self.active = False
self.allow=True
self.indefinitely = False
self.reset_on_presence = True
self.duration=None
if mode == "No":
self.allow=False
elif mode == "Next":
self.reset_on_presence = True
self.indefinitely = False
elif mode == "Forever":
self.indefinitely = True
else:
self.duration=int(mode) * 3600
self.reset_on_presence = False
def set_active(self):
if self.allow:
self.start_time = datetime.now()
self.active = True
return True
else:
return False
def set_inactive(self):
self.active = False
def has_expired(self, anyone_home=False):
if not self.active:
return False
if self.indefinitely:
return False
if self.active and self.reset_on_presence and anyone_home:
return True
if self.active and not self.duration is None:
if data_helper.time_since_last(self.start_time) > self.duration:
return True
return False | [
"noreply@github.com"
] | d-EScape.noreply@github.com |
c5b06264124b13023d9bd904c223f6a20f2da8ab | d49cfe38764aa35992ba5cf65655a6a45d9487c8 | /旋转图像.py | 1660cfd80a5903f2b51d956378ca51aa57d5f90a | [] | no_license | getabear/leetcode | fc0797f664ab4052aa2635341f4bbe40b74ec2b8 | 4af6608166f2e4cdfcfb0bbb92133b4a0f90ea34 | refs/heads/master | 2021-07-15T11:05:23.049235 | 2020-11-15T12:57:14 | 2020-11-15T12:57:14 | 224,601,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | from typing import List
class Solution:
def rotate(self, matrix: List[List[int]]) -> None:
if not matrix:
return
m,n=len(matrix),len(matrix[0])
dp=[[0]*n for _ in range(m)]
for h in range(m):
for x in range(n):
dp[x][m-h-1]=matrix[h][x]
matrix[:]=dp[:]
return
a=Solution()
matrix=[[1,2,3],[4,5,6],[7,8,9]]
a.rotate(matrix) | [
"1874178998@qq.com"
] | 1874178998@qq.com |
b9cab699677f8504464d445ef66d36c5a040e876 | 83240f3388f9257aea9cf953887bffe953ce5261 | /Deployment Files/WeatherWear/Combos/urls.py | be39736a28e6f3692c70af96ddc02dcc536cc696 | [
"MIT"
] | permissive | PhilbertLou/ClothingForecast | d99848cfd4969f07ae88e4512926bb8a93fbe9da | 7d419970dbc89b109d13b27ddbd29ce2a0ab7d9a | refs/heads/master | 2023-02-14T20:17:35.262922 | 2021-01-10T17:01:54 | 2021-01-10T17:01:54 | 277,201,290 | 3 | 1 | null | 2021-01-07T18:42:20 | 2020-07-04T23:39:56 | Python | UTF-8 | Python | false | false | 490 | py | from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path("remove", views.remove, name="remove"),
path("getbackup", views.getbackup, name="getbackup"),
path("trainnew", views.trainnew, name="trainnew"),
path("howtouse", views.howtouse, name="howtouse"),
path("clothes", views.clothes, name="clothes"),
path("loading", views.loading, name="loading"),
path("<str:username>", views.index, name="usercombo"),
]
| [
"LouPInnovations@users.noreply.github.com"
] | LouPInnovations@users.noreply.github.com |
a0183552d2c2238c81eaf547ec1d7907d11abf96 | 1a3d5e722caf70547246143492a827e468a0cf91 | /Pruebas/setJson.py | f694eec1227ddf02c44c53ae538f6e91a9ab9bc1 | [
"Apache-2.0"
] | permissive | jezlo/WLSTWeblogic | 029f9a527dbf5f9f738f7cdc5fed6078253fa1bf | 13c6be6a5d118b4167fd5a5490fa23bb9cecc92c | refs/heads/master | 2023-01-01T16:27:47.887254 | 2020-10-09T13:56:51 | 2020-10-09T13:56:51 | 294,572,184 | 0 | 0 | Apache-2.0 | 2020-09-21T15:50:58 | 2020-09-11T02:21:24 | Python | UTF-8 | Python | false | false | 261 | py | import json
dsName = 'SoaDS'
max = 100
ds = {}
ds[dsName] = []
data = {}
data['dataSources'] = []
data['dataSources'].append({
ds[dsName].append({
'max': max
})
})
with open('archivJson.txt','w') as outfile:
json.dump(data, outfile, indent=4) | [
"70851273+jezlo@users.noreply.github.com"
] | 70851273+jezlo@users.noreply.github.com |
289513a80d6bfd990eb6d718ac68af324e5f8126 | 556c734c65b2e84b2b4e276c70f195f970a37749 | /test/functional/interface_bitcoin_cli.py | 8ba9344de53286c8224e692ab8a7c6876fe9c3cf | [
"MIT"
] | permissive | cryptococo82/StreamitCoin | 49547a6a5f41b71ae4686a51e543adf819d392be | 3f43d820c319e8204ed5c19e37fe829c377d1000 | refs/heads/master | 2021-02-18T07:16:03.882842 | 2019-09-06T09:13:22 | 2019-09-06T09:13:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,910 | py | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test streamitcoin-cli"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie
import time
class TestBitcoinCli(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Main test logic"""
self.log.info("Sleeping 30 seconds...")
time.sleep(30)
self.log.info("Compare responses from gewalletinfo RPC and `streamitcoin-cli getwalletinfo`")
cli_response = self.nodes[0].cli.getwalletinfo()
rpc_response = self.nodes[0].getwalletinfo()
assert_equal(cli_response, rpc_response)
self.log.info("Compare responses from getblockchaininfo RPC and `streamitcoin-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
rpc_response = self.nodes[0].getblockchaininfo()
assert_equal(cli_response, rpc_response)
user, password = get_auth_cookie(self.nodes[0].datadir)
self.log.info("Compare responses from `streamitcoin-cli -getinfo` and the RPCs data is retrieved from.")
cli_get_info = self.nodes[0].cli('getinfo').send_cli()
wallet_info = self.nodes[0].getwalletinfo()
network_info = self.nodes[0].getnetworkinfo()
blockchain_info = self.nodes[0].getblockchaininfo()
assert_equal(cli_get_info['version'], network_info['version'])
assert_equal(cli_get_info['protocolversion'], network_info['protocolversion'])
assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
assert_equal(cli_get_info['connections'], network_info['connections'])
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test")
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
# unlocked_until is not tested because the wallet is not encrypted
if __name__ == '__main__':
TestBitcoinCli().main()
| [
"root@U18.local"
] | root@U18.local |
a75b1a8ccba08575ad724e87e9037d56ca43cf47 | e5f063b7198f301d6632f67a8efbe0eb262aef67 | /27/app.py | 39b56fb34a6e7a437a07e3d122ed632292241e41 | [] | no_license | ldh243/leetcode | 61eb2a219b23afff26f6c634183efd73029e863b | d297a8117c5e6dcb3f83bee0b9bdf67ecae14c3c | refs/heads/master | 2021-01-02T21:18:31.710207 | 2020-02-26T09:32:32 | 2020-02-26T09:32:32 | 239,806,044 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | class Solution:
def removeElement(self, nums: List[int], val: int) -> int:
for i in range(len(nums)-1, -1, -1):
if nums[i] == val:
del nums[i]
return len(nums) | [
"ldh243"
] | ldh243 |
e97231aa59386188f10462edf9ebb223d62915b0 | 7d99c16d3222dd09d2358dac17d693deb7ed8dfd | /mwk_converters/mwk_to_sqlite3.py | a662445947c9bf94dfd56abd7f356d3172ba54b1 | [] | no_license | afcarl/mw_data_analysis_helpers | 55c287daa06ef398e25ee9a8ecb290fc1f58c4dc | 88e8eaae3b26f2ce7c482585414340c8e59f6ed2 | refs/heads/master | 2020-03-17T14:56:10.483526 | 2011-07-26T17:50:01 | 2011-07-26T17:50:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,031 | py | #!/usr/bin/env python
import logging, os, sys
import sqlite3
import json
#import mwk
import mworks.data as mwk
def mwk_to_sqlite3(inFile, outFile, blacklist=[]):
m = mwk.MWKFile(inFile)
m.open()
# fix codec
codec = m.codec
codec[0], codec[1], codec[2], codec[3] = ('#codec', '#systemEvent', '#components', '#termination')
revCodec = {}
for k,v in codec.iteritems():
revCodec[v] = k
evs = m.get_events()
# open sqlite3 database
logging.debug("opening sqlite3 database: %s" % outFile)
conn = sqlite3.connect(outFile)
c = conn.cursor()
# # make table to add to data files table
# logging.debug("adding information to db")
# c.execute('''create table datafiles
# (animal text, day text)''')
# make table for new data
# tableName = os.path.splitext(os.path.basename(inFile))[0]
# cmd = "create table %s (code int, time int, value text)" % tableName
# c.execute(cmd)
c.execute('''create table events
(code int, time int, value text)''')
# make table for codec
# codecTableName = "%s_codec" % tableName
# cmd = "create table %s (code int, name text)" % codecTableName
# c.execute(cmd)
c.execute('''create table codec
(code int, name text)''')
# # add information to datafiles table
# animal = tableName.split('_')[0].lower()
# day = tableName.split('_')[1]
# c.execute('''insert into datafiles
# values(?,?)''', (animal, day))
# add codec to database
#codec = m.codec
# cmd = "insert into %s values(?,?)" % codecTableName
for (k,v) in codec.iteritems():
# c.execute(cmd,(k,v))
c.execute('''insert into codec values (?,?)''',(k,v))
# add events to database
logging.debug("adding events to db")
# cmd = "insert into %s values(?,?,?)" % tableName
for e in evs:
if codec[e.code] in blacklist:
continue
# c.execute(cmd, (e.code, e.time, json.dumps(e.value)))
c.execute('''insert into events
values(?,?,?)''', (e.code, e.time, json.dumps(e.value)))
logging.debug("cleaning up")
# close database connection
conn.commit()
c.close()
# close mworks file
m.close()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
eventsBlacklist = ['#announceCurrentState','#codec', '#systemEvent', '#components', '#termination'] # not implemented
# parse command line arguments
logging.debug("Parsing command line arguments")
if len(sys.argv) == 3:
inFile = sys.argv[1]
outFile = sys.argv[2]
elif len(sys.argv) == 2:
inFile = sys.argv[1]
outFile = '%s.sqlite3' % os.path.splitext(os.path.basename(inFile))[0]
else:
print "Usage: %s input_mwk_file (output_sqlite3_file)" % __file__
sys.exit(1)
# open up and read mwks file
logging.debug("opening and reading mwks file: %s" % inFile)
mwk_to_sqlite3(inFile, outFile, eventsBlacklist)
# exit nicely
sys.exit(0) | [
"graham@rowland.harvard.edu"
] | graham@rowland.harvard.edu |
40847418a13cc8b05a63cc63c66a8ab1c809abb4 | 95c96243c5a956b1b87a6a0bb76f2f04bcb36111 | /submission2/model.py | 8dff76a74a2b85980adeeda977eed30855328afa | [
"MIT"
] | permissive | carthach/mediaeval-2019-moodtheme-detection | a6127dffa617ae3bba054b05e56b5db1feedadc9 | 4356ea159a2df50230b3feacdadfefe061463aa8 | refs/heads/master | 2022-12-22T16:07:40.593388 | 2020-09-04T14:24:23 | 2020-09-04T14:24:23 | 292,867,267 | 0 | 0 | MIT | 2020-09-04T14:23:22 | 2020-09-04T14:23:22 | null | UTF-8 | Python | false | false | 1,772 | py | import torch
import torch.nn as nn
import torchvision
from self_attention import AttentionModule
NUM_CLASSES=56
HIDDEN_SIZE = 256
class MusicSelfAttModel(nn.Module):
def __init__(self):
super(MusicSelfAttModel, self).__init__()
self.mirex = MobileNetV2(56).cuda()
self.att_model = nn.Sequential(
AttentionModule(),
nn.Dropout(0.2),
nn.Linear(HIDDEN_SIZE, NUM_CLASSES),
nn.Sigmoid()
)
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(256,256),
nn.Dropout(0.2),
nn.Linear(256, NUM_CLASSES),
nn.Sigmoid())
def forward(self,x):
x = x.view(-1, 96, 16, 256) # 16*256=4096 input
x = x.permute(0,2,1,3)
x = x.contiguous().view(-1,96,256)
x = x.unsqueeze(1)
x = self.mirex(x)
att = x.view(-1, 16, 256)
att = self.att_model(att)
clf = x.view(-1,256)
clf = self.classifier(clf)
clf = clf.view(-1,16,56)
clf = clf.mean(dim=1)
return att,clf
class MobileNetV2(nn.Module):
def __init__(self, num_classes):
super().__init__()
self.bw2col = nn.Sequential(
nn.BatchNorm2d(1),
nn.Conv2d(1, 10, 1, padding=0), nn.ReLU(),
nn.Conv2d(10, 3, 1, padding=0), nn.ReLU())
self.mv2 = torchvision.models.mobilenet_v2(pretrained=True)
self.out_conv = nn.Sequential(
nn.Conv2d(1280, 512, 3), nn.ReLU(),
nn.Conv2d(512, 256, 1), nn.ReLU())
def forward(self, x):
x = self.bw2col(x)
x = self.mv2.features(x)
x = self.out_conv(x)
x = x.max(dim=-1)[0].max(dim=-1)[0]
return x | [
"manoj.sukhavasi1@gmail.com"
] | manoj.sukhavasi1@gmail.com |
19334f96e7f619caf8706d98468a4b1aa53b2084 | 00533734c03664f2c7949b10a7eef9278f213520 | /Drill-11/brick.py | 3421735f43c5c39eb722394cdd22753bc5dda36f | [] | no_license | jjaing01/2019_2DGP_Class | 4ab02aa823d4828a4a439389965aa4f91ba0cb30 | 2d25fcc3c753b868c59661f5b54d1c9768f080e3 | refs/heads/master | 2022-03-13T15:45:09.361849 | 2019-12-02T09:56:29 | 2019-12-02T09:56:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 778 | py | import random
from pico2d import *
import game_world
import game_framework
class Brick:
image = None
def __init__(self):
if Brick.image is None:
Brick.image = load_image('brick180x40.png')
self.x, self.y = 1600 // 2, 200
self.speed = 200
self.dir = 1
def get_bb(self):
# fill here
return self.x - 90, self.y - 20, self.x + 90, self.y + 20
def draw(self):
self.image.draw(self.x, self.y, 180, 40)
# fill here for draw
draw_rectangle(*self.get_bb())
def update(self):
self.x += self.speed * game_framework.frame_time * self.dir
clamp(0, self.x, 1600)
if self.x < 0:
self.dir = 1
elif self.x >= 1600:
self.dir = -1 | [
"jjaing01@naver.com"
] | jjaing01@naver.com |
30d71a0f811024388f46fa160a7fb991a7429ec3 | 76e9afdf16eabcc9e1a3facd308e56362112efc4 | /20210222_ls/auto_chmod.py | 84ae38d02246d5ec3b8c66e17a9fcebb764dc397 | [] | no_license | rerejii/pwb_work_2021 | c65c5e787ad98b7d847cb63ebadc24a02f001e90 | 8ecfb2a98d9d396ed505ecc939e384cf6400412d | refs/heads/main | 2023-03-30T10:43:18.115386 | 2021-03-24T05:38:41 | 2021-03-24T05:38:41 | 350,954,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | import time
import sys
import os
# args = sys.argv
path = '/nas-homes/krlabmember/hayakawa/binary/20210115'
while True:
os.chmod(path, 0o755)
time.sleep(10) | [
"hayakawa.shinya.kochi@gmail.com"
] | hayakawa.shinya.kochi@gmail.com |
eacbc6324159f2ece21dd843e590d734abac44c0 | 78886c8b581eaa7d9e2ed860e0b56d58e5b9f961 | /try/trash.py | 219e53761f169ea97822d43268961e8fecbb397c | [] | no_license | glenrendes/spplot | 79c605ea7412bb6cba69105b5cfc2570fbf2bd5e | d8bf73dcda73e7516589aea0a7b4db8c7470077a | refs/heads/master | 2022-02-16T17:41:47.627665 | 2019-08-13T20:23:38 | 2019-08-13T20:23:38 | 200,277,916 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 943 | py |
def addScrollingFigure(figure, frame):
global canvas, mplCanvas, interior, interior_id, cwid
# set up a canvas with scrollbars
canvas = Canvas(frame)
canvas.grid(row=1, column=1, sticky=Tkconstants.NSEW)
xScrollbar = Scrollbar(frame, orient=Tkconstants.HORIZONTAL)
yScrollbar = Scrollbar(frame)
xScrollbar.grid(row=2, column=1, sticky=Tkconstants.EW)
yScrollbar.grid(row=1, column=2, sticky=Tkconstants.NS)
canvas.config(xscrollcommand=xScrollbar.set)
xScrollbar.config(command=canvas.xview)
canvas.config(yscrollcommand=yScrollbar.set)
yScrollbar.config(command=canvas.yview)
# plug in the figure
figAgg = FigureCanvasTkAgg(figure, canvas)
mplCanvas = figAgg.get_tk_widget()
# and connect figure with scrolling region
cwid = canvas.create_window(0, 0, window=mplCanvas, anchor=Tkconstants.NW)
printBboxes("Init")
changeSize(figure, 1)
| [
"glenda@reasoningtechnology.com"
] | glenda@reasoningtechnology.com |
0be0809de6687f820c4485f1fe5237f50ae76e8d | 84fbd767f34a95f600f813795c2fd3d827371a78 | /12-7 log_generator.py | c58a7d691a2030fc1fca6dcabdc2ee044a712ed7 | [] | no_license | Orisun/Industrial-machine-learning | 1f2492f24a46ddd9c7b5eca192a83ced3c6a3b4a | 06e0d480dc89e8b4d1f45fe81567910309bdf744 | refs/heads/master | 2023-07-13T04:02:44.778913 | 2021-08-20T02:21:49 | 2021-08-20T02:21:49 | 295,440,169 | 28 | 11 | null | null | null | null | UTF-8 | Python | false | false | 4,349 | py | from rpc_pb2 import Request, Feature
from log import Show, Click
from log_sender import LOG_DELIMITER
from bytebuffer import ByteBuffer
from log_collector import MAX_UDP_DATA_LEN
import simplejson
from datetime import datetime
REQUEST_LOG_FILE = "/path/to/request.log"
FEATURE_LOG_FILE = "/path/to/feature.log"
SHOW_LOG_FILE = "/path/to/show.log"
CLICK_LOG_FILE = "/path/to/click.log"
def byte_log_generator(log_file):
postfix = datetime.now().strftime("%Y%m%d")
with open(log_file + "." + postfix, "rb") as f_in:
delimiter_len = len(LOG_DELIMITER)
bf = ByteBuffer.allocate(MAX_UDP_DATA_LEN)
while True:
curr_position = f_in.tell()
n = 0
# 重试10次,尽量把buffer读满
for _ in xrange(10):
n += bf.read_from_file(f_in)
if bf.get_remaining() == 0:
break
if n <= 0:
break
bf.flip() # bf由写入变为读出状态,即把position置为0
idx = 0
target = LOG_DELIMITER[idx] # 当前要寻找LOG_DELIMITER中的哪个字符
bf.mark() # 记下当前位置,reset时会回到这个位置
begin = 0 # 以delimiter结束上一段后,下一段的开始位置
length = 0 # 上一次delimiter结束后,又从buffer中读了几个字节
while True:
if bf.get_remaining() == 0:
break
b = bf.get_bytes(1)[0] # 逐个字节地读buffer
length += 1
if b == target:
idx += 1
if idx == delimiter_len: # 遇到了完整的LOG_DELIMITER
begin = bf.get_position() # 下一次读buffer的开始位置
bf.reset() # 回到本段的开始位置
idx = 0
bytes = bf.get_bytes(length - delimiter_len)
yield bytes
bf.set_position(begin) # 显式回到指定位置
bf.mark()
length = 0
target = LOG_DELIMITER[idx] # 下一个寻找目标
else:
if idx > 0: # 重置idx和target
idx = 0
target = LOG_DELIMITER[idx]
f_in.seek(curr_position + begin)
bf.clear() # 回到0位置
def request_generator():
for bytes in byte_log_generator(REQUEST_LOG_FILE):
request = Request()
try:
# protobuf反序列化
request.ParseFromString(bytes)
except:
pass
else:
yield request
def feature_generator():
for bytes in byte_log_generator(FEATURE_LOG_FILE):
feature = Feature()
try:
# protobuf反序列化
feature.ParseFromString(bytes)
except:
pass
else:
yield feature
def text_log_generator(log_file):
postfix = datetime.now().strftime("%Y%m%d")
with open(log_file + "." + postfix) as f_in:
for line in f_in:
yield line.strip()
def show_generator():
for text in text_log_generator(SHOW_LOG_FILE):
# json反序列化
dic = simplejson.loads(text)
traceid = dic.get("traceid", "")
uid = dic.get("uid", 0)
itemid = dic.get("itemid", 0)
show_time = dic.get("show_time", 0)
position = dic.get("position", 0)
if traceid and uid and itemid and show_time:
show = Show()
show.traceid = traceid
show.uid = uid
show.itemid = itemid
show.show_time = show_time
show.position = position
yield show
def click_generator():
for text in text_log_generator(CLICK_LOG_FILE):
# json反序列化
dic = simplejson.loads(text)
traceid = dic.get("traceid", "")
uid = dic.get("uid", 0)
itemid = dic.get("itemid", 0)
click_time = dic.get("click_time", 0)
if traceid and uid and itemid and click_time:
click = Click()
click.traceid = traceid
click.uid = uid
click.itemid = itemid
click.click_time = click_time
yield click | [
"zhangchaoyang@taou.com"
] | zhangchaoyang@taou.com |
c9a332aacd3871c13e7ca00a6830c77eb3c8ce73 | 76590cac337c7761091281f782b8f0306aae8198 | /Spider-QQMUSIC.py | a3691ee9a026af59cf916623fbd7ec667c1b61ea | [] | no_license | RavenHuo/Spider-QQMUSIC | ec6c388a7835bdfa21914077790b030bf9f2070f | b8330defbdd4d651931696cd7735fec26024ab8f | refs/heads/master | 2020-04-15T17:08:45.188800 | 2019-01-09T13:02:04 | 2019-01-09T13:02:04 | 164,862,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,951 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pymysql
import requests
from urllib.parse import urlencode
import json
import time
#连接数据库
conn=pymysql.connect(host='127.0.0.1',port=3306,user='root',passwd='1234',db='web_test', use_unicode=True, charset="utf8")
cur=conn.cursor()
#请求头部
header = {'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.8',
'Cache-Control': 'max-age=0',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Connection': 'keep-alive',
'Referer': 'https://y.qq.com/n/yqq/playlist/1478611135.html'
}
def get_music():
parameter = {'picmid': '1',
'rnd': '0.25099454148518685',
'g_tk': '5381',
'jsonpCallback': 'getPlaylist',
'loginUin': '0',
'hostUin': '0',
'format': 'jsonp',
'inCharset': 'utf8',
'outCharset': 'utf-8',
'notice': '0',
'platform': 'yqq',
'needNewCode': '0',
'categoryId': '6',
'sortId': '0',
'sin': '0',
'ein': '100', }
# 0-29 sin 起始元素 start in ,ein = end in
url = 'https://c.y.qq.com/splcloud/fcgi-bin/fcg_get_diss_by_tag.fcg?' + urlencode(parameter)
#0-29 sin 起始元素 start in ,ein = end in
url2='https://c.y.qq.com/splcloud/fcgi-bin/fcg_get_diss_by_tag.fcg?picmid=1&rnd=0.039497698907906664&g_tk=5381&jsonpCallback=getPlaylist&loginUin=0&hostUin=0&format=jsonp&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq&needNewCode=0&categoryId=6&sortId=5&sin=30&ein=59'#30-59
print(url)
html=requests.get(url,headers=header)
html.encoding='utf-8'
data=json.loads(html.text[12:-1])
if data and 'data' in data:
for music_list in data.get('data').get('list'):
print(music_list)
dissid=music_list.get('dissid')
if dissid:
get_song_mid(dissid)
def get_song_mid(dissid):
parameter = {'type':'1',
'json':'1',
'utf8':'1',
'onlysong':'0',
'disstid':str(dissid),
'format':'jsonp',
'g_tk':'5381',
'jsonpCallback':'playlistinfoCallback',
'loginUin':'0',
'hostUin':'0',
'format':'jsonp',
'inCharset':'utf8',
'outCharset':'utf-8',
'notice':'0',
'platform':'yqq',
'needNewCode':'0' }
url='https://c.y.qq.com/qzone/fcg-bin/fcg_ucc_getcdinfo_byids_cp.fcg?'+urlencode(parameter)
print(url)
html=requests.get(url,headers=header)
html.encoding='utf-8'
data=json.loads(html.text[21:-1])
if data and 'cdlist' in data:
for song_list in data.get('cdlist'):
for song_inf in song_list.get('songlist'):
#get歌的href
song_mid=song_inf.get('songmid')
get_song_inf(song_mid)
def get_song_inf(song_mid):
time.sleep(0.3)
parameter = {'songmid':str(song_mid),
'tpl':'yqq_song_detail',
'format':'jsonp',
'callback':'getOneSongInfoCallback',
'g_tk':'5381',
'jsonpCallback':'getOneSongInfoCallback',
'loginUin':'0',
'hostUin':'0',
'format':'jsonp',
'inCharset':'utf8',
'outCharset':'utf-8',
'notice':'0',
'platform':'yqq',
'needNewCode':'0'}
url1='https://c.y.qq.com/v8/fcg-bin/fcg_play_single_song.fcg?'+urlencode(parameter)
try:
html = requests.get(url1, headers=header)
if html.status_code==200:
html.encoding = 'utf-8'
data1 = json.loads(html.text[23:-1])
if data1 and 'data' in data1:
song_inf = data1.get('data')[0]
# get歌的url
song_mid = song_inf.get('mid')
song_url = str('https://y.qq.com/n/yqq/song/' + str(song_mid) + '.html')
print(song_url)
# get歌名
song_name = judge(song_inf.get('name'))
print(song_name)
# get歌手的名字 加【0】是为了将list转变成dict,接着dict定位name
singer_name = judge(song_inf.get('singer')[0].get('name'))
print(singer_name)
# get歌曲的时长
if song_inf.get('interval'):
song_interval = song_inf.get('interval')
song_time_seconds = song_interval % 60
song_time_minutes = song_interval // 60 # 取整
song_time = str(song_time_minutes) + " : " + str(song_time_seconds)
else:
song_time = 'None'
# get albummid
song_albummid = song_inf.get('album').get('mid')
print(song_albummid)
if song_albummid:
song_inf2 = get_song_inf2(song_albummid)
# 歌曲的流派
song_genre = song_inf2[0]
# 歌曲的语种
song_language = song_inf2[1]
# 歌曲的发布时间
song_public_time = song_inf2[2]
print(song_genre)
print(song_language)
print(song_public_time)
try:
result = cur.execute(
'insert into popular_song(song_name,singer_name,song_url,song_time,song_genre,song_language,song_public_time)values(%s,%s,%s,%s,%s,%s,%s)',(song_name, singer_name, song_url, song_time, song_genre, song_language, song_public_time))
# 提交
conn.commit()
print(result)
except Exception as e:
print(e)
else:
print('connection error')
except Exception as e:
print(e)
def get_song_inf2(ablummid):
song_inf2_list=[]
parameter = {'albummid':str(ablummid),
'jsonpCallback':'getAlbumInfoCallback',
'loginUin':'0',
'hostUin':'0',
'format':'jsonp',
'inCharset':'utf8',
'outCharset':'utf-8',
'notice':'0',
'platform':'yqq',
'needNewCode':'0'}
url2 = 'https://c.y.qq.com/v8/fcg-bin/fcg_v8_album_info_cp.fcg?'+urlencode(parameter)
# 转换成第二个urlget歌曲的流派,语种,发布时间
try:
html = requests.get(url2, headers=header)
if html.status_code == 200:
html.encoding = 'utf-8'
print(html.text)
data2 = json.loads(html.text[22:-1])
print(data2)
if data2 and 'data' in data2:
song_inf2 = data2.get('data')
# get歌曲的流派
song_genre = judge(song_inf2.get('genre'))
song_inf2_list.append(song_genre)
# get歌曲的语种
song_language = judge(song_inf2.get('lan'))
song_inf2_list.append(song_language)
# get歌曲的发布时间
song_public_time = judge(song_inf2.get('aDate'))
song_inf2_list.append(song_public_time)
return song_inf2_list
else:
print('error')
except Exception as e:
print(e)
def judge(thing):
if thing:
return str(thing)
else:
thing='None'
return thing
if __name__ == '__main__':
get_music()
| [
"965195920@qq.com"
] | 965195920@qq.com |
c55176b22a1057a5b10440b3fa3b8bacd8b730c6 | b00533b8405bbdb6254073f2146ceda13a754ee9 | /markup/__init__.py | bd4dc4d716ab5236ae0701ffd41612f00f7cc5c1 | [] | no_license | eea/flis.flip | d349abab0c36c8690a2d0dc2b7b55b4ed61b3866 | f7d2e803f70036a188c364c2e7b8b1403ee278b2 | refs/heads/master | 2023-08-10T04:29:34.838000 | 2017-02-09T13:44:20 | 2017-02-09T13:44:25 | 22,872,036 | 0 | 0 | null | 2022-07-06T19:24:10 | 2014-08-12T09:28:43 | Python | UTF-8 | Python | false | false | 22 | py | from .markup import *
| [
"catardra@60810c31-268d-49e0-9aa7-0fbff4fae3ae"
] | catardra@60810c31-268d-49e0-9aa7-0fbff4fae3ae |
7cd8898e0e3005975525306f1622e0d54d94136b | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /140_gui/pyqt_pyside/examples/PyQt5/Chapter13_Running Python Scripts on Android and iOS/demoMultipleSelection.py | 96a2d86b0e8e60e19544ee49d3e46b244fb8b9fb | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 573 | py | import android
app = android.Android()
app.dialogCreateAlert("Select your food items")
app.dialogSetMultiChoiceItems(['Pizza', 'Burger', 'Hot Dog'])
app.dialogSetPositiveButtonText('Done')
app.dialogShow()
app.dialogGetResponse()
response = app.dialogGetSelectedItems()
print(response)
selectedResult=response[1]
n=len(selectedResult)
print("You have selected following food items: ")
for i in range(0, n):
if selectedResult[i]==0:
print("Pizza")
elif selectedResult[i]==1:
print("Burger")
elif selectedResult[i]==2:
print("Hot Dog")
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
31a75b61ce6024e48f23d5ebe9bf1ad590a9b156 | 085e475b8dd0ee75796db6fc8c6a39ca4d20e279 | /Tumher/wsgi.py | 1815169c60bcd988479ee1ba521790d5718d4922 | [] | no_license | taweesinw94/TumHer | 53683fe89016f3c95cb9af9d597fa46c1c1ac113 | dc37625c8048008d2939a839b062dd1a94f6e52d | refs/heads/master | 2021-01-11T09:12:37.715825 | 2016-12-24T02:06:24 | 2016-12-24T02:06:24 | 77,259,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | """
WSGI config for Tumher project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Tumher.settings")
application = get_wsgi_application()
| [
"taweesinw94@gmail.com"
] | taweesinw94@gmail.com |
505db938905e54b798a6e16385ae0dd94d09df86 | cc327d50377f91c95d9f40fd5198c170ae77af4c | /bank-api/bankAPI.py | 8918f2e1acc67ec1fa6cfba9732477f5cc92792f | [] | no_license | Ashakibp/python-data-demo | 68eab74b5f7f84cdda66e7c028b829c977d5ea39 | a2db8431251ed719df47a1b50dbcbbf0a52ba2f9 | refs/heads/master | 2020-04-05T13:34:49.344775 | 2017-09-05T21:36:09 | 2017-09-05T21:36:09 | 94,897,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,511 | py | import copy
import json
import time
from CollectionModule import mongo_db_handler
from bottle import run, response, post, request
from bson import objectid
from cleanData import clean_data
class Bank_Api(object):
def __init__(self):
self.users = mongo_db_handler.collection_manager("banks", "users")
self.banks = mongo_db_handler.collection_manager("banks", "banks")
self.transactions = mongo_db_handler.collection_manager("banks", "transactions")
self.branches = mongo_db_handler.collection_manager("banks", "branches")
self.data_cleaner = clean_data()
def login(self, username, password):
username_query = self.users.find_query({"username": username})
if len(username_query) == 1 and username_query[0]["password"] == password:
logged_in_user = username_query[0]
return_dict = {}
return_dict["save_variables"] = [{"user_id":str(logged_in_user["_id"]), "balance": logged_in_user["balance"]}]
return [return_dict]
return_dict = {}
return_dict["go_to_block"] = "69eeeb9a-fff9-445c-a16c-f34d9f3f0d65"
return [return_dict]
def refresh(self, user_id):
login_check = self.users.find_query({"_id":user_id})
if len(login_check) == 1:
login = login_check[0]
return login
else:
return_dict = {}
return_dict["text"] = "Invalid login try again"
return [return_dict]
def get_balance(self, user_id):
user = self.refresh(user_id)
return user["balance"]
def find_branches(self, user_id):
try:
login_check = self.users.find_query({"_id": user_id})
if login_check is not []:
logged_in = login_check[0]
bank_id = objectid.ObjectId(logged_in["bank_id"])
valid_bank = self.banks.find_query({"_id": bank_id})
valid_bank = valid_bank[0]
branches = valid_bank["branches"]
bran_list = []
for branch in branches:
branch = objectid.ObjectId(branch)
bran_list.append(self.branches.find_query({"_id":branch})[0])
branch_card = self.data_cleaner.generate_branch_card(bran_list)
return [branch_card]
return_dict = {}
return_dict["text"] = "Invalid login try again"
return [return_dict]
except:
return_dict = {}
return_dict["text"] = "Error finding branches, please try again later."
return [return_dict]
def get_bank(self, text):
pass
def add_transaction_to_db(self, sender_obj, receiver_obj, amount):
query = {
"sender": sender_obj,
"receiver": receiver_obj,
"amount": amount,
"timestamp": time.time()
}
self.transactions.add_query(query)
trans_obj = self.transactions.find_query(query)[0]
return trans_obj["_id"]
def make_transaction(self, user_id, other_username, amount):
logged_in_user = self.refresh(user_id)
if logged_in_user is not None:
other_user = self.users.find_query({"username": other_username})
if len(other_user) == 1:
other_user_obj = other_user[0]
if not self.get_balance(user_id) - amount >= 0:
return False
else:
existing_balance = self.get_balance(user_id) - amount
transfer_balance = other_user_obj["balance"] + amount
self.users.update_query(
{"_id": logged_in_user["_id"]},
{"$set": {"balance": existing_balance
}})
self.users.update_query(
{"_id": other_user_obj["_id"]},
{"$set": {"balance": transfer_balance}}
)
trans_one = logged_in_user["transactions"]
trans_two = other_user_obj["transactions"]
trans_id = self.add_transaction_to_db(logged_in_user["_id"], other_user_obj["_id"], amount)
trans_one.append(trans_id)
trans_two.append(trans_id)
self.users.update_query(
{"_id": logged_in_user["_id"]},
{"$set": {"transactions": trans_one
}})
self.users.update_query(
{"_id": other_user_obj["_id"]},
{"$set": {"transactions": trans_two}}
)
return_text = {}
return_text["return_text"] = "Transaction Successful"
return [return_text]
return_text = {}
return_text["return_text"] = "Error with transaction"
return [return_text]
def get_transactions(self, user_id, number):
if number == 0:
number = 10
user_obj = self.users.find_query({"_id": user_id})[0]
trans_list = copy.deepcopy(user_obj["transactions"])
gal_card = self.data_cleaner.get_transactions(trans_list, number)
return gal_card
response.content_type = 'application/json'
bank_obj = Bank_Api()
@post("/login")
def do_login():
login_data = bank_obj.login(request.json.get('username'), request.json.get('password'))
response.content_type = 'application/json'
return json.dumps(login_data)
@post("/transaction")
def do_transaction():
user_id = objectid.ObjectId(request.json.get('user_id'))
other_username = request.json.get('other_username')
amount = int(request.json.get('amount'))
response.content_type = 'application/json'
trans_response = bank_obj.make_transaction(user_id, other_username, amount)
return json.dumps(trans_response)
@post("/getTransactions")
def get_trans():
user_id = objectid.ObjectId(request.json.get('user_id'))
amount = int(request.json.get('amount'))
response.content_type = 'application/json'
trans_data = bank_obj.get_transactions(user_id, amount)
return json.dumps(trans_data)
@post("/getBranches")
def get_branches():
user_id = objectid.ObjectId(request.json.get('user_id'))
response.content_type = 'application/json'
return json.dumps(bank_obj.find_branches(user_id))
run(host='localhost', port=8080, debug=True)
| [
"aaronshakib@gmail.com"
] | aaronshakib@gmail.com |
a3976a033b050da9d584f2ee555049bc57e48660 | 5fe194b477ba8af8acc846db2dfc961ad2a57013 | /.metadata/.plugins/org.eclipse.core.resources/.history/5/c0e3511f98fb001611e7bcc2e467e756 | 55afe0616ecc9457ecdb14d257b97cdcb7536b33 | [] | no_license | robotanica/ExcavatorROS | 6a129e302f0a288d198e3f720a78610e6333f7d3 | db0bdf0f9988ebf910f832e22f46f679e936cdcb | refs/heads/master | 2023-03-17T23:49:58.762691 | 2017-09-29T16:31:33 | 2017-09-29T16:31:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,974 | #!/usr/bin/python
'''
Created on Feb 21, 2017
@author: yutak
'''
import rospy
import sensor_msgs.msg as smsg
import exp_excavator.msg as cmsg
class JoyMsgManager:
def __init__(self):
rospy.init_node('joy_msg_manager', anonymous=True)
self.test = rospy.get_param('~test', False)
self.joy_val_msg = cmsg.JointValues()
self.joy_val_msg.boom = 0.0
self.joy_val_msg.arm = 0.0
self.joy_val_msg.bucket = 0.0
self.joy_val_msg.swing = 0.0
self.sub_spd_com_bucket = rospy.Subscriber('joy_right', smsg.Joy,
self.cb_joy_right)
self.sub_joy_left = rospy.Subscriber('joy_left', smsg.Joy,
self.cb_joy_left)
self.pub_joy_values = rospy.Publisher('joy_values', cmsg.JointValues,
queue_size= 10)
def cb_joy_right(self, joy):
self.joy_val_msg.boom = joy.axes[1]
self.joy_val_msg.bucket = joy.axes[0]
if self.test:
rospy.loginfo('Boom Joystick Value:%f' %self.joy_val_msg.boom)
rospy.loginfo('Bucket Joystick Value:%f'
%self.joy_val_msg.bucket)
self.pub_joy_values.publish(self.joy_val_msg)
def cb_joy_left(self, joy):
self.joy_val_msg.arm = joy.axes[1]
self.joy_val_msg.swing = joy.axes[0]
if self.test:
rospy.loginfo('Arm Joystick Value:%f' %self.joy_val_msg.arm)
rospy.loginfo('Swing Joystick Value:%f'
%self.joy_val_msg.swing)
self.pub_joy_values.publish(self.joy_val_msg)
if __name__ == '__main__':
jm = JoyMsgManager()
try:
rospy.spin()
except rospy.ROSInterruptException:
pass | [
"filippos.sotiropoulos@gmail.com"
] | filippos.sotiropoulos@gmail.com | |
9cb57d9c86c424ff0dcc694594968783f674234d | 540f54d74659c27248c07d8deea41e46627220f0 | /Metrics.py | 90dcfd30d3b1496c20291ac1ff07ea933c3c274a | [] | no_license | K7chyp/msft_time_series | 37977b8aef328fa18657e8f093884b2ea124b0b8 | 7472900ed3a369a23c17aec5f120128c55f4f0d2 | refs/heads/main | 2023-03-08T00:01:56.446023 | 2021-02-27T06:58:58 | 2021-02-27T06:58:58 | 341,129,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | class Metrics:
def __init__(self, dataframe):
self.dataframe = dataframe.copy()
def get_min_max(self, column):
return min(self.dataframe[column].to_list()), max(
self.dataframe[column].to_list()
)
def get_average(self, column):
sum_ = 0
for value in self.dataframe[column]:
sum_ = value
return sum_ / len(self.dataframe)
def more_less_avg(self, column):
avg = self.get_average(column)
more_than_avg = 0
less_than_avg = 0
for value in self.dataframe[column].to_list():
if value >= avg:
more_than_avg += 1
else:
less_than_avg += 1
return more_than_avg, less_than_avg
def get_mode(self, column):
mode_ = max(
set(self.dataframe[column].to_list()),
key=self.dataframe[column].to_list().count,
)
return mode_
def get_stat(self, column):
min_, max_ = self.get_min_max(column)
avg_ = self.get_average(column)
mta, lta = self.more_less_avg(column)
mode_ = self.get_mode(column)
return print(
f"Minimum {min_}",
f"Maximum {max_}",
f"Average {avg_}",
f"Mode {mode_} ",
f"More than average {mta}",
f"Less than averange {lta} ",
sep="\n",
)
| [
"nekitos199@gmail.com"
] | nekitos199@gmail.com |
44260eaf54611020edd327e194c75925b182f899 | d1a5e15463623d75560659481e8277a516a8e280 | /Website_Project/mysite/account/views.py | c6f37f13125fb41f2acb3bc18dc0c8c6497fd11f | [] | no_license | LeeSungRyul/KD_AI | fac54863d6eb49fa8fe96dbb700e9279d2a1f0fb | 560e4c2e777ab5f5ee6a2a31f17372f58ea483ef | refs/heads/master | 2023-06-27T00:06:47.433793 | 2021-07-27T14:59:25 | 2021-07-27T14:59:25 | 339,369,278 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,931 | py | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.hashers import make_password, check_password
from .models import Account
# Create your views here.
def register(request): # 회원가입 페이지를 보여주기 위한 함수
if request.method == "GET":
return render(request, 'register.html')
elif request.method == "POST":
userID = request.POST.get('userID', None) # 딕셔너리형태
userPW = request.POST.get('userPW', None)
re_password = request.POST.get('re_password', None)
userMail = request.POST.get('userMail', None)
userPhone = request.POST.get('userPhone', None)
res_data = {}
if not (userID and userPW and re_password and userMail and userPhone):
res_data['error'] = "All values must be entered."
return render(request, 'register.html', res_data)
if userPW != re_password:
# return HttpResponse('비밀번호가 다릅니다.')
res_data['error'] = 'Confirm password does not match.'
return render(request, 'register.html', res_data)
else:
account = Account(userID=userID, userPW=make_password(userPW), userMail=userMail, userPhone=userPhone)
account.save()
return redirect('/login/')
# register를 요청받으면 register.html 로 응답. return render(request, 'register.html')
# res_data: html 파일에서 {{error}}와 맵핑되어 처리. 즉, if문에서 걸리면 뒤의 문자열이 출력
def login(request):
response_data = {}
if request.method == "GET":
return render(request, 'login.html')
elif request.method == "POST":
if '_login' in request.POST:
login_userID = request.POST.get('userID', None)
login_userPW = request.POST.get('userPW', None)
# 아이디와 PW 중 어느 하나라도 입력되지 않은 경우
if not (login_userID and login_userPW):
response_data['error'] = "All values must be entered."
else:
account = Account.objects.get(userID=login_userID)
# db에서 꺼내는 명령. Post로 받아온 userID로 , db의 userID을 꺼내온다.
if check_password(login_userPW, account.userPW):
request.session['account'] = account.userID
# 세션도 딕셔너리 변수 사용과 똑같이 사용하면 된다.
# 세션 account라는 key에 방금 로그인한 id를 저장한것.
return redirect('/') # 로그인 된 홈 화면 이동
else:
response_data['error'] = "Invalid username or password."
return render(request, 'login.html', response_data)
elif '_register' in request.POST:
return redirect('/login/register/')
| [
"airtrack03@naver.com"
] | airtrack03@naver.com |
f417555ac3d50a2aa083acc38a78616e9b07d129 | 1345dd950ba5a54eae276e99302641eb03ff7c76 | /DMKD/datasets/email/process1.py | 8f20fd7e9e9c8f8ca69d57b046944f8316acbb06 | [
"MIT"
] | permissive | wanglili-dartmouth/temporal_hyper | 8c5ad59dd0c6abd554484edd44bbea09258dd4d5 | 1b4e74d1d80be5e5d8e9911704bf86b44ee617c0 | refs/heads/main | 2023-06-09T01:50:25.147880 | 2021-06-25T07:28:38 | 2021-06-25T07:28:38 | 347,167,588 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import networkx as nx
graph = nx.read_weighted_edgelist("edge.tsv", delimiter="\t", nodetype=int,create_using=nx.Graph())
graph=nx.convert_node_labels_to_integers(graph,first_label=0)
nx.write_edgelist(graph, "edgelist.tsv", delimiter="\t", data=["weight"])
| [
"noreply@github.com"
] | wanglili-dartmouth.noreply@github.com |
c6b57c21c3f21174c1571ed4ba6a70d04d2df687 | 973d983a0cc3a107cf535f7b7fd4dd04072cf22a | /reinforcement_learning/exercise_04/scripts/exercise-04_test.py | 7a7dd38a96a701e00b94965151ca1dba4ceda02b | [] | no_license | victorvg17/uni-freiburg | d822e6cf8869b7126b7e235fd1e27830eb81aff1 | a2b3b5e7d8d4c422b7bf45c9a4fbd0616ab61fac | refs/heads/master | 2020-08-30T18:18:46.793648 | 2020-01-11T14:28:07 | 2020-01-11T14:28:07 | 218,455,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,614 | py | import unittest
import numpy as np
import sys
from blackjack import BlackjackEnv
from off_policy_mc import create_random_policy, mc_control_importance_sampling
class TestMCControl(unittest.TestCase):
def test_q_values(self):
np.random.seed(0)
env = BlackjackEnv(test=True)
expected_q_values = {
(18, 10, False): [-0.23533037475345092, -0.65069513406157],
(20, 6, False): [0.6990585070611964, -0.8814504881450475],
(19, 9, False): [0.23174294060370004, -0.74],
(12, 9, False): [-0.5431985294117646, -0.29656419529837275],
(17, 8, False): [-0.4034582132564843, -0.4707282246549266],
(20, 9, True): [0.7628571428571427, 0.09944751381215464],
(17, 4, False): [-0.12105751391465681, -0.5326237852845899],
(13, 4, False): [-0.2312764955252001, -0.29011786038077975],
(17, 1, False): [-0.6282051282051277, -0.6655389076848715],
(13, 3, False): [-0.26743075453677173, -0.2716210343328985],
(16, 6, False): [-0.10835322195704067, -0.4610136452241714],
(20, 2, False): [0.6376912378303203, -0.8531152105812742],
(20, 9, False): [0.7585848074921976, -0.8680203045685262],
(21, 4, False): [0.8698830409356734, -1.0],
(16, 1, False): [-0.7971721111652841, -0.6776007497656986],
(15, 10, False): [-0.5712454852615625, -0.5446418205038894],
(14, 5, False): [-0.13802816901408452, -0.3296193129062211],
(14, 9, False): [-0.565416285452881, -0.4146797568957452],
(13, 6, True): [-0.2627118644067797, 0.2666666666666668],
(18, 10, True): [-0.1964996022275259, -0.1846153846153846],
(18, 5, False): [0.2162293488824098, -0.6132542037586541],
(21, 8, True): [0.922656960873521, 0.19824561403508784],
(18, 3, False): [0.16515944788196105, -0.6347826086956525],
(17, 3, False): [-0.13083213083213088, -0.5655934646804432],
(20, 3, False): [0.6458835687220116, -0.8849165815457946],
(19, 10, False): [-0.015288999378495947, -0.7419515847267275],
(18, 1, False): [-0.36386386386386316, -0.6866096866096864],
(14, 1, False): [-0.7548566142460688, -0.5900226757369622],
(18, 9, False): [-0.2109337203676826, -0.5946843853820601],
(14, 3, False): [-0.265155020823693, -0.3708165997322623],
(16, 5, False): [-0.20906567992599415, -0.3809971777986831],
(21, 5, True): [0.891840607210625, 0.3579545454545456],
(20, 10, False): [0.4393263157894733, -0.8543532020124465],
(15, 9, False): [-0.5498449268941082, -0.526289434151226],
(16, 7, False): [-0.4891454965357966, -0.40610104861773155],
(17, 10, False): [-0.47040094339622607, -0.6040100250626568],
(19, 8, False): [0.5706627680311898, -0.7149829184968268],
(17, 9, False): [-0.4056378404204498, -0.545539033457249],
(13, 7, False): [-0.4789838337182441, -0.2375162831089877],
(17, 5, False): [-0.02606177606177609, -0.5548141086749286],
(13, 10, False): [-0.5757291788684543, -0.45740615868734547],
(14, 4, False): [-0.21376146788990846, -0.31591448931116406],
(12, 10, False): [-0.5766489421880333, -0.4186740077999534],
(14, 10, False): [-0.5757015821688396, -0.5145094426531549],
(16, 10, False): [-0.5621724630776569, -0.5581314477802275],
(15, 2, False): [-0.3027101515847493, -0.4245939675174012],
(16, 3, False): [-0.2693409742120339, -0.46125797629899834],
(16, 2, False): [-0.2568509057129591, -0.4964166268514091],
(16, 4, False): [-0.21896383186705776, -0.48490566037735827],
(21, 2, True): [0.8876739562624256, 0.32078853046595013],
(19, 1, False): [-0.1036889332003989, -0.7973231357552581],
(19, 4, False): [0.4097258147956546, -0.7285067873303166],
(21, 8, False): [0.9245723172628312, -1.0],
(13, 8, False): [-0.524061810154526, -0.3537757437070942],
(16, 8, False): [-0.5093139482053626, -0.4488497307880566],
(15, 1, False): [-0.7456647398843936, -0.5942549371633743],
(12, 5, False): [-0.14081996434937621, -0.1990846681922192],
(15, 4, False): [-0.23978201634877452, -0.38447319778188516],
(13, 6, False): [-0.15426997245179058, -0.18335619570187484],
(21, 10, False): [0.8933922397233937, -1.0],
(19, 3, False): [0.4270570418980312, -0.7368961973278526],
(21, 1, False): [0.6351865955826352, -1.0],
(20, 1, False): [0.15662650602409645, -0.8789297658862895],
(20, 5, False): [0.6695088676671217, -0.8277925531914894],
(18, 7, False): [0.3904576436222005, -0.5871954132823696],
(20, 7, False): [0.7727583846680343, -0.8603089321692415],
(21, 6, False): [0.8976631748589844, -1.0],
(21, 10, True): [0.8835616438356135, 0.061950993989828944],
(13, 10, True): [-0.5957219251336902, -0.03703703703703699],
(12, 4, False): [-0.19615912208504802, -0.23269316659222872],
(20, 4, False): [0.6774526678141133, -0.8362611866092168],
(15, 10, True): [-0.5518913676042678, -0.19183673469387752],
(15, 3, False): [-0.2550790067720088, -0.41751152073732706],
(18, 2, False): [0.10782442748091597, -0.6102418207681367],
(18, 4, False): [0.16543937162493852, -0.5975723622782455],
(21, 4, True): [0.9018181818181826, 0.28173913043478266],
(12, 3, False): [-0.2545931758530182, -0.26359447004608316],
(13, 5, False): [-0.1932692307692308, -0.2750572082379863],
(13, 1, False): [-0.7839059674502722, -0.5131052865393172],
(16, 9, False): [-0.5247614720581559, -0.5062761506276147],
(17, 9, True): [-0.42248062015503873, -0.18709677419354848],
(21, 5, False): [0.8986852281515859, -1.0],
(14, 2, False): [-0.27932960893854786, -0.39484777517564384],
(18, 6, True): [0.22683706070287538, 0.19620253164556958],
(15, 8, False): [-0.49580615097856545, -0.431539187913126],
(15, 5, False): [-0.15183246073298437, -0.35169300225733613],
(21, 9, False): [0.9377076411960139, -1.0],
(12, 1, False): [-0.7730684326710832, -0.4712245781047172],
(15, 6, False): [-0.15188679245283032, -0.353219696969697],
(12, 8, False): [-0.515682656826568, -0.31906799809795494],
(21, 7, False): [0.9144951140065153, -1.0],
(21, 2, False): [0.8893344025661581, -1.0],
(18, 6, False): [0.27338826951042144, -0.621114948199309],
(20, 8, True): [0.7645259938837922, 0.15555555555555556],
(12, 8, True): [-0.4285714285714286, 0.411764705882353],
(12, 6, False): [-0.13891362422083722, -0.1832167832167831],
(19, 6, False): [0.47623713865752093, -0.723625557206537],
(19, 2, False): [0.37239979705733056, -0.7525150905432583],
(19, 7, True): [0.6631578947368428, 0.25595238095238093],
(20, 8, False): [0.7776617954070979, -0.8442622950819666],
(17, 6, False): [0.04052165812761995, -0.5332403533240367],
(14, 3, True): [-0.14716981132075468, -0.009433962264150898],
(19, 7, False): [0.6104999999999992, -0.7406483790523696],
(21, 3, False): [0.8782201405152218, -1.0],
(16, 1, True): [-0.7762237762237766, -0.3028169014084505],
(21, 1, True): [0.6742909423604755, -0.0976095617529882],
(12, 7, False): [-0.44228157537347185, -0.1881818181818183],
(15, 7, False): [-0.4949026876737715, -0.3318603623508622],
(12, 10, True): [-0.5485327313769756, -0.1541666666666666],
(17, 7, False): [-0.0702936928261917, -0.4908235294117645],
(14, 6, False): [-0.11781076066790352, -0.28493150684931506],
(16, 8, True): [-0.43772241992882555, -0.10967741935483877],
(15, 6, True): [-0.15942028985507253, 0.17361111111111116],
(14, 7, False): [-0.46685210941121913, -0.2968897266729508],
(12, 2, False): [-0.30228471001757445, -0.26256458431188295],
(17, 2, False): [-0.20066256507335523, -0.5998142989786454],
(13, 2, False): [-0.2899628252788114, -0.35277516462841],
(19, 5, False): [0.4169215086646282, -0.7076845806127565],
(19, 8, True): [0.5273224043715847, 0.1807228915662651],
(19, 1, True): [-0.10169491525423732, -0.2840236686390534],
(20, 4, True): [0.626903553299492, 0.19170984455958548],
(13, 4, True): [-0.2096774193548386, 0.2905982905982906],
(17, 10, True): [-0.4914145543744891, -0.3162393162393163],
(20, 1, True): [0.09164420485175204, -0.13089005235602094],
(14, 4, True): [-0.11740890688259109, 0.22321428571428564],
(13, 3, True): [-0.25345622119815664, 0.05434782608695648],
(20, 6, True): [0.6878612716763011, 0.2857142857142856],
(12, 5, True): [-0.19999999999999996, 0.2727272727272728],
(19, 10, True): [0.008559201141226814, -0.17101449275362335],
(16, 5, True): [-0.1184210526315789, 0.1486486486486487],
(18, 8, False): [0.10637254901960781, -0.6134939759036155],
(14, 8, False): [-0.5028546332894172, -0.3722763096893826],
(14, 7, True): [-0.4942528735632186, -0.06153846153846153],
(19, 9, True): [0.25867507886435326, -0.10447761194029857],
(16, 9, True): [-0.5245283018867921, -0.14383561643835613],
(21, 9, True): [0.9414455626715449, 0.10370370370370366],
(13, 9, True): [-0.4615384615384616, 0.19148936170212763],
(12, 6, True): [-0.20312499999999994, 0.1864406779661018],
(21, 7, True): [0.9118457300275489, 0.252808988764045],
(19, 5, True): [0.5297805642633239, 0.054216867469879554],
(18, 1, True): [-0.36176470588235293, -0.42592592592592615],
(21, 3, True): [0.8816169393647741, 0.2359767891682784],
(15, 2, True): [-0.2845528455284551, -0.04065040650406501],
(20, 3, True): [0.7316384180790965, 0.14942528735632185],
(18, 7, True): [0.43181818181818166, 0.11695906432748544],
(15, 7, True): [-0.47985347985347976, 0.06896551724137931],
(12, 4, True): [-0.10091743119266058, 0.18181818181818177],
(18, 8, True): [0.05014749262536869, 0.1079136690647482],
(17, 2, True): [-0.1891891891891892, -0.1259842519685039],
(17, 3, True): [-0.0899280575539568, 0.043209876543209895],
(16, 10, True): [-0.58287795992714, -0.27560521415269995],
(20, 10, True): [0.42847173761339813, -0.02462380300957595],
(16, 2, True): [-0.362549800796813, -0.07575757575757579],
(13, 9, False): [-0.50587211831231, -0.38563829787234005],
(14, 1, True): [-0.8295964125560541, -0.1869158878504673],
(18, 9, True): [-0.13504823151125409, -0.11764705882352944],
(20, 5, True): [0.6820652173913053, 0.17708333333333337],
(15, 5, True): [-0.25196850393700787, 0.027777777777777794],
(20, 7, True): [0.7968337730870713, 0.1851851851851852],
(16, 7, True): [-0.5053003533568905, -0.05673758865248227],
(13, 7, True): [-0.4891774891774891, -0.017241379310344848],
(12, 7, True): [-0.5419847328244272, 0.37333333333333335],
(14, 10, True): [-0.547877591312932, -0.07954545454545457],
(16, 3, True): [-0.19999999999999996, 0.027586206896551748],
(15, 8, True): [-0.5502008032128513, -0.07913669064748201],
(20, 2, True): [0.6198979591836736, 0.2848101265822785],
(19, 6, True): [0.5433526011560694, 0.2336956521739131],
(21, 6, True): [0.8909090909090909, 0.29304029304029283],
(14, 9, True): [-0.5, -0.07272727272727275],
(19, 4, True): [0.38855421686747016, 0.2530864197530865],
(18, 2, True): [0.12871287128712872, 0.10457516339869276],
(14, 2, True): [-0.29059829059829034, 0.025641025641025612],
(15, 4, True): [-0.19215686274509802, -0.06086956521739138],
(18, 4, True): [0.16279069767441848, 0.08284023668639053],
(13, 1, True): [-0.7543859649122808, -0.3719008264462809],
(18, 3, True): [0.08433734939759038, 0.20394736842105263],
(16, 6, True): [-0.2666666666666665, -0.014598540145985398],
(19, 3, True): [0.38601823708206706, 0.03428571428571428],
(15, 9, True): [-0.6296296296296297, -0.12403100775193798],
(13, 5, True): [-0.31225296442687717, 0.06060606060606062],
(15, 1, True): [-0.7534246575342467, -0.47368421052631565],
(17, 6, True): [-0.03859649122807019, 0.23333333333333334],
(14, 6, True): [-0.146341463414634, 0.18584070796460178],
(12, 1, True): [-0.7723577235772359, -0.2857142857142858],
(15, 3, True): [-0.21481481481481488, 0.08088235294117646],
(18, 5, True): [0.23262839879154074, 0.02366863905325443],
(14, 8, True): [-0.561181434599156, -0.25833333333333325],
(13, 8, True): [-0.6306306306306306, 0.11678832116788318],
(13, 2, True): [-0.33333333333333326, 0.14999999999999988],
(17, 5, True): [-0.043771043771043766, 0.04411764705882353],
(12, 9, True): [-0.5238095238095237, -0.08333333333333337],
(17, 1, True): [-0.6195652173913049, -0.3984962406015038],
(12, 3, True): [-0.2982456140350876, -0.0888888888888889],
(16, 4, True): [-0.1357142857142857, -0.04216867469879521],
(19, 2, True): [0.457865168539326, 0.1381578947368422],
(17, 4, True): [-0.20312500000000003, -0.07142857142857147],
(17, 8, True): [-0.43050847457627134, -0.12592592592592594],
(17, 7, True): [-0.11224489795918369, 0.12142857142857143],
(12, 2, True): [-0.30097087378640786, 0.028169014084507],
(14, 5, True): [-0.23320158102766794, 0.19587628865979384]
}
random_policy = create_random_policy(2)
Q, _ = mc_control_importance_sampling(env, num_episodes=500, behavior_policy=random_policy)
self.assert_float_dict_almost_equal(expected_q_values, Q, decimal=2)
def assert_float_dict_almost_equal(self, a, b, decimal=6):
for key_pair in zip(sorted(a), sorted(b)):
self.assertTupleEqual(key_pair[0], key_pair[1])
np.testing.assert_array_almost_equal(a[key_pair[0]], b[key_pair[1]], decimal=decimal)
if __name__ == '__main__':
unittest.main()
| [
"victorvg17"
] | victorvg17 |
3460ddd041bc68e7eac46db824581cbe4f2bc789 | 5e553a5ad555ed6bf364373196a1c46b60b5f530 | /src/graphrnn/plot.py | 6811ea1d33f7e0fb405bbce524a972601f3ce535 | [
"MIT"
] | permissive | satyakisikdar/infinity-mirror | 8a7a0079648c523cf670f836a351b0317c15a870 | 555bcc7f4c481001991d53f3b90b03c1201a5c40 | refs/heads/master | 2022-12-08T20:05:56.242257 | 2022-06-22T16:45:44 | 2022-06-22T16:45:44 | 222,779,125 | 5 | 1 | MIT | 2022-12-08T04:32:00 | 2019-11-19T20:02:32 | GAP | UTF-8 | Python | false | false | 1,446 | py | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
sns.set_style("ticks")
sns.set_context("poster",font_scale=1.28,rc={"lines.linewidth": 3})
### plot robustness result
noise = np.array([0,0.2,0.4,0.6,0.8,1.0])
MLP_degree = np.array([0.3440, 0.1365, 0.0663, 0.0430, 0.0214, 0.0201])
RNN_degree = np.array([0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
BA_degree = np.array([0.0892,0.3558,1.1754,1.5914,1.7037,1.7502])
Gnp_degree = np.array([1.7115,1.5536,0.5529,0.1433,0.0725,0.0503])
MLP_clustering = np.array([0.0096, 0.0056, 0.0027, 0.0020, 0.0012, 0.0028])
RNN_clustering = np.array([0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
BA_clustering = np.array([0.0255,0.0881,0.3433,0.4237,0.6041,0.7851])
Gnp_clustering = np.array([0.7683,0.1849,0.1081,0.0146,0.0210,0.0329])
plt.plot(noise,Gnp_degree)
plt.plot(noise,BA_degree)
plt.plot(noise, MLP_degree)
# plt.plot(noise, RNN_degree)
# plt.rc('text', usetex=True)
plt.legend(['E-R','B-A','GraphRNN'])
plt.xlabel('Noise level')
plt.ylabel('MMD degree')
plt.tight_layout()
plt.savefig('figures_paper/robustness_degree.png',dpi=300)
plt.close()
plt.plot(noise,Gnp_clustering)
plt.plot(noise,BA_clustering)
plt.plot(noise, MLP_clustering)
# plt.plot(noise, RNN_clustering)
plt.legend(['E-R','B-A','GraphRNN'])
plt.xlabel('Noise level')
plt.ylabel('MMD clustering')
plt.tight_layout()
plt.savefig('figures_paper/robustness_clustering.png',dpi=300)
plt.close()
| [
"daniel.gonzalez.cedre@gmail.com"
] | daniel.gonzalez.cedre@gmail.com |
1b980b390397a91f71589aee42039063fefcad4f | 94cdb5f62411e069de814dcc0c1b5f228a14e6d3 | /latex_linter/bin/wheel | b9504e4d3d18fb8065c7e70d3479c8a91b0f8566 | [
"MIT"
] | permissive | digorithm/latex-linter | d9025f87921bf43145ee0509b7482b28e7ca999e | d6b63002cdcecf291e2abc7a399e0d7af4bd9038 | refs/heads/master | 2020-12-03T00:39:02.182990 | 2017-11-03T21:12:46 | 2017-11-03T21:12:46 | 96,052,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | #!/home/rodrigo/Dropbox/Projects/latex_linter/latex_linter/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"rod.dearaujo@gmail.com"
] | rod.dearaujo@gmail.com | |
571dae82366587514b3f374054e07b3fbbdcd956 | e46aa8e200022383706b9160ba13b87558fa8e43 | /bot.py | 9bd69b4e639fa2e5108582e924d33e6d9e4c1d4c | [] | no_license | woody-kawagoe/labot | ef49f1558aa3348027e39ba52764bca7166717c6 | 46650d3941d131c4273cdb66aabd7ddfb148fa54 | refs/heads/master | 2021-01-20T15:27:18.693711 | 2018-09-03T04:42:56 | 2018-09-03T04:42:56 | 82,815,907 | 0 | 1 | null | 2018-09-03T04:42:57 | 2017-02-22T14:48:23 | Python | UTF-8 | Python | false | false | 2,577 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from gmailapi import GmailApi
import json
import sys
import base64
from slackclient import SlackClient
from consts import (
SLACK_TOKEN,
DIR_PATH,
USERNAME,
CHANNEL,
QUERY,
EMOJI
)
def get_mail(query="is:unread"):
f = open(DIR_PATH + "client_secret.json")
auth_info = json.load(f)
user = 'me'
api = GmailApi(auth_info)
# 初回実行時は認証が求められます。
maillist = api.getMailList(user, query)
if maillist['resultSizeEstimate'] > 0:
mail_id = maillist["messages"][0]['id']
content = api.getMailContent(user, mail_id)
mail = parse_mail(content)
api.doMailAsRead(user, mail_id)
return mail
else:
return False
def parse_mail(content):
mail = {}
if 'parts' in content['payload'].keys():
parts = content['payload']['parts'][0]
if 'parts' in parts.keys():
raw_body = parts['parts'][0]['body']['data']
else:
raw_body = parts['body']['data']
else:
raw_body = content['payload']['body']['data']
mail['body'] = base64.urlsafe_b64decode(raw_body).decode('utf-8')
mail['snippet'] = content['snippet']
headers = content['payload']['headers']
for header in headers:
if header['name'] == 'From':
mail['from'] = header['value']
elif header['name'] == 'To':
mail['to'] = header['value']
elif header['name'] == 'Subject':
mail['subject'] = header['value']
elif header['name'] == 'Date':
mail['date'] = header['value']
return mail
def send_slack(title, text, channel):
sc = SlackClient(SLACK_TOKEN)
attachments = [{
'title': title,
'fallback': text,
'icon_emoji': EMOJI,
'text': text
}]
sc.api_call(
"chat.postMessage",
channel=channel,
username=USERNAME,
attachments=attachments,
icon_emoji=EMOJI
)
if __name__ == "__main__":
argvs = sys.argv
if len(argvs) == 3:
query = [argvs[1]]
channel = argvs[2]
else:
query = QUERY
channel = CHANNEL
for q in query:
print(q)
mail = get_mail(q)
if mail:
print("メール受信")
print(mail['date'])
print(mail['subject'])
print(channel)
text = mail['date'] + '\n' + mail['body']
send_slack(mail['subject'], text, channel)
else:
print("未読メールなし")
| [
"hip.hop.zip.please@gmail.com"
] | hip.hop.zip.please@gmail.com |
f5a4439458cda5d2ef8db7d69ac279073c1f397f | 8e4adb3ec9bde9f20bc039bc23ec2020bff0e6f2 | /env/bin/jsonschema | d011658af5c832c31a9703c9f95832518b136571 | [] | no_license | clwest/NFT | b2fdd25eb0314579f524fbfb8e7e71ccea61757d | 6f983abe8e7c443e181b0de82405202bdd55a7ab | refs/heads/master | 2023-05-15T10:21:26.763071 | 2021-06-08T21:03:33 | 2021-06-08T21:03:33 | 375,144,515 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | #!/Users/chris/Desktop/NFTS/env/bin/python3.9
# -*- coding: utf-8 -*-
import re
import sys
from jsonschema.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"westremarketing@gmail.com"
] | westremarketing@gmail.com | |
f0f5c2160c8363d9c70237254997e4364943498a | 7ff3e73b630fd2cad2f09bd82f3b45c27fbc99ba | /update.py | 04f08aae593c1820bae4c701423019a5cfc5dd14 | [] | no_license | Carmezim/IMDbSentimentAnalysis | 4d651d02686162a8f6eee20376cb0d813c18f78a | e03317b55568d0b1966230403528d9bcde25e11b | refs/heads/master | 2020-07-04T05:20:20.844819 | 2016-09-27T17:51:41 | 2016-09-27T17:51:41 | 67,655,327 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,204 | py | import pickle
import sqlite3
import numpy as np
import os
#import HashingVectorizer from local dir
from vectorizer import vectdef
update_model(db_path, model, batch_size=10000):
conn = sqlite3.connect(db_path)
c = conn.cursor()
c.execute('SELECT * from review_db')
results = c.fetchmany(batch_size)
while results:
data = np.array(results)
X = data[:, 0]
y = data[:, 1].astype(int)
classes = np.array([0, 1])
X_train = vect.transform(X)
clf.partial_fit(X_train, y, classes=classes)
results = c.fetchmany(batch_size)
conn.close()
return None
cur_dir = os.path.dirname(__file__)
clf = pickle.load(open(os.path.join(cur_dir,
'pkl_objects',
'classifier.pkl'), 'rb'))
db = os.path.join(cur_dir, 'reviews.sqlite')
update_model(db_path=db, model=clf, batch_size=10000)
# Uncomment the following lines if you are sure that# you want to update your classifier.pkl file
# permanently.
pickle.dump(clf, open(os.path.join(cur_dir,
'pkl_objects', 'classifier.pkl'), 'wb')
, protocol=4)
| [
"carmezim.filho@gmail.com"
] | carmezim.filho@gmail.com |
95dbbcbc3d2648664f5686ae072b43c1300de367 | a19cd0781a008cc02ecfd445e4cbfa4730c96da4 | /GameMain.py | abdb70d3b71c285af321a3c119aa0de8cb017b7f | [] | no_license | mattx245/Binary-search-game | 3011a11fb5e3a1a9f569c9bc7dbb8b736ea7f28b | c1f4e95aa948ac9953eb3f490a8c1f9faa227073 | refs/heads/main | 2023-02-15T05:01:18.487932 | 2021-01-13T00:54:38 | 2021-01-13T00:54:38 | 328,335,425 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,403 | py | # starts and ends the game
def start_game():
while True:
val = input("Start binary search game? (y =yes, n = no): ").lower().strip()
if val == 'y':
binary_search()
start_game()
elif val == 'n':
print("Thanks for playing")
exit()
else:
print("Please enter a valid input")
# generates the array and the goal number
def array_gen():
array_start = int(input("Please enter number for array start: "))
array_end = int(input("Please enter number for array end: "))
array = list(range(array_start, array_end + 1))
return array
# the actual binary search
def binary_search():
bs_array = array_gen()
print("Think of a number within the array")
start = 0
end = len(bs_array) - 1
while start <= end:
mid = start + (end - start) // 2
mid_val = bs_array[mid]
print(f"Is this your number: {mid_val}?")
high_low = input("Type y for yes, h for higher, or l for lower: ").lower().strip()
if high_low == 'y':
print(f"Target value {mid_val} found.")
break
elif high_low == "l":
end = mid - 1
elif high_low == "h":
start = mid + 1
else:
print("Please enster valid input")
if __name__ == "__main__":
start_game()
| [
"noreply@github.com"
] | mattx245.noreply@github.com |
ab0ad8a9c5ce560fc863e92be7fd87385d712a92 | 974fe767ebe0745667a1e9240d43f963f342e02b | /first_module.py | 3e609c70001750529bce8175450a1e5c47368021 | [] | no_license | pachai22/Thoughtworks-Learning | 7cd58cbd46788c36fa5268b97fbfae7dcc9cbd08 | c2aa32b94a64139c58eca2870459386919572345 | refs/heads/master | 2022-12-08T06:10:07.848843 | 2020-09-01T11:14:36 | 2020-09-01T11:14:36 | 267,534,401 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | def my_name(name):
return "Hi,This is "+name+"."
class var():
def __new__(self,a,b):
self.a=a
self.b=b
return '''x : %s
y : %s'''%(self.a,self.b)
| [
"pachai4477@gmail.com"
] | pachai4477@gmail.com |
ff03ba9dd490b0cd9ea5efa4a2b9514102b6fa31 | 943e4508f2a430c6990df9b64bacecf5ed826a2c | /heikin-tuki.py | f3f152777d3ba42ab5c891cd30fefa4a5e2fab1d | [] | no_license | Tadataka-Matsumoto/test_machine_learning | 123daf25ed584121bc38a158408a8358d88b1752 | 1162173cbe204ad089a7326944537a166fb192cd | refs/heads/main | 2023-01-22T17:46:16.801768 | 2020-11-18T11:09:13 | 2020-11-18T11:09:13 | 313,475,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | import matplotlib.pyplot as plt
import pandas as pd
# csvを読み込む
df = pd.read_csv("../../kion10y.csv", encoding="utf-8")
# 月ごとに平均を求める
g = df.groupby(['月'])["気温"]
gg = g.sum() / g.count()
# 結果を出力
print(gg)
gg.plot()
plt.savefig("tenki-heikin-tuki.png")
plt.show() | [
"tdtk1538.gmail.com"
] | tdtk1538.gmail.com |
eb4b4ae21dcafc7f0e93713e30706b98ea8bd5c6 | f2099249a4bf74eeebf52bca8b8c4c1339a74af1 | /exam/views.py | 400838db5ac75ebfb9455d41a8af721f979917c2 | [] | no_license | kushwanth1541/onlinetest | 948db1bfcc870bdcc607ea36cce32298443eb79d | d102573e67d8534ac09aa6bcb222574fa336eb52 | refs/heads/master | 2022-02-12T21:35:05.167451 | 2019-07-10T14:49:33 | 2019-07-10T14:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,143 | py | from django.shortcuts import render
from .forms import AddSubjectFrom,AddExamForm,AddQuestionForm
from django.views.generic import ListView
# Create your views here.
def exam(request):
return render(request,'exam.html',{})
def question(request):
return render(request,'question.html',{})
def subject(request):
return render(request,'subject.html',{})
def addExam(request):
if request.method == 'POST':
form = AddExamForm(request.POST)
if form.is_valid():
form.save()
else:
form = AddExamForm()
return render(request,'add_exam.html',{'form':form})
def addQuestion(request):
if request.method == 'POST':
form = AddQuestionForm(request.POST)
if form.is_valid():
form.save()
else:
form = AddQuestionForm()
return render(request,'add_question.html',{'form':form})
def addSubject(request):
if request.method == 'POST':
form = AddSubjectFrom(request.POST)
if form.is_valid():
form.save()
msg = 'Saved.'
form = AddSubjectFrom()
else:
msg = ''
form = AddSubjectFrom()
return render(request,'add_subject.html',{'form':form},{'msg':msg})
def ranks(request):
pass
def marks(request):
pass
| [
"thapabikash48@gmail.com"
] | thapabikash48@gmail.com |
c5d2f1ddcd047f4b8538569b9e1d096d0c8dca9d | 13ce77f9beb41aee1d23f51d18c690776ed02d6d | /buscar_reco.py | 3bba563a38d16d3c3be73347ff9ee65eeb296c2e | [] | no_license | brunocaracini/SpotiPy | cec35e12b54e3537fd2cd04f24bfbebdf72c3c47 | 17d4236203a0ad9ff44b27fb6ea40ddf27400e3e | refs/heads/master | 2023-08-19T06:39:58.365677 | 2021-09-20T13:30:48 | 2021-09-20T13:30:48 | 216,396,489 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | import sys
import spotipy
import json
import os
''' shows recommendations for the given artist
'''
from spotipy.oauth2 import SpotifyClientCredentials
client_credentials_manager = SpotifyClientCredentials()
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
sp.trace=False
def get_artist(name):
results = sp.search(q='artist:' + name, type='artist')
items = results['artists']['items']
if len(items) > 0:
return items[0]
else:
return None
def show_recommendations_for_artist(artist):
artists = []
results = sp.recommendations(seed_artists = [artist['id']])
for track in results['tracks']:
if track['artists'][0]['name'] not in artists and track['artists'][0]['name'] is not artist:
artists.append(track['artists'][0]['name'])
return artists
with open('artista_recomendaciones.txt') as json_file:
artista_recomendaciones = json.load(json_file)
artista = artista_recomendaciones
name = artista
artist = get_artist(name)
artistas = show_recommendations_for_artist(artist)
with open('reco.txt', 'w') as outfile:
json.dump(artistas, outfile)
| [
"noreply@github.com"
] | brunocaracini.noreply@github.com |
182becdc79777ba8ac099a3d45657e88a44d6a9e | 6f664430050c9ccb22f079f43feccdbd76c4cd44 | /接口测试/AES_demo.py | fb8998b90f9db5558c61f687581f8212a6e94964 | [] | no_license | xiaomo001/python- | 398779a734506498d8aaf6fcd38371bd38115c97 | cebaba7f873aec930dfe69b2517556bd410b848a | refs/heads/master | 2020-09-21T14:52:07.291268 | 2019-11-29T09:28:49 | 2019-11-29T09:28:49 | 224,822,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,520 | py | import base64
from Crypto.Cipher import AES
'''
采用AES对称加密算法
'''
# str不是16的倍数那就补足为16的倍数
def add_to_16(value):
while len(value) % 16 != 0:
value += '\0'
return str.encode(value) # 返回bytes
#加密方法
def encrypt_oracle(encrypt_text):
# 秘钥
key = '*4&4^3%2$7#6@9!8'
# 初始化加密器
aes = AES.new(add_to_16(key), AES.MODE_ECB)
#先进行aes加密
encrypt_aes = aes.encrypt(add_to_16(encrypt_text))
#用base64转成字符串形式
encrypted_text = str(base64.encodebytes(encrypt_aes), encoding='utf-8') # 执行加密并转码返回bytes
print(encrypted_text)
#解密方法
def decrypt_oralce(decrypt_text):
# 秘钥
key = '*4&4^3%2$7#6@9!8'
# 初始化加密器
aes = AES.new(add_to_16(key), AES.MODE_ECB)
#优先逆向解密base64成bytes
base64_decrypted = base64.decodebytes(decrypt_text.encode(encoding='utf-8'))
#执行解密密并转码返回str
decrypted_text = str(aes.decrypt(base64_decrypted),encoding='utf-8').replace('\0','')
print(decrypted_text)
if __name__ == '__main__':
en_text = '{"status":1,"data":{"miniSwitch":1,"miniPage":[],"tray":[],"createDeskIcon":[],"popup":[],"Win10Toast":[],"trayPopup":[]}}'
de_text = "uaOp2Yi1ADdATF9HNAp8sZTInslYcL8wMkASNQ3rHrN/3/qxDPIqNx+s6aIYdvoKldkJNntkrITj6LpBeGfzM9vgGdcMGvKp/KKJGH5QGEf2Db/pRLkhN7Sm/dkg7+NGwzR1BFFwiX47TSVLWRkMekUu4xNuxedhOYBWudl/fgw="
encrypt_oracle(en_text)
decrypt_oralce(de_text) | [
"1060524911@qq.com"
] | 1060524911@qq.com |
c934430ae21a8a8433b889331bf4f89673a630c6 | 54bf627aca5a0b1278bc309ec07d9dff2a7bf99f | /setup.py | 2dfe393caf4e3953a94170da4d0436d888140143 | [
"MIT"
] | permissive | mtvbrianking/py-cli-clock | d1417bb30394b814f8a6ece2f3ae2d34ff42c010 | 193df446e0475a3a5567ce9ecf7324f6ecd9d216 | refs/heads/master | 2020-04-27T11:54:40.086237 | 2019-03-13T13:11:45 | 2019-03-13T13:11:45 | 174,313,870 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | #!/usr/bin/env python3
from setuptools import setup
from py_clock.utils import get_file_contents
from os import path
readme = path.abspath(path.join(path.dirname(__file__), "README.md"))
setup(
name = 'py-cli-clock',
version = '0.1.0',
author = "Brian Matovu",
author_email = "mtvbrianking@gmail.com",
description = ("This is a demo project trying to learn Python basics; "
"Project structure, documentation, automated tests, continuous integration."),
long_description = get_file_contents(readme),
license = "MIT",
packages = ['py_clock'],
keywords = "Python CLI Clock",
url = "https://github.com/mtvbrianking/py-cli-clock",
entry_points = {
'console_scripts': [
'py_clock = py_clock.__main__:main'
]
})
| [
"mtvbrianking@gmail.com"
] | mtvbrianking@gmail.com |
63ca94a8fed10d5cf28fdd7a594f71b39028dd5c | fe27cb911565496e2f2f9aa0b400d968d497db7d | /models/invoice.py | a45ce551127d557bd2c449d11e33022f2426e666 | [] | no_license | aliomattux/mage2odoo | e300c1e0097d4b0f40258f8903298edd29ac8b98 | ec49fb6296eab17b21354b7cedc47dd57fc9f563 | refs/heads/master | 2020-05-21T04:34:26.257437 | 2017-02-24T07:29:37 | 2017-02-24T07:29:37 | 27,386,601 | 21 | 22 | null | 2017-02-24T07:29:10 | 2014-12-01T15:51:50 | Python | UTF-8 | Python | false | false | 277 | py | from openerp.osv import osv, fields
class AccountInvoice(osv.osv):
_inherit = 'account.invoice'
_columns = {
'mage_export_error': fields.boolean('Magento Export Error', copy=False),
'external_id': fields.integer('Magento Invoice ID', copy=False, select=True),
}
| [
"kyle.waid@gcotech.com"
] | kyle.waid@gcotech.com |
f8720cb40162973a04d2461826c550fb6a66e68e | 9672e0b45f72261c069aa8140a01e861b8f8db45 | /query/migrations/0005_auto_20150526_1736.py | 970fbb78131ebda45f4baae6af990612482e6cf4 | [] | no_license | KeleiAzz/SCRC_server | 40882c0d5804b0488dd31f4d4db353616d318e48 | 669d45e4d5059cfc766a2a0852d23522d2af7d84 | refs/heads/master | 2020-04-06T10:18:40.817298 | 2016-12-04T06:07:53 | 2016-12-04T06:07:53 | 35,839,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,782 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('query', '0004_evidence'),
]
operations = [
migrations.AlterField(
model_name='evidence',
name='h1',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h10',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h11',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h12',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h13',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h14',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h15',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h16',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h17',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h18',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h19',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h2',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h20',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h21',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h22',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h23',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h3',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h4',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h5',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h6',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h7',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h8',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='evidence',
name='h9',
field=models.IntegerField(blank=True, null=True),
preserve_default=True,
),
]
| [
"kgong@ncsu.edu"
] | kgong@ncsu.edu |
0d5d76ef5b074b1347fd493018487bfa5c02aa7b | 0019ea5621577ab9a9a694e3ef91d913e981a28e | /dataset_builder/feature_extractor/test_sub2vec_model_creator.py | ee9c0388664fcada20f99ab062229fd46de862ec | [] | no_license | sungc1/fake-news-framework_Py3 | 676710b3bf7b8feb4c237ffed7d1d280f4967890 | e3552b5bc2a30dbd52ad893ce8dd29aa2242f864 | refs/heads/main | 2023-01-19T23:42:13.294446 | 2020-12-01T18:38:31 | 2020-12-01T18:38:31 | 428,178,049 | 1 | 0 | null | 2021-11-15T08:18:23 | 2021-11-15T08:18:23 | null | UTF-8 | Python | false | false | 9,808 | py | import random
from unittest import TestCase
import networkx as nx
import pandas as pd
import numpy as np
from DB.schema_definition import DB, AuthorConnection, Author, Post
from dataset_builder.feature_extractor.sub2vec_model_creator import Sub2VecModelCreator
from dataset_builder.sub2vec_feature_generator import Sub2VecFeatureGenerator
class TestSub2VecModelCreator(TestCase):
@classmethod
def setUpClass(cls):
""" get_some_resource() is slow, to avoid calling it for each test use setUpClass()
and store the result as class variable
"""
super(TestSub2VecModelCreator, cls).setUpClass()
cls._db = DB()
cls._db.setUp()
cls.sub2vec_model_creator = Sub2VecModelCreator(cls._db)
cls.sub2vec_feature_generator = Sub2VecFeatureGenerator(cls._db, **{'authors': [], 'posts': {}})
edges = [(0, 4), (2, 0), (1, 3), (3, 1), (0, 1), (1, 2), (4, 0), (4, 3), (2, 3), (3, 0)]
cls.connected_undirected_graph = cls.create_undirected_graph(5, edges, 'connected_undirected_graph')
cls.unconnected_directed_graph = cls.connected_directed_graph(7, edges, 'unconnected_directed_graph')
cls.connected_directed_graph = cls.connected_directed_graph(5, edges, 'connected_directed_graph')
cls.unconnected_undirected_graph = cls.create_undirected_graph(7, edges, 'unconnected_undirected_graph')
cls.add_graph_to_db(cls.connected_undirected_graph)
cls.add_graph_to_db(cls.unconnected_directed_graph)
cls.add_graph_to_db(cls.connected_directed_graph)
cls.add_graph_to_db(cls.unconnected_undirected_graph)
@classmethod
def add_graph_to_db(cls, graph):
post = Post(post_id=str(graph.graph['name']), domain='flickr', post_osn_id=str(graph.graph['name']))
post.post_type = 'labels'
author_connections = []
for edge in graph.edges():
author_connections.append(AuthorConnection(source_author_guid=edge[0], destination_author_guid=edge[1],
connection_type=graph.graph['name']))
authors = []
for node in graph.nodes():
authors.append(Author(name=str(node), domain=str(graph.graph['name']), author_guid=str(node)))
cls._db.addPosts([post])
cls._db.addPosts(author_connections)
cls._db.addPosts(authors)
@classmethod
def create_undirected_graph(cls, nodes_count, edges, graph_name):
graph = nx.Graph()
return cls.build_graph(edges, graph, graph_name, nodes_count)
@classmethod
def connected_directed_graph(cls, nodes_count, edges, graph_name):
graph = nx.DiGraph()
return cls.build_graph(edges, graph, graph_name, nodes_count)
@classmethod
def build_graph(cls, edges, graph, graph_name, nodes_count):
graph.add_nodes_from(range(nodes_count))
graph.add_edges_from(edges)
# nx.set_node_attributes(graph, {}, 'label')
nx.set_node_attributes(graph, values={}, name='label')
graph.graph['name'] = graph_name
return graph
def setUp(self):
random.seed(900)
def assertArrayEquals(self, actual_vector, expected_vector):
for actual_value, expected_value in zip(actual_vector, expected_vector):
self.assertAlmostEqual(actual_value, expected_value, places=7)
def test_generate_structural_embedding_for_connected_undirected_graph(self):
args = {'dimensions': 128,
'window': 2,
'walkLength': 1000,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'wl_iterations': 2,
'randomWalkCount': 10}
embeddings = self.sub2vec_model_creator.graph_structural_embedding([self.connected_undirected_graph], **args)
self.assertEqual(len(embeddings), 1)
self.assertEqual(len(embeddings[0]), 128)
actual_vector = np.array((embeddings[0]))
self.assertTrue(any(actual_vector))
def test_generate_structural_embedding_for_unconnected_undirected_graph(self):
args = {'dimensions': 138,
'window': 2,
'walkLength': 100,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'randomWalkCount': 10}
embeddings = self.sub2vec_model_creator.graph_structural_embedding([self.unconnected_undirected_graph], **args)
self.assertEqual(len(embeddings), 1)
self.assertEqual(len(embeddings[0]), 138)
actual_vector = np.array((embeddings[0]))
self.assertTrue(any(actual_vector))
def test_generate_structural_embedding_for_connected_directed_graph(self):
args = {'dimensions': 138,
'window': 2,
'walkLength': 30,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'randomWalkCount': 10}
embeddings = self.sub2vec_model_creator.graph_structural_embedding([self.connected_directed_graph], **args)
self.assertEqual(len(embeddings), 1)
self.assertEqual(len(embeddings[0]), 138)
actual_vector = np.array((embeddings[0]))
self.assertTrue(any(actual_vector))
def test_generate_structural_embedding_for_unconnected_directed_graph(self):
args = {'dimensions': 138,
'window': 2,
'walkLength': 40,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'randomWalkCount': 10}
embeddings = self.sub2vec_model_creator.graph_structural_embedding([self.unconnected_directed_graph], **args)
self.assertEqual(len(embeddings), 1)
self.assertEqual(len(embeddings[0]), 138)
actual_vector = np.array((embeddings[0]))
self.assertTrue(any(actual_vector))
def test_generate_structural_embedding_for_4_graphs(self):
args = {'dimensions': 118,
'window': 2,
'walkLength': 40,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'randomWalkCount': 10}
graphs = [self.unconnected_directed_graph, self.connected_undirected_graph,
self.unconnected_undirected_graph, self.connected_directed_graph]
embeddings = self.sub2vec_model_creator.graph_structural_embedding(graphs, **args)
self.assertEqual(len(embeddings), 4)
self.assertEqual(len(embeddings[0]), 118)
self.assertEqual(len(embeddings[1]), 118)
self.assertEqual(len(embeddings[2]), 118)
self.assertEqual(len(embeddings[3]), 118)
self.assertTrue(any(np.array((embeddings[0]))))
self.assertTrue(any(np.array((embeddings[1]))))
self.assertTrue(any(np.array((embeddings[2]))))
self.assertTrue(any(np.array((embeddings[3]))))
def test_generate_author_features_from_sub2vec(self):
dimensions = 118
args = {'dimensions': dimensions,
'window': 2,
'walkLength': 40,
'iterations': 20,
'alpha': 0.5,
'dm': 1,
'randomWalkCount': 10}
graphs = [self.unconnected_directed_graph, self.connected_undirected_graph,
self.unconnected_undirected_graph, self.connected_directed_graph]
embeddings = self.sub2vec_model_creator.graph_structural_embedding(graphs, **args)
authors_features = self.sub2vec_model_creator.convert_embedding_to_author_features(graphs, embeddings)
self.assertEqual(len(authors_features), 4 * dimensions)
for graph, embedding in zip(graphs, embeddings):
actual = [f.attribute_value for f in authors_features if f.author_guid == graph.graph['name']]
self.assertArrayEquals(actual, embedding)
def test_load_graphs(self):
graphs = self.sub2vec_model_creator.load_graphs()
expected_graphs = [self.unconnected_directed_graph, self.connected_undirected_graph,
self.unconnected_undirected_graph, self.connected_directed_graph]
expected_graph_map = {expected_graph.graph['name']: expected_graph for expected_graph in expected_graphs}
for actual_graph in graphs:
expected_graph = expected_graph_map[actual_graph.graph['name']]
self.assertNodes(actual_graph, expected_graph)
self.assertEdges(actual_graph, expected_graph)
pass
def test_execute(self):
graphs = self.sub2vec_model_creator.load_graphs()
self.sub2vec_model_creator.execute()
embedding_table_name = self.sub2vec_model_creator._table_name
df = pd.read_sql_table(embedding_table_name, self._db.engine)
self.assertTupleEqual(df.shape, (len(graphs), self.sub2vec_model_creator._num_of_dimensions + 1))
pass
def test_sub2vec_feature_generator(self):
self.sub2vec_model_creator.execute()
self.sub2vec_feature_generator.execute()
graphs = [self.unconnected_directed_graph, self.connected_undirected_graph,
self.unconnected_undirected_graph, self.connected_directed_graph]
for graph in graphs:
actual_dimensions_count = len(self._db.get_author_features_by_author_guid(graph.graph['name']))
self.assertEqual(actual_dimensions_count, self.sub2vec_model_creator._num_of_dimensions)
def assertEdges(self, actual_graph, expected_graph):
edges = [(int(v), int(u)) for v, u in actual_graph.edges()]
self.assertListEqual(list(sorted(expected_graph.edges())), list(sorted(edges)))
def assertNodes(self, actual_graph, expected_graph):
self.assertListEqual(list(expected_graph.nodes()), list(sorted(map(int, actual_graph.nodes()))))
| [
"aviade@post.bgu.ac.il"
] | aviade@post.bgu.ac.il |
1049caf450a5e4ca1eedc4aec81d6fe28ca216eb | bca6e5728aa041d348482e4265fd2c6f1f4a67d3 | /ucsmsdk/mometa/storage/StorageSasPort.py | 6a17becbc5ebeaadbdf5cee82300824e1e91d16c | [
"Apache-2.0"
] | permissive | psterdale/ucsmsdk | fc7c519ea1a43c5e77a015e3605bc2acfe3c917a | 821b805c18ad7652a79d4f581f4695558f17e943 | refs/heads/master | 2020-12-03T08:11:54.382427 | 2017-06-28T12:46:52 | 2017-06-28T12:46:52 | 95,667,187 | 1 | 0 | null | 2017-06-28T12:33:27 | 2017-06-28T12:33:27 | null | UTF-8 | Python | false | false | 3,873 | py | """This module contains the general information for StorageSasPort ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class StorageSasPortConsts:
LC_ALLOCATED = "allocated"
LC_AVAILABLE = "available"
LC_DEALLOCATED = "deallocated"
LC_REPURPOSED = "repurposed"
LINK_SPEED_1_5_GBPS = "1-5-gbps"
LINK_SPEED_12_GBPS = "12-gbps"
LINK_SPEED_3_GBPS = "3-gbps"
LINK_SPEED_6_GBPS = "6-gbps"
LINK_SPEED_DISABLED = "disabled"
LINK_SPEED_DOWN = "down"
LINK_SPEED_HOST_POWER_OFF = "host-power-off"
LINK_SPEED_UNKNOWN = "unknown"
LINK_SPEED_UNSUPPORTED_DEVICE = "unsupported-device"
class StorageSasPort(ManagedObject):
"""This is StorageSasPort class."""
consts = StorageSasPortConsts()
naming_props = set([u'id'])
mo_meta = MoMeta("StorageSasPort", "storageSasPort", "sas-port-[id]", VersionMeta.Version312b, "InputOutput", 0x3f, [], ["read-only"], [u'storageEnclosureLocalDiskConfig', u'storageLocalDisk'], [], ["Get"])
prop_meta = {
"address": MoPropertyMeta("address", "address", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"endpoint": MoPropertyMeta("endpoint", "endpoint", "uint", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version312b, MoPropertyMeta.NAMING, 0x8, None, None, None, [], ["0-4294967295"]),
"lc": MoPropertyMeta("lc", "lc", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["allocated", "available", "deallocated", "repurposed"], []),
"link_descr": MoPropertyMeta("link_descr", "linkDescr", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"link_speed": MoPropertyMeta("link_speed", "linkSpeed", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["1-5-gbps", "12-gbps", "3-gbps", "6-gbps", "disabled", "down", "host-power-off", "unknown", "unsupported-device"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version312b, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"address": "address",
"childAction": "child_action",
"dn": "dn",
"endpoint": "endpoint",
"id": "id",
"lc": "lc",
"linkDescr": "link_descr",
"linkSpeed": "link_speed",
"rn": "rn",
"sacl": "sacl",
"status": "status",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.address = None
self.child_action = None
self.endpoint = None
self.lc = None
self.link_descr = None
self.link_speed = None
self.sacl = None
self.status = None
ManagedObject.__init__(self, "StorageSasPort", parent_mo_or_dn, **kwargs)
| [
"vijayvikrant84@gmail.com"
] | vijayvikrant84@gmail.com |
ba2e7739688235d8fefeb869845ec30a64e144fb | b6ddf1cc2d3917dfbe513036eaff66bcdbd2b77b | /qa/rpc-tests/pegging.py | e21208dabd563aca66d0102d66106e19e2f36148 | [
"MIT"
] | permissive | vicsn/elements | 114f3ed296f5bbecdc7e34d2b0fe725682ed1b28 | e7f04f35ba22ef9833fb2077eb8c2c9b246f4fd7 | refs/heads/master | 2021-08-26T08:37:44.779204 | 2017-11-22T16:16:11 | 2017-11-22T16:16:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,663 | py | #!/usr/bin/env python2
from test_framework.authproxy import AuthServiceProxy, JSONRPCException
import os
import random
import sys
import time
import subprocess
import shutil
if len(sys.argv) != 3:
print("paths to bitcoind and sidechain daemon must be included as arguments")
sys.exit(0)
print(sys.argv[1])
print(sys.argv[2])
# Sync mempool, make a block, sync blocks
def sync_all(sidechain, sidechain2):
timeout = 20
while len(sidechain.getrawmempool()) != len(sidechain2.getrawmempool()):
time.sleep(1)
timeout -= 1
if timeout == 0:
raise Exception("Peg-in has failed to propagate.")
block = sidechain2.generate(1)
while sidechain.getblockcount() != sidechain2.getblockcount():
time.sleep(1)
timeout -= 1
if timeout == 0:
raise Exception("Blocks are not propagating.")
return block
fedpeg_key="cPxqWyf1HDGpGFH1dnfjz8HbiWxvwG8WXyetbuAiw4thKXUdXLpR"
fedpeg_pubkey="512103dff4923d778550cc13ce0d887d737553b4b58f4e8e886507fc39f5e447b2186451ae"
bitcoin_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
bitcoin_pass=''.join(random.choice('0123456789ABCDEF') for i in range(10))
sidechain_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
sidechain_pass=''.join(random.choice('0123456789ABCDEF') for i in range(10))
sidechain2_datadir="/tmp/"+''.join(random.choice('0123456789ABCDEF') for i in range(5))
sidechain2_pass=''.join(random.choice('0123456789ABCDEF') for i in range(10))
bitcoin_port = 8000 + os.getpid()%999
sidechain_port = bitcoin_port + 1
sidechain2_port = bitcoin_port + 2
sidechain1_p2p_port = bitcoin_port + 3
sidechain2_p2p_port = bitcoin_port + 4
os.makedirs(bitcoin_datadir)
os.makedirs(sidechain_datadir)
os.makedirs(sidechain2_datadir)
with open(os.path.join(bitcoin_datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n")
f.write("rpcuser=bitcoinrpc\n")
f.write("rpcpassword="+bitcoin_pass+"\n")
f.write("rpcport="+str(bitcoin_port)+"\n")
f.write("discover=0\n")
f.write("listen=0\n")
f.write("testnet=0\n")
f.write("txindex=1\n")
f.write("daemon=1\n")
f.write("listen=0\n")
with open(os.path.join(sidechain_datadir, "elements.conf"), 'w') as f:
f.write("regtest=1\n")
f.write("rpcuser=sidechainrpc\n")
f.write("rpcpassword="+sidechain_pass+"\n")
f.write("rpcport="+str(sidechain_port)+"\n")
f.write("discover=0\n")
f.write("testnet=0\n")
f.write("txindex=1\n")
f.write("fedpegscript="+fedpeg_pubkey+"\n")
f.write("daemon=1\n")
f.write("mainchainrpchost=127.0.0.1\n")
f.write("mainchainrpcport="+str(bitcoin_port)+"\n")
f.write("mainchainrpcuser=bitcoinrpc\n")
f.write("mainchainrpcpassword="+bitcoin_pass+"\n")
f.write("validatepegin=1\n")
f.write("validatepegout=0\n")
f.write("port="+str(sidechain1_p2p_port)+"\n")
f.write("connect=localhost:"+str(sidechain2_p2p_port)+"\n")
f.write("listen=1\n")
with open(os.path.join(sidechain2_datadir, "elements.conf"), 'w') as f:
f.write("regtest=1\n")
f.write("rpcuser=sidechainrpc2\n")
f.write("rpcpassword="+sidechain2_pass+"\n")
f.write("rpcport="+str(sidechain2_port)+"\n")
f.write("discover=0\n")
f.write("testnet=0\n")
f.write("txindex=1\n")
f.write("fedpegscript="+fedpeg_pubkey+"\n")
f.write("daemon=1\n")
f.write("mainchainrpchost=127.0.0.1\n")
f.write("mainchainrpcport="+str(bitcoin_port)+"\n")
f.write("mainchainrpcuser=bitcoinrpc\n")
f.write("mainchainrpcpassword="+bitcoin_pass+"\n")
f.write("validatepegin=1\n")
f.write("validatepegout=0\n")
f.write("port="+str(sidechain2_p2p_port)+"\n")
f.write("connect=localhost:"+str(sidechain1_p2p_port)+"\n")
f.write("listen=1\n")
try:
# Default is 8, meaning 8+2 confirms for wallet acceptance normally
# this will require 10+2.
sidechain_args = " -peginconfirmationdepth=10 "
# Start daemons
print("Starting daemons at "+bitcoin_datadir+", "+sidechain_datadir+" and "+sidechain2_datadir)
bitcoindstart = sys.argv[1]+"/bitcoind -datadir="+bitcoin_datadir
subprocess.Popen(bitcoindstart.split(), stdout=subprocess.PIPE)
sidechainstart = sys.argv[2]+"/elementsd -datadir="+sidechain_datadir + sidechain_args
subprocess.Popen(sidechainstart.split(), stdout=subprocess.PIPE)
sidechain2start = sys.argv[2]+"/elementsd -datadir="+sidechain2_datadir + sidechain_args
subprocess.Popen(sidechain2start.split(), stdout=subprocess.PIPE)
print("Daemons started")
time.sleep(3)
bitcoin = AuthServiceProxy("http://bitcoinrpc:"+bitcoin_pass+"@127.0.0.1:"+str(bitcoin_port))
sidechain = AuthServiceProxy("http://sidechainrpc:"+sidechain_pass+"@127.0.0.1:"+str(sidechain_port))
sidechain2 = AuthServiceProxy("http://sidechainrpc2:"+sidechain2_pass+"@127.0.0.1:"+str(sidechain2_port))
print("Daemons started, making blocks to get funds")
bitcoin.generate(101)
sidechain.generate(101)
addr = bitcoin.getnewaddress()
addrs = sidechain.getpeginaddress()
txid1 = bitcoin.sendtoaddress(addrs["mainchain_address"], 24)
# 10+2 confirms required to get into mempool and confirm
bitcoin.generate(11)
time.sleep(2)
proof = bitcoin.gettxoutproof([txid1])
raw = bitcoin.getrawtransaction(txid1)
print("Attempting peg-in")
try:
pegtxid = sidechain.claimpegin(raw, proof)
raise Exception("Peg-in should not mature enough yet, need another block.")
except JSONRPCException as e:
assert("Peg-in Bitcoin transaction needs more confirmations to be sent." in e.error["message"])
pass
# Should fail due to non-matching wallet address
try:
pegtxid = sidechain.claimpegin(raw, proof, sidechain.getnewaddress())
raise Exception("Peg-in with non-matching claim_script should fail.")
except JSONRPCException as e:
assert("Given claim_script does not match the given Bitcoin transaction." in e.error["message"])
pass
# 12 confirms allows in mempool
bitcoin.generate(1)
# Should succeed via wallet lookup for address match, and when given
pegtxid1 = sidechain.claimpegin(raw, proof)
# Will invalidate the block that confirms this transaction later
blockhash = sync_all(sidechain, sidechain2)
sidechain.generate(5)
tx1 = sidechain.gettransaction(pegtxid1)
if "confirmations" in tx1 and tx1["confirmations"] == 6:
print("Peg-in is confirmed: Success!")
else:
raise Exception("Peg-in confirmation has failed.")
# Look at pegin fields
decoded = sidechain.decoderawtransaction(tx1["hex"])
assert decoded["vin"][0]["is_pegin"] == True
assert len(decoded["vin"][0]["pegin_witness"]) > 0
# Quick reorg checks of pegs
sidechain.invalidateblock(blockhash[0])
if sidechain.gettransaction(pegtxid1)["confirmations"] != 0:
raise Exception("Peg-in didn't unconfirm after invalidateblock call.")
# Re-enters block
sidechain.generate(1)
if sidechain.gettransaction(pegtxid1)["confirmations"] != 1:
raise Exception("Peg-in should have one confirm on side block.")
sidechain.reconsiderblock(blockhash[0])
if sidechain.gettransaction(pegtxid1)["confirmations"] != 6:
raise Exception("Peg-in should be back to 6 confirms.")
# Do many claims in mempool
n_claims = 100
print("Flooding mempool with many small claims")
pegtxs = []
sidechain.generate(101)
for i in range(n_claims):
addrs = sidechain.getpeginaddress()
txid = bitcoin.sendtoaddress(addrs["mainchain_address"], 1)
bitcoin.generate(12)
proof = bitcoin.gettxoutproof([txid])
raw = bitcoin.getrawtransaction(txid)
pegtxs += [sidechain.claimpegin(raw, proof)]
sync_all(sidechain, sidechain2)
sidechain2.generate(1)
for pegtxid in pegtxs:
tx = sidechain.gettransaction(pegtxid)
if "confirmations" not in tx or tx["confirmations"] == 0:
raise Exception("Peg-in confirmation has failed.")
print("Success!")
except JSONRPCException as e:
print("Pegging testing failed, aborting:")
print(e.error)
except Exception as e:
print("Pegging testing failed, aborting:")
print(e)
print("Stopping daemons and cleaning up")
bitcoin.stop()
sidechain.stop()
time.sleep(5)
shutil.rmtree(sidechain_datadir)
shutil.rmtree(bitcoin_datadir)
| [
"gsanders87@gmail.com"
] | gsanders87@gmail.com |
15b8f14577f7590bcf869f4042ed8f7b22461db8 | 5e2ee1516c0e87f93e4feb59bf2b3c8f20f391de | /python_input.py | cb971024488f7b42ef3506b3c077e7bf94327cfa | [] | no_license | danardekani/python_input | b4e092fc87c64773fff71409645f75af9b715f88 | 74ccc5b78985caf2053d675b5e022225c75bc047 | refs/heads/master | 2020-12-11T17:28:24.389869 | 2020-01-14T18:56:41 | 2020-01-14T18:56:41 | 233,912,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | age = input('Enter your age: ')
print("You have live for {} seconds. this corresponds to {} years".format(int(age) * 365 * 24 * 60 * 60, age))
| [
"ardekanicapital@gmail.com"
] | ardekanicapital@gmail.com |
c34a9570ddb19f0ba395a45c52431351d60fc762 | 67bf8cefe00577487b00165783543eec611286e9 | /salmonberry/label.py | e4aa458715939728c970bd2e767ca60b8363adba | [
"MIT"
] | permissive | radomirbosak/salmonberry | 8cb3294a87ab5d145545823f20e86ef8f0b64809 | 8075d5a13e0b79605581b1cce82b99c0d0c8c300 | refs/heads/master | 2020-03-16T01:44:53.948099 | 2018-06-07T13:10:29 | 2018-06-07T13:10:29 | 132,447,434 | 2 | 0 | MIT | 2018-05-09T16:32:57 | 2018-05-07T10:54:03 | Python | UTF-8 | Python | false | false | 3,012 | py | import logging
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.feature_extraction.text import TfidfVectorizer
from .common import load_yaml, save_yaml, load_cache, load_labels, save_labels
def get_unlabeled(cache, labels):
labeled_ids = set(entry['id'] for entry in labels)
return [entry for entry in cache if entry['id'] not in labeled_ids]
def ask_labels(entry):
print('Title: ' + entry['title'])
answer = input('Labels: ')
return answer.split()
def label(cache_filename, labels_filename):
# load cache
cache = load_cache(cache_filename)
labeled = load_labels(labels_filename)
# get unlabeled entries
unlabeled = get_unlabeled(cache, labeled)
# ask for label(s)
print('For each article enter space-separated list of labels.')
num_new_labels = 0
try:
for entry in unlabeled:
labels = ask_labels(entry)
labeled.append({'id': entry['id'], 'labels': labels})
num_new_labels += 1
except EOFError:
logging.debug('Labeling interrupted')
# write down labels
logging.debug('Writing %d labels', num_new_labels)
save_labels(labeled, labels_filename)
def predict_labels(cache_filename, labels_filename):
# load files
cache = load_cache(cache_filename)
labeled = load_labels(labels_filename)
cache_id_map = {article['id']: article for article in cache}
# find all labels
all_labels = set()
for article in labeled:
all_labels.update(article['labels'])
all_labels = sorted(all_labels)
logging.debug('Found these labels: %s', ', '.join(all_labels))
# 1. learn models from labeled articles
# 1.1 prepare x-s
logging.debug('preparing xs')
titles = []
for labeled_article in labeled:
article = cache_id_map[labeled_article['id']]
titles.append(article['title'])
vectorizer = TfidfVectorizer(max_features=1000)
xs = vectorizer.fit_transform(titles)
# 1.2 prepare y-s for each label
logging.debug('training classifiers')
models = {}
scores = []
for label in all_labels:
ys = [label in labeled_article['labels'] for labeled_article in labeled]
ys = np.array(ys, dtype=np.int)
classifier = LogisticRegression(C=10)
classifier.fit(xs, ys)
scores.append(classifier.score(xs, ys))
models[label] = classifier
logging.debug('score: %.2f +- %.2f', np.average(scores), np.std(scores))
# 2. get user input
sentence = input('Sentence: ')
# 3. tokenize it and predict its labels
features = vectorizer.transform([sentence])
probs = []
for label in all_labels:
probs.append(
(label, models[label].predict_proba(features)[0][1])
)
probs.sort(key=lambda x: x[1], reverse=True)
colwidth = max(len(label) for label in all_labels)
for label, pred in probs:
print('{:>{colwidth}}: {:5.2f}'.format(label, 100 * pred, colwidth=colwidth))
| [
"radomir.bosak@gmail.com"
] | radomir.bosak@gmail.com |
4b655608f8398692c28ca98e39291340429ff692 | ba949e02c0f4a7ea0395a80bdc31ed3e5f5fcd54 | /problems/greedy/Solution621.py | bbd433a5a095fd0e364fc666dd13252734d0de78 | [
"MIT"
] | permissive | akaliutau/cs-problems-python | 6bc0a74064f6e9687fe58b13763da1fdf2e1f626 | 9b1bd8e3932be62135a38a77f955ded9a766b654 | refs/heads/master | 2023-05-11T22:19:06.711001 | 2021-06-04T11:14:42 | 2021-06-04T11:14:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,141 | py | """ Given a characters array tasks, representing the tasks a CPU needs to do,
where each letter represents a different task. Tasks could be done in any
order. Each task is done in one unit of time. For each unit of time, the CPU
could complete either one task or just be idle.
However, there is a non-negative integer n that represents the cooldown
period between two same tasks (the same letter in the array), that is that
there must be at least n units of time between any two same tasks.
Return the least number of units of times that the CPU will take to finish
all the given tasks
IDEA:
The total number of CPU intervals we need consists of busy and idle slots.
Number of busy slots is defined by the number of tasks to execute:
len(tasks). The problem is to compute a number of idle slots.
Maximum possible number of idle slots is defined by the frequency of the most
frequent task: idle_time <= (f_max - 1) * n.
Example:
ABCA, cooling=2
|AAAA|B |C |
cooling
just calculate #empty + tot_len of letters
"""
class Solution621:
pass
| [
"aliaksei.kaliutau@gmail.com"
] | aliaksei.kaliutau@gmail.com |
2020b5d2f31c423e941cd4e1a18ce87939fafe5b | 465a528a42dc4bfddfbab6005dde0c816b78fb11 | /scripts/ner_tagger.py | 1bdb7567361caec190bf6ab2dd8303da0f86315c | [] | no_license | probablygary/text_cat | 39e75917a91542be213f5eb1c1b709ac3f5fd4fd | 4fa5de3f010b9aa6217a33518d8edfde2132e34b | refs/heads/master | 2020-03-23T22:12:09.768021 | 2018-07-30T06:29:19 | 2018-07-30T06:29:19 | 142,157,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,663 | py | import pandas as pd
import os
from math import ceil
#
# ─── ENVIRONMENT SETUP ──────────────────────────────────────────────────────────
#
# Data Directory, change as necessary
data_dir = './data/kaggle_test/'
# Read in data file
print('Loading dataset...')
ner_untagged = pd.DataFrame()
ner_tagged = pd.DataFrame()
def check_dir(path, file):
flag = False
with os.scandir(path) as path:
for entry in path:
if os.DirEntry.is_file(entry) and entry.name == str(file):
flag = True
path.close()
return flag
#
# ─── PREPROCESSING ──────────────────────────────────────────────────────────────
#
def ner_tag(untagged):
print('Processing data...')
ner_cats = {
'PERSON': 'People, including fictional.',
'NORP': 'Nationalities or religious or political groups.',
'FACILITY': 'Buildings, airports, highways, bridges, etc.',
'ORG': 'Companies, agencies, institutions, etc.',
'GPE': 'Countries, cities, states.',
'LOC': 'Non-GPE locations, mountain ranges, bodies of water.',
'PRODUCT': 'Objects, vehicles, foods, etc. (Not services.)',
'EVENT': 'Named hurricanes, battles, wars, sports events, etc.',
'WORK_OF_ART': 'Titles of books, songs, etc.',
'LAW': 'Named documents made into laws.',
'LANGUAGE': 'Any named language.',
'DATE': 'Absolute or relative dates or periods.',
'TIME': 'Times smaller than a day.',
'PERCENT': 'Percentage, including "%".',
'MONEY': 'Monetary values, including unit.',
'QUANTITY': 'Measurements, as of weight or distance.',
'ORDINAL': '"first", "second", etc.',
'CARDINAL': 'Numerals that do not fall under another type.'
}
options = {'d': 'define', 'n': 'next sentence', 'q': 'save and quit'}
tagged = pd.DataFrame()
for i, word_list in enumerate(untagged['words']):
inp = ''
entities = [[]]
# Print UI
print('\n\n\n{}\n{}\t{:>5}/{:<5}\t{:>4}% Done\n\n{}\n'.format(
'-' * 50, untagged.loc[i, 'post_id'],
len(ner_tagged) + len(tagged),
len(ner_untagged) + len(ner_tagged),
ceil((len(ner_tagged) + len(tagged)) * 100 /
(len(ner_untagged) + len(ner_tagged))),
untagged.loc[i, 'sentence']))
for token in word_list:
print('{:<15}\tstart: {:>3}\tend: {:>3}\ttag: {}'.format(
token['token'], token['start'], token['end'], token['tag']))
print('\n')
for j, ent in enumerate(ner_cats):
print('{:<2}\t{:10}\t{}'.format(j, ent, ner_cats[ent]))
for option in options:
print('{:<2}\t{}'.format(option, options[option]))
# User Input
while inp not in options:
inp = input('-->')
while inp != 'n':
if inp == 'd':
print('Define Named Entity:')
start = input('start: ')
# while start not in [
# str(k)
# for k in range(0, len(untagged.loc[i, 'sentence']))
# ]:
# start = input('start: ')
start = int(start)
end = input('end: ')
# while end not in [
# str(k)
# for k in range(0, len(untagged.loc[i, 'sentence']))
# ] or int(end) < int(start):
# end = input('end: ')
while int(end) < int(start):
end = input('end: ')
tag = input('tag: ')
while tag not in [str(num) for num in range(0, len(ner_cats))]:
tag = input('tag: ')
entities += [[int(start), int(end), list(ner_cats)[int(tag)]]]
elif inp == 'q':
break
inp = input('-->')
if inp == 'q':
break
tagged = tagged.append(
{
'post_id': untagged.loc[i, 'post_id'],
'sent_id': untagged.loc[i, 'sent_id'],
'sentence': untagged.loc[i, 'sentence'],
'entities': entities
},
ignore_index=True)
tagged.to_json(
path_or_buf=data_dir + 'ner_tagged_TEMP.json', orient='records')
return tagged
if check_dir(data_dir, 'ner_tagged.json'):
ner_tagged = pd.read_json(
path_or_buf=(data_dir + 'ner_tagged.json'), orient='records')
ner_untagged = pd.read_json(
path_or_buf=(data_dir + 'ner_untagged.json'), orient='records')
untagged_ids = pd.Index(ner_untagged['sent_id']).difference(
pd.Index(ner_tagged['sent_id']))
print('{} record(s) loaded.'.format(str(len(ner_untagged))))
ner_untagged = ner_untagged[ner_untagged['sent_id'].isin(
untagged_ids)].reset_index()
ner_tagged = ner_tagged.append(ner_tag(ner_untagged), ignore_index=True)
ner_tagged.to_json(path_or_buf=(data_dir + 'ner_tagged.json'))
print('Tagging complete!')
else:
ner_untagged = pd.read_json(
path_or_buf=(data_dir + 'ner_untagged.json'), orient='records')
print('{} record(s) loaded.'.format(str(len(ner_untagged))))
ner_tagged = ner_tagged.append(ner_tag(ner_untagged), ignore_index=True)
ner_tagged.to_json(path_or_buf=(data_dir + 'ner_tagged.json'))
print('Tagging complete!')
| [
"gary.ng.jz@gmail.com"
] | gary.ng.jz@gmail.com |
18ed09fbe425c6e23a807270720e517c0825097d | 9dbe69085827de6c24e6315ee49c7f462ae9aa1c | /www/manage.py | b24a3a2e84ea3548cd50a6485a8d6ab59a903d4a | [] | no_license | Ggzzhh/LearnPython | f15d5c65a25f48fb8a9a43934227a455e195c541 | 397bf4e44a5595954c459883878c00cf4ee60307 | refs/heads/master | 2021-01-20T14:15:15.518408 | 2017-07-23T08:28:56 | 2017-07-23T08:28:56 | 88,743,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | #!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "learning_log.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"471992509@qq.com"
] | 471992509@qq.com |
6e330310f58089a7617c96a3fe90a35b6e094a35 | d0ec5898ac10ccbed7405d2b8dead64c4e5fc61f | /agent/managers/test_manager.py | cea53865114359b0422619a4e460de3df6c9359a | [
"Apache-2.0"
] | permissive | ingdestino/x-mano | ff87e326bcb32d3af9ea256897b0f0e0240e6293 | 0983664df59123a4472880a71400f00966c171c9 | refs/heads/master | 2020-03-21T22:33:35.657707 | 2017-06-23T14:07:21 | 2017-06-23T14:07:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,496 | py | # Copyright 2017 Giovanni Baggio Create Net / FBK (http://create-net.fbk.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Simple agent manager that tests the communication with the
federator manager
"""
import logging
import traceback
from common.rabbit_endpoint import RabbitEndpoint
from common.manager_abstract import ManagerAbstract
LOG = logging.getLogger(__name__)
class AgentManager(ManagerAbstract):
# Start a rabbitmq endpoint, consume messages on <domain-name>-to_domain
# and publish on the exchange of the other two queues
def __init__(self, rabbit_data):
super(AgentManager, self).__init__(rabbit_data)
super(AgentManager, self)
queues_dict = {self._from_domain_queue: 'publish',
self._to_domain_queue: 'consume',
self._monitor_queue: 'publish'}
self._domain_endpoint = RabbitEndpoint(self._username,
self._password,
self._rabbit_ip,
self._rabbit_port,
self._username + '-exchange',
queues_dict,
self.process_message)
self._domain_endpoint.start()
# when the manager is initialized, this function is executed
# on a separated thread.
# This test manager receive two messages from the federator manager,
# it replies, and finally it shutdowns when the 'bye' message is received
def process_message(self, queue, message):
try:
if queue == self._to_domain_queue:
if message['header'] == 'first message of sequence':
LOG.info('first message received')
self._domain_endpoint.send(self._from_domain_queue,
{'header': 'data'})
self._domain_endpoint.send(self._from_domain_queue,
{'header': 'data2'})
self._domain_endpoint.send(self._from_domain_queue,
{'header': 'ACK1'})
if message['header'] == 'second message of sequence':
LOG.info('second message received')
self._domain_endpoint.send(self._from_domain_queue,
{'header': 'ACK2'})
if message['header'] == 'bye':
self.terminate()
except Exception:
print(traceback.print_exc())
def process_command(self, command):
try:
pass
except Exception:
pass
def stop_endpoint(self):
self.terminate()
def is_endpoint_stopped(self):
return self._domain_endpoint.is_endpoint_stopped()
def terminate(self):
self._domain_endpoint.join()
| [
"g.baggio@create-net.org"
] | g.baggio@create-net.org |
46938072fdd88d92a0daec7ee0a0b4f408d355c2 | 1498148e5d0af365cd7fd16197174174a7fa9800 | /t000766_2.py | d782134c3d7d1053b4073e2e8647e4a5474ab4d1 | [] | no_license | feiyanshiren/myAcm | 59a2b80fe7e02787defcb152eee3eae26135322a | 00c7082d5143ddf87aeeafbdb6ce29da46dc8a12 | refs/heads/master | 2023-09-01T12:12:19.866447 | 2023-09-01T09:09:56 | 2023-09-01T09:09:56 | 148,560,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,681 | py | import time
time1 = time.time()
h01 = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
h1 = ["1", "2", "3", "4", "5", "6", "7", "8", "9"]
h2 = ["11", "22", "33", "44",
"55", "66", "77", "88", "99"]
hx3 = ["1x1", "2x2", "3x3", "4x4", "5x5",
"6x6", "7x7", "8x8", "9x9"]
hx4 = ["1xx1", "2xx2", "3xx3", "4xx4", "5xx5",
"6xx6", "7xx7", "8xx8", "9xx9"]
hx5 = ["1xyx1", "2xyx2", "3xyx3", "4xyx4", "5xyx5",
"6xyx6", "7xyx7", "8xyx8", "9xyx9"]
hx6 = ["1xyyx1", "2xyyx2", "3xyyx3", "4xyyx4", "5xyyx5",
"6xyyx6", "7xyyx7", "8xyyx8", "9xyyx9"]
h3 = []
h4 = []
h5 = []
h6 = []
hy5 = []
hy6 = []
for hx3_in in hx3:
for h in h01:
s = hx3_in.replace("x", h)
h3.append(s)
for hx4_in in hx4:
for h in h01:
s = hx4_in.replace("x", h)
h4.append(s)
for hx5_in in hx5:
for h in h01:
s = hx5_in.replace("x", h)
hy5.append(s)
for hx6_in in hx6:
for h in h01:
s = hx6_in.replace("x", h)
hy6.append(s)
for hy5_in in hy5:
for h in h01:
s = hy5_in.replace("y", h)
h5.append(s)
for hy6_in in hy6:
for h in h01:
s = hy6_in.replace("y", h)
h6.append(s)
h = h1 + h2 + h3 + h4 + h5 + h6
hh = []
for i in h:
d = str(int(i) ** 2)
k = str(int(i) ** 3)
dd = d[::-1]
kk = k[::-1]
if d == dd and k == kk:
hh.append(i)
hhh = []
ss = ""
k = 0
for h in hh:
if k == 5:
hhh.append(ss.strip())
ss = h + " "
k = 1
else:
ss = ss + h + " "
k = k + 1
hhh.append(ss.strip())
for i in hhh:
print(i)
print(time.time() - time1)
| [
"feiyanshiren@163.com"
] | feiyanshiren@163.com |
125499835c23d08274653ccb5eb0ae8c9dc55854 | 96bc7e5000e75075e7422a5324d4acec1645cee0 | /edge.py | 73e2a98b3591102945cd9a05b83d9f1709ca3ef9 | [] | no_license | anirudt/simplecv | 0cf66c47ff2ab8c8a37b7a4e23ddcb2d0967f269 | 6802a983e1dec0f082104fc9c12bfaff0b42300a | refs/heads/master | 2021-01-20T21:29:16.902414 | 2014-12-05T20:33:02 | 2014-12-05T20:33:02 | 27,535,533 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 722 | py | '''
This code script explains the Canny Edge Detection
implementation. Also, a circle detection is attempted
'''
from SimpleCV import *
cam = Camera()
disp = Display()
img = cam.getImage()
cat = img.edges(t1=10)
cat.save("Edge.png")
blobs = cat.findBlobs()
blobs.draw()
cat.save("Edge-blob.png")
print "Change incorporated"
blobs[-1].draw(color=Color.PUCE,width=-1,alpha=128)
cat.save("labelled.png")
if blobs: # if blobs are found
circles = blobs.filter([b.isCircle(0.2) for b in blobs]) # filter out only circle shaped blobs
if circles:
img.drawCircle((circles[-1].x, circles[-1].y), circles[-1].radius(),SimpleCV.Color.BLUE,3) # draw the circle on the main image
img.save("circled.png")
print "Done"
| [
"anirudt@gmail.com"
] | anirudt@gmail.com |
2929a14ff94a078d7b7ee03be4455bbc2add8fc4 | 06baa13d54d62f72ff906de24e93a2e70864c236 | /pi.py | 7fcaea163ceb273dd759cc3b69b254c19fab7b5d | [] | no_license | LucasMallmann/algoritmo-genetico-formacao-equipes | 49b3452904cc62541368a38d0c414ec45443aad6 | 5bcd4830e26c5530ef67f3e860459f2c6dfa24ce | refs/heads/master | 2020-03-27T19:37:57.547026 | 2018-11-15T12:14:00 | 2018-11-15T12:14:00 | 147,002,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,456 | py | from random import random, sample, choice, uniform
from math import floor
import numpy as np
from numpy import array, dot, mean
from numpy.linalg import pinv
from tqdm import tqdm
from pprint import PrettyPrinter
from sheets.spreadsheet import get_client
pp = PrettyPrinter()
def create_individual(individual_size: int):
genes = np.empty(individual_size)
groups = np.arange(total_groups)
indexes = np.arange(len(genes))
np.put(genes, indexes, groups)
genes = [int(group_idx) for group_idx in genes]
np.random.shuffle(genes)
return genes
def create_population(individual_size: int, population_size: int):
return [create_individual(individual_size) for i in range(population_size)]
def crossover(parent_1, parent_2):
"""
Retorna os filhos dado dois pais.
Os cromossomos não são necessariamente linkados
"""
child = {}
loci = [i for i in range(0, individual_size)]
# 50% do tamanho do individuo
loci_1 = sample(loci, floor(0.5 * (individual_size)))
# Escolher todos que ainda não foram escolhidos no loci_1
loci_2 = [i for i in loci if i not in loci_1]
chromosome_1 = [[i, parent_1['individual'][i]] for i in loci_1]
chromosome_2 = [[i, parent_2['individual'][i]] for i in loci_2]
child.update({key: value for (key, value) in chromosome_1})
child.update({key: value for (key, value) in chromosome_2})
return [child[i] for i in loci]
def my_crossover(parent_1, parent_2, random_pos):
child_1 = parent_1['individual'][:random_pos] + \
parent_2['individual'][random_pos:]
child_2 = parent_2['individual'][:random_pos] + \
parent_1['individual'][random_pos:]
return child_1, child_2
def mutate(individual):
"""
Realizar a mutação de um indivíduo.
"""
loci = [i for i in range(0, individual_size)]
position_1 = sample(loci, 1)[0]
position_2 = sample(loci, 1)[0]
if position_2 != position_1:
aux = individual[position_1]
individual[position_1] = individual[position_2]
individual[position_2] = aux
return individual
def correct_individual(individual):
'''
Irá corrigir os indivíduos de uma população
Ex: ALUNOS_POR_GRUPO = 3
grupos = [0 ,0 ,0 ,0, 1, 1, 2, 2, 2]
Irá arrumar para:
[0, 0, 0, 1, 1, 1, 2, 2, 2]
'''
for group_number in range(total_groups):
group_count = list(individual).count(group_number)
if group_count > persons_by_group:
while group_count != persons_by_group:
rand_idx, rand_number = choice(
list(enumerate(individual)))
if rand_number == group_number:
bigger = [x for x in individual if x > group_number]
if bigger:
individual[rand_idx] = choice(bigger)
else:
individual[rand_idx] = group_number + 1
group_count = list(individual).count(group_number)
if group_count < persons_by_group:
while group_count != persons_by_group:
rand_idx, rand_number = choice(
list(enumerate(individual)))
if rand_number > group_number:
individual[rand_idx] = group_number
group_count = list(individual).count(group_number)
return individual
def check_termination_condition(best_individual):
"""
Checar se o melhor indivíduo atual é melhor ou igual ao esperado
"""
if ((best_individual.get('sum_result') <= 6)
or (generation_count == max_generations)):
return True
else:
return False
def get_offspring_new_population(current_population):
"""
Irá obter uma parcela da nova população que será gerada
"""
split_random_position = np.random.randint(1, individual_size)
descendants_crossover = []
amount_to_crossover = int(np.floor(crossover_rate * population_size))
amount_to_mutate = int(np.floor(mutation_rate * population_size))
for _ in range(amount_to_crossover):
parent_1 = select_parent_roulette(current_population)
parent_2 = select_parent_roulette(current_population)
child_1, child_2 = my_crossover(parent_1,
parent_2,
split_random_position)
child_1 = correct_individual(child_1)
child_2 = correct_individual(child_2)
descendants_crossover.append(child_1)
descendants_crossover.append(child_2)
individuals_to_mutate = sample(descendants_crossover,
amount_to_mutate)
descendants_mutated = [mutate(ind) for ind in individuals_to_mutate]
descendants = descendants_crossover + descendants_mutated
# Calcular o fitness dos novos descendentes
descendants = calc_population_fitness(descendants, parameters)
descendants = sorted(descendants, key=lambda i: i['sum_result'])
total_to_select = population_size - amount_survived
return descendants[:total_to_select]
def get_fitness(individual, parameters: np.ndarray) -> dict:
'''
Irá obter a fitness de cada indivíduo
'''
sum_of_params_by_group = np.zeros(
(len(parameters), total_groups))
for param_idx, param_line in enumerate(parameters):
for person_idx, group_number in enumerate(individual):
sum_of_params_by_group[param_idx][group_number] += param_line[person_idx]
result = 0
for line in sum_of_params_by_group:
for k in range(total_groups):
for j in range(k, total_groups):
result += abs(line[k] - line[j])
fitness = 1 / (result ** 2)
return {'individual': individual, 'sum_result': result, 'fitness': fitness}
def calc_population_fitness(
population: np.ndarray,
parameters: np.ndarray):
calculated_fit = [get_fitness(ind, parameters)
for ind in population]
total_fit = sum([ind.get('fitness')
for ind in calculated_fit])
for individual in calculated_fit:
individual['fitness'] = individual['fitness'] / total_fit
return calculated_fit
def get_new_generation(current_population, descendants):
sorted_pop = sorted(current_population, key=lambda i: i['sum_result'])
survived = sorted_pop[:amount_survived]
# print('\n')
# print(f'Survived = {survived}')
return survived + descendants
def select_parent_roulette(population):
"""
Irá selecionar um indivíduo para realizar o cruzamento
através do método de roleta
"""
pick = uniform(0, 1)
current = 0
for individual in population:
current += individual.get('fitness')
if current > pick:
return individual
TP = [100, 50, 20, 150]
NG = [50, 100, 150, 200, 250, 300]
TC = [0.6, 0.7, 0.8, 0.9]
TM = [0.1, 0.05, 0.15, 0.20]
IG = [0.1, 0, 0.2, 0.3]
total_groups = 4
individual_size = 200
probability_of_individual_mutating = 0.1
# import gspread
client = get_client(
['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
)
# worksheet = client.open('Resultados_TG').sheet1
sheet = client.open_by_key("1gV3IhvOE_nLZpkbqdrz8kzJWe62gNZ88ExNndQ9SU3Q")
worksheet = sheet.worksheet("data2")
# Parâmetros genéticos
# population_size = 100
# max_generations = 100
# mutation_rate = 0.5
# crossover_rate = 0.7
# ig = 0.1
# persons_by_group = int(individual_size / total_groups)
# parameters = np.random.random_integers(
# 1, 5, (3, total_groups * persons_by_group))
# pp.pprint(parameters)
# best_individuals_stash = [create_individual(individual_size)]
# initial_population = create_population(individual_size, population_size)
# termination = False
# generation_count = 0
# print(f'Parameters (Skills) = \n{parameters}')
# print('-' * 80)
# initial_population = calc_population_fitness(initial_population, parameters)
# current_population = initial_population
# [print(ind) for ind in initial_population]
# initial_best_ind = sorted(current_population, key=lambda i: i['sum_result'])[0]
# print(f'Initial best individual - {initial_best_ind}')
# print('-' * 50)
ag = 1
for population_size in TP:
for max_generations in NG:
for crossover_rate in TC:
for mutation_rate in TM:
for ig in IG:
print('Genetic Parameters')
print(f'AG number = {ag}')
print(f'Population Size - {population_size}')
print(f'Max Generations - {max_generations}')
print(f'Crossover Rate - {crossover_rate}')
print(f'Mutation Rate - {mutation_rate}')
print(f'Generation Increment (survival) - {ig}')
print('\n')
persons_by_group = int(individual_size / total_groups)
parameters = np.random.random_integers(
1, 5, (3, total_groups * persons_by_group))
best_individuals_stash = [create_individual(individual_size)]
initial_population = create_population(individual_size, population_size)
termination = False
generation_count = 0
# print(f'Parameters (Skills) = \n{parameters}')
pp.pprint(f'Parameters - {parameters}')
print('-' * 80)
initial_population = calc_population_fitness(initial_population, parameters)
current_population = initial_population
[print(ind) for ind in initial_population]
initial_best_ind = sorted(current_population, key=lambda i: i['sum_result'])[0]
print('\n')
print(f'Initial best individual - {initial_best_ind}')
print('-' * 50)
amount_survived = int(np.floor(ig * population_size))
while termination is False:
descendants = get_offspring_new_population(current_population)
new_generation = get_new_generation(current_population, descendants)
gen_fit = sum([n['fitness'] for n in new_generation])
new_individuals = [ind['individual'] for ind in new_generation]
current_population = calc_population_fitness(new_individuals, parameters)
# [pp.pprint(ind) for ind in current_population]
best_individual = sorted(
current_population, key=lambda i: i['sum_result'])[0]
termination = check_termination_condition(best_individual)
generation_count += 1
print(f'Best Individual = {best_individual}')
print('\n')
line = [
ag, population_size, max_generations, crossover_rate, mutation_rate, ig,
str(initial_best_ind['individual']), str(initial_best_ind['sum_result']),
str(best_individual['individual']), str(best_individual['sum_result']),
]
worksheet.append_row(line)
ag += 1
print('\n')
print('\n')
| [
"lucasmallmann76@gmail.com"
] | lucasmallmann76@gmail.com |
923176b05b13547f26b54b29c28090ef780edb2a | 017a57c810ad08ecff84652a252656afa3173e17 | /odin/utils/shape_calculation.py | d2740e55a36f13457b42b10d7a8e3a26cad4ac51 | [
"MIT"
] | permissive | SmartArduino/odin | 0189e0b71ccac311887f0fda6bafb96ca9c53a88 | 1706c91c2fbafd23018ce98bf87b3928935b2466 | refs/heads/master | 2021-01-17T08:22:48.073115 | 2017-03-04T12:15:03 | 2017-03-04T12:15:03 | 83,890,654 | 0 | 0 | null | 2017-03-04T12:07:20 | 2017-03-04T12:07:20 | null | UTF-8 | Python | false | false | 7,382 | py | from __future__ import print_function, division, absolute_import
from math import ceil
import numpy as np
# ===========================================================================
# Shape calculation for Pooling
# Contain code from theano: theano/tensor/signal/pool.py
# Copyright (c) 2008--2016, Theano Development Team
# ===========================================================================
def get_pool_output_shape(imgshape, ws, ignore_border=False,
strides=None, pad=None):
"""
Parameters
----------
imgshape : tuple, list, or similar of integer or scalar Theano variable
order: (samples, pool_dim1, pool_dim2, pool_dim3, ..., input_depth)
(i.e tensorflow-NHWC format)
ws : list or tuple of N ints
Downsample factor over rows and column.
ws indicates the pool region size.
ignore_border : bool
If ws doesn't divide imgshape, do we include an extra row/col/slice
of partial downsampling (False) or ignore it (True).
strides : list or tuple of N ints or None
Stride size, which is the number of shifts over rows/cols/slices to get the
next pool region. If stride is None, it is considered equal to ws
(no overlap on pooling regions).
pad : tuple of N ints or None
For each downsampling dimension, this specifies the number of zeros to
add as padding on both sides. For 2D and (pad_h, pad_w), pad_h specifies the
size of the top and bottom margins, pad_w specifies the size of the left and
right margins. No padding is added if pad is None.
"""
# convert tensorflow shape to theano shape
imgshape = (imgshape[0], imgshape[-1]) + tuple(imgshape[1:-1])
ndim = len(ws)
# check valid pad (list or tuple of int)
if isinstance(pad, str):
if 'valid' in pad.lower():
pad = (0,) * ndim
elif 'same' in pad.lower():
out_shape = tuple([int(ceil(float(i) / float(j)))
for i, j in zip(imgshape[-ndim:], strides)])
return (imgshape[0],) + imgshape[2:-ndim] + out_shape + (imgshape[1],)
def compute_out(v, downsample, stride):
if ignore_border:
if downsample == stride:
return v // stride
else:
out = (v - downsample) // stride + 1
return np.maximum(out, 0)
else:
if stride >= downsample:
return (v - 1) // stride + 1
else:
return max(0, (v - 1 - downsample + stride) // stride) + 1
# ====== check input arguments ====== #
if len(imgshape) < ndim:
raise TypeError('imgshape must have at least {} dimensions'.format(ndim))
if strides is None:
strides = ws
if pad is None:
pad = (0,) * ndim
patch_shape = tuple(imgshape[-ndim + i] + pad[i] * 2
for i in range(ndim))
out_shape = [compute_out(patch_shape[i], ws[i], strides[i])
for i in range(ndim)]
rval = tuple(imgshape[:-ndim]) + tuple(out_shape)
# convert theano shape to tensorflow shape
rval = (rval[0],) + rval[2:] + (rval[1],)
return rval
# ===========================================================================
# Shape calculation for Convolution
# Contain code from theano: theano/tensor/nnet/abstract_conv.py
# Copyright (c) 2008--2016, Theano Development Team
# ===========================================================================
def __get_conv_shape_1axis(image_shape, kernel_shape, border_mode,
subsample, dilation=1):
if None in [image_shape, kernel_shape, border_mode,
subsample, dilation]:
return None
# Implicit dilated kernel shape
dil_kernel_shape = (kernel_shape - 1) * dilation + 1
if isinstance(border_mode, str):
border_mode = border_mode.lower()
if border_mode == "half" or border_mode == "same":
pad = dil_kernel_shape // 2
elif border_mode == "full":
pad = dil_kernel_shape - 1
elif border_mode == "valid":
pad = 0
else:
pad = border_mode
if pad < 0:
raise ValueError("border_mode must be >= 0")
# In case of symbolic shape, we want to build the smallest graph
# (image_shape + 2 * pad - dil_kernel_shape) // subsample + 1
if pad == 0:
out_shp = (image_shape - dil_kernel_shape)
else:
out_shp = (image_shape + 2 * pad - dil_kernel_shape)
if subsample != 1:
out_shp = out_shp // subsample
out_shp = out_shp + 1
# ====== get exact same border_mode for theano ====== #
if (border_mode == 'half' or border_mode == 'same') and \
kernel_shape % 2 == 0:
out_shp = (image_shape + subsample - 1) // subsample
return out_shp
def get_conv_output_shape(image_shape, kernel_shape,
border_mode, subsample,
filter_dilation=None):
"""
This function compute the output shape of convolution operation.
original code: abstract_conv.py (theano)
Parameters
----------
image_shape: tuple of int (symbolic or numeric) corresponding to the input
order: (samples, conv_dim1, conv_dim2, conv_dim3, ..., input_depth)
(i.e tensorflow-NHWC format)
kernel_shape: tuple of int (symbolic or numeric) corresponding to the
order: (kernel_dim1, kernel_dim2, kernel_dim3, ..., input_depth, out_depth)
(i.e tensorflow-NHWC format)
border_mode: string, int (symbolic or numeric) or tuple of int (symbolic
or numeric). If it is a string, it must be 'valid', 'half' or 'full'.
If it is a tuple, its two (or three) elements respectively correspond
to the padding on height and width (and possibly depth) axis.
subsample: tuple of int (symbolic or numeric). Its or three elements
espectively correspond to the subsampling on height and width (and
possibly depth) axis.
filter_dilation: tuple of int (symbolic or numeric). Its two elements
correspond respectively to the dilation on height and width axis.
Returns
-------
output_shape: tuple of int corresponding to the output image shape. Its
four element must correspond respectively to: batch size, number of
output channels, height and width of the image. None where undefined.
"""
# ====== convert tensorflow shape to theano shape ====== #
image_shape = (image_shape[0], image_shape[-1]) + tuple(image_shape[1:-1])
kernel_shape = (kernel_shape[-1], kernel_shape[-2]) + tuple(kernel_shape[:-2])
# ====== infer shape ====== #
bsize, imshp = image_shape[0], image_shape[2:]
nkern, kshp = kernel_shape[0], kernel_shape[2:]
if filter_dilation is None:
filter_dilation = np.ones(len(subsample), dtype='int')
if isinstance(border_mode, tuple):
out_shp = tuple(__get_conv_shape_1axis(
imshp[i], kshp[i], border_mode[i],
subsample[i], filter_dilation[i]) for i in range(len(subsample)))
else:
out_shp = tuple(__get_conv_shape_1axis(
imshp[i], kshp[i], border_mode,
subsample[i], filter_dilation[i]) for i in range(len(subsample)))
# ====== convert theano to tensorflow shape ====== #
return (bsize, ) + out_shp + (nkern,)
| [
"nickartin13@gmail.com"
] | nickartin13@gmail.com |
6e913cc30b992987c3cb468fa015c85901ed5254 | 132cb9feeed6afd6863028cfc13dd0aaea2511a9 | /profiles_project/settings.py | bfa591a4b15682324686dbd351956df8ebe27295 | [
"MIT"
] | permissive | georgeuy/profiles-rest-api | 67cd194ee06dfa2f9d58fdfbdf5c3aaa698d8717 | 704ee0e2c12d12088a8fc3182833a088b996673f | refs/heads/main | 2023-03-02T22:37:43.202162 | 2021-02-14T22:11:29 | 2021-02-14T22:11:29 | 338,348,051 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,209 | py | """
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i80yy+88o6uo@kkb@+vtqcgcfb9_u2l5_0=$u_9n8^+cq4c-d_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| [
"poliovei@gmail.com"
] | poliovei@gmail.com |
cbfbfd1fd74118ce0a9b86232a1ef846dbfb2334 | 7f1d85c9652064577695be413d57f8db2176eade | /base_model.py | 39122a05ebc957bb13f3b2b46f2fc63b7f07b4f2 | [] | no_license | hssaka7/Deploying-ML-Model-on-Flask | f610620563713e515bbff93d3708bed445f3d865 | a634ab84e8da11f83673cbc9abdad3b1e682bc28 | refs/heads/master | 2021-01-16T14:10:21.981171 | 2020-03-06T04:51:33 | 2020-03-06T04:51:33 | 243,148,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,211 | py | import pandas as pd
from sklearn.datasets import load_wine
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
import pickle
wine_dataset = load_wine()
#loading the data to pandas dataset
all_data = wine_dataset['data']
features = wine_dataset['feature_names']
target = wine_dataset['target']
df = pd.DataFrame(data = all_data, columns = features)
df['target'] = target
# splitting the dataset to training and testing
test_size = 0.3
x_train, x_test, y_train, y_test = train_test_split(df.drop('target',axis = 1), target,
test_size=test_size, random_state=0)
# building a classifier using descision tree classifier
md = DecisionTreeClassifier().fit(x_train, y_train)
print("accuracy is {0}".format(md.score(x_test, y_test)))
# need to save the model using pickle to the file
file_destination = "model/classifier_dump.sav"
pickle.dump(md,open(file_destination, 'wb'))
# testing the dumped model by loading the model
model_loaded = pickle.load(open(file_destination, 'rb'))
accuracy = model_loaded.score(x_test, y_test)
print(accuracy)
# the accuracy matches
| [
"hssaka7@gmail.com"
] | hssaka7@gmail.com |
1cddce9ac55f4cb9dc7b25929fe0d5bd49a3126b | c937095566ba711452cd7723f27ee133c88defb1 | /FirstApp/__init__.py | ac643589d8417a80b6004e869cd5dd21dc0bc8c7 | [] | no_license | sum252/Team-1 | 36e679b3004eb43a38520313207da0e43638ff80 | 6450f6b2d5c52f9031d12a852907ccdda785fe46 | refs/heads/master | 2021-03-06T02:55:59.806986 | 2020-04-06T22:53:19 | 2020-04-06T22:53:19 | 246,175,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,587 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 28 16:44:22 2020
@author: Valeria
NOT TESTED
This code write locally csv
upload in FTP
download In FTP
no mail
files used
http://127.0.0.1:5000/index
http://127.0.0.1:5000/newactivation
http://127.0.0.1:5000/submit
"""
from flask import Flask, request,flash,render_template
from flask_mail import Mail, Message #send mail
from ftplib import FTP
from time import sleep
from datetime import datetime
from datetime import timedelta
import csv
import os
import shutil
#...flash ...IMPROVEMENT in animation
app=Flask(__name__)
app.secret_key='secretkey'
serverName='43.249.199.51'
username='sparkwbb'
password='+@)rKAeG}6!bNnp>'
FTPdirectoryIn='/43.249.199.51/Incoming' #Incoming or Incoming_ss
FTPdirectoryOut='/43.249.199.51/Outgoing'
'''
app.config.update(
DEBUG=True,
MAIL_SERVER='smtp.gmail.com',
MAIL_PORT=465,
MAIL_USE_SSL=True,
MAIL_USE_TLS=False,
MAIL_USERNAME = 'projectsender20@gmail.com',
MAIL_PASSWORD = '123abc!@#'
)
mail = Mail(app)
'''
def send_mail_good():
try:
msg = Message("RES2 received",
sender="projectsender20@gmail.com",
recipients=["projectsender20@gmail.com"])
msg.body = "Hello!"
# msg.html = "<b>testing</b>"
mail.send(msg)
return 'Mail sent!>good news'
except Exception as e:
return(str(e))
def check_reply(Account):
fileNameMail = 'WNN'+str(Account)+'_RES2.csv'
filePathMail=os.path.join('Outgoing',fileNameMail)#folder where the RES2.csv is saved
datalist=[]
with open(filePathMail, mode='r') as csv_file:
csv_reader=csv.reader(csv_file)
header=next(csv_reader) #go in second line, also next(csv_reader)
datalist=[row for row in csv_reader]
data=datalist[0]
return (data)
def send_mail_good_activation(Account,filePathOut,fileNameOut,res2info):
# retrieve Inconming information
fileName = 'WNN'+str(Account)+'.csv'
filePathIn=os.path.join('Incoming',fileName)
with open(filePathIn, mode='r') as csv_file:
csv_reader=csv.reader(csv_file)
header=next(csv_reader) #go in second line, also next(csv_reader)
datalist=[row for row in csv_reader]
data=datalist[0]
SIMNo=data[8]
ProductOfferID=data[9]
ReqType=data[0]
try:
#send your message with credentials specified above
msg = Message("Reply - Customer Account No: " + str(Account)+" - Request type: "+ ReqType + " - Status: "+ str(res2info[7]) ,
sender="projectsender20@gmail.com",
recipients=["projectsender20@gmail.com"])
with app.open_resource(filePathOut) as fp:
msg.attach(fileNameOut,'text/csv',fp.read())
if str(res2info[7]) == 'Complete':
msg.body = "\n\n\n Replay form Spark, Customer Account No: {}\n\n\n\
Mobile Number: {}\n SIMNo: {}\n External Batch Id: {}\n Siebel Order Number: {}\n\n\n\
Request type: {}\n Spark Plan: {}\n\n\n\
The Status is: {}\n\n\n\n"\
.format(Account,res2info[6],SIMNo, res2info[3],res2info[2],ReqType, ProductOfferID, res2info[7])
# elif res2info[8]=="": #not details are present
# msg.body = "\n\n\n Replay form Spark, Customer Account No: {}\n\n\n\
# Mobile Number: {}\n SIMNo: {}\n External Batch Id: {}\n Siebel Order Number: {}\n\n\n\
# Request type: {}\n Spark Plan: {}\n\n\n\
# The Status is: {}\n\n\n\n"\
# .format(Account,res2info[6],SIMNo, res2info[3],res2info[2],ReqType, ProductOfferID, res2info[7])
else:
msg.body = "\n\n\n Replay form Spark, Customer Account No: {}\n\n\n\
Mobile Number: {}\n SIMNo: {}\n External Batch Id: {}\n Siebel Order Number: {}\n\n\n\
Request type: {}\n Spark Plan: {}\n\n\n\
The Status is: {}\n\n\n\n Error Description is: {}\n\n\n\n"\
.format(Account,res2info[6],SIMNo, res2info[3],res2info[2],ReqType, ProductOfferID, res2info[7], res2info[8])
mail.send(msg)
# tell the script to report if your message was sent or which errors need to be fixed
print('Sent')
except Exception as e:
return(str(e))
def send_mail_incoming(Account,filePathIn,fileNameIn):
try:
#send your message with credentials specified above
msg = Message("Incoming file Customer Account No " + str(Account),
sender="projectsender20@gmail.com",
recipients=["projectsender20@gmail.com"])
with app.open_resource(filePathIn) as fp:
msg.attach(fileNameIn,'text/csv',fp.read())
mail.send(msg)
# tell the script to report if your message was sent or which errors need to be fixed
print('Sent')
return 1
except Exception as e:
return(str(e))
def send_mail_bad(Account):
try:
msg = Message("Reply for Customer Account No " + str(Account)+" IS NOT CREATED",
sender="projectsender20@gmail.com",
recipients=["projectsender20@gmail.com"])
msg.body = "Some problem arise in processing the request for Customer Account No "+ str(Account)+ " TRY AGAIN!"
mail.send(msg)
return 'Mail sent!>Time expired! The file _RES2.csv is NOT present within xx minutes'
except Exception as e:
return(str(e))
@app.route('/')
@app.route('/index')
@app.route('/index.html')
def index():
return render_template('index.html')
@app.route('/newactivation', methods=['POST','GET'])
@app.route('/newactivation.html', methods=['POST','GET'])
@app.route('/newactivationwtf', methods=['POST','GET'])
@app.route('/newactivationwtf.html', methods=['POST','GET'])
@app.route('/newactivationflask', methods=['POST','GET'])
@app.route('/newactivationflask.html', methods=['POST','GET'])
#insert data in form
def form_example():
error = None
CustomerAccountNo=""
SIMNo=""
ELID=""
if request.method=='POST':
CustomerAccountNo = request.form.get('CustomerAccountNo')#can be None
SIMNo = request.form.get('SIMNo')
ELID = request.form.get('ELID')
ProductOfferID = request.form.get('ProductOfferID')
if CustomerAccountNo == None or ELID == None or SIMNo == None or ProductOfferID == None:
error = 'Invalid input'
flash('Please fill all the fields', category='AllFields')
if CustomerAccountNo == None:
flash('Please fill this data ', category='CustomerAccountNosms')
if SIMNo == None:
flash('Please fill this data ', category='SIMNosms')
if ELID == None:
flash('Please fill this data ', category='ELIDsms')
if ProductOfferID == None:
flash('Please select an option', category='ProductOfferIDsms')
if CustomerAccountNo == "" or SIMNo == "" or ELID == "" or ProductOfferID == "" :
error = 'Invalid input'
flash('Please fill all the fields', category='AllFields')
if CustomerAccountNo == "":
flash('Please fill this data ', category='CustomerAccountNosms')
if SIMNo == "":
flash('Please fill this data ', category='SIMNosms')
if ELID == "":
flash('Please fill this data ', category='ELIDsms')
if ProductOfferID == "":
flash('Please select an option', category='ProductOfferIDsms')
if len(CustomerAccountNo) != sum(c.isdigit() for c in CustomerAccountNo) and CustomerAccountNo != "":
error = 'Invalid input'
flash('Inserted data has {} digits and {} others characters'\
.format(sum(c.isdigit() for c in CustomerAccountNo),len(CustomerAccountNo)-sum(c.isdigit() for c in CustomerAccountNo)), category='CustomerAccountNosms')
flash('Customer Accoun No should have just digits', category='CustomerAccountNosms')
if [len(SIMNo),sum(c.isdigit() for c in SIMNo)] != [17,17] and SIMNo != "":
error = 'Invalid input'
flash('Error details in SIM No', category='SIMNosms')
flash('Inserted data has {} digits and {} others characters'\
.format(sum(c.isdigit() for c in SIMNo),len(SIMNo)-sum(c.isdigit() for c in SIMNo)), category='SIMNosms')
flash('SIM No should have 17 digits', category='SIMNosms')
if [len(ELID),sum(c.isdigit() for c in ELID)] != [9,9] and ELID != "":
error = 'Invalid input'
flash('Inserted data has {} digits and {} others characters'\
.format(sum(c.isdigit() for c in ELID),len(ELID)-sum(c.isdigit() for c in ELID)), category='ELIDsms')
flash('ELID should have 9 digits', category='ELIDsms')
#
if error != None:
return render_template('newactivation.html', error=error)
temp = ProductOfferID.split()
ProductOfferID = temp[0]
#wirte the file.csv, TO DO: if not make ERROR message
fileNameTemplate = 'WNNDDMMYY_Activate_EXAMPLE.csv'
filePathTemplate=os.path.join('CSVTemplate',fileNameTemplate) #to handle windows and linux same way
with open(filePathTemplate, mode='r') as csv_file:
csv_reader=csv.reader(csv_file)
header=next(csv_reader) #go in second line, also next(csv_reader)
datalist=[row for row in csv_reader]
data=datalist[0]
data[3]='WNN'+str(CustomerAccountNo)
data[8]=str(SIMNo)
data[9]=str(ProductOfferID)
data[30]=str(ELID)
fileName = 'WNN'+str(CustomerAccountNo)+'.csv'
filePathIn=os.path.join('Incoming',fileName) #to handle windows and linux same way
filePathWaiting=os.path.join('Waiting',fileName)
#write .csv file locally TO DO: if not make ERROR message, if yes make an OK message
#INSERT: if the file is not in waiting folder. If the file is in waiting folder, not write it and send a message to wait
with open(filePathIn, mode='w') as csv_file:
csv_writer=csv.writer(csv_file,lineterminator='\r')
csv_writer.writerow(header)
csv_writer.writerow(data)
shutil.copy(filePathIn, filePathWaiting)
#connect to the FTP server and write file, TO DO: if not make ERROR message, if yes make an OK message ?
ftp = FTP(serverName)
ftp.login(username,password)
ftp.cwd(FTPdirectoryIn)
#open the local 'WNN'+str(CustomerAccountNo)+'.csv'
with open(filePathIn, "rb") as f:
ftp.storbinary('STOR ' + fileName, f,1024)
ftp.cwd('/..')
ftp.quit()
return render_template('submit.html',error=error)
return render_template('newactivation.html', error=error)
if __name__== '__main__':
app.run(debug=True)
| [
"noreply@github.com"
] | sum252.noreply@github.com |
75b7e371ba1b7dc06443cf9194ff8dbb81af325f | a5dbb808da2ea6e959d9f9b2f1d3ad4a14626d57 | /Lesson 8 (Functions)/2.py | 446139e92dde0aac17ac77be360be09795463ecd | [] | no_license | SanyaBoroda4/Hillel_Homeworks | 667e4c3b14fd8250617217b709c9e4dd7d8fdfd9 | 45747360d662162cc96af3a81c78f887da065148 | refs/heads/master | 2023-07-12T02:34:43.712706 | 2021-08-17T18:43:42 | 2021-08-17T18:43:42 | 388,894,473 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | def sum(x, y):
return x + y
print(sum(4, 5))
def bio(name, age):
print(f"Hello {name}! You are {age} old.")
bio("Alex", 333)
def fun1 (c=2, b=3):
print(c + b)
fun1() | [
"sorokinalex1992@gmail.com"
] | sorokinalex1992@gmail.com |
b9cb66b4a8ea043376fb60607e748e7460ec98fc | 5dddbca48998df417f0a9f027d13b2e55aa5a457 | /lab5/push-T16-apic-json-w-detailed-comments.py | 50298a26a179755aca2ab88efda85bf478791a21 | [] | no_license | lumoscloud-student/devops2daycourse | 827a3180abc9c591e3dae6f2706009d87d433efa | 782118eb45df5687b154ad23359ab049f146d521 | refs/heads/master | 2020-12-24T16:23:51.123343 | 2016-03-17T07:43:02 | 2016-03-17T07:45:07 | 37,833,702 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89,952 | py | #!/usr/bin/python
# This file was meant to be run in the Python Interactive Interpreter
# While you can run this file from CLI just as a single python file by using the CLI command 'python ./push-T16-apic-json-w-detailed-comments.py',
# try running it from the Python Interactive Interpreter first, then delete your tenant and try it from the CLI using the above command, as well
# As you first run through this using the Python Interactive Interpreter, take your time and only copy one line at a time
# This isn't a rush to get the lab done, it's a learning experience, so take your time and learn (and re-learn!)
# Try things twice if you need/want to -- see what happens!
# Begin by opening PuTTY to your ubuntu dev bare VM, and typing 'python' and hit enter
# Now you are in the Python Interactive Interpreter
# Next continue by copying only ONE line at a time, and pasting it into the Interpreter
# If you copy too many lines at once, you will miss your chance to input data that Python is asking you to input, and the program simply won't work at all
# You do NOT need to paste any line that begins with a '#' -- Those are comments, and Python ignores them completely
# Those are just for you and me to read
# BTW, commenting the heck out of your Python scripts is a FANTASTIC way not to come back to a program
# you wrote 6 months ago and not have a CLUE what you were thinking or trying to accomplish
# GET IN THE PRACTICE OF COMMENTING A LOT WHEN BEGINNING TO WRITE CODE!!!
# Go ahead, start copying and pasting ONE line at a time.
# Import the 'sys' python module and ask the user which aci pod, then store that information in the variable 'acipod'
import sys
acipod = raw_input('Which ACI Pod # are you trying to push data to? ')
print acipod
# Use the value from the acipod variable to populate the last digit of the third octet - which correlates to the aci pod #
apic = '10.29.10{0}.24'.format(acipod)
print apic
# Ask the user what their admin username is, then store that information in the variable 'username'
username = raw_input('What is your student admin username (e.g. student1, student2, etc)? ')
print username
# Import the 'getpass' python module so we can ask the user secret info and not broadcast it to the screen
# Ask the user what their admin password, then store that information in the variable 'password'
from getpass import getpass
password = getpass('What is your student password? ')
print password
# I said we obtained the password in secret, I didn't say we encrypted it
# This is obviously stored in clear text
# There are much more advanced ways of encrypting it, but we won't go into them in this class
# Using what we have now gathered from the user, populate our standard auth JSON file, using variables
auth = {'aaaUser': {'attributes': {'name': username, 'pwd': password } } }
print auth
# Notice how the data inside the JSON has automatically been expanded to include the values of the variables 'username' and 'password'?
# Also notice how the order is messed up (e.g. 'pwd' comes before 'name')?
# This is due to the way Python dictionarys or 'dict' data types work
# We'll talk a little more about them in the next lab
# Import the python module 'requests' for making HTTP(S) GETs and POSTs which we will use to send RESTful API calls
import requests
# Import the python module 'json' so that we can use REST to send and receive JSON data and know how to handle it
import json
# Create a 'requests' session, and store that session object as variable 's'
s = requests.Session()
print s
# Send an HTTPS POST to authenticate using the session that we just created and stored as variable 's',
# and send the 'auth' variable which is our JSON auth data
#
# Using the variable 's' with a '.' and another word is simply calling a object/method in a class from the requests python module,
# but as an extension of an already-created session
r = s.post('https://{0}/api/mo/aaaLogin.json'.format(apic), data=json.dumps(auth), verify=False)
print r
# The variable 'r' now is packed with a lot of good information
# Find out what the HTTP Status is, and pack that info in the variable 'status'
status = r.status_code
# Print to stdout (screen) what we just learned and stored in 'status'
print status
# What is the difference between printing 'r' to screen and printing 'status' (which was really 'r.status_code') to screen?
# The answer? The variable data type
# To find out what the two different data types are, enter these next two commands one at a time
type(r)
# We see that 'r' is a 'Class' data type (and what Class it is)
type(status)
# And we see that 'status' or really 'r.status_code' is an 'int' or integer, which is one of the values stored in the returned information from that 'r' Class
# This is VERY, VERY import information to know for any future coding we might want to do inside this Python program, as we need to know how to query variables and work properly with what data type a variable is
# Let's get more info out of our 'r' variable. We need to get the 'cookies' or 'token' out of 'r' and store it in the variable 'cookies'
# We will send back to the APIC these cookies every subsequent time we send an HTTP POST, so that we do not have to keep authenticating
cookies = r.cookies
# Print to stdout (screen) what we just learned and stored in 'cookies'
print cookies
# Ah, another Class stored inside that same 'r' variable. Seems 'r' has quite a LOT of info in it!
# 'r' is basically what your Web Browser would get back if you browsed to a web page
# Only not only what you the human user would see, but what the browser would see and need to know and use to make the page work properly
# In fact far more information than you would see even if you did a 'View Page Source'
# We don't need the information about the HTTP Headers that we received, but I included this anyhow just to demo extra info that 'r' is holding on to
headers = r.headers
# Print to stdout (screen) what we just learned and stored in 'headers'
print headers
# That's a lot of information just in the headers alone. Let's look at just a very small subset of that info
print headers['content-type']
type(headers)
# Grab the full text response (HTML Body) that we got when we tried to authenticate
# If we failed to auth, and got back a status of something other than '200', this will hopefully help us know what went wrong
text = r.text
# Print to stdout (screen) what we just learned and stored in 'text'
print text
# Um, What??
# That is very NON-Pretty JSON
# Let's Pretty it up a bit, shall we?
from pprint import pprint
# This is the same as importing 'pprint', but instead of importing the entire Python module, we only import what we need,
# and thus not simplify how we write our code, but also consume less memory from the host running the Python program
pprint(text)
# That didn't seem to help much at all, I wonder why not?
# If we take a look at the data type of 'text', it might give us more of a clue
type(text)
# Ah, we see that it is 'Unicode' (very similar to text or a 'string', but much a more specific text/string type)
# Let's see if we can turn it into another data type
# Let's see if we can extract the JSON out of it in a Python 'dict' or Dictionary format
jsondata = r.json()
type(jsondata)
# Now let's try to Pretty Print that
pprint(jsondata)
# Now, while there was still a TON of information on that page, it is MUCH cleaner and easier to read, especially if you back your PuTTY font size way off
# That could be useful to us later on, but for now, let's press on, shall we?
# Now here is where we have a all of the JSON data that at one time was captured from the API Inspector
# This includes 14 separate API calls that create everything in the Fabric Access Policies
# We wiped our ACI Fabric clean, so now we need to do a bunch of HTTPS REST POSTs to push all that data back to ACI
# These 15 JSON file include ONLY the Fabric Access Policies
# They included NONE of the Tenant configuration
# That will be the last thing in the file
#
# Notice that there are essentially two lines to each REST API call
# The first line packs all the JSON data into the variable 'jsonddata'
# The second line performs an HTTP POST and sends the data using the requests session that we've already set up
# Notice that every time we run the second line and POST data, we first state the HTTP Method (GET, POST),
# then as an argument we pass the URL in, then we pass the auth token/cookie, then we pass in the JSON data,
# then finally tell the python requests module that we don't care if the APIC HTTP certificate is self-signed or not
#
# Go ahead, run these one at a time
# Make sure you have your APIC WebUI open to Fabric Access Policies, and have everything you see below expanded, so you can see it created in real-time
# This first one creates the VMM Domain
acipodvcenter = '10.29.10{0}.45'.format(acipod)
jsondata = {"vmmDomP":{"attributes":{"dn":"uni/vmmp-VMware/dom-T16-vCenter","enfPref":"hw","mcastAddr":"0.0.0.0","mode":"default","name":"T16-vCenter","ownerKey":"","ownerTag":""},"children":[{"vmmRsDefaultStpIfPol":{"attributes":{"tnStpIfPolName":"default"}}},{"vmmRsDefaultFwPol":{"attributes":{"tnNwsFwPolName":"default"}}},{"vmmRsDefaultLldpIfPol":{"attributes":{"tnLldpIfPolName":"default"}}},{"vmmCtrlrP":{"attributes":{"dvsVersion":"5.5","hostOrIp":acipodvcenter,"inventoryTrigSt":"untriggered","mode":"default","msftConfigIssues":"","name":"T16-VCSA","port":"0","rootContName":"TenanT16","scope":"vm","statsMode":"disabled"},"children":[{"vmmRsAcc":{"attributes":{"tDn":"uni/vmmp-VMware/dom-T16-vCenter/usracc-T16-admin"}}}]}},{"infraRsVlanNs":{"attributes":{"tDn":"uni/infra/vlanns-[T16-VMM-VLAN-Pool]-dynamic"}}},{"vmmRsDefaultCdpIfPol":{"attributes":{"tnCdpIfPolName":"default"}}},{"vmmRsDefaultLacpLagPol":{"attributes":{"tnLacpLagPolName":"default"}}},{"vmmRsDefaultL2InstPol":{"attributes":{"tnL2InstPolName":"default"}}},{"vmmUsrAccP":{"attributes":{"descr":"","name":"T16-admin","ownerKey":"","ownerTag":"","usr":"root"}}}]}}
r = s.post('https://{0}/api/node/mo/uni/vmmp-VMware/dom-T161-vCenter.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This next one creates the Interface Policy for 'CDP Disable' just in case we need it
jsondata = {"cdpIfPol":{"attributes":{"adminSt":"disabled","descr":"","dn":"uni/infra/cdpIfP-T16-CDP-disable","name":"T16-CDP-disable","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/cdpIfP-T16-CDP-disable.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Interface Policy for 'CDP Enable' for our ESXi vSwitch from the Blade Servers up to the FIs
jsondata = {"cdpIfPol":{"attributes":{"adminSt":"enabled","descr":"","dn":"uni/infra/cdpIfP-T16-CDP-enable","name":"T16-CDP-enable","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/cdpIfP-T16-CDP-enable.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Interface Policy for 'LLDP Disable' just in case we need it
jsondata = {"lldpIfPol":{"attributes":{"adminRxSt":"disabled","adminTxSt":"disabled","descr":"","dn":"uni/infra/lldpIfP-T16-LLDP-disable","name":"T16-LLDP-disable","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/lldpIfP-T16-LLDP-disable.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Interface Policy for 'LLDP Enable' for our vPC from the Leafs down to the FIs
jsondata = {"lldpIfPol":{"attributes":{"adminRxSt":"enabled","adminTxSt":"enabled","descr":"","dn":"uni/infra/lldpIfP-T16-LLDP-enable","name":"T16-LLDP-enable","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/lldpIfP-T16-LLDP-enable.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Interface Policy for 'LACP Active' for our vPC from the Leafs down to the FIs
jsondata = {"lacpLagPol":{"attributes":{"ctrl":"fast-sel-hot-stdby,graceful-conv,susp-individual","descr":"","dn":"uni/infra/lacplagp-T16-LACP-Active","maxLinks":"16","minLinks":"1","mode":"active","name":"T16-LACP-Active","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/lacplagp-T16-LACP-Active.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Interface Policy for 'LACP MAC Pinning' for our ESXi vSwitch from the Blade Servers up to the FIs
jsondata = {"lacpLagPol":{"attributes":{"ctrl":"fast-sel-hot-stdby,graceful-conv,susp-individual","descr":"","dn":"uni/infra/lacplagp-T16-LACP-MacPinning","maxLinks":"16","minLinks":"1","mode":"mac-pin","name":"T16-LACP-MacPinning","ownerKey":"","ownerTag":""}}}
r = s.post('https://{0}/api/node/mo/uni/infra/lacplagp-T16-LACP-MacPinning.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the first Interface Policy Group for our vPC from both Leafs down to FI-A
jsondata = {"infraAccBndlGrp":{"attributes":{"descr":"","dn":"uni/infra/funcprof/accbundle-T16-vPC-FI-A-PG","lagT":"node","name":"T16-vPC-FI-A-PG","ownerKey":"","ownerTag":""},"children":[{"infraRsMonIfInfraPol":{"attributes":{"tnMonInfraPolName":""}}},{"infraRsLldpIfPol":{"attributes":{"tnLldpIfPolName":"T16-LLDP-enable"}}},{"infraRsStpIfPol":{"attributes":{"tnStpIfPolName":""}}},{"infraRsL2IfPol":{"attributes":{"tnL2IfPolName":""}}},{"infraRsCdpIfPol":{"attributes":{"tnCdpIfPolName":"T16-CDP-enable"}}},{"infraRsMcpIfPol":{"attributes":{"tnMcpIfPolName":""}}},{"infraRsAttEntP":{"attributes":{"tDn":"uni/infra/attentp-T16-VMM-AEP"}}},{"infraRsLacpPol":{"attributes":{"tnLacpLagPolName":"T16-LACP-Active"}}},{"infraRsStormctrlIfPol":{"attributes":{"tnStormctrlIfPolName":""}}},{"infraRsHIfPol":{"attributes":{"tnFabricHIfPolName":""}}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/funcprof/accbundle-T16-vPC-FI-A-PG.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the first Interface Policy Group for our vPC from both Leafs down to FI-B
jsondata = {"infraAccBndlGrp":{"attributes":{"descr":"","dn":"uni/infra/funcprof/accbundle-T16-vPC-FI-B-PG","lagT":"node","name":"T16-vPC-FI-B-PG","ownerKey":"","ownerTag":""},"children":[{"infraRsMonIfInfraPol":{"attributes":{"tnMonInfraPolName":""}}},{"infraRsLldpIfPol":{"attributes":{"tnLldpIfPolName":"T16-LLDP-enable"}}},{"infraRsStpIfPol":{"attributes":{"tnStpIfPolName":""}}},{"infraRsL2IfPol":{"attributes":{"tnL2IfPolName":""}}},{"infraRsCdpIfPol":{"attributes":{"tnCdpIfPolName":"T16-CDP-disable"}}},{"infraRsMcpIfPol":{"attributes":{"tnMcpIfPolName":""}}},{"infraRsAttEntP":{"attributes":{"tDn":"uni/infra/attentp-T16-VMM-AEP"}}},{"infraRsLacpPol":{"attributes":{"tnLacpLagPolName":"T16-LACP-Active"}}},{"infraRsStormctrlIfPol":{"attributes":{"tnStormctrlIfPolName":""}}},{"infraRsHIfPol":{"attributes":{"tnFabricHIfPolName":""}}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/funcprof/accbundle-T16-vPC-FI-B-PG.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the first Interface Profile and Interface Selector for our vPC from port eth1/11 down to FI-A that will eventually be mapped to both Leaf Switch Nodes
jsondata = {"infraAccPortP":{"attributes":{"descr":"","dn":"uni/infra/accportprof-T16-L101-L102-FI-A_ifselector","name":"T16-L101-L102-FI-A_ifselector","ownerKey":"","ownerTag":""},"children":[{"infraHPortS":{"attributes":{"descr":"","name":"T16-FI-A-Port11","ownerKey":"","ownerTag":"","type":"range"},"children":[{"infraRsAccBaseGrp":{"attributes":{"fexId":"101","tDn":"uni/infra/funcprof/accbundle-T16-vPC-FI-A-PG"}}},{"infraPortBlk":{"attributes":{"descr":"","fromCard":"1","fromPort":"15","name":"block2","toCard":"1","toPort":"15"}}}]}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/accportprof-T16-L101-L102-FI-A_ifselector.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the first Interface Profile and Interface Selector for our vPC from port eth1/12 down to FI-B that will eventually be mapped to both Leaf Switch Nodes
jsondata = {"infraAccPortP":{"attributes":{"descr":"","dn":"uni/infra/accportprof-T16-L101-L102-FI-B_ifselector","name":"T16-L101-L102-FI-B_ifselector","ownerKey":"","ownerTag":""},"children":[{"infraHPortS":{"attributes":{"descr":"","name":"T16-FI-B-Port12","ownerKey":"","ownerTag":"","type":"range"},"children":[{"infraRsAccBaseGrp":{"attributes":{"fexId":"101","tDn":"uni/infra/funcprof/accbundle-T16-vPC-FI-B-PG"}}},{"infraPortBlk":{"attributes":{"descr":"","fromCard":"1","fromPort":"16","name":"block2","toCard":"1","toPort":"16"}}}]}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/accportprof-T16-L101-L102-FI-B_ifselector.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Dyanmic VLAN Pool
jsondata = {"fvnsVlanInstP":{"attributes":{"allocMode":"dynamic","descr":"","dn":"uni/infra/vlanns-[T16-VMM-VLAN-Pool]-dynamic","name":"T16-VMM-VLAN-Pool","ownerKey":"","ownerTag":""},"children":[{"fvnsEncapBlk":{"attributes":{"allocMode":"inherit","descr":"","from":"vlan-2180","name":"","to":"vlan-2189"}}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/vlanns-[T16-VMM-VLAN-Pool]-dynamic.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the Switch Profile for the switch block range of Node 101 to Node 102 and maps the both Interface Profiles down to FI-A and FI-B, respectively
jsondata = {"infraNodeP":{"attributes":{"descr":"","dn":"uni/infra/nprof-T16-L101-L102-FI-A-and-B_swprof","name":"T16-L101-L102-FI-A-and-B_swprof","ownerKey":"","ownerTag":""},"children":[{"infraLeafS":{"attributes":{"descr":"","name":"T16-L101-L102-FI-A-and-B_swsel","ownerKey":"","ownerTag":"","type":"range"},"children":[{"infraNodeBlk":{"attributes":{"descr":"","from_":"101","name":"0e4c30acf5d779c5","to_":"102"}}}]}},{"infraRsAccPortP":{"attributes":{"tDn":"uni/infra/accportprof-T16-L101-L102-FI-A_ifselector"}}},{"infraRsAccPortP":{"attributes":{"tDn":"uni/infra/accportprof-T16-L101-L102-FI-B_ifselector"}}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra/nprof-T16-L101-L102-FI-B-SP.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# This one creates the AEP and maps the vCenter to it, as well as creates the vSwitch Override Policy so that CDP Enabled, LLDP Disabled and MAC Pinning are all used
jsondata = {"infraAttEntityP":{"attributes":{"descr":"","dn":"uni/infra/attentp-T16-VMM-AEP","name":"T16-VMM-AEP","ownerKey":"","ownerTag":""},"children":[{"infraRsDomP":{"attributes":{"tDn":"uni/vmmp-VMware/dom-T16-vCenter"}}},{"infraAttPolicyGroup":{"attributes":{"descr":"","name":""},"children":[{"infraRsOverrideCdpIfPol":{"attributes":{"tnCdpIfPolName":"T16-CDP-enable"}}},{"infraRsOverrideLacpPol":{"attributes":{"tnLacpLagPolName":"T16-LACP-MacPinning"}}},{"infraRsOverrideLldpIfPol":{"attributes":{"tnLldpIfPolName":"T16-LLDP-disable"}}}]}}]}}
r = s.post('https://{0}/api/node/mo/uni/infra.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# Before you do anything else, switch over to the Tenant tab, and MAKE SURE your Tenant is DELETED!
# This next one is the big'n
# No seriously, you have no idea how big this seemingly simple JSON data next line really is
# In fact, please take a moment to see how big it is
# Put your cursor right before the first curly brace, and SHIFT+END or whatever, but select and copy that whole line
# Next, open another Sublime Text tab with CTRL+N, and paste the whole thing
# Finally run JSON Pretty Print
# See at the bottom right, how many lines it is?
# 5,486 lines is what I count
# This is what an entire tenant (with NOT a ton of information in it contains)
# This tenant only has a few BDs, Subnets, EPGs, VRFs, etc
# However, it does have a few L4-7 Service Graphs created and deployed, with full parameters
# Go ahead, run it, and then explore ALL of what was created in the APIC
# Make sure you explore all of the L4-7 stuff, including expanding the EPGs and seeing the 'L4-7 Parameters' that are all populated there
# (Not all EPGs have L4-7 Params, so check them all)
# BTW, this one will take a LONG time to paste (like upwards of over a minute), and it is actually entirely possible that it will fail
# simply due to copy/paste buffer overflow
# If this happens, then now would be a good time to close the file, SSH into your Ubuntu Linux box,
# make sure it is git cloned to your /home/student directory, and run it using the CLI command
# 'python ./push-T6-apic-json-w-detailed-comments.py' (without any quotes)
jsondata = {"fvTenant":{"attributes":{"descr":"","dn":"uni/tn-TenanT16","name":"TenanT16","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFuncProfContr":{"attributes":{"descr":"","name":"","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFuncProfGrp":{"attributes":{"descr":"","name":"t6f5","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFuncProf":{"attributes":{"descr":"","name":"t6f5HA1ArmDyn","ownerKey":"","ownerTag":""},"children":[{"vnsRsProfToMFunc":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server"}}},{"vnsAbsDevCfg":{"attributes":{"descr":"","name":"devConfig","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"LocalTraffic","locked":"no","name":"LTM","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Monitor","locked":"no","name":"Monitor1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \\r\\n"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"ReceiveText","locked":"no","mandatory":"no","name":"ReceiveText","validation":"","value":""}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"yes","name":"Type","validation":"","value":"HTTP"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"yes","name":"FailByAttempts","validation":"","value":"3"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"yes","name":"FrequencySeconds","validation":"","value":"5"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Pool","locked":"no","name":"Pool1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"DYNAMIC"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"yes","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsCfgRel":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}}]}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Network","locked":"no","name":"Network1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"InternalSelfIP","locked":"no","name":"InternalFloat","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"40.40.60.100"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC1","key":"InternalSelfIP","locked":"no","name":"Internal1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"40.40.60.101"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC2","key":"InternalSelfIP","locked":"no","name":"Internal2","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"40.40.60.102"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}}]}}]}},{"vnsAbsFuncCfg":{"attributes":{"descr":"","name":"funcConfig","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Listener","locked":"no","name":"Listener1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"yes","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"yes","name":"DestinationPort","validation":"","value":"80"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"yes","name":"Protocol","validation":"","value":"TCP"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"NetworkRelation","locked":"no","name":"NetworkRelation1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsCfgRel":{"attributes":{"cardinality":"unspecified","key":"NetworkRel","locked":"no","mandatory":"no","name":"NetworkRel","targetName":"Network1"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Pool","locked":"no","name":"Pool1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"EPGDestinationPort","locked":"no","mandatory":"no","name":"EPGDestinationPort","validation":"","value":"80"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"EPGConnectionLimit","locked":"no","mandatory":"no","name":"EPGConnectionLimit","validation":"","value":"1000"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"EPGRatio","locked":"no","mandatory":"no","name":"EPGRatio","validation":"","value":"1"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"EPGConnectionRateLimit","locked":"no","mandatory":"no","name":"EPGConnectionRateLimit","validation":"","value":"1000"}}},{"vnsAbsCfgRel":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool1"}}}]}}]}}]}},{"vnsAbsFuncProf":{"attributes":{"descr":"","name":"t6f5HA2ArmStatic","ownerKey":"","ownerTag":""},"children":[{"vnsRsProfToMFunc":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server"}}},{"vnsAbsDevCfg":{"attributes":{"descr":"","name":"devConfig","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"LocalTraffic","locked":"no","name":"LTM","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Monitor","locked":"no","name":"Monitor1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \/r\/n"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"yes","name":"Type","validation":"","value":"HTTP"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"yes","name":"FrequencySeconds","validation":"","value":"5"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"yes","name":"FailByAttempts","validation":"","value":"3"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Pool","locked":"no","name":"Pool1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"STATIC"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"yes","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsCfgRel":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}}]}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Network","locked":"no","name":"Network1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"ExternalSelfIP","locked":"no","name":"ExternalFloat","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"10.10.60.100"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC2","key":"ExternalSelfIP","locked":"no","name":"External2","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"10.10.60.102"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC1","key":"ExternalSelfIP","locked":"no","name":"External1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"10.10.60.101"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"InternalSelfIP","locked":"no","name":"InternalFloat","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"20.20.60.100"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC1","key":"InternalSelfIP","locked":"no","name":"Internal1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"20.20.60.101"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"ADC2","key":"InternalSelfIP","locked":"no","name":"Internal2","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"yes","name":"SelfIPAddress","validation":"","value":"20.20.60.102"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"yes","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}}]}}]}}]}},{"vnsAbsFuncCfg":{"attributes":{"descr":"","name":"funcConfig","ownerKey":"","ownerTag":""},"children":[{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Listener","locked":"no","name":"Listener1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"yes","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"yes","name":"Protocol","validation":"","value":"TCP"}}},{"vnsAbsParam":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"yes","name":"DestinationPort","validation":"","value":"80"}}}]}},{"vnsAbsFolder":{"attributes":{"cardinality":"unspecified","devCtxLbl":"","key":"Pool","locked":"no","name":"Pool1","profileBehaviorShared":"no","scopedBy":"epg"},"children":[{"vnsAbsCfgRel":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool1"}}}]}}]}}]}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"T16-AllowPing","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"tenant"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"T16-AllowICMP","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"T16-AllowPing"}}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"Test2","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"context"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"Subject","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"default"}}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"maybe1armdynamic","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"context"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"Subject","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"default"}}},{"vzRsSubjGraphAtt":{"attributes":{"tnVnsAbsGraphName":"t6f5-WebGraph1Arm"}}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"Test","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"context"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"Subject","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"default"}}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"T16-AllowSSH","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"tenant"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"AllowRemoteSSH","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"T16-sshFilterIdentity"}}}]}}]}},{"vzBrCP":{"attributes":{"descr":"","name":"T16Web2ArmContract","ownerKey":"","ownerTag":"","prio":"unspecified","scope":"context"},"children":[{"vzSubj":{"attributes":{"consMatchT":"AtleastOne","descr":"","name":"Subject","prio":"unspecified","provMatchT":"AtleastOne","revFltPorts":"yes"},"children":[{"vzRsSubjFiltAtt":{"attributes":{"tnVzFilterName":"default"}}}]}}]}},{"vnsLDevCtx":{"attributes":{"ctrctNameOrLbl":"Test","descr":"","graphNameOrLbl":"T16WebContract","name":"","nodeNameOrLbl":"ADC"},"children":[{"vnsRsLDevCtxToLDev":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5"}}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"internal","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-internal"}}}]}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"external","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-external"}}}]}}]}},{"vnsLDevCtx":{"attributes":{"ctrctNameOrLbl":"T16Web2ArmContract","descr":"","graphNameOrLbl":"T16WebContract","name":"","nodeNameOrLbl":"ADC"},"children":[{"vnsRsLDevCtxToLDev":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5"}}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"internal","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-internal"}}}]}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"external","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-external"}}}]}}]}},{"vnsLDevCtx":{"attributes":{"ctrctNameOrLbl":"maybe1armdynamic","descr":"","graphNameOrLbl":"t6f5-WebGraph1Arm","name":"","nodeNameOrLbl":"ADC"},"children":[{"vnsRsLDevCtxToLDev":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5"}}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"internal","descr":"","name":""},"children":[{"vnsRsLIfCtxToBD":{"attributes":{"tDn":"uni/tn-TenanT16/BD-T16-F5"}}},{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-internal"}}}]}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"external","descr":"","name":""},"children":[{"vnsRsLIfCtxToBD":{"attributes":{"tDn":"uni/tn-TenanT16/BD-T16-F5"}}},{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-external"}}}]}}]}},{"vnsLDevCtx":{"attributes":{"ctrctNameOrLbl":"Test2","descr":"","graphNameOrLbl":"T16WebContract","name":"","nodeNameOrLbl":"ADC"},"children":[{"vnsRsLDevCtxToLDev":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5"}}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"internal","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-internal"}}}]}},{"vnsLIfCtx":{"attributes":{"connNameOrLbl":"external","descr":"","name":""},"children":[{"vnsRsLIfCtxToLIf":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/lIf-external"}}}]}}]}},{"vnsAbsGraph":{"attributes":{"descr":"","name":"T16WebContract","ownerKey":"","ownerTag":"","uiTemplateType":"ONE_NODE_ADC_TWO_ARM"},"children":[{"vnsAbsTermNodeCon":{"attributes":{"descr":"","name":"T1","ownerKey":"","ownerTag":""},"children":[{"vnsAbsTermConn":{"attributes":{"attNotify":"no","descr":"","name":"1","ownerKey":"","ownerTag":""}}},{"vnsInTerm":{"attributes":{"descr":"","name":"input-terminal"}}},{"vnsOutTerm":{"attributes":{"descr":"","name":"output-terminal"}}}]}},{"vnsAbsTermNodeProv":{"attributes":{"descr":"","name":"T2","ownerKey":"","ownerTag":""},"children":[{"vnsAbsTermConn":{"attributes":{"attNotify":"no","descr":"","name":"1","ownerKey":"","ownerTag":""}}},{"vnsInTerm":{"attributes":{"descr":"","name":"input-terminal"}}},{"vnsOutTerm":{"attributes":{"descr":"","name":"output-terminal"}}}]}},{"vnsAbsConnection":{"attributes":{"adjType":"L2","connType":"external","descr":"","name":"C1","ownerKey":"","ownerTag":"","unicastRoute":"yes"},"children":[{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-T16WebContract/AbsTermNodeCon-T1/AbsTConn"}}},{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-T16WebContract/AbsNode-ADC/AbsFConn-external"}}}]}},{"vnsAbsConnection":{"attributes":{"adjType":"L2","connType":"external","descr":"","name":"C2","ownerKey":"","ownerTag":"","unicastRoute":"yes"},"children":[{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-T16WebContract/AbsNode-ADC/AbsFConn-internal"}}},{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-T16WebContract/AbsTermNodeProv-T2/AbsTConn"}}}]}},{"vnsAbsNode":{"attributes":{"descr":"","funcType":"GoTo","name":"ADC","ownerKey":"","ownerTag":"","shareEncap":"no"},"children":[{"vnsAbsFuncConn":{"attributes":{"attNotify":"no","descr":"","name":"external","ownerKey":"","ownerTag":""},"children":[{"vnsRsMConnAtt":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server/mConn-external"}}}]}},{"vnsAbsFuncConn":{"attributes":{"attNotify":"no","descr":"","name":"internal","ownerKey":"","ownerTag":""},"children":[{"vnsRsMConnAtt":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server/mConn-internal"}}}]}},{"vnsRsNodeToAbsFuncProf":{"attributes":{"tDn":"uni/tn-TenanT16/absFuncProfContr/absFuncProfGrp-t6f5/absFuncProf-t6f5HA2ArmStatic"}}},{"vnsRsNodeToMFunc":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server"}}}]}}]}},{"vnsAbsGraph":{"attributes":{"descr":"","name":"t6f5-WebGraph1Arm","ownerKey":"","ownerTag":"","uiTemplateType":"ONE_NODE_ADC_ONE_ARM"},"children":[{"vnsAbsTermNodeCon":{"attributes":{"descr":"","name":"T1","ownerKey":"","ownerTag":""},"children":[{"vnsAbsTermConn":{"attributes":{"attNotify":"no","descr":"","name":"1","ownerKey":"","ownerTag":""}}},{"vnsInTerm":{"attributes":{"descr":"","name":"input-terminal"}}},{"vnsOutTerm":{"attributes":{"descr":"","name":"output-terminal"}}}]}},{"vnsAbsTermNodeProv":{"attributes":{"descr":"","name":"T2","ownerKey":"","ownerTag":""},"children":[{"vnsAbsTermConn":{"attributes":{"attNotify":"no","descr":"","name":"1","ownerKey":"","ownerTag":""}}},{"vnsInTerm":{"attributes":{"descr":"","name":"input-terminal"}}},{"vnsOutTerm":{"attributes":{"descr":"","name":"output-terminal"}}}]}},{"vnsAbsConnection":{"attributes":{"adjType":"L3","connType":"external","descr":"","name":"C1","ownerKey":"","ownerTag":"","unicastRoute":"yes"},"children":[{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-t6f5-WebGraph1Arm/AbsNode-ADC/AbsFConn-external"}}},{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-t6f5-WebGraph1Arm/AbsTermNodeCon-T1/AbsTConn"}}}]}},{"vnsAbsConnection":{"attributes":{"adjType":"L3","connType":"external","descr":"","name":"C2","ownerKey":"","ownerTag":"","unicastRoute":"yes"},"children":[{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-t6f5-WebGraph1Arm/AbsNode-ADC/AbsFConn-internal"}}},{"vnsRsAbsConnectionConns":{"attributes":{"tDn":"uni/tn-TenanT16/AbsGraph-t6f5-WebGraph1Arm/AbsTermNodeProv-T2/AbsTConn"}}}]}},{"vnsAbsNode":{"attributes":{"descr":"","funcType":"GoTo","name":"ADC","ownerKey":"","ownerTag":"","shareEncap":"no"},"children":[{"vnsAbsFuncConn":{"attributes":{"attNotify":"no","descr":"","name":"external","ownerKey":"","ownerTag":""},"children":[{"vnsRsMConnAtt":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server/mConn-external"}}}]}},{"vnsAbsFuncConn":{"attributes":{"attNotify":"yes","descr":"","name":"internal","ownerKey":"","ownerTag":""},"children":[{"vnsRsMConnAtt":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server/mConn-internal"}}}]}},{"vnsRsNodeToAbsFuncProf":{"attributes":{"tDn":"uni/tn-TenanT16/absFuncProfContr/absFuncProfGrp-t6f5/absFuncProf-t6f5HA1ArmDyn"}}},{"vnsRsNodeToMFunc":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mFunc-Virtual-Server"}}}]}}]}},{"drawCont":{"attributes":{},"children":[{"drawInst":{"attributes":{"info":"{'epg-T16-Win7-EPG':{'x':727,'y':200},'ctrct_provider-Test2':{'x':466,'y':-90},'ctrct_provider-Test':{'x':458,'y':114},'ctrct_provider-T16-AllowPing':{'x':453,'y':214},'ctrct_provider-T16-AllowSSH':{'x':461,'y':13},'ctrct_provider-T16Web2ArmContract':{'x':453,'y':329},'epg-T16-Ubuntu-EPG':{'x':223,'y':83}}","oDn":"uni/tn-TenanT16/ap-T16-MyApp"}}}]}},{"fvCtx":{"attributes":{"descr":"","knwMcastAct":"permit","name":"T16-Private","ownerKey":"","ownerTag":"","pcEnfPref":"enforced"},"children":[{"fvRsCtxToExtRouteTagPol":{"attributes":{"tnL3extRouteTagPolName":""}}},{"fvRsBgpCtxPol":{"attributes":{"tnBgpCtxPolName":""}}},{"vzAny":{"attributes":{"descr":"","matchT":"AtleastOne","name":""}}},{"fvRsOspfCtxPol":{"attributes":{"tnOspfCtxPolName":""}}},{"fvRsCtxToEpRet":{"attributes":{"tnFvEpRetPolName":""}}}]}},{"fvBD":{"attributes":{"arpFlood":"no","descr":"","epMoveDetectMode":"","limitIpLearnToSubnets":"no","llAddr":"::","mac":"00:22:BD:F8:19:FF","multiDstPktAct":"bd-flood","name":"T16-F5","ownerKey":"","ownerTag":"","unicastRoute":"yes","unkMacUcastAct":"proxy","unkMcastAct":"flood"},"children":[{"fvRsBDToNdP":{"attributes":{"tnNdIfPolName":""}}},{"fvRsCtx":{"attributes":{"tnFvCtxName":"T16-Private"}}},{"fvRsIgmpsn":{"attributes":{"tnIgmpSnoopPolName":""}}},{"fvSubnet":{"attributes":{"ctrl":"","descr":"","ip":"40.40.60.1/24","name":"","preferred":"no","scope":"private"}}},{"fvRsBdToEpRet":{"attributes":{"resolveAct":"resolve","tnFvEpRetPolName":""}}}]}},{"fvBD":{"attributes":{"arpFlood":"no","descr":"","epMoveDetectMode":"","limitIpLearnToSubnets":"no","llAddr":"::","mac":"00:22:BD:F8:19:FF","multiDstPktAct":"bd-flood","name":"T16-BD2","ownerKey":"","ownerTag":"","unicastRoute":"yes","unkMacUcastAct":"proxy","unkMcastAct":"flood"},"children":[{"fvRsBDToNdP":{"attributes":{"tnNdIfPolName":""}}},{"fvRsCtx":{"attributes":{"tnFvCtxName":"T16-Private"}}},{"fvRsIgmpsn":{"attributes":{"tnIgmpSnoopPolName":""}}},{"fvSubnet":{"attributes":{"ctrl":"","descr":"","ip":"20.20.60.1/24","name":"","preferred":"no","scope":"private"}}},{"fvRsBdToEpRet":{"attributes":{"resolveAct":"resolve","tnFvEpRetPolName":""}}}]}},{"fvBD":{"attributes":{"arpFlood":"no","descr":"","epMoveDetectMode":"","limitIpLearnToSubnets":"no","llAddr":"::","mac":"00:22:BD:F8:19:FF","multiDstPktAct":"bd-flood","name":"T16-BD1","ownerKey":"","ownerTag":"","unicastRoute":"yes","unkMacUcastAct":"proxy","unkMcastAct":"flood"},"children":[{"fvRsBDToNdP":{"attributes":{"tnNdIfPolName":""}}},{"fvRsCtx":{"attributes":{"tnFvCtxName":"T16-Private"}}},{"fvRsIgmpsn":{"attributes":{"tnIgmpSnoopPolName":""}}},{"fvSubnet":{"attributes":{"ctrl":"","descr":"","ip":"30.30.60.1/24","name":"","preferred":"no","scope":"private"}}},{"fvSubnet":{"attributes":{"ctrl":"","descr":"","ip":"10.10.60.1/24","name":"","preferred":"no","scope":"private"}}},{"fvRsBdToEpRet":{"attributes":{"resolveAct":"resolve","tnFvEpRetPolName":""}}}]}},{"vzFilter":{"attributes":{"descr":"","name":"T16-sshFilterIdentity","ownerKey":"","ownerTag":""},"children":[{"vzEntry":{"attributes":{"applyToFrag":"no","arpOpc":"unspecified","dFromPort":"22","dToPort":"22","descr":"","etherT":"ip","icmpv4T":"unspecified","icmpv6T":"unspecified","name":"sshFilter","prot":"tcp","sFromPort":"unspecified","sToPort":"unspecified","stateful":"no","tcpRules":""}}}]}},{"vzFilter":{"attributes":{"descr":"","name":"T16-AllowPing","ownerKey":"","ownerTag":""},"children":[{"vzEntry":{"attributes":{"applyToFrag":"no","arpOpc":"unspecified","dFromPort":"unspecified","dToPort":"unspecified","descr":"","etherT":"ip","icmpv4T":"unspecified","icmpv6T":"unspecified","name":"AllowPing","prot":"icmp","sFromPort":"unspecified","sToPort":"unspecified","stateful":"no","tcpRules":""}}}]}},{"fvRsTenantMonPol":{"attributes":{"tnMonEPGPolName":""}}},{"fvAp":{"attributes":{"descr":"","name":"T16-MyApp","ownerKey":"","ownerTag":"","prio":"unspecified"},"children":[{"fvAEPg":{"attributes":{"descr":"","matchT":"AtleastOne","name":"T16-Win7-EPG","prio":"unspecified"},"children":[{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"T16Web2ArmContract"}}},{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"T16-AllowSSH"}}},{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"T16-AllowPing"}}},{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"Test"}}},{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"Test2"}}},{"fvRsCons":{"attributes":{"prio":"unspecified","tnVzBrCPName":"maybe1armdynamic"}}},{"fvRsDomAtt":{"attributes":{"encap":"unknown","instrImedcy":"lazy","resImedcy":"lazy","tDn":"uni/vmmp-VMware/dom-T16-vCenter"}}},{"fvRsCustQosPol":{"attributes":{"tnQosCustomPolName":""}}},{"fvRsBd":{"attributes":{"tnFvBDName":"T16-BD1"}}}]}},{"fvAEPg":{"attributes":{"descr":"","matchT":"AtleastOne","name":"T16-Ubuntu-EPG","prio":"unspecified"},"children":[{"fvRsDomAtt":{"attributes":{"encap":"unknown","instrImedcy":"immediate","resImedcy":"immediate","tDn":"uni/vmmp-VMware/dom-T16-vCenter"}}},{"fvRsCustQosPol":{"attributes":{"tnQosCustomPolName":""}}},{"fvRsBd":{"attributes":{"tnFvBDName":"T16-BD2"}}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"LocalTraffic","locked":"no","name":"LTM","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Monitor","locked":"no","name":"Monitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \/r\/n"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"no","name":"FailByAttempts","validation":"","value":"3"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"no","name":"Type","validation":"","value":"HTTP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"no","name":"FrequencySeconds","validation":"","value":"5"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"no","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"STATIC"}}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Member","locked":"no","name":"Member","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Port","locked":"no","mandatory":"no","name":"Port","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"IPAddress","locked":"no","mandatory":"no","name":"IPAddress","validation":"","value":"20.20.60.21"}}}]}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Network","locked":"no","name":"Network1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"InternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.100"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.101"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.101"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"ExternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.100"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool1"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"NetworkRelation","locked":"no","name":"NetworkRelation1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"NetworkRel","locked":"no","mandatory":"no","name":"NetworkRel","targetName":"Network1"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Listener","locked":"no","name":"Listener1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"no","name":"DestinationPort","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"no","name":"Protocol","validation":"","value":"TCP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"no","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationIPAddress","locked":"no","mandatory":"no","name":"DestinationIPAddress","validation":"","value":"40.40.60.110"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Listener","locked":"no","name":"Listener1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"no","name":"DestinationPort","validation":"","value":"8080"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"no","name":"Protocol","validation":"","value":"TCP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"no","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationIPAddress","locked":"no","mandatory":"no","name":"DestinationIPAddress","validation":"","value":"10.10.60.112"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Network","locked":"no","name":"Network1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"ExternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.100"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.101"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"InternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.100"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.101"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"LocalTraffic","locked":"no","name":"LTM","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"no","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"STATIC"}}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Member","locked":"no","name":"Member","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Port","locked":"no","mandatory":"no","name":"Port","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"IPAddress","locked":"no","mandatory":"no","name":"IPAddress","validation":"","value":"20.20.60.22"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Monitor","locked":"no","name":"Monitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \/r\/n"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"no","name":"FailByAttempts","validation":"","value":"3"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"no","name":"Type","validation":"","value":"HTTP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"no","name":"FrequencySeconds","validation":"","value":"5"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Network","locked":"no","name":"Network1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"ADC2","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"InternalSelfIP","locked":"no","name":"Internal2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"40.40.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Route","locked":"no","name":"Route1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"no","name":"DestinationNetmask","validation":"","value":"0.0.0.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"NextHopIPAddress","locked":"no","mandatory":"no","name":"NextHopIPAddress","validation":"","value":"40.40.60.1"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationIPAddress","locked":"no","mandatory":"no","name":"DestinationIPAddress","validation":"","value":"0.0.0.0"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"ADC1","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"InternalSelfIP","locked":"no","name":"Internal1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"40.40.60.101"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"InternalSelfIP","locked":"no","name":"InternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"40.40.60.100"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Network","locked":"no","name":"Network1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"InternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.100"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"ExternalFloat","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"YES"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.100"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"ExternalSelfIP","locked":"no","name":"External1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"10.10.60.101"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"ADC2","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.102"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"ADC1","graphNameOrLbl":"T16WebContract","key":"InternalSelfIP","locked":"no","name":"Internal1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Floating","locked":"no","mandatory":"no","name":"Floating","validation":"","value":"NO"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPNetmask","locked":"no","mandatory":"no","name":"SelfIPNetmask","validation":"","value":"255.255.255.0"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SelfIPAddress","locked":"no","mandatory":"no","name":"SelfIPAddress","validation":"","value":"20.20.60.101"}}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Listener","locked":"no","name":"Listener1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"no","name":"DestinationPort","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"no","name":"Protocol","validation":"","value":"TCP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"no","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationIPAddress","locked":"no","mandatory":"no","name":"DestinationIPAddress","validation":"","value":"10.10.60.111"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool1"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"EPGDestinationPort","locked":"no","mandatory":"no","name":"EPGDestinationPort","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"EPGRatio","locked":"no","mandatory":"no","name":"EPGRatio","validation":"","value":"1"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"EPGConnectionLimit","locked":"no","mandatory":"no","name":"EPGConnectionLimit","validation":"","value":"1000"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"EPGConnectionRateLimit","locked":"no","mandatory":"no","name":"EPGConnectionRateLimit","validation":"","value":"1000"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"LocalTraffic","locked":"no","name":"LTM","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Monitor","locked":"no","name":"Monitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \/r\/n"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"no","name":"FailByAttempts","validation":"","value":"3"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"no","name":"Type","validation":"","value":"HTTP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"no","name":"FrequencySeconds","validation":"","value":"5"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"no","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"STATIC"}}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"LocalTraffic","locked":"no","name":"LTM","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Monitor","locked":"no","name":"Monitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"SendText","locked":"no","mandatory":"no","name":"SendText","validation":"","value":"GET \\r\\n"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FailByAttempts","locked":"no","mandatory":"no","name":"FailByAttempts","validation":"","value":"3"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Type","locked":"no","mandatory":"no","name":"Type","validation":"","value":"HTTP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"FrequencySeconds","locked":"no","mandatory":"no","name":"FrequencySeconds","validation":"","value":"5"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"LBMethod","locked":"no","mandatory":"no","name":"LBMethod","validation":"","value":"ROUND_ROBIN"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"PoolType","locked":"no","mandatory":"no","name":"PoolType","validation":"","value":"DYNAMIC"}}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"maybe1armdynamic","devCtxLbl":"","graphNameOrLbl":"t6f5-WebGraph1Arm","key":"PoolMonitor","locked":"no","name":"PoolMonitor1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolMonitorRel","locked":"no","mandatory":"no","name":"PoolMonitorRel","targetName":"LTM/Monitor1"}}}]}}]}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"Test2","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool2","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool2"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"NetworkRelation","locked":"no","name":"NetworkRelation","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"NetworkRel","locked":"no","mandatory":"no","name":"NetworkRel","targetName":"Network1"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Listener","locked":"no","name":"Listener1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationPort","locked":"no","mandatory":"no","name":"DestinationPort","validation":"","value":"80"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"Protocol","locked":"no","mandatory":"no","name":"Protocol","validation":"","value":"TCP"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationNetmask","locked":"no","mandatory":"no","name":"DestinationNetmask","validation":"","value":"255.255.255.255"}}},{"vnsParamInst":{"attributes":{"cardinality":"unspecified","key":"DestinationIPAddress","locked":"no","mandatory":"no","name":"DestinationIPAddress","validation":"","value":"10.10.60.110"}}}]}},{"vnsFolderInst":{"attributes":{"cardinality":"unspecified","ctrctNameOrLbl":"T16Web2ArmContract","devCtxLbl":"","graphNameOrLbl":"T16WebContract","key":"Pool","locked":"no","name":"Pool1","nodeNameOrLbl":"ADC","scopedBy":"epg"},"children":[{"vnsCfgRelInst":{"attributes":{"cardinality":"unspecified","key":"PoolRel","locked":"no","mandatory":"no","name":"PoolRel","targetName":"LTM/Pool1"}}}]}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"T16Web2ArmContract"}}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"Test"}}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"Test2"}}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"maybe1armdynamic"}}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"T16-AllowSSH"}}},{"fvRsProv":{"attributes":{"matchT":"AtleastOne","prio":"unspecified","tnVzBrCPName":"T16-AllowPing"}}}]}}]}},{"vnsLDevVip":{"attributes":{"contextAware":"single-Context","devtype":"VIRTUAL","funcType":"GoTo","mode":"legacy-Mode","name":"t6f5"},"children":[{"vnsRsMDevAtt":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1"}}},{"vnsCCred":{"attributes":{"name":"username","value":"admin"}}},{"vnsCCredSecret":{"attributes":{"name":"password"}}},{"vnsRsALDevToDomP":{"attributes":{"tDn":"uni/vmmp-VMware/dom-T16-vCenter"}}},{"vnsCMgmt":{"attributes":{"host":"10.29.103.176","name":"","port":"443"}}},{"vnsCDev":{"attributes":{"devCtxLbl":"ADC2","name":"t6f5_Device_2","vcenterName":"T16-VCSA","vmName":"BIG-IP VE 11.6.0.0.0.401-02"},"children":[{"vnsCCred":{"attributes":{"name":"username","value":"admin"}}},{"vnsCCredSecret":{"attributes":{"name":"password"}}},{"vnsCMgmt":{"attributes":{"host":"10.29.103.226","name":"","port":"443"}}},{"vnsCIf":{"attributes":{"name":"1_1","vnicName":"Network adapter 2"}}},{"vnsCIf":{"attributes":{"name":"1_2","vnicName":"Network adapter 3"}}},{"vnsDevFolder":{"attributes":{"key":"HostConfig","name":"HostConfig"},"children":[{"vnsDevParam":{"attributes":{"key":"NTPServer","name":"NTPServer","value":"10.29.103.15"}}},{"vnsDevParam":{"attributes":{"key":"HostName","name":"HostName","value":"t6f502.lumoscloud.com"}}}]}},{"vnsDevFolder":{"attributes":{"key":"HighAvailability","name":"HighAvailability"},"children":[{"vnsDevParam":{"attributes":{"key":"SelfIPAddress","name":"SelfIPAddress","value":"3.3.3.2"}}},{"vnsDevParam":{"attributes":{"key":"Interface","name":"Interface","value":"1_3"}}},{"vnsDevParam":{"attributes":{"key":"VLAN","name":"VLAN","value":"3000"}}},{"vnsDevParam":{"attributes":{"key":"SelfIPNetmask","name":"SelfIPNetmask","value":"255.255.255.0"}}}]}}]}},{"vnsCDev":{"attributes":{"devCtxLbl":"ADC1","name":"t6f5_Device_1","vcenterName":"T16-VCSA","vmName":"BIG-IP VE 11.6.0.0.0.401-01"},"children":[{"vnsCCred":{"attributes":{"name":"username","value":"admin"}}},{"vnsCCredSecret":{"attributes":{"name":"password"}}},{"vnsCMgmt":{"attributes":{"host":"10.29.103.176","name":"","port":"443"}}},{"vnsCIf":{"attributes":{"name":"1_1","vnicName":"Network adapter 2"}}},{"vnsCIf":{"attributes":{"name":"1_2","vnicName":"Network adapter 3"}}},{"vnsDevFolder":{"attributes":{"key":"HostConfig","name":"HostConfig"},"children":[{"vnsDevParam":{"attributes":{"key":"NTPServer","name":"NTPServer","value":"10.29.103.15"}}},{"vnsDevParam":{"attributes":{"key":"HostName","name":"HostName","value":"t6f501.lumoscloud.com"}}}]}},{"vnsDevFolder":{"attributes":{"key":"HighAvailability","name":"HighAvailability"},"children":[{"vnsDevParam":{"attributes":{"key":"SelfIPAddress","name":"SelfIPAddress","value":"3.3.3.1"}}},{"vnsDevParam":{"attributes":{"key":"Interface","name":"Interface","value":"1_3"}}},{"vnsDevParam":{"attributes":{"key":"VLAN","name":"VLAN","value":"3000"}}},{"vnsDevParam":{"attributes":{"key":"SelfIPNetmask","name":"SelfIPNetmask","value":"255.255.255.0"}}}]}}]}},{"vnsLIf":{"attributes":{"name":"external"},"children":[{"vnsRsMetaIf":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mIfLbl-external"}}},{"vnsRsCIfAtt":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/cDev-t6f5_Device_2/cIf-[1_1]"}}},{"vnsRsCIfAtt":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/cDev-t6f5_Device_1/cIf-[1_1]"}}}]}},{"vnsLIf":{"attributes":{"name":"internal"},"children":[{"vnsRsMetaIf":{"attributes":{"tDn":"uni/infra/mDev-F5-BIGIP-1.1.1/mIfLbl-internal"}}},{"vnsRsCIfAtt":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/cDev-t6f5_Device_2/cIf-[1_2]"}}},{"vnsRsCIfAtt":{"attributes":{"tDn":"uni/tn-TenanT16/lDevVip-t6f5/cDev-t6f5_Device_1/cIf-[1_2]"}}}]}}]}}]}}
r = s.post('https://{0}/api/node/mo/uni/tn-Tenant1.json.json'.format(apic), cookies=cookies, data=json.dumps(jsondata), verify=False)
print r.status_code
print r.text
# That's it - you're done!
| [
"student@lumoscloud.com"
] | student@lumoscloud.com |
a3cee10d2c3fa7bcdffc20880585935069d651fc | 4910c0f3d03935fc8ee03f1e9dc20dfdb2c7c04b | /Codigos estudiantes por lenguaje/PY/Bryann Valderrama/Algoritmos de Busqueda/DifferencePairSearch.py | 6c885a6c2d463ae002f1c7a54ec826b5b9e9f0a1 | [] | no_license | roca12/gpccodes | ab15eeedc0cadc0735651262887b44f1c2e65b93 | aa034a3014c6fb879ec5392c51f9714bdc5b50c2 | refs/heads/master | 2023-02-01T13:49:27.563662 | 2023-01-19T22:50:58 | 2023-01-19T22:50:58 | 270,723,328 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | '''Dado un arreglo y un numero n, buscar si existe un par cuya
diferencia es n.
- Complejidad Tiempo: O (n logn)
'''
from sys import stdin, stdout
rl = stdin.readline
wr = stdout.write
def findPair(arr, n):
size = len(arr)
i, j = 0, 1
while i < size and j < size:
if i != j and arr[j] - arr[i] == n:
wr(f'Par encontrado: {arr[i]} - {arr[j]}\n')
# return True # Encontrar solo un par
i += 1 # Encontrar todos los pares
j += 1 # Encontrar todos los pares
elif arr[j] - arr[i] < n:
j += 1
else:
i += 1
wr('Par no encontrado\n')
return False
arr = list(map(int, rl().split())) # 1 2 3 4 5 6 7
n = int(rl()) # 5
findPair(arr, n) # 1 -6 | 2 - 7
| [
"noreply@github.com"
] | roca12.noreply@github.com |
272e900e5d172bdcb323eae8cf7b8e508f4f9db3 | 78a2c1ed432ff68f7ce3a0e21873a1a29f9cb6f7 | /dev/nogipx/python/morris-drafts/main_programm/v04/modules/group_manager/data_types/User.py | c98a6ce14c884f6cc1e4f6c1578ca062fe9ef80f | [] | no_license | nogipx/codevault | e28a4d7090df07307faa0adcb7fa5c0ee06e656a | 1621a32dae1d401466f7452be04bb65a0359c947 | refs/heads/master | 2020-03-10T07:18:50.606112 | 2019-10-23T21:18:13 | 2019-10-23T21:18:13 | 129,260,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | class User:
def __init__(self, **kwargs):
for key in kwargs:
self.__setattr__(key, kwargs.get(key))
self.session_thread = None
def get_name(self):
return "{} {}".format(self.first_name, self.last_name)
def get_info(self):
return self
def configure(self, **kwargs):
for arg in kwargs:
if not self.__dict__.get(arg):
self.__setattr__(arg, kwargs.get(arg))
def parse_configure(self, config):
config = config.strip('[]{}').split(',')
conf = {}
for setting in config:
setting = setting.split(':')
key = setting[0].strip('{}{}{}'.format("'", '"', ' '))
value = setting[1].strip('{}{}{}'.format("'", '"', ' '))
conf.update({key: value})
self.configure(**conf)
def __str__(self):
user = '-' * 20 + '\n'
for key in self.__dict__:
user += '{} = {}'.format(key, self.__dict__.get(key))
user += '\n'
user += '-' * 20 + '\n'
return user
| [
"mtkn@tutanota.com"
] | mtkn@tutanota.com |
105a9ebf514dbcc1174edad6acb38c9311af38e6 | a36df6357fce551e791c729a8e08a1de231c372a | /train.py | 2a474e50cfcba84e00e5f245d377aaa59a1e4dd5 | [] | no_license | lisadunlap/rl_rationalizations | dc8bbfb3578cac075c003428da30457aa15988e4 | 966c00890aa87f90339fe4e478f7adbab65ffcf4 | refs/heads/master | 2020-12-24T00:19:51.029164 | 2020-02-06T16:38:12 | 2020-02-06T16:38:12 | 237,321,430 | 0 | 0 | null | 2020-02-07T06:50:42 | 2020-01-30T22:52:22 | Jupyter Notebook | UTF-8 | Python | false | false | 3,293 | py | import torch
import torch.nn.functional as F
import torch.optim as optim
from envs import create_atari_env
from model import ActorCritic
def ensure_shared_grads(model, shared_model):
for param, shared_param in zip(model.parameters(),
shared_model.parameters()):
if shared_param.grad is not None:
return
shared_param._grad = param.grad
def train(rank, args, shared_model, counter, lock, optimizer=None):
torch.manual_seed(args.seed + rank)
env = create_atari_env(args.env_name)
env.seed(args.seed + rank)
model = ActorCritic(env.observation_space.shape[0], env.action_space)
if optimizer is None:
optimizer = optim.Adam(shared_model.parameters(), lr=args.lr)
model.train()
state = env.reset()
state = torch.from_numpy(state)
done = True
episode_length = 0
while True:
# Sync with the shared model
model.load_state_dict(shared_model.state_dict())
if done:
cx = torch.zeros(1, 256)
hx = torch.zeros(1, 256)
else:
cx = cx.detach()
hx = hx.detach()
values = []
log_probs = []
rewards = []
entropies = []
for step in range(args.num_steps):
episode_length += 1
value, logit, (hx, cx) = model((state.unsqueeze(0),
(hx, cx)))
prob = F.softmax(logit, dim=-1)
log_prob = F.log_softmax(logit, dim=-1)
entropy = -(log_prob * prob).sum(1, keepdim=True)
entropies.append(entropy)
action = prob.multinomial(num_samples=1).detach()
log_prob = log_prob.gather(1, action)
state, reward, done, _ = env.step(action.numpy())
done = done or episode_length >= args.max_episode_length
reward = max(min(reward, 1), -1)
with lock:
counter.value += 1
if done:
episode_length = 0
state = env.reset()
state = torch.from_numpy(state)
values.append(value)
log_probs.append(log_prob)
rewards.append(reward)
if done:
break
R = torch.zeros(1, 1)
if not done:
value, _, _ = model((state.unsqueeze(0), (hx, cx)))
R = value.detach()
values.append(R)
policy_loss = 0
value_loss = 0
gae = torch.zeros(1, 1)
for i in reversed(range(len(rewards))):
R = args.gamma * R + rewards[i]
advantage = R - values[i]
value_loss = value_loss + 0.5 * advantage.pow(2)
# Generalized Advantage Estimation
delta_t = rewards[i] + args.gamma * \
values[i + 1] - values[i]
gae = gae * args.gamma * args.gae_lambda + delta_t
policy_loss = policy_loss - \
log_probs[i] * gae.detach() - args.entropy_coef * entropies[i]
optimizer.zero_grad()
(policy_loss + args.value_loss_coef * value_loss).backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
ensure_shared_grads(model, shared_model)
optimizer.step() | [
"lisabdunlap@berkeley.edu"
] | lisabdunlap@berkeley.edu |
42b324800f137db7574882d411956b2974e5288c | 725d8e6e1f1bd2a8e906085dde9b99eb10aaa4a7 | /Util.py | 1926c7e0cdee2f0879d28a4b718ea336e717c61f | [] | no_license | fabriciop1/wavelets_pibic | c669e2b7e8988645ab1ab10b1b34779d394a7567 | e3c02d48dc778f979b1423eb13429ae2df58be4b | refs/heads/master | 2021-01-12T05:25:30.016713 | 2017-01-03T14:54:41 | 2017-01-03T14:54:41 | 77,926,247 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | # -*- coding: cp1252 -*-
import numpy as np
import math
class Util:
def getAccuracy(self, testSet, predictions): # APENAS 1-NN
self.acertos = 0
for i in range (len(testSet)):
if (testSet[i, -1] == predictions[i]):
self.acertos = self.acertos + 1
return (self.acertos / float(len(testSet))) * 100.0
| [
"noreply@github.com"
] | fabriciop1.noreply@github.com |
fb88d2050af08871cb648c62636bdca364d084a4 | 973bc8d8fb6beb5a115b60b2b97a898baaed11fb | /python_flask_app/python_flask__app/app/config/mongodb_connection/database_connection.py | dae7695544abe8072a329c8419832969d807e73e | [] | no_license | fjbernardez/dataSec | 343f901f14a079dacb8e9fdcaeb030f49878d448 | 573274095a99454286315416a976bc258adb80ea | refs/heads/main | 2023-05-01T15:00:15.789327 | 2021-05-25T01:36:00 | 2021-05-25T01:36:00 | 370,528,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | from flask_pymongo import PyMongo
# mongodb connection
mongo_connection = PyMongo()
| [
"bernardez.f@gmail.com"
] | bernardez.f@gmail.com |
804b09be82a5890f8223579c5cca30c08fbd1e24 | 7ae32748fb910d2542e35c57543fc89f98cd2b1d | /tests/runtime/runtime.py | abd1a564bef4b7fae1348ad52b7bc9b326046667 | [
"Apache-2.0"
] | permissive | sanjaymsh/dtfabric | 451c87d987f438fccfbb999079d2f55d01650b68 | 9e216f90b70d8a3074b2125033e0773e3e482355 | refs/heads/master | 2022-12-19T09:13:02.370724 | 2020-09-27T05:11:25 | 2020-09-27T05:11:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,589 | py | # -*- coding: utf-8 -*-
"""Tests for the run-time object."""
from __future__ import unicode_literals
import unittest
from dtfabric.runtime import runtime
from tests import test_lib
class StructureValuesClassFactoryTest(test_lib.BaseTestCase):
"""Structure values class factory tests."""
# pylint: disable=protected-access
@test_lib.skipUnlessHasTestFile(['structure.yaml'])
def testCreateClassTemplate(self):
"""Tests the _CreateClassTemplate function."""
definitions_file = self._GetTestFilePath(['structure.yaml'])
definitions_registry = self._CreateDefinitionRegistryFromFile(
definitions_file)
data_type_definition = definitions_registry.GetDefinitionByName('point3d')
class_template = runtime.StructureValuesClassFactory._CreateClassTemplate(
data_type_definition)
self.assertIsNotNone(class_template)
# TODO: implement error conditions.
def testIsIdentifier(self):
"""Tests the _IsIdentifier function."""
result = runtime.StructureValuesClassFactory._IsIdentifier('valid')
self.assertTrue(result)
result = runtime.StructureValuesClassFactory._IsIdentifier('_valid')
self.assertTrue(result)
result = runtime.StructureValuesClassFactory._IsIdentifier('valid1')
self.assertTrue(result)
result = runtime.StructureValuesClassFactory._IsIdentifier('')
self.assertFalse(result)
result = runtime.StructureValuesClassFactory._IsIdentifier('0invalid')
self.assertFalse(result)
result = runtime.StructureValuesClassFactory._IsIdentifier('in-valid')
self.assertFalse(result)
def testValidateDataTypeDefinition(self):
"""Tests the _ValidateDataTypeDefinition function."""
definitions_file = self._GetTestFilePath(['structure.yaml'])
definitions_registry = self._CreateDefinitionRegistryFromFile(
definitions_file)
data_type_definition = definitions_registry.GetDefinitionByName('point3d')
runtime.StructureValuesClassFactory._ValidateDataTypeDefinition(
data_type_definition)
# TODO: implement error conditions.
def testCreateClass(self):
"""Tests the CreateClass function."""
definitions_file = self._GetTestFilePath(['structure.yaml'])
definitions_registry = self._CreateDefinitionRegistryFromFile(
definitions_file)
data_type_definition = definitions_registry.GetDefinitionByName('point3d')
structure_values_class = runtime.StructureValuesClassFactory.CreateClass(
data_type_definition)
self.assertIsNotNone(structure_values_class)
if __name__ == '__main__':
unittest.main()
| [
"joachim.metz@gmail.com"
] | joachim.metz@gmail.com |
7b964f61670ccb5db77a15f8f6c355bc59266f51 | e8f88fa5c7ca0263be5958d85a36b855976d4b0f | /LAB_EXAM_QUESTIONS/Solutions/string_apps/string_operation.py | 5f8da9ca2b6b403bbd4ca01c630c4963429c11e9 | [] | no_license | sxb42660/MachineLearning_Fall2019 | 67bb471e79608b17a57ac1fabc9f6de1e455a015 | b256a6961d30918611ecbda6961d5938b1291864 | refs/heads/master | 2022-07-13T10:50:46.646541 | 2020-05-15T18:59:19 | 2020-05-15T18:59:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,143 | py | '''
Question1 :
Write a program to find a longest substring without repeating characters from a given string input from the console.
Sample Input: ‘ababcdxa’
Sample Output: abcdx
'''
class StringOperations:
def longest_substring(self, input_string):
temp_string = ""
longest_substring = ""
char_list = []
for a in input_string:
if a in char_list:
longest_substring = self.clean_list(char_list, a, temp_string, longest_substring)
char_list.append(a)
if a not in char_list:
char_list.append(a)
for a in char_list:
temp_string += a
if len(longest_substring) < len(temp_string):
longest_substring = temp_string
print(longest_substring)
def clean_list(self, input_list, char, temp_string, longest_substring):
for a in input_list:
temp_string += a
for i in range(input_list.index(char) + 1):
del input_list[0]
if len(longest_substring) < len(temp_string):
longest_substring = temp_string
return longest_substring
| [
"sivakumar.umkc.fall2019@gmail.com"
] | sivakumar.umkc.fall2019@gmail.com |
44d9e96b59bac03ee55adecaccc7b5ed8e844b24 | be91b231d3fd8c340c2d083db5af626da42ee0f1 | /dsc/migrations/0042_auto_20170117_1159.py | 7be1afd56c0c9ae4be05367a3a7fd729e758d66e | [] | no_license | tango-controls/dsc | 0966468447b74bf3ec56a4763e40fe3897b3f325 | 961dff5d21663491a39a7dc0e49b1626d74218e1 | refs/heads/master | 2022-09-09T03:00:22.208646 | 2020-12-30T12:28:19 | 2020-12-30T12:28:19 | 95,218,117 | 3 | 6 | null | 2020-12-30T12:28:20 | 2017-06-23T12:26:21 | Python | UTF-8 | Python | false | false | 650 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dsc', '0041_deviceserveraddmodel_repository_tag'),
]
operations = [
migrations.AlterField(
model_name='deviceserveraddmodel',
name='ds_status',
field=models.CharField(default=b'new', max_length=20, verbose_name=b'Information status', choices=[(b'new', b'New'), (b'verified', b'Verified'), (b'updated', b'Updated'), (b'certified', b'Certified'), (b'archived', b'Archived'), (b'deleted', b'Deleted')]),
),
]
| [
"piotr.goryl@3-controls.com"
] | piotr.goryl@3-controls.com |
c35db6386f83d6038856651b1a5d0577fc8afc98 | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-dcs/huaweicloudsdkdcs/v2/model/list_bigkey_scan_tasks_response.py | a6bf9b01d77cf1b7325ac90267c21449802d43a3 | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,639 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListBigkeyScanTasksResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'instance_id': 'str',
'count': 'int',
'records': 'list[RecordsResponse]'
}
attribute_map = {
'instance_id': 'instance_id',
'count': 'count',
'records': 'records'
}
def __init__(self, instance_id=None, count=None, records=None):
"""ListBigkeyScanTasksResponse - a model defined in huaweicloud sdk"""
super(ListBigkeyScanTasksResponse, self).__init__()
self._instance_id = None
self._count = None
self._records = None
self.discriminator = None
if instance_id is not None:
self.instance_id = instance_id
if count is not None:
self.count = count
if records is not None:
self.records = records
@property
def instance_id(self):
"""Gets the instance_id of this ListBigkeyScanTasksResponse.
实例ID
:return: The instance_id of this ListBigkeyScanTasksResponse.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this ListBigkeyScanTasksResponse.
实例ID
:param instance_id: The instance_id of this ListBigkeyScanTasksResponse.
:type: str
"""
self._instance_id = instance_id
@property
def count(self):
"""Gets the count of this ListBigkeyScanTasksResponse.
总数
:return: The count of this ListBigkeyScanTasksResponse.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this ListBigkeyScanTasksResponse.
总数
:param count: The count of this ListBigkeyScanTasksResponse.
:type: int
"""
self._count = count
@property
def records(self):
"""Gets the records of this ListBigkeyScanTasksResponse.
大key分析记录列表
:return: The records of this ListBigkeyScanTasksResponse.
:rtype: list[RecordsResponse]
"""
return self._records
@records.setter
def records(self, records):
"""Sets the records of this ListBigkeyScanTasksResponse.
大key分析记录列表
:param records: The records of this ListBigkeyScanTasksResponse.
:type: list[RecordsResponse]
"""
self._records = records
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListBigkeyScanTasksResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
28024678822c3c384b69ade29d4df52fa9476c8e | bd12128da5e5d52151be22c3bc7fdaef7acd4b3b | /src/indikator/migrations/0007_indikatorplotkonfig_indikatorid.py | 6282d2e2d43aa260372dfe72790001883925e103 | [] | no_license | RBHSMA/TechTrader | a6ed9da0875a902fa26dc51eddc63955485e03e5 | 65465c5ceb6d95f9d333b3399ccd988034b475ba | refs/heads/main | 2023-04-14T21:12:02.970836 | 2021-03-30T23:27:04 | 2021-03-30T23:27:04 | 334,465,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | # Generated by Django 3.1.6 on 2021-03-29 13:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('indikator', '0006_indikatorplotkonfig'),
]
operations = [
migrations.AddField(
model_name='indikatorplotkonfig',
name='indikatorId',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
| [
"1710317@stud.hs-mannheim.de"
] | 1710317@stud.hs-mannheim.de |
def34686da19a1a290c023a270663c50243cc2df | 5f8ac36e9b716e2908f3187c727d9e4a3d661816 | /Emmanuel/Emmanuel.py | 6ad3a42e543a5d357e6c7ceda1e7a2f9958cd75c | [] | no_license | geediegram/parsel_tongue | bad2eb950bf7ce713a01a4c67b39aebda05811e9 | a6cbe859c175676bbda138a30593d90327ef6c4a | refs/heads/master | 2023-08-22T13:48:15.523463 | 2021-09-30T14:01:26 | 2021-09-30T14:01:26 | 397,195,882 | 0 | 0 | null | 2021-08-17T13:59:57 | 2021-08-17T09:44:21 | Python | UTF-8 | Python | false | false | 18 | py | print("Emmnauelt") | [
"jospag4christ@yahoo.com"
] | jospag4christ@yahoo.com |
2991fb12f691bcef8ca69850744936449667f7c6 | cdfde3d248e7bc217a78cb87e214b12e5f55ccf2 | /Hypatheon-1.2/client/client_messages.py | 5ec5c6038858740054a3d19dcaa4de83c6ba9748 | [
"LicenseRef-scancode-us-govt-public-domain",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | TCNorris/pvslib | 0af3efce7f58bd5d943c680602913fe012ca457b | e58918cee86853d965797d24f270db6cb63736fe | refs/heads/master | 2020-11-25T01:36:21.941251 | 2020-08-06T05:45:32 | 2020-08-06T05:45:32 | 285,403,040 | 1 | 0 | null | 2020-08-05T21:00:19 | 2020-08-05T21:00:18 | null | UTF-8 | Python | false | false | 13,074 | py |
# Copyright (c) 2012 United States Government as represented by
# the National Aeronautics and Space Administration. No copyright
# is claimed in the United States under Title 17, U.S.Code. All Other
# Rights Reserved.
#
# The software in this package has been released as open-source software
# under the NASA Open Source Agreement. See the accompanying file
# NASA_Open_Source_Agreement.pdf for a full description of the terms.
display_messages = \
{
'edit_prefs' :
"Edit the preference values below, then select the Save button. "
"New preferences will take effect the next time you "
"start Hypatheon. Select Restore Defaults if you need to "
"recover the original values, then select Save." ,
'internal_error' :
"Unfortunately, an internal error has been detected. "
"If you would be kind enough to report the problem, "
"the developers can provide corrections in a future version. "
"If this event is followed by unusual behavior, "
"it would be wise to "
"restart Hypatheon before proceeding with your work. "
"Sorry for any inconvenience." ,
'problem_reporting' :
"Problem reporting procedure: \n"
"\n - Contact Ben Di Vito (b.divito@nasa.gov).\n"
"\n - Provide a description of the events preceding the mishap.\n "
"\n - If relevant, also send this auto-generated error report file: " ,
'reporting_thanks' :
"Thank you very much for taking the time to report this problem." ,
'index_libraries' :
"Hypatheon can index any of the PVS libraries listed below. "
"Select them individually or in sets using the buttons provided. "
"'New' selects libraries not yet indexed "
"in the database, while 'Indexed' selects the opposite (shown in blue). "
"After library selection, you may begin a background "
"indexing job, which reports its progress in a separate window. "
"For moderate to large selections, this operation can take "
"many minutes to complete." ,
'lib_convention_reminder' :
"Reminder: Please be sure your libraries adhere to the conventions "
"described in Help item PVS Library Conventions.",
'library_selection_note' :
"Note: Libraries shown in red have been indexed already "
"in another database. Those shown in orange are found on the "
"exclude list (preference setting); "
"these must be selected explicitly to be indexed.",
'index_grouping' :
"Hypatheon can index a sequence of PVS libraries listed in a suitable "
"manifest file (names having the form *.all), provided all libraries "
"are located in the same directory. Library names are extracted as "
"the first identifier found on each line beginning with an alphabetic "
"character. Indexing will be conducted in the manifest-listed order. "
"After manifest file selection, you may begin a background "
"indexing job, which reports its progress in a separate window. "
"For moderate to large selections, this operation can take "
"many minutes to complete." ,
'indexing_in_progress' :
"An indexing process for the selected libraries has been launched "
"and will run in the background until completed. "
"Progress will be reported below, updated every second." ,
'schedule_indexing' :
"Periodic (re)indexing can be performed in batch mode using the "
"'cron' service. If you would like to update your index regularly "
"(e.g., nightly), simply add a scheduling entry to your cron table, "
"which typically can be done using a command such as 'crontab -e'."
"\n\n"
"A candidate crontab entry was generated from your library selection. "
"The generated entry shown below will cause "
"cron to run the indexer daily at 3:15 AM. "
"Copy the table entry to the "
"clipboard using the Copy button below, then paste it into "
"your crontab using a text editor. If desired, edit further "
"to modify the schedule or library list. " ,
'indexing_command' :
"Alternatively, you may copy only the command portion of the "
"crontab entry above for use directly on the command line." ,
'crontab_added' :
"A crontab entry has been created. You should check it by issuing "
"the shell command 'crontab -l'. You may edit this entry anytime "
"using the command 'crontab -e' to change the schedule or to add "
"or subtract profiles." ,
'delete_libraries' :
"To delete any of the previously indexed PVS libraries, "
"select from those listed below. Data in the remaining libraries "
"will not be impacted. This process should take "
"only a few seconds." ,
'vacuum_database' :
"You can choose to 'vacuum' the relevant databases after the libraries "
"have been deleted. This action will reclaim free space and compact the "
"database files, requiring some additional time to complete." ,
'lib_deps_refresh' :
"When the Display button is selected, "
"the library dependency file will be "
"displayed in a separate window, provided it exists. "
"The report can be created or refreshed by selecting the Refresh button, "
"which might require a few seconds to scan all of the PVS files." ,
'clear_database' :
"Clearing a database will remove all of its stored "
"information on PVS libraries, although the database itself will remain. "
"Deleting a database will destroy the library information as well as "
"the database file. "
"Before invoking these actions, make sure the database file "
"is not needed by another client instance "
"or an active indexing process." ,
'delete_superseded' :
"Database files for the following collection names and version strings "
"are older than the currently active database files:" ,
'create_database' :
"Creating a database will add a new database file whose name "
"is derived from the collection name entered below. Collection names "
"must be alphanumeric strings "
"(no embedded blanks or punctuation characters). "
"An optional version number or string may be given. " ,
'create_database_copy' :
"\n"
"The new database file can be created empty or copied from one of the "
"existing databases listed above. Copying is a useful approach when "
"adding to a fixed set of libraries, such as the PVS core "
"{prelude, bitvectors, finite_sets}. " ,
'create_database_empty' :
"The new database file will be created empty. " ,
'invoke_shell_command_file' :
"Enter a command string below. Include '%s' wherever you want the path "
"name to be inserted. Omitting %s causes the file contents to be "
"piped to the command. " ,
'invoke_shell_command_direc' :
"Enter a command string below. It will be preceded by a 'cd' "
"to the target directory. "
"You may then use '.' to refer to the directory. " ,
'invoke_shell_command_paths' :
"Enter a command string below. Include '%s' wherever you want the list "
"of path names to be inserted. " ,
'edit_path_list' :
"Select path names from either list to move to the other. Both "
"lists will be saved so hidden paths can be retrieved later." ,
'explain_terminal' :
"If menu item Open Terminal Window on Paths is selected, "
"the new terminal will be passed a list of path names as "
"shell parameters. This means they will be available as "
"positional parameters $1, $2, etc., or through the special "
"parameter '$*'. Shell commands such as 'ls -l $*' should be "
"expanded accordingly."
"\n\n"
"If any path name contains the space character, the shell "
"will likely parse the path as two or more words. If this happens, "
"using the 'eval' command should correct the problem. For "
"example, enter the command as 'eval ls -l $*'. " ,
'invoke_prover_command_custom' :
'Edit the prover command string below to create the desired command. '
'Include "%s" wherever you want the name above to be inserted. Omit '
'double quotes if not needed for the intended prover command.' ,
'invoke_prover_command_full' :
'Edit the name string above to include theory parameters, if needed, '
'or remove the [--] characters. '
'Edit the prover command string below to create the desired command. '
'Include "%s" wherever you want the name above to be inserted. Omit '
'double quotes if not needed for the intended prover command.' ,
### Following not yet implemented
# 'wait_for_import' :
# ('Adding IMPORTING statement',
# "The theory you requested is not currently on the import chain. Please "
# "wait while Hypatheon inserts an IMPORTING statement into your .pvs "
# "file. PVS will be taking commands "
# "from Hypatheon Client during this process. "
# "User input can interfere, so please wait "
# "for the client to notify you when it is safe to resume the proof.") ,
}
dialog_messages = \
{
'confirm_carry_over_plus_db' :
('Hypatheon Start-up',
"Current version %s of Hypatheon is new. "
"Previous version %s was also detected. "
"Would you like to carry over files for the previous databases "
"and preferences?" ) ,
'confirm_carry_over_no_db' :
('Hypatheon Start-up',
"Current version %s of Hypatheon is new. "
"Previous version %s was also detected. "
"Hypatheon's database schema has changed since that version, "
"so previous database files cannot be carried over. "
"You will need to recreate any private databases and re-index "
"the relevant libraries. "
"Would you like to carry over the previous preferences? "
"" ) ,
'confirm_indexing_cancel' :
('Indexing files',
"Cancellation will terminate the indexing process currently underway "
"and abort the remainder of the indexing job. "
"Do you wish to proceed with cancellation?" ) ,
'confirm_db_removal' :
('Database removal',
"You have asked to remove all information in library collection '%s'. "
"Be sure this is what you intend before proceeding. "
"This operation cannot be undone. You will need to reindex "
"the libraries to recreate this data.") , ## % collec_name
'confirm_ss_removal' :
('Database removal',
"You have asked to delete %s superseded database files. "
"Be sure this is what you intend before proceeding. "
"This operation cannot be undone.") , ## % num_files
'ask_restart_client' :
('Restart client process',
"Restarting the Hypatheon Client process will destroy the current "
"process instance along with most query information displayed in the "
"client's window. A new client process will be started in its place, "
"then the latest query will be rerun. "
"Nothing else associated with the PVS Emacs instance will be changed "
"by this action.") ,
}
error_messages = \
{
'db_server_down' :
('Hypatheon Client start-up',
"A connection to the Hypatheon database server was not established. "
"Try starting Hypatheon Client again." ) ,
'missing_font_family' :
('Hypatheon Client start-up',
"None of the font families from the group %s could be found. "
"There might be something wrong with your "
"Tk installation. ") , ## % family_list
'bad_preferences' :
('Hypatheon Client preferences',
"While reading the preferences file, an error was detected "
"for preference value '%s'. "
"Select 'Preferences' from the Edit menu to correct "
"the problem. In the meantime, "
"a default value will be used.") , ## % preference_load_error
'carry_over_prefs_conflict' :
('Preference carry-over conflict',
"While attemtping to carry over the user-preferences file from "
"the previous version, a conflict was detected. This means "
"that a value you had previously changed was also changed in "
"the new preferences file. You will need to reenter your "
"preference settings using the Edit menu."),
'lib_deletion_error' :
('Library deletion',
"An error was encountered while deleting the selected libraries. "
"View the Indexing Error Log or see file "
"<hypatheon>/log/index_err_log for details.") ,
}
warning_messages = \
{
'not_in_checker' :
('Submit prover command',
"No proof is currently in progress.") ,
'database_error' :
('Database access',
"An error occurred while accessing the database. "
"This could be benign, possibly caused by a temporary database lock. "
"Try again and see if it clears up. "
"If the error recurs, it is probably due to a bug "
"and should be reported.") ,
}
| [
"cesaramh@gmail.com"
] | cesaramh@gmail.com |
2829f3d66dbe204a86b0b5d8275b5ef1bf8f68ef | 78dad420f791ac163cfceedcb9ddcfe48f0277d4 | /regex_match_vs_search.py | 99abc52ecec201b13d0de96f690e20397a159614 | [] | no_license | rahul94jh/Regex_python_prac | 57d366e7ee82314205964c1d1dc7857b06d3eae1 | 9e0440a1f28c6708230524a23b80e78d616b2b9c | refs/heads/master | 2022-08-30T06:32:46.353165 | 2020-06-01T07:29:41 | 2020-06-01T07:29:41 | 266,967,246 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,588 | py | import re
def match_pattern(text, patterns):
if re.match(patterns, text):
return re.match(patterns, text)
else:
return "Not found!"
def find_pattern(text, patterns):
if re.search(patterns, text):
return re.search(patterns, text)
else:
return "No Match Found!"
print(find_pattern("aabbcc", "b+")) # match bb
print(match_pattern("aabbcc", "b+")) # no match
line = "Cats are smarter than dogs"
searchObj = re.search(r"(.*) are (.*?) .*", line, re.M | re.I)
if searchObj:
print("searchObj.group() : ", searchObj.group())
print("searchObj.group(1) : ", searchObj.group(1))
print("searchObj.group(2) : ", searchObj.group(2))
else:
print("Nothing found!!")
line = "Cats are smarter than dogs"
matchObj = re.match(r"(.*) are (.*?) .*", line, re.M | re.I)
if searchObj:
print("matchObj.group() : ", matchObj.group())
print("matchObj.group(1) : ", matchObj.group(1))
print("matchObj.group(2) : ", matchObj.group(2))
else:
print("Nothing found!!")
"""Python offers two different primitive operations based on regular expressions: match checks for a match
only at the beginning of the string, while search checks for a match anywhere in the string"""
line = "Cats are smarter than dogs"
matchObj = re.match(r"dogs", line, re.M | re.I)
if matchObj:
print("match --> matchObj.group() : ", matchObj.group())
else:
print("No match!!")
searchObj = re.search(r"dogs", line, re.M | re.I)
if searchObj:
print("search --> searchObj.group() : ", searchObj.group())
else:
print("Nothing found!!")
| [
"rahul1994jh@gmail.com"
] | rahul1994jh@gmail.com |
e615392a194d413bc82cfe041ef4879dba4681ca | a5eb4c49ce2d7a2e931f6517e4168d40d29c2b6d | /9/part1.py | cd629f77520d594fe4b5e642f5a43da71fcf98ad | [] | no_license | saldixon75/advent-of-code-2020 | 3a196894aaf9437168db17635455e6b3738a01c5 | a43bc0659c3a3b215b39313febff948d21d7ef63 | refs/heads/master | 2023-02-05T23:41:45.257005 | 2021-01-02T23:31:06 | 2021-01-02T23:31:06 | 320,781,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | import sys;
import re;
with open("input.txt") as f:
lines = f.read().splitlines();
buffer_size = 25;
def is_valid(num, buffer):
for a in range(buffer_size):
for b in range(a+1, buffer_size):
#print("a = " + str(buffer[a]) + " b=" + str(buffer[b]));
if ( (buffer[a] + buffer[b] == num) and (buffer[a] != buffer[b]) ):
print("hooray");
return True;
return False;
for i in range(buffer_size+1,len(lines)):
num = int(lines[i-1]);
#print("i = " + str(i) + " number = " + str(num));
buffer = map(int, lines[i-buffer_size-1:i-1]);
#print(buffer);
if (not is_valid(num, buffer)):
print("ROGUE ENTRY FOUND!! i=" + str(i) + " number=" + str(num));
break;
| [
"sally.dixon@ft.com"
] | sally.dixon@ft.com |
1d2c136d8bc4c0826cad19d6f8be571d357f017f | d5cdff2800b98401722b0385c133e862fee41b07 | /python0613_001.py | 358b29bf218d2dbc4d5da5b5c60fd7c0d0b14131 | [] | no_license | zhangrhc/zhangrh | 5a8d56b107eb47cb7aab52e85ac5b4b0161ae0da | cb5892ce684e301cf270e5105bbd39f4d40ddd75 | refs/heads/master | 2021-01-17T17:45:02.037473 | 2017-07-06T02:38:30 | 2017-07-06T02:38:30 | 95,534,034 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | __author__ = 'Administrator'
#encoding:utf-8
from selenium import webdriver
from bs4 import BeautifulSoup
from pandas import DataFrame
import time
#手动添加路径
path = "D:\python2.7\Scripts\chromedriver.exe"
driver = webdriver.Chrome(executable_path=path)
url = "https://www.huomao.com/channel/lol"
#司机开车了
driver.get(url)
#让页面移到最下面点击加载,连续6次,司机会自动更新!!
for i in range (6):
driver.find_element_by_id("getmore").click()
time.sleep(1)
#开始解析
soup = BeautifulSoup(driver.page_source,"html.parser")
page_all = soup.find("div",attrs={"id":"channellist"})
pages = page_all.find_all("div",attrs={"class":"list-smallbox"})
name =[]
title =[]
watching =[]
for page in pages:
tag = False
try:
this_title = page.find("div",attrs={"class":"title-box"}).find("em").string.strip()
temp = page.find_all("p")
this_name = temp[1].find("span").string.strip()
this_watching = temp[1].find_all("span")[1].string.strip()
tag = True
if tag:
title.append(this_title)
name.append(this_name)
watching.append(this_watching)
except:
continue
result = DataFrame({
"主播名":name,
"节目名":title,
"在线观看人数":watching
})
#没有文件会自动创建
result.to_excel("E:\\resultLol.xlsx",sheet_name = "Sheet1") | [
"314714885@qq.com"
] | 314714885@qq.com |
971b6145646fc3cdcc9c52b61c3bfcc2385ff973 | bcc086541d3fcafd51c1e106cb7bb22b48ca13db | /gitdjan/func.py | 38adeac8fc1af92eb7bfbe5433cccb28e5919b92 | [] | no_license | yusufpolat/Gitdjan | 8755f93b7ffcc33c7f88cdc4f437665fa52bd153 | 50725fc74b9a988a7c390b4d449c45f05275196b | refs/heads/master | 2021-01-18T04:02:09.621110 | 2016-06-24T06:35:33 | 2016-06-24T06:35:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | #-*- coding:utf-8 -*-
from re import escape
def name_check(name):
namex = escape(name)
namex = namex.replace("\\-", "-")
namex = namex.replace("\\_", "_")
namex = namex.replace("\\ ", "-")
name = name.replace(" ", "-")
if name == namex:
return name.lower()
else:
return False
| [
"ahmtkotan@gmail.com"
] | ahmtkotan@gmail.com |
719282a375266b3538f83a706dc40b3d8f819f52 | b488c537e1e59838c52ba9cd7dbeba19eca9454d | /src/networks/main.py | 7ff48679db0a94aee697f56cc68bcc33843569ff | [] | no_license | JuneKyu/torch-multi-class-deepsvdd-for-text | 73e61357cbfa34756bf2bbcb03552738cfca2e8a | 54fb48021af7bf7b55b75a3c4160646264447d7c | refs/heads/main | 2023-06-13T23:23:36.329996 | 2021-07-08T08:36:54 | 2021-07-08T08:36:54 | 377,061,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,424 | py | from .mnist_LeNet import MNIST_LeNet, MNIST_LeNet_Autoencoder
from .cifar10_LeNet import CIFAR10_LeNet, CIFAR10_LeNet_Autoencoder
from .cifar10_LeNet_elu import CIFAR10_LeNet_ELU, CIFAR10_LeNet_ELU_Autoencoder
from .news20_LinearNet import News20_LinearNet, News20_LinearNet_Autoencoder
def build_network(net_name):
"""Builds the neural network."""
implemented_networks = ('mnist_LeNet', 'cifar10_LeNet', 'cifar10_LeNet_ELU', 'news20_LinearNet')
assert net_name in implemented_networks
net = None
if net_name == 'mnist_LeNet':
net = MNIST_LeNet()
if net_name == 'cifar10_LeNet':
net = CIFAR10_LeNet()
if net_name == 'cifar10_LeNet_ELU':
net = CIFAR10_LeNet_ELU()
if net_name == 'news20_LinearNet':
net = News20_LinearNet()
return net
def build_autoencoder(net_name):
"""Builds the corresponding autoencoder network."""
implemented_networks = ('mnist_LeNet', 'cifar10_LeNet', 'cifar10_LeNet_ELU', 'news20_LinearNet')
assert net_name in implemented_networks
ae_net = None
if net_name == 'mnist_LeNet':
ae_net = MNIST_LeNet_Autoencoder()
if net_name == 'cifar10_LeNet':
ae_net = CIFAR10_LeNet_Autoencoder()
if net_name == 'cifar10_LeNet_ELU':
ae_net = CIFAR10_LeNet_ELU_Autoencoder()
if net_name == 'news20_LinearNet':
ae_net = News20_LinearNet_Autoencoder()
return ae_net
| [
"IDbluefish@gmail.com"
] | IDbluefish@gmail.com |
62ec98c11c353c33219211cd44afe625c192ecf4 | 09cead98874a64d55b9e5c84b369d3523c890442 | /py200620_python1_chen/py200703_06/quiz3_chen.py | 037893db6af7902ecea02b99d135e9937ab38ff1 | [] | no_license | edu-athensoft/stem1401python_student | f12b404d749286036a090e941c0268381ce558f8 | baad017d4cef2994855b008a756758d7b5e119ec | refs/heads/master | 2021-08-29T15:01:45.875136 | 2021-08-24T23:03:51 | 2021-08-24T23:03:51 | 210,029,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | """
quiz 2.
date: 2020-07-03
student name: QiJun Chen
"""
"""
q8.
your answer:
"""
"""
q7.
your answer:
"""
"""
q6.
your answer:
a
"""
"""
q5.
your answer:
"""
"""
q4.
your answer:
"""
"""
q3.
your answer: -
a = 1 ; b = 2
"""
a = 1
b = 2
a = 1 ; b = 2
# a = 1 b = 2
"""
q2.
your answer:b
b, e, f, g
"""
"""
q1.
your answer:d
c and d
"""
| [
"lada314@gmail.com"
] | lada314@gmail.com |
066a76d2c53f6a0d62e7c286f609782fba3a1fe4 | 9e538305f9263d86e780a4a3f205c972f658f54d | /src/order/views/__init__.py | 9f843456ab734d5c67008a631cf6b7c7a12202e2 | [] | no_license | tanjibpa/mednet | bb188582b0d90407015622b34f0291557acb1919 | 19a7535d583077fec7b7030c298fceb4c4df3207 | refs/heads/main | 2023-05-26T07:44:27.615506 | 2021-06-10T06:30:19 | 2021-06-10T06:30:19 | 355,774,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | from order.views.supplier import *
from order.views.pharmaceutical import *
from order.views.retailer import *
| [
"ikram.tanjib@gmail.com"
] | ikram.tanjib@gmail.com |
087e2051e3fb1e882581d5a8018773a3c8f43a32 | 590edb81b3248d549c7dcdf169ee4267f96fb784 | /scripts/twitter_related/remind_round_tweets/remind_round_tweets.py | e88bc2e47b2011dc7ca07e79bc7f83632699bbe3 | [] | no_license | emadgh/twizhoosh | f6225ad9e149c5e41687bb7f320ddd166f1ca08d | b20eec57abf10f4ce3cd40c05050e9aa2c3230e0 | refs/heads/master | 2021-01-15T07:57:05.306385 | 2015-01-12T19:47:54 | 2015-01-12T19:56:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | from core.scripts.twitter_related.base import BaseTimelineScript
from scripts.twitter_related.dedicate_round_tweets.dedicate_round_tweets import is_round
class RemindRoundTweets(BaseTimelineScript):
def on_timeline_update(self, data):
next_tweet = data['user']['statuses_count'] + 1
if is_round(next_tweet):
text = 'توییت بعدی شما توییت {0}ام است، که توییتی رند است.'.format(next_tweet)
self.twitter.send_direct_message(text=text, user_id=data['user']['id']) | [
"soheil.behnezhad@gmail.com"
] | soheil.behnezhad@gmail.com |
059bc80940745f205442ee9d27d24a4f4ccfa4d6 | d94c84979ffeab0746dcf6e5deb0fee098dc7052 | /set/setOps.py | 73cd51522440e896369087e4f4afa7d008cef7ba | [] | no_license | jambellops/hRank | 51435e62f98cde6f000dd44e76ba9f9285d1421d | fa76c6ba37a78df8392a47ce042e7ed78ee628be | refs/heads/master | 2020-04-06T15:36:09.724255 | 2018-11-14T17:25:40 | 2018-11-14T17:25:40 | 157,585,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | '''
Task
You have a non-empty set s, and you have to execute N commands given in N lines.
The commands will be pop, remove and discard.
Input Format
The first line contains integer n, the number of elements in the set .
The second line contains n space separated elements of set s. All of the
elements are non-negative integers, less than or equal to 9.
The third line contains integer N, the number of commands.
The next N lines contains either pop, remove and/or discard commands followed
by their associated value.
'''
n = int(input()) # size of s
s = set(map(int, input().split()))
N = int(input()) # number of commands
for i in range(N):
line = input().split(maxsplit=-1)
if len(line)>1:
action = line[0]
value = int(line[1])
getattr(s, action)(value)
else:
action = line[0]
getattr(s, action)()
print(sum(s))
| [
"noreply@github.com"
] | jambellops.noreply@github.com |
56453b9676ce59b3625fe2ac8a989d0e9ebc73d6 | 364e9b13de43ded742754be1850d4dbb50a87b14 | /dbexplorer/client/preference.py | d6cadda3a4731d5b495daa754b2b5e3c15fe508e | [] | no_license | SLamasse/ClioDB | 42587dd67b7aa8cb571022cc7feccd2a799d4985 | bc3688af237cc939f8d0424ee6558f12ee88a598 | refs/heads/main | 2023-01-13T10:36:00.857403 | 2020-11-21T09:20:06 | 2020-11-21T09:20:06 | 314,774,547 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,937 | py | # -*- coding: utf-8 -*-
"""
.. module:: preference
:synopsis: Module d'affichage et de configuration des préférences de l'application
.. codeauthor:: pireh, amérique du nord, laurent frobert
"""
from PySide import QtGui
from PySide import QtCore
import i18n
import os
import shutil
class PreferenceDialog(QtGui.QDialog):
def __init__(self,mainWindow):
QtGui.QDialog.__init__(self)
self.mainWindow = mainWindow
layout = QtGui.QGridLayout()
self.setLayout(layout)
self.setWindowTitle(_(u"Préférences"))
labelLangue=QtGui.QLabel(_(u"Choix de la Langue"))
self.langueZone = self.getLanguePanel()
labelFont = QtGui.QLabel(_(u"Taille police : "))
self.fontZone = QtGui.QLineEdit()
self.fontZone.setText(str(self.readFontSizefromconfig()))
labelConfig = QtGui.QLabel(_(u"Dossier de paramétrage"))
self.configZone = self.getConfigPathPanel()
self.includeConfig = QtGui.QCheckBox(_(u"Inclure le fichier de base de données"))
okButton = QtGui.QPushButton(_(u"Appliquer au prochain redémarage"))
cancelButton = QtGui.QPushButton(_(u"Annuler"))
okButton.clicked.connect(self.okbuttonclicked)
cancelButton.clicked.connect(self.cancelbuttonclicked)
layout.addWidget(labelLangue,0,0,1,2)
layout.addWidget(self.langueZone,1,0,1,2)
layout.addWidget(labelFont,2,0)
layout.addWidget(self.fontZone,2,1)
layout.addWidget(labelConfig,3,0,1,2)
layout.addWidget(self.configZone,4,0,1,2)
layout.addWidget(self.includeConfig,5,0,1,2)
layout.addWidget(okButton,6,0,1,2)
layout.addWidget(cancelButton,7,0,1,2)
def readFontSizefromconfig(self):
from dbexplorer.server import service as serviceServer
try :
f = open(os.path.join(serviceServer.getpath(),'current_font_size'), 'r')
size = f.read()
f.close()
except:
size = 12
return size
def okbuttonclicked(self):
if self.configZone.selectFilePath.text() != self.mainWindow.service.getConfigPath():
print 'nouveau rep : ' ,self.configZone.selectFilePath.text()
try:
f = open("./configpath",'w')
f.write(self.configZone.selectFilePath.text())
f.close()
#copie de l'ancien fichier de conf vers le nouveau répertoire ?
if self.includeConfig.isChecked() and not os.path.exists(os.path.join(self.configZone.selectFilePath.text(),'storage.sqlite')): # todo : if ne faut pas que le fichier de conf existe déjà dans la destination
os.makedirs(os.path.join(self.configZone.selectFilePath.text(),'backup'))
srcfile = os.path.join(self.mainWindow.service.getConfigPath(),'storage.sqlite')
dstdir = os.path.join(self.configZone.selectFilePath.text(),'storage.sqlite')
shutil.copy(srcfile, dstdir)
except:
print 'error configpath'
for i in range(len(self.langueZone.radios)):
if self.langueZone.radios[i].isChecked():
(codelangue,langue) = self.langueZone.langues[i]
i18n.saveLang(codelangue,self.mainWindow.service.getConfigPath())
if str(self.readFontSizefromconfig()) != self.fontZone.text():
self.saveFontSize(self.fontZone.text())
from dbexplorer.client.main import applyStylesheet
applyStylesheet(self.fontZone.text())
self.mainWindow.textSizeHasChanged(int(self.fontZone.text()))
self.close()
def saveFontSize(self,size):
try :
configpath = self.mainWindow.service.getConfigPath()
f = open(os.path.join(configpath,'current_font_size'), 'w')
f.write(size)
f.close()
except:
pass
def cancelbuttonclicked(self):
self.close()
def getLanguePanel(self):
langues = i18n.getAvailableLangue()
w = QtGui.QWidget()
w.radios=[]
w.langues = langues
layout = QtGui.QGridLayout()
w.setLayout(layout)
for (codelangue,langue) in langues:
c = QtGui.QRadioButton(langue)
if codelangue == i18n.currentLang :
c.setChecked(True)
w.radios.append(c)
layout.addWidget(c)
return w
def getConfigPathPanel(self):
w = QtGui.QWidget()
w.selectFilePath = QtGui.QLineEdit(self.mainWindow.service.getConfigPath())
openFileNameButton = QtGui.QPushButton(_(u"Choisir un répertoire"))
openFileNameButton.clicked.connect(self.openDir)
layout = QtGui.QGridLayout()
w.setLayout(layout)
layout.addWidget(w.selectFilePath)
layout.addWidget(openFileNameButton)
return w
def openDir(self):
options = QtGui.QFileDialog.Options()
fileName = QtGui.QFileDialog.getExistingDirectory( dir=self.configZone.selectFilePath.text(), options=QtGui.QFileDialog.ShowDirsOnly)
if fileName:
self.configZone.selectFilePath.setText(fileName)
| [
"noreply@github.com"
] | SLamasse.noreply@github.com |
62c11291ef7b294b1e1e5be3cfacc8c090e418c0 | 58efb4b7e4bed07d346f3de1b6e077311282b5a7 | /okada_kl_subfaults.py | 5610459d3dd35779336609ff624d7f1cd81d1236 | [] | no_license | samcom12/Tohoku | 964cd89a40e7c5168723cb2ab74df4287de771b0 | 95f9d3bd01bf865ec5eec7862882307025f619e3 | refs/heads/master | 2023-01-23T22:53:27.163126 | 2020-12-07T22:51:56 | 2020-12-07T22:51:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,794 | py |
import okada
import numpy as np
def deformation(x, y, xoff=0, yoff=0, E_subfault=10, N_subfault=10, iseed=1001):
"""
Calculate sea bed deformations due to a KL defined random slip field on a fault satisying
the following parameters
"""
# Set up common fault parameters appropriate for Tohoku earth quake
depth=32000.0
length=300000
width=150000
strike=195.0
dip=14.0
rake=87.0
nu=0.25
#slip=62.0
#
# opening=10.0
slip = 20.0
opening = 0.0
# Calculate subfault coordinates
epicenters_E, epicenters_N, epicenters_D = subfaults(E_subfault, N_subfault, dip, strike, length, width)
slips, _, _, _, = kl_slipfield(epicenters_E, epicenters_N, epicenters_D, length, width, slip, iseed)
#slips = slip*np.ones_like(epicenters_E)
openings = opening*np.ones_like(epicenters_E)
# initialise the value of the sum of the displacement of each subfaults
uE_sum=np.zeros_like(x)
uN_sum=np.zeros_like(x)
uZ_sum=np.zeros_like(x)
# the length and width of subfaults
length_E= width/E_subfault
length_N= length/N_subfault
# calculate the sum of displacements of subfaults
for i in range(N_subfault):
for j in range(E_subfault):
x_convert = x-epicenters_E[i,j]
y_convert = y-epicenters_N[i,j]
d_convert = depth-epicenters_D[i,j]
#slip_local = slip*(1.0 + 0.5*np.random.normal())
slipij = slips[i,j]
openingij = openings[i,j]
params =dict(x=x_convert, y=y_convert, xoff=xoff, yoff=yoff,
depth=d_convert, length=length_N, width=length_E,
slip=slipij, opening=openingij,
strike=strike, dip=dip, rake=rake,
nu=0.25)
uE,uN,uZ = okada.forward(**params)
uE_sum=uE_sum+uE
uN_sum=uN_sum+uN
uZ_sum=uZ_sum+uZ
print(50*'=')
print(np.max(uZ_sum))
print(np.min(uZ_sum))
return uE_sum, uN_sum, uZ_sum, slips
def subfaults(E_subfault, N_subfault, dip, strike, length, width):
"""
Given the coordinate of the fault is (0,0,0), find out the coordinates of subfaults after rotation.
E_subfault denote the number of subfaults in a row from east to west,
N_subfault denote the number of subfaults in a row from north to south
L_north: length North
W_east: width East
epicenters_E denote the E_axis of the center of these subfaults,
epicenters_N denote the N_axis of the center of these subfaults.
epicenters_H denote the depth of the center of these subfaults.
"""
from numpy import zeros, pi, sin, cos
epicenters_E=zeros((N_subfault,E_subfault))
epicenters_N=zeros((N_subfault,E_subfault))
epicenters_D=zeros((N_subfault,E_subfault))
#Compute the length and width of each subfault.
subfault_width=width/E_subfault
subfault_length=length/N_subfault
# Convert the Angle system to the radian system
dip_angle=dip/180*pi
strike_angle=strike/180*pi
for i in range(N_subfault):
for j in range(E_subfault):
# Compute the E,N coordinates of the center of these subfaults before rotation.
E=-width/2+subfault_width*(j+1) -subfault_width/2
N=length/2-subfault_length*(i+1)+subfault_length/2
# Compute the E,N coordinates of the center of these subfaults after rotation
epicenters_E[i,j]=E*cos(dip_angle)*cos(strike_angle)+N*sin(strike_angle)
epicenters_N[i,j]=N*cos(strike_angle)-E*cos(dip_angle)*sin(strike_angle)
epicenters_D[i,j]=-E*sin(dip_angle)
return epicenters_E, epicenters_N, epicenters_D
def kl_slipfield(epicenters_E, epicenters_N, epicenters_D, length, width, slip, iseed=1001):
from math import exp, sqrt
from numpy import linalg as LA
n,m = epicenters_E.shape
vector_E = epicenters_E.flatten()
vector_N = epicenters_N.flatten()
vector_D = epicenters_D.flatten()
N=len(vector_E)
C_hat=np.zeros((N,N),dtype=float)
#print(C_hat)
mu=slip
alpha=0.75
sigma=alpha*mu
r0=0.2*width
for i in range(N):
for j in range(N):
K = sqrt((vector_E[i]-vector_E[j])**2 + (vector_N[i]-vector_N[j])**2 + (vector_D[i]-vector_D[j])**2)
C_hat[i,j] = sigma**2 * exp(-K/r0)
#print(C_hat)
D,V = LA.eig(C_hat)
idx = D.argsort()[::-1]
D = D[idx]
V = V[:,idx]
D = np.diag(D)
sqrtD = np.sqrt(D)
np.random.seed(iseed)
z=np.random.normal(size=(N,1))
#print(mu)
#print(z)
s = mu + np.dot(V,np.dot(sqrtD,z))
s = np.reshape(s,(n,m))
return s, D, V, z
| [
"stoiver@gmail.com"
] | stoiver@gmail.com |
71d1e1bd7fd8420c085463ad1438045a67a185a2 | c88aa1d1f85d58226015510537153daa73358dce | /13/ex3.py | 350553daddfe47db637c2269b69f59f65552c088 | [] | no_license | kmollee/2014_fall_cp | e88ca3acf347a9f49c8295690e4ef81c828cec6b | fff65200333af8534ce23da8bdb97ed904cc71dc | refs/heads/master | 2021-01-01T17:56:33.442405 | 2015-01-07T10:44:32 | 2015-01-07T10:44:32 | 24,130,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,123 | py | # coding: utf-8
# 上面一行宣告程式內容所採用的編碼(encoding)
# 導入 cherrypy 模組
import cherrypy
# 導入 Python 內建的 os 模組
import os
# 以下為 Guess 類別的設計內容, 其中的 object 使用, 表示 Guess 類別繼承 object 的所有特性, 包括方法與屬性設計
class Guess(object):
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index(self, name="John"):
return "hello, " + name
@cherrypy.expose
def saygoodbye(self, name="John"):
return "goodbye," + name
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(Guess())
else:
# 表示在近端執行
cherrypy.quickstart(Guess())
| [
"10073105@gm.nfu.edu.tw"
] | 10073105@gm.nfu.edu.tw |
353b363378fc41668dad00e5e4eed045090cb167 | 459c193e96a5446b7c6d408e664ee5c5caf18e44 | /src/4DVar6hNMC_obs_parameter_estimation.py | 8dbf80cb5b1dcfb54d9ce5c79eba97ed872c7171 | [] | no_license | saceandro/DataAssimilation | da1bd32aa30b46aa2bacc2e3679da4a0ddcdd080 | 9ed3a708f0be442013f21cde2e14c6dd177fc789 | refs/heads/master | 2020-04-23T14:49:56.947434 | 2019-02-18T08:41:32 | 2019-02-18T08:41:32 | 171,245,480 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,488 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 14 17:01:40 2017
@author: yk
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import math
count = 0
def handler(func, *args):
return func(*args)
#%%
class Lorenz96:
def __init__(self, N):
self.N = N # number of variables
self.M = 1 # number of parameters
def gradient(self,x):
d = np.zeros(self.N + self.M)
d[0] = (x[1] - x[self.N-2]) * x[self.N-1] - x[0] + x[self.N]
d[1] = (x[2] - x[self.N-1]) * x[0] - x[1] + x[self.N]
for i in range(2, self.N-1):
d[i] = (x[i+1] - x[i-2]) * x[i-1] - x[i] + x[self.N]
d[self.N-1] = (x[0] - x[self.N-3]) * x[self.N-2] - x[self.N-1] + x[self.N]
d[self.N] = 0
return d
def gradient_adjoint(self, la, x):
mt = np.zeros((self.N + self.M ,self.N + self.M))
for i in range(self.N):
for j in range(self.N):
if (((i-1) % self.N) == j):
mt[j][i] += x[(i+1) % self.N] - x[(i-2) % self.N]
if (((i+1) % self.N) == j):
mt[j][i] += x[(i-1) % self.N]
if (((i-2) % self.N) == j):
mt[j][i] -= x[(i-1) % self.N]
if ((i % self.N) == j):
mt[j][i] -= 1
mt[N][i] = 1
# for j in range(self.N + self.M):
# mt[j][self.N] = 0 # not needed because mt is initiated as zero
gr = mt @ la
return gr
class RungeKutta4:
def __init__(self, callback, N, dt, t, x):
self.callback = callback
self.N = N
self.dt = dt
self.t = t
self.x = x
self.M = 1
def nextstep(self):
k1 = handler(self.callback, self.x)
k2 = handler(self.callback, self.x + k1*self.dt/2)
k3 = handler(self.callback, self.x + k2*self.dt/2)
k4 = handler(self.callback, self.x + k3*self.dt)
self.t += self.dt
self.x += (k1 + 2*k2 + 2*k3 + k4) * self.dt/6
return self.x
def orbit(self,T):
steps = int(T/self.dt) + 1
o = np.zeros((steps,self.N + self.M))
o[0] = self.x
for i in range(steps):
o[i] = self.nextstep()
return o
def nextstep_gradient(self):
self.nextstep()
return self.dt * self.callback(self.t, self.x)
def orbit_gradient(self, T):
steps = int(T/self.dt)
gr = np.zeros((steps, self.N + self.M))
gr[0] = self.dt * self.callback(self.t, self.x)
for i in range(steps):
gr[i] = self.nextstep_gradient()
return gr
class Adjoint:
def __init__(self, dx, dla, N, T, dt, it, x, y):
self.dx = dx
self.dla = dla
self.N = N
self.T = T
self.dt = dt
self.x = x
self.y = y
self.it = it
self.minute_steps = int(T/self.dt)
self.steps = int(self.minute_steps/it)
self.M = 1
def orbit(self):
for i in range(self.minute_steps-1):
k1 = handler(self.dx, self.x[i])
k2 = handler(self.dx, self.x[i] + k1*self.dt/2)
k3 = handler(self.dx, self.x[i] + k2*self.dt/2)
k4 = handler(self.dx, self.x[i] + k3*self.dt)
self.x[i+1] = self.x[i] + (k1 + 2*k2 + 2*k3 + k4) * self.dt/6
return self.x
def observed(self, stddev):
self.orbit()
for i in range(self.steps):
for j in range(self.N):
self.x[i,j] += stddev * np.random.randn() # fixed
return self.x
def true_observed(self, stddev):
tob = np.copy(self.orbit())
for i in range(self.steps):
for j in range(self.N):
self.x[i,j] += stddev * np.random.randn() # fixed
return tob, self.x
def gradient(self):
la = np.zeros((self.minute_steps, self.N + self.M))
for i in range(self.steps-1, -1, -1):
for j in range(it-1, -1, -1):
n = self.it*i + j
if (n < self.it*self.steps - 1):
p1 = handler(self.dx, self.x[n])
p2 = handler(self.dx, self.x[n] + p1*self.dt/2)
p3 = handler(self.dx, self.x[n] + p2*self.dt/2)
p4 = handler(self.dx, self.x[n] + p3*self.dt)
gr = (p1 + 2*p2 + 2*p3 + p4)/6
k1 = handler(self.dla, la[n+1], self.x[n+1])
k2 = handler(self.dla, la[n+1] - k1*self.dt/2, self.x[n+1] - gr*self.dt/2)
k3 = handler(self.dla, la[n+1] - k2*self.dt/2, self.x[n+1] - gr*self.dt/2)
k4 = handler(self.dla, la[n+1] - k3*self.dt, self.x[n])
la[n] = la[n+1] + (k1 + 2*k2 + 2*k3 + k4) * self.dt/6
for j in range(self.N):
la[self.it*i][j] += self.x[self.it*i][j] - self.y[i][j]
return la[0]
def gradient_from_x0(self, x0):
self.x[0] = x0
self.orbit()
la = np.zeros((self.minute_steps, self.N + self.M))
for i in range(self.steps-1, -1, -1):
for j in range(it-1, -1, -1):
n = self.it*i + j
if (n < self.it*self.steps - 1):
p1 = handler(self.dx, self.x[n])
p2 = handler(self.dx, self.x[n] + p1*self.dt/2)
p3 = handler(self.dx, self.x[n] + p2*self.dt/2)
p4 = handler(self.dx, self.x[n] + p3*self.dt)
gr = (p1 + 2*p2 + 2*p3 + p4)/6
k1 = handler(self.dla, la[n+1], self.x[n+1])
k2 = handler(self.dla, la[n+1] - k1*self.dt/2, self.x[n+1] - gr*self.dt/2)
k3 = handler(self.dla, la[n+1] - k2*self.dt/2, self.x[n+1] - gr*self.dt/2)
k4 = handler(self.dla, la[n+1] - k3*self.dt, self.x[n])
la[n] = la[n+1] + (k1 + 2*k2 + 2*k3 + k4) * self.dt/6
for j in range(self.N):
la[self.it*i][j] += self.x[self.it*i][j] - self.y[i][j]
return la[0]
def cost(self, x0):
self.x[0] = x0
self.orbit()
cost=0
# cost = (xzero - xb) * (np.linalg.inv(B)) * (xzero - xb)
for i in range(self.steps):
cost += (self.x[self.it*i][0:self.N] - self.y[i]) @ (self.x[self.it*i][0:self.N] - self.y[i])
return cost/2.0 # fixed
def true_cost(self):
cost=0
# cost = (xzero - xb) * (np.linalg.inv(B)) * (xzero - xb)
for i in range(self.steps):
cost += (self.x[self.it*i][0:self.N] - self.y[i]) @ (self.x[self.it*i][0:self.N] - self.y[i])
return cost/2.0 # fixed
def numerical_gradient_from_x0(self,x0,h):
gr = np.zeros(self.N + self.M)
c1 = self.cost(x0)
for j in range(self.N + self.M):
xx = np.copy(x0)
xx[j] += h
c = self.cost(xx)
gr[j] = (c - c1)/h
return gr
def cbf(self, x0):
global count, axL, axR
count += 1
axL.scatter(count, x0[self.N], c='b')
axR.scatter(count, self.cost(x0), c='b')
#%%
def plot_orbit(dat):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(dat[:,0],dat[:,1],dat[:,2])
ax.set_xlabel('$x_0$')
ax.set_ylabel('$x_1$')
ax.set_zlabel('$x_2$')
plt.show()
def compare_orbit(dat1, dat2):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(dat1[:,0],dat1[:,1],dat1[:,2],label='true orbit')
ax.plot(dat2[:,0],dat2[:,1],dat2[:,2],label='assimilated')
ax.set_xlabel('$x_0$')
ax.set_ylabel('$x_1$')
ax.set_zlabel('$x_2$')
plt.legend()
plt.show()
def compare_orbit3(dat1, dat2, dat3, label1, label2, label3):
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(dat1[:,0],dat1[:,1],dat1[:,2],label=label1)
ax.plot(dat2[:,0],dat2[:,1],dat2[:,2],label=label2)
ax.plot(dat3[:,0],dat3[:,1],dat3[:,2],label=label3)
ax.set_xlabel('$x_0$')
ax.set_ylabel('$x_1$')
ax.set_zlabel('$x_2$')
plt.legend()
plt.show()
#%%
from scipy.optimize import minimize
N = 7
pref = "data/" + str(N) + "/"
M = 1
F = 8
year = 0.01
day = 365 * year
dt = 0.01
# T = day * 0.2
T = 1.
print("T", T)
print("day", T/0.2)
it = 5
minute_steps = int(T/dt)
steps = int(minute_steps/it)
stddev = 1
lorenz = Lorenz96(N)
tob = np.loadtxt(pref + "year.1.dat")
obs = np.loadtxt(pref + "observed." + str(it) + ".1.dat")
compare_orbit(tob[0:minute_steps], obs[0:steps])
t = np.arange(0., T, dt)
x_opt = np.zeros(N + M)
x_opt[0:N] = np.loadtxt(pref + "year.2.dat")[np.random.randint(len(tob))]
x_opt[N] = 15 # initial guess for F
x = np.zeros((minute_steps, N + M))
scheme = Adjoint(lorenz.gradient, lorenz.gradient_adjoint, N, T, dt, it, x, obs)
print("Before assimilation")
print("cost", scheme.cost(x_opt))
compare_orbit3(tob[0:minute_steps], obs[0:steps], scheme.x[:,0:N], 'true_orbit', 'observed', 'initial value')
compare_orbit(tob[0:minute_steps], scheme.x[:,0:N])
print("Analytical and numerical gradient comparison")
gr_anal = scheme.gradient_from_x0(x_opt)
print ("gr_anal", gr_anal)
gr_num = scheme.numerical_gradient_from_x0(x_opt, 0.00001)
print ("gr_num", gr_num)
print ("relative error", (gr_anal - gr_num)/gr_num)
#%%
global axL
global axR
fig , (axL, axR) = plt.subplots(ncols=2, figsize=(10,4), sharex=False)
res = minimize(scheme.cost, x_opt, jac=scheme.gradient_from_x0, method='L-BFGS-B', callback=scheme.cbf)
print (res)
print ("true x0", tob[0])
for j in range(3):
#for j in range(N):
fig = plt.figure()
plt.plot(t, tob[0:minute_steps,j], label='true orbit')
plt.plot(t, scheme.x[0:minute_steps,j], label='assimilated')
plt.legend()
plt.show()
compare_orbit(tob[0:minute_steps], scheme.x[:,0:N])
#%%
fig = plt.figure()
plt.plot(t, [np.linalg.norm(scheme.x[i,0:N] - tob[i])/math.sqrt(N) for i in range(len(t))], label='x norm')
plt.xlabel('t')
plt.ylabel('RMSE')
plt.yscale('symlog')
plt.legend()
plt.show()
print ("RMSE: ", np.mean([np.linalg.norm(scheme.x[i,0:N] - tob[i])/math.sqrt(N) for i in range(int(len(t)*0.4),int(len(t)*0.6))]))
print('4DVar optimal cost: ', res.fun)
scheme_true = Adjoint(lorenz.gradient, lorenz.gradient_adjoint, N, T, dt, it, tob, obs)
print('true cost: ', scheme_true.true_cost())
| [
"y.tienjin.kon@gmail.com"
] | y.tienjin.kon@gmail.com |
3c5ebbfcf44c30e7cf78edd8b203c538bfc48c6f | 2e5de95feb3bdab9590d1e6a41437e6240b290e4 | /mini_spider.py | 0a070143b31ae7c3a919af71ac10b3fc4b9adedb | [] | no_license | longxiaoyun/GenericCrawl | df66ab54ef2159f442dd0e0daa199415b2acbd85 | 92c9a05df8a06b3b3cb7cad221b5db992522b17e | refs/heads/master | 2022-06-15T08:48:25.240238 | 2020-05-09T08:23:49 | 2020-05-09T08:23:49 | 262,522,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,230 | py | # -*-coding:utf-8 -*-
from retrying import retry
from configparser import ConfigParser
import re
import os
import time
from datetime import datetime
from collections import deque
import random
import logging
import threading
import requests
import argparse
from urllib.parse import urldefrag, urljoin, urlparse
from urllib.parse import quote
logger = logging.getLogger(__name__)
# 命令行参数
def parse_argument():
parser = argparse.ArgumentParser("mini_spider", add_help=False)
parser.add_argument("-c", "--config", type=str, required=True,help="config file path")
parser.add_argument("-v", "--version", action="version", version='%(prog)s 1.0', help='show version message.')
parser.add_argument("-h", "--help", action="help", default=argparse.SUPPRESS, help='show help message.')
options = parser.parse_args()
return options
# 初始化配置文件
class InitConf(object):
conf = ConfigParser()
options = parse_argument()
@classmethod
def getter_settings(cls):
config = cls.conf
options = cls.options
config_path = options.config
if not config_path:
logger.error('未指定配置文件.')
return
file_path = get_file_path(config_path)
config.read(file_path, encoding='utf-8')
return config['spider']
# 文件路径处理
def get_file_path(_path):
current_file = os.path.abspath(__file__)
parent_path = os.path.abspath(os.path.dirname(current_file) + os.path.sep + ".")
file_path = os.path.join(parent_path, _path)
return file_path
# 链接补全
def link_handler(seed_url, url):
real_url, _ = urldefrag(url) # 网站内的相对网址(没有域名)
return urljoin(seed_url, real_url)
# 保存结果
def save_result(html, url, encode):
if not html or not url:
logger.error('待保存数据不能为空')
return
try:
output_directory = settings.get('output_directory')
_url = quote(url, safe='')
current_file = os.path.abspath(__file__)
parent_path = os.path.abspath(os.path.dirname(current_file) + os.path.sep + ".")
file_path = os.path.join(parent_path, output_directory, _url)
with open(file_path, 'w', encoding=encode) as f:
f.write(html)
except OSError as e:
logger.error(e)
# 参数处理
def params_handler(param, default_param):
if param:
param = param.strip()
else:
param = default_param
return param
# 主要爬取类
class DefaultCrawl(threading.Thread):
def __init__(self, seed_url, settings, visited, q):
# 调用父类的构造方法
super(DefaultCrawl, self).__init__()
self.will_crawl_queue = q
self.visited = visited
if isinstance(seed_url, str):
self.will_crawl_queue.appendleft(seed_url)
self.visited[seed_url] = 0
if isinstance(seed_url, list):
for _s in seed_url:
self.will_crawl_queue.appendleft(_s)
self.visited[_s] = 0
self.settings = settings
self.download_delay = int(params_handler(self.settings.get('crawl_interval'), 3))
self.timeout = int(params_handler(self.settings.get('crawl_timeout'), 10))
self.headers = {'User-Agent': 'Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)'}
self.max_depth = int(params_handler(self.settings.get('max_depth'), 1))
self.regex_str = '<a[^>]+href=["\'](.*?)["\']'
self.enc = 'utf-8'
self.throttle = Throttle(self.download_delay)
@retry(stop_max_attempt_number=3)
def request(self, url, method, data):
if method == "POST":
result = requests.post(url, data=data, headers=self.headers, timeout=self.timeout)
else:
result = requests.get(url, headers=self.headers, timeout=self.timeout)
assert result.status_code == 200
self.enc = result.encoding
return result.content, result.url
def downloader(self, url, method='GET', data=None):
try:
result, page_url = self.request(url, method, data)
except Exception as e:
logger.error(e)
result = None
page_url = None
return result, page_url
def extractor_urls(self, html):
url_regex = re.compile(self.regex_str, re.IGNORECASE)
return url_regex.findall(html)
def run(self):
while self.will_crawl_queue:
url = self.will_crawl_queue.pop()
self.throttle.wait_url(url)
depth = self.visited[url]
if depth <= self.max_depth:
# 下载链接
html,page_url = self.downloader(url)
if html:
if isinstance(html, bytes):
html = html.decode(self.enc)
# 筛选出页面的链接
url_list = self.extractor_urls(html)
# 筛选需要爬取的链接(包含了htm,html的链接)
filter_urls = [link for link in url_list if link.endswith(('.html', '.htm', '.shtml'))]
for url in filter_urls:
# 补全链接
real_url = link_handler(page_url, url) # 将每一个抽取到的相对网址补全为http:// + 域名 + 相对网址的格式
# 判断链接是否访问过
if real_url not in self.visited:
save_result(html, real_url, encode=self.enc)
# 将每一个页面中的下一层url链接的depth都加一,这样每一层都会对应一个depth
self.visited[real_url] = depth + 1
# 将所有抽取出来的链接添加到队列中待爬取
if real_url not in self.will_crawl_queue:
self.will_crawl_queue.appendleft(real_url)
# 下载延迟
class Throttle:
def __init__(self, delay):
# 保存每个爬取过的链接与对应爬取时间的时间戳
self.domains = {}
self.delay = delay
def wait_url(self, url_str):
# 以netloc为基础进行休眠
domain_url = urlparse(url_str).netloc # 获取到爬取的链接的域名
last_accessed = self.domains.get(domain_url) # 获取上次爬取链接的时间戳(时间戳与域名对应,爬取该域名的网站之后更新为最新的时间戳)
# 爬取的条件为上次爬取的时间戳不为空(上次爬取过,如果没有爬取则把这个域名和当前时间戳保存到字典)
if self.delay > 0 and last_accessed is not None:
# 计算当前时间和上次访问时间间隔
# sleep_interval加上随机偏移量
sleep_interval = self.delay - (datetime.now() - last_accessed).seconds # 记录上次爬取到这次的时间间隔
# 如果时间间隔尚未达到规定的时间间隔,则需要等待
if sleep_interval > 0:
time.sleep(sleep_interval + round(random.uniform(1, 3), 1)) # 设置一个随机的偏移量
# 启动方法
def main():
settings = InitConf.getter_settings()
thread_num = params_handler(settings.get('thread_count'), 8)
thread_num = int(thread_num)
url_list_file = settings.get('url_list_file')
seed_file_path = get_file_path(url_list_file)
with open(seed_file_path, 'r', encoding='utf-8') as f:
seed_list = [line.strip() for line in f]
visited = dict()
# 使用deque队列,方便做去重
q = deque()
threads = []
for i in range(thread_num):
t = DefaultCrawl(seed_list, settings, visited, q)
t.start()
threads.append(t)
# 等待所有队列完成
for _ in threads:
_.join()
# 阻塞,直到队列里的所有元素都被处理完
# q.join()
if __name__=='__main__':
main()
| [
"noreply@github.com"
] | longxiaoyun.noreply@github.com |
bb0b4d4a07df746a2803aadc965063e15ef30dea | 342f44418c13d4f746e9becd506988eaebaeb8fa | /lglass/database/__init__.py | 4bfa74b21f20a22bfb1e173c1c88bca90fe5ccc5 | [
"MIT"
] | permissive | fritz0705/lglass | 8d3d271638a0a7d60e5bd5fb00d284b53a97c9a2 | 3c8b13e090b2c7ba27789aaea2c536ed62f8f3f7 | refs/heads/master | 2021-01-17T07:36:48.388518 | 2016-06-03T16:31:03 | 2016-06-03T16:31:03 | 10,013,269 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 55 | py | from lglass.database.base import from_url, build_chain
| [
"fritz@grimpen.net"
] | fritz@grimpen.net |
7682f8e46a7452dbb09d77d81b83c9ddd544deee | a3c662a5eda4e269a8c81c99e229879b946a76f6 | /.venv/lib/python3.7/site-packages/pylint/test/input/func_bug113231.py | 6334ff9c8ff8d817b27865e568d5dba02d72af51 | [
"MIT"
] | permissive | ahmadreza-smdi/ms-shop | 0c29da82c58b243507575672bbc94fb6e8068aeb | 65ba3f3061e2ac5c63115b08dadfe7d67f645fb6 | refs/heads/master | 2023-04-27T19:51:34.858182 | 2019-11-24T20:57:59 | 2019-11-24T20:57:59 | 223,616,552 | 6 | 2 | MIT | 2023-04-21T20:51:21 | 2019-11-23T16:09:03 | Python | UTF-8 | Python | false | false | 605 | py | # pylint: disable=E1101
# pylint: disable=C0103
# pylint: disable=R0903, useless-object-inheritance, unnecessary-pass
"""test bugfix for #113231 in logging checker
"""
from __future__ import absolute_import
# Muck up the names in an effort to confuse...
import logging as renamed_logging
__revision__ = ''
class Logger(object):
"""Fake logger"""
pass
logger = renamed_logging.getLogger(__name__)
fake_logger = Logger()
# Statements that should be flagged:
renamed_logging.warning('%s, %s' % (4, 5))
logger.warning('%s' % 5)
# Statements that should not be flagged:
fake_logger.warn('%s' % 5)
| [
"ahmadreza.smdi@gmail.com"
] | ahmadreza.smdi@gmail.com |
0df8069e1c6ec0000880b1a3554d2c8d8fbc3caf | f2a5c18419643dd41da7f2e4796836004afb788f | /zeropush/utils.py | d4fe8dc8512e4b2be411c1fdc49aa9408c24e4ad | [] | no_license | theskumar-archive/django-zeropush-reflux | e13bccc1369af8c295db5f5a019d0bb1495d8c9b | 0970b037e4a8b2af9d325381951c0b72eccf1aeb | refs/heads/master | 2021-01-21T17:50:37.839289 | 2015-05-09T15:54:27 | 2015-05-09T15:54:27 | 34,857,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
# Standard Library
import json
import logging
from datetime import timedelta
# Third Party Stuff
import requests
from django.conf import settings
from django.utils.encoding import force_str
from django.utils import six
logger = logging.getLogger(__name__)
ZEROPUSH_NOTIFY_URL = "https://api.zeropush.com/notify"
def notify_devices(devices, alert=None, sound=None, badge='+1', info=None, expiry=None,
content_available=None, category=None):
'''
https://zeropush.com/documentation/api_reference#notify
'''
assert settings.ZEROPUSH_AUTH_TOKEN
if len(devices) > 0:
params = {
"device_tokens": [device.token for device in devices]
}
# add alert payload
if isinstance(alert, six.string_types):
alert = force_str(alert)
params.update({"alert": alert})
if sound is not None:
params.update({"sound": sound})
if badge is not None:
params.update({"badge": badge})
if info is not None:
params.update({"info": json.dumps(info)})
if content_available is not None:
params.update({"content_available": bool(content_available)})
if category is not None:
params.update({"category": category})
# add default expiry if not available
expiry_time = expiry if expiry else int(timedelta(days=30).total_seconds())
params.update({"expiry": expiry_time})
# add authorization
headers = {'Authorization': 'Token token="%s"' % settings.ZEROPUSH_AUTH_TOKEN,
'content-type': 'application/json'}
response = requests.post(ZEROPUSH_NOTIFY_URL, json.dumps(params), headers=headers)
if response.ok:
logger.info("Push successfully sent to zeropush")
return True
else:
msg = "Error! Push failed to be sent to zeropush! Error response: %s" % response.text
logger.error(msg)
return False
return False
| [
"me+github@saurabh-kumar.com"
] | me+github@saurabh-kumar.com |
9a70ed43d1cd64c0b0ca1d2c6fd5864c04128087 | 14373275670c1f3065ce9ae195df142146e2c1a4 | /stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi | 48fc2554304ebe4cd9f9322e99473fb4876e26ed | [
"Apache-2.0",
"MIT"
] | permissive | sobolevn/typeshed | eb7af17c06a9722f23c337e6b9a4726223155d58 | d63a82640390a9c130e0fe7d409e8b0b836b7c31 | refs/heads/master | 2023-08-04T05:59:29.447015 | 2023-06-14T21:27:53 | 2023-06-14T21:27:53 | 216,265,622 | 2 | 0 | Apache-2.0 | 2022-02-08T10:40:53 | 2019-10-19T20:21:25 | Python | UTF-8 | Python | false | false | 876 | pyi | from _typeshed import Incomplete
class BucketRetentionRules:
openapi_types: Incomplete
attribute_map: Incomplete
discriminator: Incomplete
def __init__(
self,
type: str = "expire",
every_seconds: Incomplete | None = None,
shard_group_duration_seconds: Incomplete | None = None,
) -> None: ...
@property
def type(self): ...
@type.setter
def type(self, type) -> None: ...
@property
def every_seconds(self): ...
@every_seconds.setter
def every_seconds(self, every_seconds) -> None: ...
@property
def shard_group_duration_seconds(self): ...
@shard_group_duration_seconds.setter
def shard_group_duration_seconds(self, shard_group_duration_seconds) -> None: ...
def to_dict(self): ...
def to_str(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
| [
"noreply@github.com"
] | sobolevn.noreply@github.com |
1160913b4e15aef699a5ac91d1ceb88cdfc89fbd | a6ed990fa4326c625a2a02f0c02eedf758ad8c7b | /meraki/sdk/python/getNetworkMerakiAuthUser.py | dd485a53b639bbcae0a844de8d441be562d0bd1c | [] | no_license | StevenKitavi/Meraki-Dashboard-API-v1-Documentation | cf2352976c6b6c00c17a5f6442cedf0aeed46c22 | 5ed02a7def29a2ce455a3f2cfa185f76f44789f5 | refs/heads/main | 2023-03-02T08:49:34.846055 | 2021-02-05T10:31:25 | 2021-02-05T10:31:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | import meraki
# Defining your API key as a variable in source code is not recommended
API_KEY = '6bec40cf957de430a6f1f2baa056b99a4fac9ea0'
# Instead, use an environment variable as shown under the Usage section
# @ https://github.com/meraki/dashboard-api-python/
dashboard = meraki.DashboardAPI(API_KEY)
network_id = 'L_646829496481105433'
meraki_auth_user_id = ''
response = dashboard.networks.getNetworkMerakiAuthUser(
network_id, meraki_auth_user_id
)
print(response) | [
"shiychen@cisco.com"
] | shiychen@cisco.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.