blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
519f08022094cf0b5db7f1cd495abafe6bd81ea9 | 27b17b44c0caa4ac157899715eb1f4d0bd6e3fae | /orders/migrations/0005_auto_20200102_0741.py | 8712e7bce51bff702c82b37b395c79fa7c6f2f66 | [] | no_license | alexsonphoenix/cs50w_project3_PizzaRestaurant | 6dbb1bb8d8948eea3d5dca69a63961bed8fd39b9 | 3c3b10cb5272836b2a58a97b3b704995ed64e9a9 | refs/heads/master | 2023-05-22T16:51:29.639260 | 2020-01-03T02:11:20 | 2020-01-03T02:11:20 | 229,177,485 | 0 | 0 | null | 2021-06-10T23:20:12 | 2019-12-20T02:47:04 | HTML | UTF-8 | Python | false | false | 1,690 | py | # Generated by Django 2.0.3 on 2020-01-02 07:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0004_auto_20191230_0234'),
]
operations = [
migrations.RenameField(
model_name='cart',
old_name='status',
new_name='cart_status',
),
migrations.RenameField(
model_name='dinner_platter',
old_name='price',
new_name='price_large',
),
migrations.RenameField(
model_name='order',
old_name='status',
new_name='order_status',
),
migrations.RenameField(
model_name='pizza',
old_name='price',
new_name='price_large',
),
migrations.RenameField(
model_name='sub',
old_name='price',
new_name='price_large',
),
migrations.AddField(
model_name='dinner_platter',
name='price_small',
field=models.FloatField(),
preserve_default=False,
),
migrations.AddField(
model_name='pizza',
name='price_small',
field=models.FloatField(),
preserve_default=False,
),
migrations.AddField(
model_name='sub',
name='price_small',
field=models.FloatField(),
preserve_default=False,
),
migrations.AlterField(
model_name='pizza',
name='topping',
field=models.ManyToManyField(blank=True, related_name='pizzas', to='orders.Topping'),
),
]
| [
"alexsonphoenix"
] | alexsonphoenix |
e8955562f25858d1d44d8c3e96d0052cd1c0a9a6 | 91d42246402e3bf7057df314dc7e329f0037d7a9 | /linepy/timeline.py | 4c6aef7087e01e2841b400b0d79cecaafcfa92f7 | [
"BSD-3-Clause"
] | permissive | ender996/selfbot-py | 60a5505302be3b811d4742f3273a368f6526f777 | 4d0e69dd9ee8527d07746d770eaceb222509f314 | refs/heads/master | 2023-06-02T00:52:38.789377 | 2021-06-21T02:57:08 | 2021-06-21T02:57:08 | 260,297,936 | 2 | 0 | MIT | 2020-04-30T19:16:55 | 2020-04-30T19:16:54 | null | UTF-8 | Python | false | false | 10,604 | py | # -*- coding: utf-8 -*-
from datetime import datetime
from .channel import Channel
import json, time, base64
def loggedIn(func):
def checkLogin(*args, **kwargs):
if args[0].isLogin:
return func(*args, **kwargs)
else:
args[0].callback.default('You want to call the function, you must login to LINE')
return checkLogin
class Timeline(Channel):
def __init__(self):
if not self.channelId:
self.channelId = self.server.CHANNEL_ID['LINE_TIMELINE']
Channel.__init__(self, self.channel, self.channelId, False)
self.tl = self.getChannelResult()
self.__loginTimeline()
def __loginTimeline(self):
self.server.setTimelineHeadersWithDict({
'Content-Type': 'application/json',
'User-Agent': self.server.USER_AGENT,
'X-Line-Mid': self.profile.mid,
'X-Line-Carrier': self.server.CARRIER,
'X-Line-Application': self.server.APP_NAME,
'X-Line-ChannelToken': self.tl.channelAccessToken
})
self.profileDetail = self.getProfileDetail()
"""Timeline"""
@loggedIn
def getFeed(self, postLimit=10, commentLimit=1, likeLimit=1, order='TIME'):
params = {'postLimit': postLimit, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'order': order}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/feed/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getHomeProfile(self, mid=None, postLimit=10, commentLimit=1, likeLimit=1):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'postLimit': postLimit, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'sourceType': 'LINE_PROFILE_COVER'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/post/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getProfileDetail(self, mid=None):
if mid is None:
mid = self.profile.mid
params = {'userMid': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v1/userpopup/getDetail.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def updateProfileCoverById(self, objId):
params = {'coverImageId': objId}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/home/updateCover.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def getProfileCoverId(self, mid=None):
if mid is None:
mid = self.profile.mid
home = self.getProfileDetail(mid)
return home['result']['objectId']
@loggedIn
def getProfileCoverURL(self, mid=None):
if mid is None:
mid = self.profile.mid
home = self.getProfileDetail(mid)
params = {'userid': mid, 'oid': home['result']['objectId']}
return self.server.urlEncode(self.server.LINE_OBS_DOMAIN, '/myhome/c/download.nhn', params)
"""Post"""
@loggedIn
def createPost(self, text, holdingTime=None):
params = {'homeId': self.profile.mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/post/create.json', params)
payload = {'postInfo': {'readPermission': {'type': 'ALL'}}, 'sourceType': 'TIMELINE', 'contents': {'text': text}}
if holdingTime != None:
payload["postInfo"]["holdingTime"] = holdingTime
data = json.dumps(payload)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def sendPostToTalk(self, mid, postId):
if mid is None:
mid = self.profile.mid
params = {'receiveMid': mid, 'postId': postId}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/post/sendPostToTalk.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def createComment(self, mid, postId, text):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/comment/create.json', params)
data = {'commentText': text, 'activityExternalId': postId, 'actorId': mid}
data = json.dumps(data)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def deleteComment(self, mid, postId, commentId):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/comment/delete.json', params)
data = {'commentId': commentId, 'activityExternalId': postId, 'actorId': mid}
data = json.dumps(data)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def likePost(self, mid, postId, likeType=1001):
if mid is None:
mid = self.profile.mid
if likeType not in [1001,1002,1003,1004,1005,1006]:
raise Exception('Invalid parameter likeType')
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/like/create.json', params)
data = {'likeType': likeType, 'activityExternalId': postId, 'actorId': mid}
data = json.dumps(data)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def unlikePost(self, mid, postId):
if mid is None:
mid = self.profile.mid
params = {'homeId': mid, 'sourceType': 'TIMELINE'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/like/cancel.json', params)
data = {'activityExternalId': postId, 'actorId': mid}
data = json.dumps(data)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
return r.json()
"""Group Post"""
@loggedIn
def createGroupPost(self, mid, text):
payload = {'postInfo': {'readPermission': {'homeId': mid}}, 'sourceType': 'TIMELINE', 'contents': {'text': text}}
data = json.dumps(payload)
r = self.server.postContent(self.server.LINE_TIMELINE_API + '/v45/post/create.json', data=data, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def createGroupAlbum(self, mid, name):
data = json.dumps({'title': name, 'type': 'image'})
params = {'homeId': mid,'count': '1','auto': '0'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album.json', params)
r = self.server.postContent(url, data=data, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Create a new album failure.')
return True
@loggedIn
def deleteGroupAlbum(self, mid, albumId):
params = {'homeId': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album/%s' % albumId, params)
r = self.server.deleteContent(url, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Delete album failure.')
return True
@loggedIn
def getGroupPost(self, mid, postLimit=10, commentLimit=1, likeLimit=1):
params = {'homeId': mid, 'commentLimit': commentLimit, 'likeLimit': likeLimit, 'sourceType': 'TALKROOM'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_API, '/v45/post/list.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
"""Group Album"""
@loggedIn
def getGroupAlbum(self, mid):
params = {'homeId': mid, 'type': 'g', 'sourceType': 'TALKROOM'}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/albums.json', params)
r = self.server.getContent(url, headers=self.server.timelineHeaders)
return r.json()
@loggedIn
def changeGroupAlbumName(self, mid, albumId, name):
data = json.dumps({'title': name})
params = {'homeId': mid}
url = self.server.urlEncode(self.server.LINE_TIMELINE_MH, '/album/v3/album/%s' % albumId, params)
r = self.server.putContent(url, data=data, headers=self.server.timelineHeaders)
if r.status_code != 201:
raise Exception('Change album name failure.')
return True
@loggedIn
def addImageToAlbum(self, mid, albumId, path):
file = open(path, 'rb').read()
params = {
'oid': int(time.time()),
'quality': '90',
'range': len(file),
'type': 'image'
}
hr = self.server.additionalHeaders(self.server.timelineHeaders, {
'Content-Type': 'image/jpeg',
'X-Line-Mid': mid,
'X-Line-Album': albumId,
'x-obs-params': self.genOBSParams(params,'b64')
})
r = self.server.getContent(self.server.LINE_OBS_DOMAIN + '/album/a/upload.nhn', data=file, headers=hr)
if r.status_code != 201:
raise Exception('Add image to album failure.')
return r.json()
@loggedIn
def getImageGroupAlbum(self, mid, albumId, objId, returnAs='path', saveAs=''):
if saveAs == '':
saveAs = self.genTempFile('path')
if returnAs not in ['path','bool','bin']:
raise Exception('Invalid returnAs value')
hr = self.server.additionalHeaders(self.server.timelineHeaders, {
'Content-Type': 'image/jpeg',
'X-Line-Mid': mid,
'X-Line-Album': albumId
})
params = {'ver': '1.0', 'oid': objId}
url = self.server.urlEncode(self.server.LINE_OBS_DOMAIN, '/album/a/download.nhn', params)
r = self.server.getContent(url, headers=hr)
if r.status_code == 200:
self.saveFile(saveAs, r.raw)
if returnAs == 'path':
return saveAs
elif returnAs == 'bool':
return True
elif returnAs == 'bin':
return r.raw
else:
raise Exception('Download image album failure.')
| [
"derek.smith72@gmail.com"
] | derek.smith72@gmail.com |
f5d9e062e4f48178fdc61c9160b67bb92238dbd1 | ee732739791fa31972e61cc3eb4d51fcb18c1ec1 | /api_pratice/musics/serializers.py | 699af90e10c2cc6e91bf80efb7626bd5f49075bc | [] | no_license | daniel2012600/Django_Learning | 46dfccd5a3c90ad422297fdf7b1a5de0217d1aaa | ab25b8cd8bedbf75bbdf1b67d9f13710f9f1142d | refs/heads/main | 2023-04-07T12:36:16.293909 | 2021-04-11T04:15:05 | 2021-04-11T04:15:05 | 354,464,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | from rest_framework import serializers
from musics.models import Music
class MusicSerializer(serializers.ModelSerializer):
class Meta:
model = Music
# fields = '__all__'
fields = ('id', 'song', 'singer', 'last_modify_date', 'created') | [
"daniel2012600@gmail.com"
] | daniel2012600@gmail.com |
8ae01ed09f2baaa04103e50c931257c68330a8e0 | 9a2a707ca4bbac2ac782ca1e83a08b0cae687f6b | /LearningSpace/FirstClassFunctions.py | c2cb6be1e4935217c2a61f9ac20265d353069d2f | [] | no_license | Duathdaert/100-days-of-code | d7c62aed580a1bf1171cca8924b4ca041b77654d | 4782d9a9354645076ae5bef128b640a328918232 | refs/heads/master | 2022-09-25T22:40:50.181081 | 2020-05-27T20:48:12 | 2020-05-27T20:48:12 | 266,616,597 | 0 | 0 | null | 2020-05-24T20:09:30 | 2020-05-24T20:09:30 | null | UTF-8 | Python | false | false | 164 | py | def enclosing():
x = 'closed over'
def local_func():
print(x)
return local_func()
# lf = FirstClassFunctions.enclosing
# lf()
# closed over
| [
"Peter_Askey@waters.com"
] | Peter_Askey@waters.com |
0691173b6fb052d34e4dc198ab0fd6ad668c0aa0 | 5eec81340cef7293d9194249e8dda728a197cc81 | /Activity_03.py | e817d59facec10f05a7f31a5e411f06ebe36ea43 | [] | no_license | darshan033/python-36 | 80ba5827a6ee5c5f805f5d6ff4e230309d9a45cf | 143d91e5e7ae1fec3232c9ad64f3ef84188501e8 | refs/heads/main | 2023-07-26T21:41:49.837287 | 2021-08-29T15:23:54 | 2021-08-29T15:23:54 | 400,701,427 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | string1 = input("Enter the first string :" )
string2 = input("Enter the second string :\n")
concatenated_string = string1 + string2
print(concatenated_string)
print(concatenated_string*5)
| [
"noreply@github.com"
] | darshan033.noreply@github.com |
0eb6f40d29617eeb150a93be10ba041ff6dde4c8 | dcd6f10dd26dd9cc198ac3b843c44155fbd4a1ed | /demo/boilerplate/apps.py | c437f40883956270ed0e113e89a89c81b2fa0db1 | [
"MIT"
] | permissive | Papagoat/Django-Sass-Demo | 9c02084cfdd2e5379b228f4430841320ac384aff | 409ea4ad769cd2e1f25c9ba4639906409d1534a0 | refs/heads/master | 2020-04-21T18:38:25.078004 | 2019-02-10T18:39:05 | 2019-02-10T18:39:05 | 169,777,552 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.apps import AppConfig
class BoilerplateConfig(AppConfig):
name = 'boilerplate'
| [
"terencelucasyap@gmail.com"
] | terencelucasyap@gmail.com |
e06d4fc57982adec80adeabb651d4c09d60b4bb0 | 0c703b1009e3f3bf7845a4c49f204387bbb75808 | /gatekeeper/SpeechRecognition.py | 2fc46b5abe58c1cadea229a1135a6c271e2ddf8b | [
"MIT"
] | permissive | git-commit/iot-gatekeeper | 9412531e55704795841baa0940f7a2101d4d994e | 67d1b720c25a5c27338791ca6ffe447f405075a2 | refs/heads/develop | 2022-12-11T20:58:16.243179 | 2017-01-18T10:38:18 | 2017-01-18T10:38:18 | 73,506,088 | 0 | 1 | MIT | 2022-12-07T23:39:35 | 2016-11-11T19:38:54 | Python | UTF-8 | Python | false | false | 4,009 | py | import json, requests
import privateconfig, logging
import base64
#Note: The way to get api key:
#Free: https://www.microsoft.com/cognitive-services/en-us/subscriptions?productId=/products/Bing.Speech.Preview
#Paid: https://portal.azure.com/#create/Microsoft.CognitiveServices/apitype/Bing.Speech/pricingtier/S0
tts_url_api="https://speech.platform.bing.com"
class SpeechRecognition:
accesstoken = None
n = 0
def __init__(self):
self.renew_authentication()
def renew_authentication(self):
params = ""
headers = {"Ocp-Apim-Subscription-Key": privateconfig.bing_speech_token}
accessTokenUri = "https://api.cognitive.microsoft.com/sts/v1.0/issueToken"
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Connect to server to get the Access Token
logging.debug ("Connect to server to get the Access Token")
response = requests.post(accessTokenUri, params=params, headers=headers)
logging.debug (response.text)
data = response.text
response.raise_for_status()
SpeechRecognition.accesstoken = data
logging.debug ("Access Token: " + SpeechRecognition.accesstoken)
def transformToAudio(self, text):
if SpeechRecognition.n > 2 :
return None
body = "<speak version='1.0' xml:lang='en-us'> \
<voice xml:lang='en-us' xml:gender='Male' name='Microsoft Server Speech Text to Speech Voice (en-US, BenjaminRUS)'>\
%s</voice></speak>" % text
headers = {"Content-type": "application/ssml+xml",
"X-Microsoft-OutputFormat": "riff-16khz-16bit-mono-pcm",
"Authorization": "Bearer " + SpeechRecognition.accesstoken,
"X-Search-AppId": "07D3234E49CE426DAA29772419F436CA",
"X-Search-ClientID": "1ECFAE91408841A480F00935DC390960",
"User-Agent": "TTSForPython"}
#Connect to server to synthesize the wave
logging.debug ("\nConnect to server to synthesize the wave")
response = requests.post ("%s/synthesize" % tts_url_api, headers=headers, data=body)
try:
response.raise_for_status()
except:
logging.debug (response.text)
self.renew_authentication()
SpeechRecognition.n = SpeechRecognition.n + 1
return self.transformToAudio(text)
SpeechRecognition.n = 0
data = response.content
logging.debug ("The synthesized wave length: %d" %(len(data)))
file = open('temp.wav', 'wb')
file.write(data)
return 'temp.wav'
# Wave format audio
def transformToText(self, audio_path):
if SpeechRecognition.n > 2 :
return None
headers = {"Content-Type": "audio/wav; samplerate=16000",
"Authorization": "Bearer " + base64.b64encode(SpeechRecognition.accesstoken),
"Host": "speech.platform.bing.com"}
params = {
"scenarios": "smd",
"appid": "D4D52672-91D7-4C74-8AD8-42B1D98141A5",
"locale": "en-US",
"device.os": "Linux",
"version": "3.0",
"format":"json",
"requestid": "1d4b6030-9099-11e0-91e4-0800200c9a66&instanceid=1d4b6030-9099-11e0-91e4-0800200c9a66"
}
body = open(audio_path).read()
#Connect to server to synthesize the wave
logging.debug ("\nConnect to server to get text from wave")
response = requests.post ("%s/query" % tts_url_api, headers=headers, params=params, data=body)
try:
response.raise_for_status()
except:
logging.debug (response.text)
self.renew_authentication()
SpeechRecognition.n = SpeechRecognition.n + 1
return self.transformToText(audio_path)
SpeechRecognition.n = 0
logging.debug (response.json()[0]['results'])
| [
"yuriy.arabskyy@gmail.com"
] | yuriy.arabskyy@gmail.com |
924c4eaa046d0dfd6b0029ae09ef891dc4796ef5 | 12f57783d651213a66967ab1591e9065c6f6384f | /CodeFights/arcade/Intro/level3-commonCharacterCount.py | 9b46d278b85c5cb0266874ea91346698288357bb | [] | no_license | codeAligned/codingChallenges | 212a779cbd8a466189b22d2d080cd595ef2fd52e | 7166249990b71ed4f564f0252c53ba8a3872186c | refs/heads/master | 2020-05-17T11:43:03.958267 | 2018-07-11T02:48:05 | 2018-07-11T02:48:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | from collections import Counter
def commonCharacterCount(s1, s2):
s1Count, s2Count = Counter(s1), Counter(s2)
if len(s1) >= len(s2):
difference = s1Count - s2Count
s1Count.subtract(difference)
return sum(s1Count.values())
else:
difference = s2Count - s1Count
s2Count.subtract(difference)
return sum(s2Count.values())
# TESTS
s1 = "aabcc"
s2 = "adcaa"
commonCharacterCount(s1, s2)
| [
"root@MBPR-jmartin-SF.home"
] | root@MBPR-jmartin-SF.home |
a1debc9689b292651733b263aa8a17b531e8e4d5 | 0cb7545be73ab31dfd828d69a6af2dc9f90066d7 | /3days/nn-mnist.py | f022b241f84633e97c18adaa25d2fd8fd50afa6c | [] | no_license | dllen/tf-intro | 171059a0a1f62de00fd891bc3ba8a17aab86fdd6 | cf2ce3f1e440bf15c1a689cccbef2dcdc0becbbd | refs/heads/master | 2021-07-16T04:03:54.001781 | 2017-10-20T11:02:06 | 2017-10-20T11:02:06 | 107,366,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
import matplotlib.pyplot as plt
import random
mnist = input_data.read_data_sets("..\MNIST_data", one_hot=True)
# input place holders
X = tf.placeholder(tf.float32, [None, 784])
Y = tf.placeholder(tf.float32, [None, 10])
# weights & bias for nn layers
W1 = tf.Variable(tf.random_normal([784, 256]))
b1 = tf.Variable(tf.random_normal([256]))
L1 = tf.nn.relu(tf.matmul(X, W1) + b1)
W2 = tf.Variable(tf.random_normal([256, 256]))
b2 = tf.Variable(tf.random_normal([256]))
L2 = tf.nn.relu(tf.matmul(L1, W2) + b2)
W3 = tf.Variable(tf.random_normal([256, 10]))
b3 = tf.Variable(tf.random_normal([10]))
hypothesis = tf.matmul(L2, W3) + b3
# define cost / loss & optimizer
# 交叉熵
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hypothesis, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.1).minimize(cost)
# loss
loss = tf.reduce_mean(cost)
# train
train = tf.train.GradientDescentOptimizer(0.01).minimize(loss)
correct_prediction = tf.equal(tf.argmax(hypothesis, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for step in range(10001):
batch = mnist.train.next_batch(100)
sess.run(train, feed_dict={X: batch[0], Y: batch[1]})
if step % 100 == 0:
print(accuracy.eval(feed_dict={X: mnist.test.images, Y: mnist.test.labels}))
r = random.randint(0, mnist.test.num_examples - 1)
print("Label : ", sess.run(tf.argmax(mnist.test.labels[r:r + 1], 1)))
print("Prediction : ", sess.run(tf.argmax(hypothesis, 1), feed_dict={X: mnist.test.images[r:r + 1]}))
plt.imshow(mnist.test.images[r:r + 1].reshape(28, 28), cmap="Greys", interpolation="nearest")
plt.show()
| [
"shichaopeng@jd.com"
] | shichaopeng@jd.com |
52af5ecc9943cd6cc22d832ea20af76d24dca139 | 56f53edfc2a599e08da398e2e82795bb0214755a | /gencards.py | a3a60e2125e0654b14c1315b1a43cebe447a5406 | [
"MIT"
] | permissive | harlanhaskins/Luhn | f9532aabdb43e9024cd4929aeca598931daa7180 | 6fc3523d3961ca75ced1c7d5e8d718af4c9d6289 | refs/heads/master | 2021-01-17T14:46:08.478148 | 2015-10-15T21:36:59 | 2015-10-15T21:37:02 | 27,979,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | import random
for i in range(1000000):
print(random.randint(4000000000000000, 4999999999999999))
| [
"harlan@harlanhaskins.com"
] | harlan@harlanhaskins.com |
c4a596e688b76660a5ee1690c895b010d09b1472 | a2297c232ea2ce73e1f882930a33b49e5e491167 | /*4Sum.py | aead84b35f29c44c8ce8e4e1e8b8fcedd5ba295d | [] | no_license | JinshanJia/leetcode-python | 414f679a90a79f4665332b02c3228d08199347bb | 946ab122d658d2c1ea0097d3e122f557c622edfa | refs/heads/master | 2021-01-17T19:26:10.176287 | 2016-10-23T18:04:47 | 2016-10-23T18:04:47 | 71,719,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,961 | py | __author__ = 'Jia'
'''
Given an array S of n integers, are there elements a, b, c, and d in S such that a + b + c + d = target? Find all unique
quadruplets in the array which gives the sum of target.
Note:
Elements in a quadruplet (a,b,c,d) must be in non-descending order. (ie, a <= b <= c <= d)
The solution set must not contain duplicate quadruplets.
For example, given array S = {1 0 -1 0 -2 2}, and target = 0.
A solution set is:
(-1, 0, 0, 1)
(-2, -1, 1, 2)
(-2, 0, 0, 2)
'''
class Solution:
# @return a list of lists of length 4, [[val1,val2,val3,val4]]
def fourSum(self, num, target):
if num is None or len(num) < 4:
return []
result = []
num.sort()
index = 0
while index < len(num) - 3:
left = index + 1
while left < len(num) - 2:
mid = left + 1
right = len(num) - 1
tmp = num[index] + num[left]
while mid < right:
if num[mid] + num[right] + tmp > target:
right -= 1
continue
if num[mid] + num[right] + tmp < target:
mid += 1
continue
l = [num[index], num[left], num[mid], num[right]]
result.append(l)
right -= 1
mid += 1
while mid < right and num[mid] == num[mid - 1]:
mid += 1
while mid < right and num[right] == num[right + 1]:
right -= 1
left += 1
while left < len(num) - 3 and num[left - 1] == num[left]:
left += 1
index += 1
while index < len(num) - 3 and num[index - 1] == num[index]:
index += 1
return result
s = Solution()
num = [91277418,66271374,38763793,4092006,11415077,60468277,1122637,72398035,-62267800,22082642,60359529,-16540633,92671879,-64462734,-55855043,-40899846,88007957,-57387813,-49552230,-96789394,18318594,-3246760,-44346548,-21370279,42493875,25185969,83216261,-70078020,-53687927,-76072023,-65863359,-61708176,-29175835,85675811,-80575807,-92211746,44755622,-23368379,23619674,-749263,-40707953,-68966953,72694581,-52328726,-78618474,40958224,-2921736,-55902268,-74278762,63342010,29076029,58781716,56045007,-67966567,-79405127,-45778231,-47167435,1586413,-58822903,-51277270,87348634,-86955956,-47418266,74884315,-36952674,-29067969,-98812826,-44893101,-22516153,-34522513,34091871,-79583480,47562301,6154068,87601405,-48859327,-2183204,17736781,31189878,-23814871,-35880166,39204002,93248899,-42067196,-49473145,-75235452,-61923200,64824322,-88505198,20903451,-80926102,56089387,-58094433,37743524,-71480010,-14975982,19473982,47085913,-90793462,-33520678,70775566,-76347995,-16091435,94700640,17183454,85735982,90399615,-86251609,-68167910,-95327478,90586275,-99524469,16999817,27815883,-88279865,53092631,75125438,44270568,-23129316,-846252,-59608044,90938699,80923976,3534451,6218186,41256179,-9165388,-11897463,92423776,-38991231,-6082654,92275443,74040861,77457712,-80549965,-42515693,69918944,-95198414,15677446,-52451179,-50111167,-23732840,39520751,-90474508,-27860023,65164540,26582346,-20183515,99018741,-2826130,-28461563,-24759460,-83828963,-1739800,71207113,26434787,52931083,-33111208,38314304,-29429107,-5567826,-5149750,9582750,85289753,75490866,-93202942,-85974081,7365682,-42953023,21825824,68329208,-87994788,3460985,18744871,-49724457,-12982362,-47800372,39958829,-95981751,-71017359,-18397211,27941418,-34699076,74174334,96928957,44328607,49293516,-39034828,5945763,-47046163,10986423,63478877,30677010,-21202664,-86235407,3164123,8956697,-9003909,-18929014,-73824245]
# num = [1, 0, -1, 0, -2, 2, 2, -2]
import datetime
t = datetime.datetime.now()
print s.fourSum(num, -236727523)
print (datetime.datetime.now() - t) | [
"jiajinshan2009@gmail.com"
] | jiajinshan2009@gmail.com |
91350932556ecaff00bee5d3d68c24b56773e58a | 2f12b8d0a6271fede39b9901866d085546533ed5 | /scrapers/queens.py | 28fa6ebaf36b338829d8043db073ae6d8d626f0b | [] | no_license | Ekimerton/classio-api | e910bef634e07761b249d5e36770e07f1b9a3918 | 2a044d7f30b743f90300c2dc1fb435b162046e82 | refs/heads/master | 2023-06-25T11:18:28.979651 | 2021-07-29T00:49:53 | 2021-07-29T00:49:53 | 372,102,250 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,908 | py | import os
from datetime import datetime
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from webdriver_manager.chrome import ChromeDriverManager
from models import Course, Timeslot, Section
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
driver = webdriver.Chrome(ChromeDriverManager().install())
wait = WebDriverWait(driver, 10)
hotfix_flip = True
def login():
driver.get('https://saself.ps.queensu.ca/psc/saself/EMPLOYEE/SA/c/SA_LEARNER_SERVICES.CLASS_SEARCH.GBL?Page=SSR_CLSRCH_ENTRY&Action=U')
element = wait.until(EC.presence_of_element_located((By.ID, 'username')))
element.send_keys(os.environ['QUEENS_USERNAME'])
element = driver.find_element_by_id('password')
element.send_keys(os.environ['QUEENS_PASSWORD'])
element = driver.find_element_by_name('_eventId_proceed')
element.click()
def get_subjects(semester):
semester_string = "{} {}".format(semester['year'], semester['term'])
driver.get("https://saself.ps.queensu.ca/psc/saself/EMPLOYEE/SA/c/SA_LEARNER_SERVICES.CLASS_SEARCH.GBL?Page=SSR_CLSRCH_ENTRY&Action=U")
# Semester Selection
element = wait.until(EC.presence_of_element_located(
(By.ID, 'CLASS_SRCH_WRK2_STRM$35$')))
for option in element.find_elements_by_tag_name('option'):
if option.text == semester_string:
option.click()
break
# Wait for semester to be fetched
wait.until(EC.invisibility_of_element_located((By.ID, "WAIT_win0")))
element = driver.find_element_by_id('SSR_CLSRCH_WRK_SUBJECT_SRCH$0')
return element.find_elements_by_tag_name('option')
'''
HOTFIX: WINTER 2021 NEEDS APPLIED SCIENCE < and > 150
'''
def get_search(semester, subject_idx):
global hotfix_flip
semester_string = "{} {}".format(semester['year'], semester['term'])
driver.get("https://saself.ps.queensu.ca/psc/saself/EMPLOYEE/SA/c/SA_LEARNER_SERVICES.CLASS_SEARCH.GBL?Page=SSR_CLSRCH_ENTRY&Action=U")
# Semester Selection
element = wait.until(EC.presence_of_element_located(
(By.ID, 'CLASS_SRCH_WRK2_STRM$35$')))
for option in element.find_elements_by_tag_name('option'):
if option.text == semester_string:
option.click()
break
# Wait for semester to be fetched and pick subject
wait.until(EC.invisibility_of_element_located((By.ID, "WAIT_win0")))
element = driver.find_element_by_id('SSR_CLSRCH_WRK_SUBJECT_SRCH$0')
option = element.find_elements_by_tag_name('option')[subject_idx]
option.click()
hotfix = option.text == "Applied Science"
if hotfix:
# Less than OR greater than
element = driver.find_element_by_id(
'SSR_CLSRCH_WRK_SSR_EXACT_MATCH1$1')
element.send_keys("l" if hotfix_flip else "g")
hotfix_flip = not hotfix_flip
# Boundry point
element = driver.find_element_by_id(
'SSR_CLSRCH_WRK_CATALOG_NBR$1')
element.send_keys("150")
else:
# Contains ""
element = driver.find_element_by_id(
'SSR_CLSRCH_WRK_SSR_EXACT_MATCH1$1')
element.send_keys("c")
# Undergrad only
element = driver.find_element_by_id('SSR_CLSRCH_WRK_ACAD_CAREER$2')
option = element.find_elements_by_tag_name('option')[0]
option.click()
# Main campus only
element = driver.find_element_by_id('SSR_CLSRCH_WRK_CAMPUS$3')
element.send_keys("m")
# In person instruction only
# element = driver.find_element_by_id('SSR_CLSRCH_WRK_INSTRUCTION_MODE$4')
# element.send_keys("i")
# Show non open classes
element = driver.find_element_by_id('SSR_CLSRCH_WRK_SSR_OPEN_ONLY$5')
if element.is_selected():
element.click()
# Click search
element = driver.find_element_by_id('CLASS_SRCH_WRK2_SSR_PB_CLASS_SRCH')
element.click()
# See if search gets results
try:
wait.until(lambda driver:
driver.find_elements(
By.ID, 'CLASS_SRCH_WRK2_SSR_PB_MODIFY$5$')
or
driver.find_elements(
By.XPATH, "//*[contains(text(), 'The search returns no results that match the criteria specified.')]")
)
except:
return "Error with search"
engine = create_engine('sqlite:///data/queens.db')
Session = sessionmaker(bind=engine)
session = Session()
# Once search loads, parse html for classes and times
courses = driver.find_elements_by_xpath(
"//div[starts-with(@id,'win0divSSR_CLSRSLT_WRK_GROUPBOX2$')]")
for course_div in courses:
# Extract course info
course_desc = course_div.find_element_by_tag_name(
'a').get_attribute('title')
course_code = course_desc[17:course_desc.index(" -")].replace(" ", "")
course_code = course_code[:-
1] if course_code[-1] == "A" or course_code[-1] == "B" else course_code
course_name = course_desc[course_desc.index(" -") + 2:].strip()
new_course = Course(code=course_code, name=course_name,
semester=semester_string)
session.add(new_course)
sections = course_div.find_elements_by_xpath(
".//tr[starts-with(@id,'trSSR_CLSRCH_MTG1$')]")
for section_div in sections:
# Extract section info
section_desc = section_div.find_element_by_xpath(
".//a[starts-with(@id,'MTG_CLASSNAME$')]").text.splitlines()[0]
section_code = section_desc[:section_desc.index("-")].strip()
section_kind = section_desc[section_desc.index("-") + 1:].strip()
new_section = Section(code=section_code,
kind=section_kind)
new_section.course = new_course
session.add(new_section)
timeslots = section_div.find_element_by_xpath(
".//span[starts-with(@id,'MTG_DAYTIME$')]").text.splitlines()
for timeslot in timeslots:
if timeslot == 'TBA':
continue
# Extract timeslot info
timeslot_times = timeslot[timeslot.index(' ') + 1:]
start_string, end_string = timeslot_times.split(" - ")
start_time = datetime.strptime(start_string, '%I:%M%p').time()
end_time = datetime.strptime(end_string, '%I:%M%p').time()
timeslot_string = timeslot[:timeslot.index(' ')]
timeslot_days = [timeslot_string[i:i+2]
for i in range(0, len(timeslot_string), 2)]
for timeslot_day in timeslot_days:
new_timeslot = Timeslot(
day=timeslot_day,
start_time=start_time,
end_time=end_time)
new_timeslot.section = new_section
session.add(new_timeslot)
try:
session.commit()
except Exception as e:
print(e)
session.rollback()
return "Success"
login()
semester = {
"year": "2022",
"term": "Winter"
}
subjects = get_subjects(semester)
subject_names = [subject.text for subject in subjects]
print("Found {} subjects".format(str(len(subjects))))
for idx, subject_name in enumerate(subject_names):
if subject_name == " ":
continue
status = get_search(semester, idx)
print("{} - {} - {}".format(str(idx).zfill(3),
subject_name.ljust(30), status))
if subject_name == "Applied Science":
status = get_search(semester, idx)
print("{} - {} - {}".format(str(idx).zfill(3),
(subject_name + " (Batch 2)").ljust(30), status))
driver.quit()
| [
"ekim0252@gmail.com"
] | ekim0252@gmail.com |
979135269e9506b92bea3c8b12f08e76fe882377 | 2f2baa16b01ac1ad8f078e29543ae4ed8ebb2b88 | /data_type.py | dd848c2f4b9c63937ab27fadb25be6ad63ebea89 | [] | no_license | vic-ux/py_work | ab81f08f24e69b51b30f1f194b93a7e40bae6554 | e042165899e8164cb347414969916bc737a42f93 | refs/heads/master | 2023-04-07T04:23:15.095601 | 2021-04-18T16:08:47 | 2021-04-18T16:08:47 | 359,008,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | age = '23'
message = "Happy " + age + "rd Birthday!"
print(message)
import this
| [
"vomodewu@gmail.com"
] | vomodewu@gmail.com |
a82c76f942927a67392aa0710e1f1969930ee6cf | bbf025a5f8596e5513bd723dc78aa36c46e2c51b | /dfs + tree/graph.py | 66496a7005f463b2e1716261d4179eac0bb238f2 | [] | no_license | AlanFermat/leetcode | 6209bb5cf2d1b19e3fe7b619e1230f75bb0152ab | cacba4abaca9c4bad8e8d12526336115067dc6a0 | refs/heads/master | 2021-07-11T04:00:00.594820 | 2020-06-22T21:31:02 | 2020-06-22T21:31:02 | 142,341,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | class Graph:
def __init__(self,mapping={}):
'''
Constructs a new empty graph.
'''
self.graph = mapping
def nodes(self):
'''
Returns a list of all nodes in the graph.
'''
return self.graph.keys()
def get_neighbors(self, node):
'''
Given a particular node, returns a list of all neighbors in the graph.
'''
return self.graph[node]
def add_node(self, node):
'''
Adds the given node to the graph.
'''
self.graph[node] = set()
def add_edge(self, node1, node2):
'''
Adds an edge between the given pair of nodes, adding the nodes themselves first if they are not already in the graph.
'''
if not node1 in self.graph.keys():
self.add_node(node1)
if not node2 in self.graph.keys():
self.add_node(node2)
self.graph[node1].add(node2)
self.graph[node2].add(node1) | [
"zy19@rice.edu"
] | zy19@rice.edu |
f2bfc11338590eec04ff10e1911a56f28c3461f0 | e34cbf5fce48f661d08221c095750240dbd88caf | /python/day06/re_module.py | edd0ec1139439c775c119d49c71c7b07ae65d1f5 | [] | no_license | willianflasky/growup | 2f994b815b636e2582594375e90dbcb2aa37288e | 1db031a901e25bbe13f2d0db767cd28c76ac47f5 | refs/heads/master | 2023-01-04T13:13:14.191504 | 2020-01-12T08:11:41 | 2020-01-12T08:11:41 | 48,899,304 | 2 | 0 | null | 2022-12-26T19:46:22 | 2016-01-02T05:04:39 | C | UTF-8 | Python | false | false | 612 | py | #!/usr/bin/env python
# -*-coding:utf8-*-
# __author__ = "willian"
import re
# 从头匹配,很少使用
re.match("\d+", "341221")
# 匹配一次
re.search("\d+", "341221")
# 匹配多次
re.findall("\d+", "341221")
# 以逗号分割
re.split(",", "341,221")
# 匹配到进行替换,默认是替代所有,count指定次数.
re.sub("\d{4}", "1995", "1399,2017", count=1)
# re.I (忽略大小写)
# print(re.search("[a-z]", "Alex", flags=re.I))
# re.M (匹配多行)
# print(re.search("^is", "my name\nis alex", flags=re.M))
# re.S (多行匹配在一起)
# print(re.search(".+", "my \nname", flags=re.S))
| [
"284607860@qq.com"
] | 284607860@qq.com |
997a28a368bfd423b188e23f7ae8ab15a4a71e8f | cb38b170cc716d812822c8fdf64da99e154e7e77 | /Python Lab/Exp 5/3.py | 13596ece44d68d3b81a7eb7ee726ca0b110a7bfe | [] | no_license | ayush-sah/Python | 9227b2819083d0c1fce4fa60a62b167c74a14172 | e17b43d2f4d53f4490630fc13a7defaafcf9ea28 | refs/heads/master | 2021-07-08T21:29:35.031374 | 2021-04-21T07:59:17 | 2021-04-21T07:59:17 | 228,924,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | # To Implement a program with same method name and multiple arguments
class add:
def calc(num1, num2):
return num1 + num2
class concat(add):
def calc(str1, str2):
if type(str1) is int:
return add.calc(str1, str2)
else:
return str1 + str2
print("The answer for int is:", concat.calc(12, 34))
print("The answer for string is:", concat.calc("Ayush", " Sah"))
| [
"noreply@github.com"
] | ayush-sah.noreply@github.com |
276f494e824843392c3efb25c438e23b280c6dbd | 0754e2e7aa1ffb90b54d563ce5a9317e41cfebf9 | /ml/m03_xor.py | 2f5fac7cee0e1b1116a7a60ebc02f9efee5e76ae | [] | no_license | ChaeMyungSeock/Study | 62dcf4b13696b1f483c816af576ea8883c57e531 | 6f726a6ecb43387e4a3b9d068a9c491b115c74c0 | refs/heads/master | 2023-01-24T20:59:52.053394 | 2020-12-07T14:54:34 | 2020-12-07T14:54:34 | 263,255,793 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | from sklearn.svm import LinearSVC
from sklearn.metrics import accuracy_score
from sklearn import svm
# 1. 데이터
x_data = [[0, 0], [1,0], [0,1], [1,1]]
y_data = [0, 1, 1, 0]
# 2. 모델
# 모델은 한줄.. 파라미터값으로 늘어남
model = LinearSVC()
# 3. 훈련
model.fit(x_data, y_data)
# 4. 평가 예측
x_test = [[0,0], [1,0], [0,1], [1,1]]
y_predict = model.predict(x_test)
acc = accuracy_score([0,1,1,0], y_predict)
print(x_test, "의 예측 결과 : ", y_predict)
print("acc = ", acc)
#
| [
"noreply@github.com"
] | ChaeMyungSeock.noreply@github.com |
1c14ebd975783fd80e99878abf489860ea98e91d | c6c3648880485656bb7c349f330378d8fb224192 | /P023.py | 8a3cc9c50e5a843f8918ce5251331b29a1ee841b | [] | no_license | erdos2n/ProjectEuler | 9416fa6ba2f83f2275f5ebfcbd41f5d83d23b19e | a85f6a102b98bae4e227aac55f4d77f92d18a5bc | refs/heads/master | 2021-09-21T01:47:10.601050 | 2018-08-18T18:50:28 | 2018-08-18T18:50:28 | 124,658,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,969 | py | """
A perfect number is a number for which the sum of its proper divisors is exactly equal to the number.
For example, the sum of the proper divisors of 28 would be 1 + 2 + 4 + 7 + 14 = 28, which means that 28 is a perfect number.
A number n is called deficient if the sum of its proper divisors is less than n and it is called abundant if this sum exceeds n.
As 12 is the smallest abundant number, 1 + 2 + 3 + 4 + 6 = 16, the smallest number that can be written as the sum of two abundant numbers is 24.
By mathematical analysis, it can be shown that all integers greater than 28123 can be written as the sum of two abundant numbers.
However, this upper limit cannot be reduced any further by analysis even though it is known that the
greatest number that cannot be expressed as the sum of two abundant numbers is less than this limit.
Find the sum of all the positive integers which cannot be written as the sum of two abundant numbers.
"""
from time import time
from used_functions import isDeficient, isAbundant
from itertools import product, permutations, combinations_with_replacement
"""
Below you will see all of the code I used to test different methods. I left them here, because learning is fun!
"""
def get_non_abundant_list(n)->list:
non_abundant_list = []
for number in range(1, n + 1):
if isDeficient(number):
non_abundant_list.append(number)
return non_abundant_list
def get_abundant_list(n)->list:
abundant_list = []
for number in range(1, n + 1):
if isAbundant(number):
abundant_list.append(number)
return abundant_list
def sum_abundant_number(n):
check_list = set()
pairs_list = get_abundant_list(n)
for p in combinations_with_replacement(pairs_list, 2):
if sum(p) <= n:
check_list.add(sum(p))
print(check_list)
return sum(check_list)
def sum_non_abundant_number(n):
non_abundant_sum = 0
for number in range(1, n + 1):
if isDeficient(number):
non_abundant_sum += number
return non_abundant_sum
def sum_pairs_non_abundant(n):
check_list = set()
pairs_list = get_non_abundant_list(n)
for p in combinations_with_replacement(pairs_list, 2):
if sum(p)<=n:
check_list.add(sum(p))
print(check_list)
return None
def get_non_abundant_sum(n):
total_list = sum(range(1, n + 1))
sum_of_abundant = sum_abundant_number(n)
print(total_list, sum_of_abundant)
return total_list - sum_of_abundant
def get_abundant_sum_final(n):
total_set = set(range(1, n))
abundant_pairs = get_abundant_list(n)
for p in combinations_with_replacement(abundant_pairs, 2):
s = sum(p)
if s<=n:
try:
total_set.remove(s)
except KeyError as e:
continue
return sum(total_set)
if __name__ == "__main__":
start = time()
print(get_abundant_sum_final(28123))
print(start - time())
| [
"rafacarrasco07@gmail.com"
] | rafacarrasco07@gmail.com |
353fbe7250bf1beac4646624a021763b5c94b92a | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Projects/Learn/PyCharm/Algorithmic Toolbox/Algorithmic Warm Up/Last Digit of the Sum of Fibonacci Numbers/last_digit_of_the_sum_of_fibonacci_numbers_unit_tests.py | bb3384e3158b2445f6adca669ed4c4fac09f64be | [
"LicenseRef-scancode-other-permissive"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a6a99b9bfea384a8802695a7f6eafeab6ae6e1cd091ebf62c01e6e6c0ecac93e
size 662
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
64af56b098dae04854baf9a815e82476fa55b8f7 | 029af1d18d80d285866dbd07e96395377581f7ae | /ivizier/ivizier/urls.py | 88a4bda3958eba8924d80c65d4f08a3b642fa588 | [] | no_license | MihaiBuica/iVizier-Project | 83acc3d4e500beee679feea6585580917b7fcc8f | bfa171aa55593c7da29e42ffcbb3d80be9ae6116 | refs/heads/master | 2022-07-30T19:16:30.884193 | 2020-05-20T16:49:56 | 2020-05-20T16:49:56 | 254,064,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,494 | py | """ivizier URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.urls import path, include
from users import views as user_views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('register/', user_views.register, name='register'),
path('login/', auth_views.LoginView.as_view(template_name='users/login.html'), name='login'),
path('logout/', auth_views.LogoutView.as_view(template_name='users/logout.html'), name='logout'),
path('profile/', user_views.profile, name='profile'),
# path('add-post/', auth_views.LoginView.as_view(template_name='avizier/add-post.html'), name='add-post'),
path('', include('avizier.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"mihaibuica38@gmail.com"
] | mihaibuica38@gmail.com |
719e65e5b37c1e0e4ccf0d70594462115a66a15d | 72141a527aeca59b2afb68d84f027ecb475a0142 | /eksamenOving/proveEksamen.py | 159cd812aa342ae1424e20ab951273555d68c71d | [] | no_license | borgebj/IN1000-Python | 4d4a102032b6d15887d33a889dd029fea500ea1e | ffa686d11116d315dca7962e7a6c3e3767e0d720 | refs/heads/master | 2021-01-04T04:45:00.223550 | 2020-12-09T20:06:09 | 2020-12-09T20:06:09 | 240,392,581 | 0 | 0 | null | 2020-12-09T20:06:10 | 2020-02-14T00:00:45 | Python | UTF-8 | Python | false | false | 6,385 | py | print("------------------------------------------------------------------------------")
#oppg 1
tall = (3+1) * 2
tall = tall - 5
print("oppg 1 - tall:", tall)
print("------------------------------------------------------------------------------")
tall = 7
tekst = "a"
if tall>10:
tekst = tekst + "b"
elif tall<5:
tekst = tekst + "c"
else:
tekst = tekst + "d"
print("oppg 2 - tekst:", tekst)
print("------------------------------------------------------------------------------")
#oppg 3
a = 0
for b in [2,4,1]:
a = 2*a + b
print("oppg 3 - for:", a)
print("------------------------------------------------------------------------------")
#oppg 4
tallene = [ ]
a = 0
b = 1
while a<4:
tallene.append(b)
b = b*2
a = a+1
print("oppg 4 - while:", tallene[0] + tallene[3])
print("------------------------------------------------------------------------------")
#oppg 5
def kalkuler(tall):
tall = tall + 1
return tall * 2
print("oppg 5 - kalkulator:", kalkuler(2) + kalkuler(4))
print("------------------------------------------------------------------------------")
#oppg 6
class Tall:
def __init__(self, a):
self._a = a
def m1(self, c):
self._a = self._a + c
def m2(self):
self._a = self._a * 2
def m3(self):
return self._a + 10
t1 = Tall(5)
t2 = Tall(2)
t1.m2()
t2.m1(t1.m3())
print("oppg 6 - tall:", t2.m3())
print("------------------------------------------------------------------------------")
#oppg 7
class Person:
def __init__(self, navn, alder):
self._navn = navn
self._alder = alder
def bursdag(self):
self._alder += 1
def hentAlder(self):
return self._alder
def settAlder(self, nyAlder):
self._alder = nyAlder
far = Person("Gjert", 48)
trener = far
trener.bursdag()
print("oppg 7 - klasse:", far.hentAlder())
print("------------------------------------------------------------------------------")
#oppg 8
far = Person("Gjert", 48)
trener = far
trener.settAlder(60)
print("oppg 8 - alder:", far.hentAlder() )
print("------------------------------------------------------------------------------")
#oppg 9
far = Person("Gjert", 48)
trener = far
trener.bursdag()
trener = Person("Tone", 60)
print("oppg 9 - alder:", far.hentAlder() )
print("------------------------------------------------------------------------------")
#oppgave 10
def feiring(p):
p.bursdag()
far = Person("Gjert", 48)
feiring(far)
print("oppg 10 - alder:", far.hentAlder())
print("------------------------------------------------------------------------------")
#oppgave 11
def vinnerlag(hjemmelag, bortelag, hjemmemaal, bortemaal):
if hjemmemaal > bortemaal:
return hjemmelag
if hjemmemaal == bortemaal:
return "uavgjort"
elif bortemaal > hjemmemaal:
return bortelag
print("oppg 11 - vinnerlag:", vinnerlag("Brann", "Molde", 2, 3),"og", vinnerlag("Brann", "Molde", 2, 2))
print("------------------------------------------------------------------------------")
def forkort_lagliste(lagliste):
return list(set(lagliste))
print("oppg 12 - forkort:", forkort_lagliste(["Molde", "Sarpsborg", "Molde", "Brann"]))
print("------------------------------------------------------------------------------")
#oppgave 13
def legg_inn_null_maal(lagliste):
ordbok = {}
for x in lagliste:
ordbok[x] = 0
return ordbok
print("oppg 13 - null_maal:", legg_inn_null_maal(["Brann", "Molde", "Sarpsborg", "Molde", "Brann"]))
print("------------------------------------------------------------------------------")
#oppgave 14
def ekstraher_lagliste(fn):
lagnavn = []
fil = open(fn)
for x in fil:
biter = x.split(" ")
lagnavn.append(biter[0])
lagnavn.append(biter[1])
fil.close()
return lagnavn
print("oppg 14 - ekstraher:", ekstraher_lagliste("lagliste2.txt"))
print("------------------------------------------------------------------------------")
#oppgave 15
def regn_poengsum(fn):
# tar imot liste med hjemmelag, bortelag, hjemmemaal, bortemaal
# returnerer en liste med alle lagene
liste = ekstraher_lagliste(fn)
# tar imot liste med alle lagnavn
# returnerer en mengde med alle lagene
mengde = forkort_lagliste(liste)
# tar imot en liste med alle lagnavn
# returner ordbok med alle lag med 0 maal
ordbok = legg_inn_null_maal(mengde)
fil = open(fn)
for x in fil:
biter = x.split()
hjemmelag = biter[0]
bortelag = biter[1]
hjemmemaal = biter[2]
bortemaal = biter[3]
vinner = vinnerlag(hjemmelag, bortelag, hjemmemaal, bortemaal)
if vinner == "uavgjort":
ordbok[hjemmelag] += 1
ordbok[bortelag] += 1
elif vinner == hjemmelag:
ordbok[hjemmelag] += 3
#else:
elif vinner == bortelag:
ordbok[bortelag] += 3
fil.close()
return ordbok
print("oppg 15 - poengsum:", regn_poengsum("lagliste2.txt"))
print("------------------------------------------------------------------------------")
#oppgave 16
def gull(lagoversikt):
storst = 0
for x in lagoversikt:
poeng = lagoversikt[x]
if poeng > storst:
storst = poeng
vinnerlag = x
return vinnerlag
print("oppg 16 - gull:", gull({"Brann":2, "Molde":3, "Sarpsborg":1}))
print("------------------------------------------------------------------------------")
#oppgave 17
def finn_gull(fn):
print("Navn paa vinnerlag:", gull(regn_poengsum(fn)))
finn_gull("lagliste2.txt")
print("------------------------------------------------------------------------------")
#oppgave 25
def godkjenn(alder):
fam1 = alder[0]
fam2 = alder[1]
antallMyndigFam1 = []
antallMyndigFam2 = []
for x in fam1:
if x >= 18:
antallMyndigFam1.append(x)
for y in fam2:
if y >= 18:
antallMyndigFam2.append(y)
if len(antallMyndigFam1) >= 1 and len(antallMyndigFam2) >= 1:
return True
else:
return False
print("oppg 25 T1 - myndigperson (har begge familie myndig person?):", godkjenn([[10,2,30],[20,1]]))
print("oppg 25 T2 - myndigperson (har begge familie myndig person?):", godkjenn([[10,2,30],[10,1]]))
print("------------------------------------------------------------------------------")
| [
"57920815+borgebj@users.noreply.github.com"
] | 57920815+borgebj@users.noreply.github.com |
6c46999ddcfe4f6028d29dcdd2d2bb61c6a59501 | 650461f8804d7bd3c3f76d53a4f0b203b6f2788e | /PPool/__init__.py | 4e1a8022187ec02b92b5b21a70840fca3ef4427d | [
"Apache-2.0"
] | permissive | oeg-upm/PPool | 839953798f27249d3c9b492adc313afd9f2160c4 | 1c5557c37d86b5c22179b2204d68e7256d2a5c08 | refs/heads/master | 2020-03-25T02:56:28.305760 | 2018-09-26T07:04:28 | 2018-09-26T07:04:28 | 143,314,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15 | py | name = "PPool"
| [
"ahmad88me@gmail.com"
] | ahmad88me@gmail.com |
9d0fdf93f542ddba324d792686a717531390d744 | 781fe896b6b326d7b239ba4671d9fe8751b2a886 | /Project_unsorted/Project/test.py | 62a1e17113173cb967d0904d9d80d86d28c0dc35 | [] | no_license | Sra1chandra/DIP | ee123c1ac1baa3dad2f876e0ff5d369433e98d35 | 383d3477f8191ffb3d22adbf701b6ae453b92570 | refs/heads/master | 2021-08-29T01:27:47.561994 | 2017-12-13T08:51:52 | 2017-12-13T08:51:52 | 114,097,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,242 | py | import cv2
import numpy as np
import sys
from matplotlib import pyplot as plt
filename = "./samples/image1.jpg"
image = cv2.imread(filename)
image_hsv = cv2.cvtColor(image,cv2.COLOR_BGR2LUV)
cv2.imshow('img',image);
dst = image;
#decl (input,sp,sr,out,max_level)s
#sp - spatial window radius , sr = color window radius
cv2.pyrMeanShiftFiltering(image,30,20,dst,3)
cv2.imshow('img2',dst);
cv2.imwrite('im1.jpg',dst);
med = cv2.medianBlur(dst,5);
img_hsv = cv2.cvtColor(dst,cv2.COLOR_BGR2HSV)
cv2.imshow('img3',img_hsv)
cv2.imwrite('im2.jpg',img_hsv);
k = cv2.waitKey(0);
if(k==27):
cv2.destroyAllWindows()
color = ('b','g','r')
for i,col in enumerate(color):
histr = cv2.calcHist([img_hsv],[i],None,[256],[0,256])
plt.plot(histr,color = col)
plt.xlim([0,256])
plt.show()
# identify regions
# b = int(med[:][:][0]);
# g = int(med[:][:][1]);
# r = int(med[:][:][2])
#
# h = int(img_hsv[:][:][0])
# s = int(img_hsv[:][:][1])
# v = int(img_hsv[:][:][2])
height,width,channel = dst.shape;
temp = [['a' for x in range(width)] for y in range(height)]
count = 0;
for i in range(0,height):
for j in range(0,width):
# if(img_hsv[i][j][2]>150 and (abs(int(med[i][j][0])-int(med[i][j][1]))<=30) and (abs(int(med[i][j][1])-int(med[i][j][2]))<=30)):
# temp[i][j]='s';
if(img_hsv[i][j][2]>160 and (med[i][j][0]>=160 and med[i][j][0]<=255) and (med[i][j][1]>=70 and med[i][j][1]<=255) and (med[i][j][2]>=0) and(med[i][j][0]+15>=med[i][j][1] and med[i][j][0]+15>=med[i][j][2])):
temp[i][j] = 's'
elif(img_hsv[i][j][2]>110 and ( med[i][j][0]<=100) and ( med[i][j][1]<=255) and (med[i][j][2]>=100) and(med[i][j][2]>=med[i][j][0] and med[i][j][2]>=med[i][j][1])):
temp[i][j]='m'
elif(img_hsv[i][j][2]>30 and img_hsv[i][j][2]<170 and ( med[i][j][0]<=120) and ( med[i][j][1]<=120) and (med[i][j][2]<=120) and(med[i][j][1]>=med[i][j][0] and med[i][j][1]>=med[i][j][2])):
temp[i][j]='m'
elif(img_hsv[i][j][2]>100 and ( med[i][j][0]<=100) and ( med[i][j][1]<=255) and (med[i][j][2]<=200) and(med[i][j][1]>=med[i][j][0] and med[i][j][1]>=med[i][j][2])):
temp[i][j]='l'
else:
temp[i][j] = 'o'
# if((int(med[i][j][0])>int(med[i][j][2])) and (int(med[i][j][0])>int(med[i][j][1]))):
# temp[i][j]='s';
# elif(() and ()):
# temp[i][j]='m';
temp2 = med;
for i in range(0,height):
for j in range(0,width):
flag = int(med[i][j][0])>int(med[i][j][1])
#print(temp[i][j],temp2[i][j][0],temp2[i][j][1],temp2[i][j][2],flag);
if(temp[i][j]=='s' and channel ==3):
temp2[i][j][0]=0;
temp2[i][j][1]=0;
temp2[i][j][2]=0;
elif(temp[i][j]=='m' and channel == 3):
temp2[i][j][0]=80
temp2[i][j][1]=80
temp2[i][j][2]=80
elif(temp[i][j]=='l' and channel == 3):
temp2[i][j][0]=200
temp2[i][j][1]=200
temp2[i][j][2]=200
else:
temp2[i][j][0]=255
temp2[i][j][1]=255
temp2[i][j][2]=255
cv2.imshow('img4',temp2);
cv2.imwrite('im3.jpg',temp2);
k = cv2.waitKey(0);
if(k==27):
cv2.destroyAllWindows();
| [
"Sra1chandra@github.com"
] | Sra1chandra@github.com |
7c393120ee51e757a0b0c2bc246dc2a4c934dc23 | 08706df7e3712ebec7afd2d2f8f964ae9d485386 | /server/patients/migrations/0016_attribute_resource.py | 3c95316f5e9d660ee4b386204d0e49c148dcc89e | [] | no_license | nickdotreid/take-on-transplant | 9129c9ab7c1206291fc1ca616c18c44cd7519587 | bf901b987121093787383f3d3726f87dddf4d5fd | refs/heads/master | 2023-08-27T06:14:54.521168 | 2021-11-02T21:41:04 | 2021-11-02T21:41:04 | 298,403,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | # Generated by Django 3.1.1 on 2020-11-24 02:34
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('resources', '0006_auto_20201120_1722'),
('patients', '0015_issue_posttransplantissue_pretransplantissue'),
]
operations = [
migrations.AddField(
model_name='attribute',
name='resource',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='resources.resource'),
),
]
| [
"nickreid@nickreid.com"
] | nickreid@nickreid.com |
05feea5e67d117d41e529187d1990f74c15afed1 | 14eb90d0b38772cfef35fe167efeb668515cbe9c | /Tests.py | 2061128cd1f66808046003549606d71ffe40fda9 | [] | no_license | avestuk/udacity-course | fd2f0daad4822f4d85058b73e9765526b1b684c4 | 076cc5a513507c6cbf5098e06318acb80ce1dff0 | refs/heads/master | 2021-01-20T04:42:21.430507 | 2017-05-16T19:19:10 | 2017-05-16T19:19:10 | 89,717,727 | 0 | 0 | null | 2017-05-16T19:19:11 | 2017-04-28T15:10:30 | Python | UTF-8 | Python | false | false | 1,377 | py | # A list is symmetric if the first row is the same as the first column,
# the second row is the same as the second column and so on. Write a
# procedure, symmetric, which takes a list as input, and returns the
# boolean True if the list is symmetric and False if it is not.
def symmetric(grid):
#Need to check the first row and column at the same time. Then the second row and second column
numberofRows = len(grid) #Take the length of the list aka the number of rows
numberofColumns = len(grid[0]) #Check the number of columns
if not (numberofColumns == numberofRows):
return False
i = 0
while i < numberofRows:
j = 0
while j < numberofRows:
if grid[i][j] == grid[j][i]:
j += 1
else:
return False
i += 1
return True
print(symmetric([1,2,3]))
#>>> True
#print symmetric([["cat", "dog", "fish"],
# ["dog", "dog", "fish"],
# ["fish", "fish", "cat"]])
#>>> True
print(symmetric([["cat", "dog", "fish"],
["dog", "dog", "dog"],
["fish","fish","cat"]]))
#>>> False
#print symmetric([[1, 2],
# [2, 1]])
#>>> True
#print symmetric([[1, 2, 3, 4],
# [2, 3, 4, 5],
# [3, 4, 5, 6]])
#>>> False
#print symmetric([[1,2,3],
# [2,3,1]])
#>>> False | [
"avestuk@gmail.com"
] | avestuk@gmail.com |
8925a7612fb319aa6542ba12246fe2662fc4ef14 | d22e88db71d7c41764a4573a33fe52024d98216a | /basic command files/command_options.py | 60aad582c27282efb72e6963710bdfd178db075f | [
"MIT"
] | permissive | MWeber313/python-logger-cli | 115a12ed06c094a0d5cdccb4692d1276a169aeb0 | d972dae3eeb7552be0f12164804de0595bcd4c28 | refs/heads/main | 2023-01-03T21:37:00.254620 | 2020-10-28T06:38:47 | 2020-10-28T06:38:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | # This file will be base commands for public users
print(
'''
-Write: Write a new public log
-Read: Read a public log
-Edit: Edit a public log
-Delete: Delete a public log
-Help: More information
'''
) | [
"mack.webb37@gmail.com"
] | mack.webb37@gmail.com |
ed3e5f5a1a5fb3d8133fc50f65171f11860f8d91 | 14b14a8fd77387d67b73cebe8cbefb1657e909cc | /python_scripts/tally_csv_ballots.py | a0454219bd5d3bfbf4726683fc1a3b7392d54aae | [] | no_license | pkeane/stvtools | 1cbb30b6e7823842bd90e64aed05307a2484be3a | 741adcd460dfc8909a2bced5389c963d5a45aac0 | refs/heads/master | 2021-01-25T05:15:34.936295 | 2012-04-18T02:11:32 | 2012-04-18T02:11:32 | 600,134 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,901 | py | import csv
import json
import sys
import stvtools
from operator import itemgetter, attrgetter
from random import randint
"""
converts from cvs ballots as columns to ballots as json
"""
CONFIG = {}
CONFIG['initial_ballot_value'] = 100
CONFIG['minimum_full_professors'] = 0
def do_tally(ballot_data):
row_ballots = {}
for place in ballot_data:
if place[0] == 'seats':
CONFIG['seats'] = int(place[1])
elif place[0] == 'voters':
CONFIG['voters'] = place[1]
elif place[0] == 'candidates':
CONFIG['candidates'] = place[1]
else:
voter = 0;
for vote in place:
voter += 1
if not row_ballots.has_key("v"+str(voter)):
row_ballots["v"+str(voter)] = []
row_ballots["v"+str(voter)].append(vote)
ballots = []
for i in row_ballots:
b = {}
b['data'] = row_ballots[i]
b['value'] = 100
ballots.append(b)
candidates = {}
for n in range(1,int(CONFIG['candidates'])+1):
full = True
eid = 'c'+str(n)
c = stvtools.StvCandidate(eid,eid,full,[],0)
candidates['c'+str(n)] = c
droop = stvtools.calculate_droop(len(ballots),CONFIG['seats'],CONFIG['initial_ballot_value'])
logs = []
committee = []
(ballots,candidates,committee,logs) = stvtools.run_step(ballots,candidates,committee,CONFIG,droop,logs)
return logs
if __name__ == "__main__":
if sys.argv[1]:
filename = sys.argv[1]
ballot_data = []
for row in csv.reader(open(filename)):
ballot_data.append(row)
was_elected = {}
for i in range(500):
result = do_tally(ballot_data)
last = result.pop()
for cand in last['committee']:
if cand.eid in was_elected:
was_elected[cand.eid] += 1
else:
was_elected[cand.eid] = 1
sorted_elected = sorted(was_elected.items(),key=itemgetter(1),reverse=True)
for tup in sorted_elected:
print(tup[0]+' ('+ str(tup[1])+')')
| [
"pkeane@mail.utexas.edu"
] | pkeane@mail.utexas.edu |
0598cc55bb3cc9cd48235f6dee023526aede8599 | a00ed711e3e08b50ad6e91cc07a2cddc4a1de5ea | /airflow/migrations/versions/0075_2_0_0_add_description_field_to_connection.py | 4c3f5835dcbfdf9b443396cbcceb764f421fbf89 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ishiis/airflow | 4305794e36b611d01f49e3f2401be3dc49782670 | 292440d54f4db84aaf0c5a98cf5fcf34303f2fa8 | refs/heads/master | 2022-07-30T00:51:28.806940 | 2022-07-14T12:07:11 | 2022-07-14T12:07:11 | 209,801,072 | 1 | 0 | Apache-2.0 | 2019-09-20T13:47:26 | 2019-09-20T13:47:26 | null | UTF-8 | Python | false | false | 2,008 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add description field to ``connection`` table
Revision ID: 61ec73d9401f
Revises: 2c6edca13270
Create Date: 2020-09-10 14:56:30.279248
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '61ec73d9401f'
down_revision = '2c6edca13270'
branch_labels = None
depends_on = None
airflow_version = '2.0.0'
def upgrade():
"""Apply Add description field to ``connection`` table"""
conn = op.get_bind()
with op.batch_alter_table('connection') as batch_op:
if conn.dialect.name == "mysql":
# Handles case where on mysql with utf8mb4 this would exceed the size of row
# We have to set text type in this migration even if originally it was string
# This is permanently fixed in the follow-up migration 64a7d6477aae
batch_op.add_column(sa.Column('description', sa.Text(length=5000), nullable=True))
else:
batch_op.add_column(sa.Column('description', sa.String(length=5000), nullable=True))
def downgrade():
"""Unapply Add description field to ``connection`` table"""
with op.batch_alter_table('connection', schema=None) as batch_op:
batch_op.drop_column('description')
| [
"noreply@github.com"
] | ishiis.noreply@github.com |
f4959530fd1e9ac59ea85fce71f6bf2009276c3a | 47569e02708e101b23fdad3f066939a2bc2894ce | /videodetect.py | e35edf42e4a237524ca7724e02405a3e23d84148 | [] | no_license | jyz5257/Moving-Vehicle-Classification-HOG | e3e011eec981dace7f8706cf08838384d3a6d69a | 34833b85d0d350e4dc5b3c85eb29060be858056b | refs/heads/master | 2021-04-24T21:13:26.558661 | 2018-01-24T04:05:37 | 2018-01-24T04:05:37 | 116,738,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,394 | py | import cv2
import numpy as np
from skimage import color
from skimage.feature import hog
from sklearn.externals import joblib
import imutils
import urllib
#BG Subtraction, convert to grayscale, and threshold image
def bgsubtract(bg,car):
imgAbsdiff = cv2.absdiff(bg, car)
imgGray = cv2.cvtColor(imgAbsdiff, cv2.COLOR_BGR2GRAY)
ret1, thres = cv2.threshold(imgGray, 20, 255, cv2.THRESH_BINARY)
return thres
# count the neighborhood foreground and background pixel
def check_pix(img,p):
im = img[p[0]-1:p[0]+2, p[1]-1:p[1]+2]
QF = 0
QB = 0
for i in range(0,3):
for j in range(0,3):
if im[i,j] == 255:
QF = QF +1
if im[i,j] == 0:
QB = QB +1
if im[1,1] == 255:
QF = QF - 1
if im[1,1] == 0:
QB = QB - 1
return QF,QB
# foraground adaptive bg subtraction
def fgbs(img):
s = img.shape
for i in range(1,s[0]-1):
for j in range(1,s[1]-1):
p = (i,j)
QF,QB = check_pix(img,p)
gamma = 1
theta = 1.2
v = theta * np.exp((QF-QB)/gamma)
if v > 1:
img[i,j] = 255
if v < 1:
img[i,j] = 0
return img
#make bounding boxes
def box(img):
image, cnts, _ = cv2.findContours(img,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
x = [0]*(len(cnts)-1)
y = [0]*(len(cnts)-1)
w = [0]*(len(cnts)-1)
h = [0]*(len(cnts)-1)
for i in range(0,(len(cnts)-1)):
x[i],y[i],w[i],h[i] = cv2.boundingRect(cnts[i+1])
return x,y,w,h
#import video stream
video = 'video/output2.avi'
c = cv2.VideoCapture(video)
_,f = c.read()
#HOG Parameters
orientations = 9
pixels_per_cell = [4, 4]
cells_per_block = [2, 2]
visualize = False
normalize = True
# load svm model
clf = joblib.load('svm_linearmodel.pkl')
# open a video writer using opencv
fourcc = cv2.VideoWriter_fourcc('M','J','P','G')
out = cv2.VideoWriter('output.avi', fourcc, 30.0, (320,240))
while True:
_,f = c.read()
crop_f = f[44:224, 0:320]
# read the background image and remove the camera frame
imgbg = cv2.imread('background.png')
crop_bg = imgbg[44:224, 0:320]
thres = bgsubtract(crop_bg,crop_f)
(x,y,w,h) = box(thres)
for i in range(0,len(x)):
if w[i]>20 and h[i]>15:
window = f[(y[i]+44):(y[i]+44+h[i]), x[i]:(x[i]+w[i])]
window = color.rgb2gray(window)
img1 = cv2.resize(window,(64,48))
# examine the hog fature
fd = hog(img1, orientations, pixels_per_cell, cells_per_block, visualize, normalize)
fd = fd.reshape((1,-1))
# predict the feature label
pred = clf.predict(fd)
if pred == 0:
if w[i]<180:
cv2.rectangle(f,(x[i],y[i]+44),(x[i]+w[i],y[i]+44+h[i]),(255,255,0),1)
if w[i] >180:
cv2.rectangle(f,(x[i],y[i]+44),(x[i]+w[i],y[i]+44+h[i]),(0,0,255),1)
if pred == 1:
cv2.rectangle(f,(x[i],y[i]+44),(x[i]+w[i],y[i]+44+h[i]),(0,0,255),1)
# write the video
out.write(f)
cv2.imshow('img',f)
k = cv2.waitKey(1)
if k == 27:
break
cv2.destroyAllWindows()
c.release()
| [
"jyz5257@bu.edu"
] | jyz5257@bu.edu |
c8f3a1b7d233400574fead87d6325c975218ca91 | 6cde1148cc1ba6c1d704f64603a292440c73caac | /uploadfile_demo/front/forms.py | 00274f0e1b4e306ad5c543edc2885058b1b7ef00 | [] | no_license | huanshenyi/django_text | 09f33be02f0f9da33320af9b82e947d4ebed86ee | 536e2eb329faf57edfb30f2299f4ba64592e6b78 | refs/heads/master | 2020-04-24T14:44:10.707414 | 2019-03-15T06:08:57 | 2019-03-15T06:08:57 | 172,033,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | from django import forms
from .models import Article
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = "__all__"
error_messages = {
'thumbnail': {
'invalid_image': '違う'
}
} | [
"noreply@github.com"
] | huanshenyi.noreply@github.com |
45b2ac47de0843f838c204ec567060d9ff0e417f | 76fbd18e4b7001bb9d923e814d5d3d020f746e2c | /test_strategy_bubble.py | 08cf43aae520277068232febd512a6297c858bc5 | [] | no_license | nanka-tukuru/pybubbly | 1f2e3b695c19d0376123b537d2d514a3d4df2c96 | 71f46d141570e2d8cecc57eba4617022cd7aff24 | refs/heads/main | 2023-06-11T18:06:55.041665 | 2021-07-06T03:50:33 | 2021-07-06T03:50:33 | 376,425,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | import unittest
from strategy_bubble import *
class TestBubbleSort(unittest.TestCase):
"""BubbleSortテスト
"""
def test_case1(self):
srclist = [10,9,8,7,6,5,4,3,2,1,0]
anslist = [0,1,2,3,4,5,6,7,8,9,10]
context = SortingContext(BubbleSort())
context.sort(srclist)
self.assertListEqual(srclist, anslist)
if __name__ == "__main__":
unittest.main()
| [
"nankatukuru@gmail.com"
] | nankatukuru@gmail.com |
611c1d6a034fe6d7889e0e761e2448f55d236f54 | 5c547466bd3b8cb49cdabbf35b2d1cbb6923ccbd | /scripts/actions_ec2.py | 5ca6b4d06c55aa090c4a2ea86d89f0649eb7ae7b | [
"MIT"
] | permissive | skoch0013/aws_scripts | 8a835a500d8972482d52c4f1c4fdaed0a8c932bc | bb0f3246477648fbf0c068f2100a5006203c9716 | refs/heads/master | 2021-01-20T21:20:35.415974 | 2017-11-03T15:29:23 | 2017-11-03T15:29:23 | 101,765,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,110 | py | import boto3
import time
from botocore.exceptions import ClientError
ec2 = boto3.resource('ec2')
def create_volume(availability_zone, size, snapshot_id, tag_name, tag_value):
volume_tag = {"Key": tag_name, "Value": tag_value}
try:
ebs = ec2.create_volume(
AvailabilityZone=availability_zone,
Size=size,
SnapshotId=snapshot_id
)
ebs.create_tags(Tags=[volume_tag])
return ebs
except ClientError as e:
print(e)
def attach_volume(instance_id, volume_id, device):
res = ec2.Instance(instance_id)
try:
res.attach_volume(
VolumeId=volume_id,
Device=device
)
print res
except ClientError as e:
print(e)
def create_instance(image_id, instance_type, key_name, security_groups, subnet_id, tag_name, tag_value):
instance_tag = {"Key": tag_name, "Value": tag_value}
try:
instance = ec2.create_instances(
ImageId=image_id,
MinCount=1,
MaxCount=1,
InstanceType=instance_type,
KeyName=key_name,
SecurityGroupIds=[security_groups],
SubnetId=subnet_id
)
for i in instance:
i.create_tags(Tags=[instance_tag])
return i
except ClientError as e:
print(e)
def list_instances():
try:
for instance in ec2.instances.all():
print instance.id, instance.state
except ClientError as e:
print(e)
def terminate_instance(instance_id):
try:
instance = ec2.instances.filter(
InstanceIds=[instance_id]
).terminate()
print instance
except ClientError as e:
print(e)
def terminate_all_running_instances():
try:
instances = ec2.instances.filter(
Filters=[{'Name': 'instance-state-name', 'Values': ['running', 'stopped']}]
)
for instance in instances:
instance.terminate()
print(instance.id, instance.instance_type)
except ClientError as e:
print(e)
def get_volume_id(instance_id):
inst = ec2.Instance(instance_id)
volumes = inst.volumes.all()
for v in volumes:
return v
def create_snapshot(volume_id):
try:
snapshot = ec2.create_snapshot(
VolumeId=volume_id,
Description="test")
return snapshot
except ClientError as e:
print(e)
def create_security_group(security_group_name, description, vpc_id, inbound_rules, outbound_rules, tag_name, tag_value):
sg_tag = {"Key": tag_name, "Value": tag_value}
group = ec2.create_security_group(
GroupName=security_group_name,
Description=description,
VpcId=vpc_id)
time.sleep(10)
group.create_tags(Tags=[sg_tag])
try:
for rule in inbound_rules:
group.authorize_ingress(
IpPermissions=[rule]
)
for rule in outbound_rules:
group.authorize_egress(
IpPermissions=[rule]
)
except ClientError as e:
print(e)
return group
| [
"oksana_ivasenko@epam.com"
] | oksana_ivasenko@epam.com |
f28960625204e475aa39bacb5409466a87f416c2 | a8dbe1a94d039053a0a8288011cc354e4b554280 | /py/FallNode.py | e2d0dfb35be3c1f87c53e2d49f41d69522191f05 | [] | no_license | peymathi/csci437-fsm-game | ab44b76c59a4029c3f7cb74fe99bd69f2b7ccf3d | a20fc34978b1c157e90aed2768947dacb6030d0a | refs/heads/master | 2022-12-11T10:12:35.175746 | 2020-09-12T17:04:42 | 2020-09-12T17:04:42 | 293,584,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | from CommonNode import CommonNode
# Temporary node that makes the player take damage first before going on to the nextNode
class FallNode (CommonNode):
def __init__(self, player, message, fallDamage, nextNode):
super().__init__(player, message)
self._next_node = nextNode
self._fall_damage = fallDamage
def _init_room(self):
print(f"{self._message} taking {self._fall_damage} damage.")
return self._next_node.evaluate()
| [
"pmathis99@comcast.net"
] | pmathis99@comcast.net |
d0c84309a72d66f88cd7cb9b45631e0b3a698382 | cc76054d3ff9b87400169203195ec060ae8c50cc | /salesanalytics/apps.py | 4e277d7ab61cf8bc3e43b9c26e2c32d375c8fa38 | [] | no_license | desertcamel/yamaki | c37436ecfdf1f24082ee3147d37beed6a75401d6 | b0a0ca60ade18c147785bc8998a9fec9653251f4 | refs/heads/master | 2021-09-03T15:56:37.712267 | 2018-01-10T07:58:35 | 2018-01-10T07:58:35 | 116,133,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | from django.apps import AppConfig
class SalesanalyticsConfig(AppConfig):
name = 'salesanalytics'
| [
"sandeep@biznessanalytics.com"
] | sandeep@biznessanalytics.com |
6211c7383b32ce57d7debcf4e69de5acc63075ef | 1aa2ac6f0631b6e7e123ad8797f52ab90247aac5 | /utils/reg.py | 09b051826072ab98464dd0826acd4b819ec35157 | [
"Apache-2.0"
] | permissive | ap-conv/ap-net | 4ae79477682650bcca9cabd8326562b4b3136444 | 98a84e45cd6aa90305c78ecab35bdd682f2fb05f | refs/heads/main | 2023-02-08T12:03:32.837092 | 2020-12-29T12:26:35 | 2020-12-29T12:26:35 | 311,905,333 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | import torch
def regularize(*features):
if len(features) == 2:
return regularize_2(*features)
elif len(features) == 3:
feature_1, feature_2, feature_3 = features
return regularize_2(feature_1, feature_2) + regularize_2(feature_1, feature_3) + regularize_2(feature_2, feature_3)
else:
raise ValueError
def regularize_2(feature_1, feature_2):
N, C_1, H, W = feature_1.shape
S_1 = H * W
N, C_2, H, W = feature_2.shape
S_2 = H * W
feature_1 = feature_1.view(N, C_1, S_1)
feature_2 = feature_2.view(N, C_2, S_2).transpose(1, 2)
result = torch.matmul(feature_1, feature_2)
return torch.mean(result)
| [
"ap-conv@outlook.com"
] | ap-conv@outlook.com |
4ca15c8ceac9905aa994585733bf43914d8b5cf3 | 9c397d709e80ffa8ac06163d96078915106b0a08 | /arith_arranger.py | d70d335105ae35076662d72f4e474c2732bdc11f | [] | no_license | ZichKoding/Arithmetic_Formatter | 33dd478abdc72af142a68e073456df80e3427d6a | e91bd39b444b342368b8707d8537cfcd662a479c | refs/heads/main | 2023-05-13T21:59:14.509629 | 2021-06-07T04:43:41 | 2021-06-07T04:43:41 | 334,397,109 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | import os, sys
import random
from kivy.resources import resource_add_path, resource_find
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from kivy.core.window import Window
from kivy.core.audio import SoundLoader
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.image import Image
Builder.load_file('arith_arranger.kv')
class Arithmetic(Widget):
pass
class ArithmeticArrangerApp(App):
def build(self):
return Arithmetic()
if __name__ == '__main__':
ArithmeticArrangerApp().run() | [
"chriszichkocoding@gmail.com"
] | chriszichkocoding@gmail.com |
fa2a624f48a208efd6ce0860be4515dea3a2561b | f515fe4b039590f9ccc034acb27a09c672a1434d | /PyBank/main.py | 032f79468e4dd70b2cb486814e61ca5c8383f2f3 | [] | no_license | JAK-UCF/python-challenge | 14b4be6696bd50b79005e051863958aa16f2ed8a | 0b78d46c47ffd73a3e68422a9ff1ef2f809dd85c | refs/heads/master | 2020-06-06T10:53:49.001486 | 2019-06-22T07:38:47 | 2019-06-22T07:38:47 | 192,720,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,069 | py | # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# WHITEBOARDING FOR UNIT 3 HOMEWORK - PyBank #
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# import the bank data
# count the number of months in data set (this will be the number of rows, less 1 for the header)
# calculate the net total for the profit/loss of the entire period (this will be a sum of the total PnL column)
# calculate the average for PnL of the entire period (results of line 9 divided by line results of line 8)
# find the greatest increase in profits in the entire period (this will be the highest value in the set) & date it occurred
# find the greatest decrease in profits in the entire period (this will be the lowest value in the set) & date it occurred
# print results to both the terminal and to a text file
# bank_data file is
# 2 columns [Date, Profit/Losses]
# this is included in a header row
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# PyBank CODE #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# import the bank data
import os
import csv
bank_data_path = os.path.join('budget_data.csv')
with open(bank_data_path, 'r', newline='') as csvfile:
bank_data = csv.reader(csvfile, delimiter=',')
header = next(bank_data) # since skipping, all value counts below are true, no need to subtract 1 for header in math...
each_month = list(bank_data)
# get length of bank data list set to provide number of months in file
months = len(each_month)
# create independent lists for month and profit/loss
period, pnl = list(zip(*each_month))
# grab values and cast to integers
pnl_values = [int(x) for x in pnl]
# calculate net profit/loss
total_pnl = 0
for day in pnl_values:
total_pnl += day
# calculate change from month to month
change = []
i = 0
for number in range(len(pnl_values)-1):
var = pnl_values[i+1] - pnl_values[i]
i += 1
change.append(var)
# calculate sum of changes (for use in averaging)
ttl_chgs = 0
for var in change:
ttl_chgs += var
# calculate average change; divide by 1 less than number of months since there is no change value for first month
avg_chg = ttl_chgs / (months - 1)
# find index of min/max values; to match correct month in output, add 1 since no change value to match to month[0]
h = change.index(max(change))
l = change.index(min(change))
print('Financial Analysis')
print('- - - - - - - - - - - - - - - - - - - - - - - - - -')
print('Total Months: ', months)
print('Total: ', '${}'.format(int(total_pnl)))
print('Average Change: ', '${:.2f}'.format(float(avg_chg)))
print('Greatest Increase in Profits: ', period[h+1], ' ${}'.format(max(change)))
print('Greatest Decrease in Profits: ', period[l+1], ' ${}'.format(min(change)))
print('- - - - - - - - - - - - - - - - - - - - - - - - - -')
with open('FinancialAnalysis.txt', 'w') as f:
print('Financial Analysis', file=f)
print('- - - - - - - - - - - - - - - - - - - - - - - - - -', file=f)
print('Total Months: ', months, file=f)
print('Total: ', '${}'.format(int(total_pnl)), file=f)
print('Average Change: ', '${:.2f}'.format(float(avg_chg)), file=f)
print('Greatest Increase in Profits: ', period[h+1], ' ${}'.format(max(change)), file=f)
print('Greatest Decrease in Profits: ', period[l+1], ' ${}'.format(min(change)), file=f)
print('- - - - - - - - - - - - - - - - - - - - - - - - - -', file=f) | [
"jenklimek@msn.com"
] | jenklimek@msn.com |
f3ef8d9e54c5ba4847709554f1cdd71dcc8ad89a | bb2c7bfc0103b1b187a244a4becc26fe7c71e396 | /main_preprocess.py | 107ab5e940f54159021ea5b530d5847ac76789a6 | [] | no_license | kaoutarElamiry12/Seg-Net | 6cd01312754234b437f3eb7d728eb70f9fe65b35 | 5910c546c1314b8455d9507d32fc0ea30377fed5 | refs/heads/master | 2022-02-09T12:19:13.281092 | 2019-06-27T20:39:07 | 2019-06-27T20:39:07 | 192,698,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | from proc import preprocess
import tensorflow as tf
import h5py
import os
import multiprocessing as mp
preproc = {
'indir': './img',
'stride': 2,
'patch_size': 80, # should be multiple of 8
'mode': 'tif',
'shuffle': True,
'traintest_split_rate': 0.9
}
preprocess(**preproc)
| [
"noreply@github.com"
] | kaoutarElamiry12.noreply@github.com |
c997ae3f2e974662ca89bdc82bccbd2658d4404b | 73f7cc0e71bfd38d3bfe97367324f1e7a5d8b451 | /engine_code/gapi/modules/proxy/cloud/parse.py | 0e1d8a64f87ac9893d254692c67c63c5b528386c | [] | no_license | cash2one/my-test | ccc0ae860f936262a601c1b579d3c85196b562f9 | 8bd23f5963f4dc7398b7670e28768a3533bd5d14 | refs/heads/master | 2021-01-18T03:20:30.889045 | 2017-01-19T02:52:02 | 2017-01-19T02:52:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,357 | py | #!/usr/bin/python
# -*- coding=utf-8 -*-
from xml.etree.ElementTree import ElementTree,Element
def read_xml(in_path):
'''读取并解析xml文件
in_path: xml路径
return: ElementTree'''
tree = ElementTree()
tree.parse(in_path)
print tree.parse(in_path)
return tree
def write_xml(tree, out_path):
'''将xml文件写出
tree: xml树
out_path: 写出路径'''
tree.write(out_path,encoding="utf-8")
print '.....'
def if_match(node, kv_map):
'''判断某个节点是否包含所有传入参数属性
node: 节点
kv_map: 属性及属性值组成的map'''
for key in kv_map:
if node.get(key) != kv_map.get(key):
return False
return True
#---------------search -----
def find_nodes(tree, path):
'''查找某个路径匹配的所有节点
tree: xml树
path: 节点路径'''
return tree.findall(path)
def get_node_by_keyvalue(nodelist, kv_map):
'''根据属性及属性值定位符合的节点,返回节点
nodelist: 节点列表
kv_map: 匹配属性及属性值map'''
result_nodes = []
for node in nodelist:
if if_match(node, kv_map):
result_nodes.append(node)
return result_nodes
#---------------change -----
def change_node_properties(nodelist, kv_map, is_delete=False):
'''修改/增加 /删除 节点的属性及属性值
nodelist: 节点列表
kv_map:属性及属性值map'''
for node in nodelist:
for key in kv_map:
if is_delete:
if key in node.attrib:
del node.attrib[key]
else:
node.set(key, kv_map.get(key))
def change_node_text(nodelist, text, is_add=False, is_delete=False):
'''改变/增加/删除一个节点的文本
nodelist:节点列表
text : 更新后的文本'''
for node in nodelist:
if is_add:
node.text += text
elif is_delete:
node.text = ""
else:
node.text = text
def create_node(tag, property_map, content):
'''新造一个节点
tag:节点标签
property_map:属性及属性值map
content: 节点闭合标签里的文本内容
return 新节点'''
element = Element(tag, property_map)
element.text = content
return element
def add_child_node(nodelist, element):
'''给一个节点添加子节点
nodelist: 节点列表
element: 子节点'''
for node in nodelist:
node.append(element)
def del_node_by_tagkeyvalue(nodelist, tag, kv_map):
'''同过属性及属性值定位一个节点,并删除之
nodelist: 父节点列表
tag:子节点标签
kv_map: 属性及属性值列表'''
for parent_node in nodelist:
children = parent_node.getchildren()
for child in children:
if child.tag == tag and if_match(child, kv_map):
parent_node.remove(child)
#if __name__ == "__main__":
#
# #1. 读取xml文件
# tree = read_xml("./test.xml")
# print 'tree',tree
#
# #2. 属性修改
# #A. 找到父节点
# nodes = find_nodes(tree, "processers/processer")
# #B. 通过属性准确定位子节点
# result_nodes = get_node_by_keyvalue(nodes, {"name":"BProcesser"})
# #C. 修改节点属性
# change_node_properties(result_nodes, {"age": "1"})
# #D. 删除节点属性
# change_node_properties(result_nodes, {"value":""}, True)
#
# #3. 节点修改
# #A.新建节点
# a = create_node("person", {"age":"15","money":"200000"}, "this is the firest content")
# #B.插入到父节点之下
# add_child_node(result_nodes, a)
#
# #4. 删除节点
# #定位父节点
# del_parent_nodes = find_nodes(tree, "processers/services/service")
# #准确定位子节点并删除之
# target_del_node = del_node_by_tagkeyvalue(del_parent_nodes, "chain", {"sequency" : "chain1"})
#
# #5. 修改节点文本
# #定位节点
# text_nodes = get_node_by_keyvalue(find_nodes(tree, "processers/services/service/chain"), {"sequency":"chain3"})
# change_node_text(text_nodes, "new text")
#
# #6. 输出到结果文件
# write_xml(tree, "./out.xml")
| [
"zhizhi1908@yeahh.net"
] | zhizhi1908@yeahh.net |
ef7aee74756905a6c511dbccd07cd08f38ddd2b3 | 8b9bc88ce6138cb2d008d9766964cadb2878b7d2 | /pl/test/transaction_test.py | cbd86ffdbfbcc9b78197abcecc22ed700b4da9cc | [] | no_license | tvaught/experimental | e499ebd6e9227c9cd4536c9f2c88b73c00e73eb0 | 7f86676ce0643375996da7a6f3bcbf8b35feb8b1 | refs/heads/master | 2021-01-13T01:36:50.431672 | 2015-07-27T20:50:48 | 2015-07-27T20:50:48 | 266,525 | 16 | 7 | null | 2013-03-19T16:30:51 | 2009-08-01T16:11:55 | Python | UTF-8 | Python | false | false | 366 | py | #!python
# Author: Travis N. Vaught
# Copyright (c)2013, Vaught Management, LLC
# License: BSD
# Major package imports
import pandas
import numpy as np
# Local library imports
import transaction
# Load from test file
f = open('transactions2012.csv', 'ra')
trns = pandas.read_csv(f)
tlist = []
for i in range(len(trns.DATE)):
t = transaction.Transaction(
| [
"travis@vaught.net"
] | travis@vaught.net |
1705c7af71891bc2ac5ec06ee1e4c4eb356f9e68 | ec26ddcda8c99e6cb416d5eeb3dc1c59759ba397 | /variables.py | 9d79285d30ababa52e8724ed0c46f1a8b977f95a | [] | no_license | AdinaFakih/python | 6706d3e2b9ad26c4ed5050678eb3df3169f3523f | 1d63e047d1b12b2fa01fd0c3f49889b680447b94 | refs/heads/master | 2020-05-02T23:58:49.198308 | 2019-03-28T23:12:50 | 2019-03-28T23:12:50 | 178,295,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | # a = 10
# print (a + a)
# print(type(a))
my_income = 100
tax_rate = 0.1
my_taxes = my_income * tax_rate
print(my_taxes) | [
"noreply@github.com"
] | AdinaFakih.noreply@github.com |
5e9c4cf93d6fe14c51c0dbe8d9f1021d055afeea | 340a75cda3ef70c02917ca1975305356699e0aa4 | /benchmarks/src/L1/example_task/example_task.py | ffa02657e114907cda309042b9eebe3c37749bf7 | [] | no_license | harvard-edge/TinyMLPerf | b31a55ffd0670fffdc520769975ae9cc44edb85b | 15f2e0e337ba90ce43f5dd4f2024047fe52fbe62 | refs/heads/master | 2022-02-22T14:59:24.338449 | 2019-10-31T19:19:45 | 2019-10-31T19:19:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,322 | py | import sys
import os
import re
import argparse
from task import Task
filepath = os.path.dirname(os.path.abspath(__file__))
template_path = filepath + "/" + "template.c"
class ExampleTask(Task):
def __init__(self):
self.parser = argparse.ArgumentParser()
self.parser.add_argument("--param1", default=None, type=int)
self.parser.add_argument("--param2", default=None, type=int)
def replace_with_params(self, template, param1, param2):
assert("{{PARAM1}}" in template)
assert("{{PARAM2}}" in template)
template = template.replace("{{PARAM1}}", str(param1))
template = template.replace("{{PARAM2}}", str(param2))
return template
def generate_task(self, output_path, args):
assert(args.param1 is not None)
assert(args.param2 is not None)
with open(template_path, "r") as f:
template_string = f.read()
template_string = self.replace_with_params(template_string,
args.param1,
args.param2)
with open(output_path + "/main.cpp", "w") as f:
f.write(template_string)
def task_name(self):
return "ExampleTask"
def get_parser(self):
return self.parser
| [
"max@dhcp-10-250-27-41.harvard.edu"
] | max@dhcp-10-250-27-41.harvard.edu |
9e6db984fd95838c610d8b6672ef685602129c1c | b60555fc02c06c7d15dff96083e2a89addd0fbc6 | /scripts/balance_data.py | f5e6a249497661ba1e85476e2d41d6bc74f2a26e | [
"MIT"
] | permissive | Antoine-BL/EuroTruck-ai.py | c2fae85d9d5566799c698deeca82a01e393acd71 | c68ca76063c14b1b8b91d338c8cead9f411521ca | refs/heads/master | 2023-04-09T15:42:48.390049 | 2020-01-18T20:50:59 | 2020-01-18T20:50:59 | 167,547,749 | 2 | 0 | MIT | 2023-03-24T23:38:34 | 2019-01-25T12:57:58 | Python | UTF-8 | Python | false | false | 2,592 | py | import os
from heapq import nsmallest
import random
import numpy as np
PCT_TEST = 0.2
SAMPLES_PER_FILE = 100
def main():
balance_data()
def balance_data():
path = 'D:\Documents\School work\Cegep\Session 6\EuroTruck-ai.py\data-png'
dataset_size =
labels = np.load(path)
for i in range(0, len(labels)):
label = label[i]
def balance_data():
dataset_size = calc_nb_samples(unsorted_path)
print('Balancing dataset of {} samples'.format(dataset_size))
pct_per_bin = proportions_per_bin(unsorted_path, 0.1, dataset_size)
balance_and_save(pct_per_bin, unsorted_path, dataset_size, 0.1, balanced_path)
def calc_nb_samples(path) -> int:
nb_files = 0
data_file = path.format(nb_files + 1)
while os.path.isfile(data_file):
nb_files += 1
data_file = path.format(nb_files + 1)
return nb_files * SAMPLES_PER_FILE
def proportions_per_bin(path, bin_size, total_nb_samples):
bins = np.zeros((round(2/bin_size), ), dtype=np.int)
print('finding proportions per bin')
nb_files = round(total_nb_samples / SAMPLES_PER_FILE)
for num_file in range(1, nb_files + 1):
print('File {} of {} ({}%)'.format(num_file, nb_files, round(num_file / nb_files * 100, 1)))
data_file = path.format(num_file)
data = np.load(data_file)
for data_point in data:
bin_nb = int(round((data_point[1][1] + 1) / bin_size, 0))
bins[bin_nb - 1] += 1
return bins
def balance_and_save(bins, path, total_nb_samples, bin_size, write_path):
write_file_num = 1
second_smallest = max(nsmallest(4, bins))
bin_prob = []
for nb in bins:
bin_prob.append(second_smallest / nb)
bal_data = []
print('Balancing data')
nb_files = round(total_nb_samples / SAMPLES_PER_FILE)
for num_file in range(1, nb_files + 1):
print('File {} of {} ({}%)'.format(num_file, nb_files, round(num_file / nb_files * 100, 1)))
data_file = path.format(num_file)
data = np.load(data_file)
for data_point in data:
bin_nb = int(round((data_point[1][1] + 1) / bin_size))
if random.randrange(0, 10000) / 10000 < bin_prob[bin_nb - 1]:
bal_data.append(data_point)
if len(bal_data) == SAMPLES_PER_FILE:
np.save(write_path.format(write_file_num), bal_data)
print('writing balanced data to file number {}'.format(write_file_num))
write_file_num += 1
bal_data = []
if __name__ == '__main__':
main()
| [
"antoine.brassard@gmail.com"
] | antoine.brassard@gmail.com |
bfcfe9c39e88787a47af7b24c492c7cb2ba75116 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03150/s056018673.py | ba3699fc1ecf9d7f7a828e88f30db87b5e18b4da | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | S = input()
ans = "NO"
for i in range(len(S)):
for j in range(len(S)):
if S[0:i] + S[i+j:len(S)] == "keyence":
print("YES")
exit()
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
f4c1a5faf28472dabc9a1ec3f62b04cc617d762a | f3a341d7ee0b0e1fc05dfd3863d0b7203130517e | /FinalProject/urls.py | 15396ff3b724197ea69a30ffa39328026bc2fac9 | [] | no_license | nandaryanizar/FinalProjectNLP | 248152b01dfb0a1aa6c50e357ebbe4f8a3dbe085 | 337d5b8a9a760049fa65cc29392f07d298236adb | refs/heads/master | 2020-03-22T05:35:48.088562 | 2018-07-07T09:26:03 | 2018-07-07T09:26:03 | 139,577,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | """FinalProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from app import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.Index.as_view(), name='home'),
path('news/', views.News.as_view(), name='news'),
path('truthfulness/', views.Politifact.as_view(), name='truthfulness')
]
| [
"anandar.ryanizar@gmail.com"
] | anandar.ryanizar@gmail.com |
3e90c7f5b279e7d86b365e1a1faeb32f2420825d | 0529196c4d0f8ac25afa8d657413d4fc1e6dd241 | /runnie0427/02965/2965.py2.py | fead6e9c86c1bc1e100db0a5a2029668e08104b8 | [] | no_license | riyuna/boj | af9e1054737816ec64cbef5df4927c749808d04e | 06420dd38d4ac8e7faa9e26172b30c9a3d4e7f91 | refs/heads/master | 2023-03-17T17:47:37.198570 | 2021-03-09T06:11:41 | 2021-03-09T06:11:41 | 345,656,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,370 | py | <!DOCTYPE html>
<html lang="ko">
<head>
<title>Baekjoon Online Judge</title><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta charset="utf-8"><meta name="author" content="스타트링크 (Startlink)"><meta name="keywords" content="ACM-ICPC, ICPC, 프로그래밍, 온라인 저지, 정보올림피아드, 코딩, 알고리즘, 대회, 올림피아드, 자료구조"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta property="og:type" content="website"><meta property="og:image" content="http://onlinejudgeimages.s3-ap-northeast-1.amazonaws.com/images/boj-og-1200.png"><meta property="og:site_name" content="Baekjoon Online Judge"><meta name="format-detection" content = "telephone=no"><meta name="msapplication-config" content="none"><link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"><link rel="manifest" href="/site.webmanifest"><link rel="mask-icon" href="/safari-pinned-tab.svg" color="#0076c0"><meta name="msapplication-TileColor" content="#00aba9"><meta name="theme-color" content="#ffffff"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/css/bootstrap.min.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/style.css?version=20210107"><link href="https://fonts.googleapis.com/css?family=Noto+Sans+KR:400,700|Open+Sans:400,400i,700,700i|Source+Code+Pro&subset=korean" rel="stylesheet"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/connect.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/result.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/custom.css?version=20210107"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.3/css/font-awesome.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/theme-colors/blue.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/pace.css">
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-10874097-3"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-10874097-3');
</script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.css" /><meta name="username" content="">
<link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/pages/page_404_error.css">
</head>
<body>
<div class="wrapper">
<div class="header no-print"><div class="topbar"><div class="container"><ul class="loginbar pull-right"><li><a href = "/register">회원가입</a></li><li class="topbar-devider"></li><li><a href = "/login?next=%2Fsource%2Fdownload%2F5344904">로그인</a></li></ul></div></div><div class="navbar navbar-default mega-menu" role="navigation"><div class="container"><div class="navbar-header"><button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-responsive-collapse"><span class="sr-only">Toggle navigation</span><span class="fa fa-bars"></span></button><a class="navbar-brand" href="/"><img id="logo-header" src="https://d2gd6pc034wcta.cloudfront.net/images/logo@2x.png" alt="Logo" data-retina></a></div><div class="collapse navbar-collapse navbar-responsive-collapse"><ul class="nav navbar-nav"><li class="dropdown mega-menu-fullwidth "><a href="javascript:void(0);" class="dropdown-toggle" data-toggle="dropdown">문제</a><ul class="dropdown-menu"><li><div class="mega-menu-content"><div class="container"><div class="row equal-height"><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href = "/problemset">전체 문제</a></li><li><a href = "/category">문제 출처</a></li><li><a href = "/step">단계별로 풀어보기</a></li><li><a href = "/problem/tags">알고리즘 분류</a></li><li><a href = "/problem/added">새로 추가된 문제</a></li><li><a href = "/problem/added/1">새로 추가된 영어 문제</a></li><li><a href = "/problem/ranking">문제 순위</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href="/problem/only">푼 사람이 한 명인 문제</a></li><li><a href="/problem/nobody">아무도 못 푼 문제</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/problem/random">랜덤</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>출처</h3></li><li><a href = "/category/1">ICPC</a></li><li><a href = "/category/2">Olympiad</a></li><li><a href = "/category/55">한국정보올림피아드</a></li><li><a href = "/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href = "/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href = "/category/5">대학교 대회</a></li><li><a href = "/category/428">카카오 코드 페스티벌</a></li><li><a href = "/category/215">Coder's High</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>ICPC</h3></li><li><a href = "/category/7">Regionals</a></li><li><a href = "/category/4">World Finals</a></li><li><a href = "/category/211">Korea Regional</a></li><li><a href = "/category/34">Africa and the Middle East Regionals</a></li><li><a href = "/category/10">Europe Regionals</a></li><li><a href = "/category/103">Latin America Regionals</a></li><li><a href = "/category/8">North America Regionals</a></li><li><a href = "/category/92">South Pacific Regionals</a></li></ul></div></div></div></div></li></ul></li><li><a href = "/workbook/top">문제집</a></li><li><a href = "/contest/official/list">대회<span class='badge badge-red rounded-2x'>2</span></a></li><li><a href = "/status">채점 현황</a></li><li><a href = "/ranklist">랭킹</a></li><li><a href = "/board/list/all">게시판</a></li><li><a href = "/group/list/all">그룹</a></li><li><a href = "/blog/list">블로그</a></li><li><a href = "/lectures">강의</a></li><li><a href = "/search"><i class="fa fa-search search-btn"></i></a></li></ul></div></div></div></div><form action="/logout" method="post" id="logout_form"><input type='hidden' value='%2Fsource%2Fdownload%2F5344904' name="next"></form>
<div class="container content">
<div class="col-md-8 col-md-offset-2">
<div class="error-v1">
<span class="error-v1-title">404</span>
<span>Not found</span>
<div class="margin-bottom-20"></div>
</div>
<div class="text-center">
<span style="font-size:18px;">강의 슬라이드의 첨부 소스 코드가 404 에러가 뜨는 경우에는 링크를 복사/붙여넣기 해주세요.</span>
</div>
<div class="margin-bottom-40"></div>
</div>
</div>
<div class="footer-v3 no-print"><div class="footer"><div class="container"><div class="row"><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>Baekjoon Online Judge</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/about">소개</a></li><li><a href="/news">뉴스</a></li><li><a href="/live">생중계</a></li><li><a href="/poll">설문조사</a></li><li><a href="/blog">블로그</a></li><li><a href="/calendar">캘린더</a></li><li><a href="/donate">기부하기</a></li><li><a href="https://github.com/Startlink/BOJ-Feature-Request">기능 추가 요청</a></li><li><a href="https://github.com/Startlink/BOJ-spj">스페셜 저지 제작</a></li><li><a href="/labs">실험실</a></li></ul><div class="thumb-headline"><h2>채점 현황</h2></div><ul class="list-unstyled simple-list"><li><a href="/status">채점 현황</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>문제</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/problemset">문제</a></li><li><a href="/step">단계별로 풀어보기</a></li><li><a href="/problem/tags">알고리즘 분류</a></li><li><a href="/problem/added">새로 추가된 문제</a></li><li><a href="/problem/added/1">새로 추가된 영어 문제</a></li><li><a href="/problem/ranking">문제 순위</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/change">재채점 및 문제 수정</a></li></ul><div class="thumb-headline"><h2>유저 대회 / 고등학교 대회</h2></div><ul class="list-inline simple-list margin-bottom"><li><a href="/category/353">FunctionCup</a></li><li><a href="/category/319">kriiicon</a></li><li><a href="/category/420">구데기컵</a></li><li><a href="/category/358">꼬마컵</a></li><li><a href="/category/421">네블컵</a></li><li><a href="/category/413">소프트콘</a></li><li><a href="/category/416">웰노운컵</a></li><li><a href="/category/detail/1743">HYEA Cup</a></li><li><a href="/category/364">경기과학고등학교</a></li><li><a href="/category/417">대구과학고등학교</a></li><li><a href="/category/429">부산일과학고</a></li><li><a href="/category/435">서울과학고등학교</a></li><li><a href="/category/394">선린인터넷고등학교</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>출처</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/category/1">ICPC</a></li><li><a href="/category/211">ICPC Korea Regional</a></li><li><a href="/category/2">Olympiad</a></li><li><a href="/category/55">한국정보올림피아드</a></li><li><a href="/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href="/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href="/category/5">대학교 대회</a></li><li><a href="/category/428">카카오 코드 페스티벌</a></li><li><a href="/category/215">Coder's High</a></li></ul><div class="thumb-headline"><h2>대학교 대회</h2></div><ul class="list-inline simple-list"><li><a href="/category/320">KAIST</a></li><li><a href="/category/426">POSTECH</a></li><li><a href="/category/341">고려대학교</a></li><li><a href="/category/434">광주과학기술원</a></li><li><a href="/category/361">국민대학교</a></li><li><a href="/category/83">서강대학교</a></li><li><a href="/category/354">서울대학교</a></li><li><a href="/category/352">숭실대학교</a></li><li><a href="/category/408">아주대학교</a></li><li><a href="/category/334">연세대학교</a></li><li><a href="/category/336">인하대학교</a></li><li><a href="/category/347">전북대학교</a></li><li><a href="/category/400">중앙대학교</a></li><li><a href="/category/402">충남대학교</a></li><li><a href="/category/418">한양대 ERICA</a></li><li><a href="/category/363">홍익대학교</a></li><li><a href="/category/409">경인지역 6개대학 연합 프로그래밍 경시대회</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>도움말</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/help/judge">채점 도움말 및 채점 환경</a></li><li><a href="/help/rejudge">재채점 안내</a></li><li><a href="/help/rte">런타임 에러 도움말</a></li><li><a href="/help/problem">문제 스타일 안내</a></li><li><a href="/help/language">컴파일 또는 실행 옵션, 컴파일러 버전, 언어 도움말</a></li><li><a href="/help/workbook">문제집 도움말</a></li><li><a href="/help/contest">대회 개최 안내</a></li><li><a href="/help/problem-add">문제 출제 안내</a></li><li><a href="/help/rule">이용 규칙</a></li><li><a href="/help/stat">통계 도움말</a></li><li><a href="/help/question">질문 도움말</a></li><li><a href="/help/faq">자주묻는 질문</a></li><li><a href="/help/lecture">강의 안내</a></li><li><a href="/help/short">짧은 주소 안내</a></li><li><a href="/help/ad">광고 안내</a></li></ul></div></div></div><div class="copyright"><div class="container"><div class="row"><div class="col-md-9 col-sm-12"><p>© 2021 All Rights Reserved. <a href="https://startlink.io">주식회사 스타트링크</a> | <a href="/terms">서비스 약관</a> | <a href="/privacy">개인정보 보호</a> | <a href="/terms/payment">결제 이용 약관</a> | <a href="https://boj.startlink.help/hc/ko">도움말</a> | <a href="http://startl.ink/2pmlJaY">광고 문의</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj.md">업데이트 노트</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-issues.md">이슈</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-todo.md">TODO</a></p></div><div class="col-md-3 col-sm-12"><ul class="social-icons pull-right"><li><a href="https://www.facebook.com/onlinejudge" data-original-title="Facebook" class="rounded-x social_facebook"></a></li><li><a href="https://startlink.blog" data-original-title="Wordpress" class="rounded-x social_wordpress"></a></li></ul></div></div><div class="row"><div class="col-sm-12"><a href="https://startlink.io" class="hidden-xs"><img src="https://d2gd6pc034wcta.cloudfront.net/logo/startlink-logo-white-only.png" class="pull-right startlink-logo"></a><ul class="list-unstyled simple-list"><li>사업자 등록 번호: 541-88-00682</li><li>대표자명: 최백준</li><li>주소: 서울시 서초구 서초대로74길 29 서초파라곤 412호</li><li>전화번호: 02-521-0487 (이메일로 연락 주세요)</li><li>이메일: <a href="mailto:contacts@startlink.io">contacts@startlink.io</a></li><li>통신판매신고번호: 제 2017-서울서초-2193 호</li></ul></div><div class="col-xs-9"><p id="no-acm-icpc"></p></div><div class="col-xs-3"></div></div></div></div></div>
</div>
<div id="fb-root"></div><script>
window.fbAsyncInit = function() {
FB.init({
appId : '322026491226049',
cookie : true,
xfbml : true,
version : 'v2.8'
});
};
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/ko_KR/sdk.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
</script>
<script>
!function(f,b,e,v,n,t,s){ if(f.fbq)return;n=f.fbq=function(){ n.callMethod?
n.callMethod.apply(n,arguments):n.queue.push(arguments) };if(!f._fbq)f._fbq=n;
n.push=n;n.loaded=!0;n.version='2.0';n.queue=[];t=b.createElement(e);t.async=!0;
t.src=v;s=b.getElementsByTagName(e)[0];s.parentNode.insertBefore(t,s) }(window,
document,'script','//connect.facebook.net/en_US/fbevents.js');
fbq('init', '1670563073163149');
fbq('track', 'PageView');
</script>
<noscript><img height="1" width="1" style="display:none" src="https://www.facebook.com/tr?id=1670563073163149&ev=PageView&noscript=1"/></noscript><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-migrate/3.0.1/jquery-migrate.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/js/bootstrap.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/moment.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/locale/ko.js"></script><script type="text/javascript" src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/app.min.js?version=20210107"></script><script type="text/javascript">jQuery(document).ready(function() {App.init(0);});</script><!--[if lt IE 9]><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/respond.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/html5shiv.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/plugins/placeholder-IE-fixes.js"></script><![endif]--><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/pace/1.0.2/pace.min.js"></script><script src="https://js.pusher.com/4.2/pusher.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.js"></script>
<script>
window.MathJax = {
tex: {
inlineMath: [ ['$', '$'], ['\\(', '\\)'] ],
displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
processEscapes: true,
tags: "ams",
autoload: {
color: [],
colorv2: ['color']
},
packages: { '[+]': ['noerrors'] }
},
options: {
ignoreHtmlClass: "no-mathjax|redactor-editor",
processHtmlClass: 'mathjax',
enableMenu: false
},
chtml: {
scale: 0.9
},
loader: {
load: ['input/tex', 'output/chtml', '[tex]/noerrors'],
}
};
</script><script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script><script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
</body>
</html> | [
"riyuna0427@gmail.com"
] | riyuna0427@gmail.com |
7af3e3bd9757adddbd077ea4ae627c31b0e60f98 | 64c2a62bc54ca02b254f221366ac3c5d350f92fd | /question_information.py | 02a808c1a29d4a60053b7a0946908320fd3ee9c0 | [] | no_license | zhoulin845522/Wenjuanxing | 9a68d987ad92c3c0d51c87c163a785c008b8a329 | afa93363ac4ba526e56029486705dab7b2830302 | refs/heads/master | 2020-09-09T11:45:10.931631 | 2019-11-13T12:03:10 | 2019-11-13T12:03:10 | 221,438,527 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | question_url = 'https://www.wjx.cn/jq/48787292.aspx'
question_coockie = 'UM_distinctid=16e1aff3de1fd-0bece1616bde0c-b363e65-144000-16e1aff3de35e3; CNZZDATA4478442=cnzz_eid%3D1310099333-1572406869-https%253A%252F%252Fsp0.baidu.com%252F%26ntime%3D1573269174; .ASPXANONYMOUS=dtqxPW_F1QEkAAAAZmE5N2UwNWUtNGJlNi00OGY2LTgxNjYtY2ZkMjgwMjNlZjgx8mTsfvQkCcGBp6uvemtwoBzmi341; acw_tc=2f624a7315724109645253418e79e4c3b50feb8bfd8360cd64384a3720f9ae; jac48787292=12795638; Hm_lvt_21be24c80829bd7a683b2c536fcf520b=1572410966,1572516457,1572516568,1573184302; Hm_lpvt_21be24c80829bd7a683b2c536fcf520b=1573269428'
submit_url = 'https://www.wjx.cn/joinnew/processjq.ashx?submittype=1&curID=48787292&t=1573269466263&starttime=2019%2F11%2F9%2011%3A17%3A07&ktimes=327&rn=3752430363.12795638&hlv=1&jqnonce=aab16aac-b7f2-434f-b6da-7c51e1e6b753&jqsign=ffe61ffd*e0a5*343a*e1cf*0d26b6b1e024&jpm=13'
submit_times = 100
designated_area = ['广东','湖南']
designated_ratio = [
[1,1,2,3,3,4,4,5],
[1,1,2,1],
[1111,3,888,5,999,4]
]
def createStr():
count1 = 0
iStr = 'submitdata= '
while count1 < len(designated_ratio):
halfLen = int(len(designated_ratio[count1])/2)
sumRatio = 0
count2 = halfLen
while (count2 >= halfLen) & (count2 < 2*halfLen):
sumRatio += designated_ratio[count1][count2]
count2 += 1
num = random.randint(1,sumRatio)
count2 = 0
ratio = 0
while count2 < halfLen:
ratio += designated_ratio[count1][count2 + halfLen]
if num <= ratio:
iStr = iStr + '}' + str(count1 + 2) + '$' + str(designated_ratio[count1][count2])
break
count2 += 1
count1 += 1
return iStr | [
"424466955@qq.com"
] | 424466955@qq.com |
500cf5bd3c035e6fe49e994d82fdb317f3ffc6de | 21df757f97a2ae789cf17434327e2dea21fe88ce | /V.0.1/tasktrader02/tasktrader/migrations/0013_auto_20171203_0305.py | 02fcc9ca40343218bf8247113e00efbe0e10b714 | [
"MIT"
] | permissive | ayser259/tasktrader | 627e2c06942931081e98408b66540b299eaebddb | 0292b3987474f8744fa12955542159789a34f9e7 | refs/heads/master | 2021-05-07T22:19:40.915809 | 2017-12-08T03:43:57 | 2017-12-08T03:43:57 | 109,198,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,562 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-03 03:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('tasktrader', '0012_auto_20171203_0301'),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=20)),
('password', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Applied_Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('company_name', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='CV',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('cv', models.FileField(upload_to='resumés')),
],
),
migrations.CreateModel(
name='Department',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('department_name', models.CharField(max_length=30)),
('company_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Company')),
],
),
migrations.CreateModel(
name='Employee',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('job_title', models.CharField(max_length=20)),
('first_name', models.CharField(max_length=20)),
('last_name', models.CharField(max_length=20)),
('department', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Department')),
],
),
migrations.CreateModel(
name='Employee_Skills',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('employee_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee')),
],
),
migrations.CreateModel(
name='Filled_Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('employee_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee')),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('campus_name', models.CharField(max_length=30)),
('city', models.CharField(max_length=30)),
('country', models.CharField(max_length=30)),
('street_address', models.CharField(max_length=30)),
('postal_code', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('picture', models.ImageField(upload_to='Profile_Pictures')),
('employee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee')),
],
),
migrations.CreateModel(
name='Posted_Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('employee_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee')),
],
),
migrations.CreateModel(
name='Random_Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('employee_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee')),
],
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('skill_name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('status_type', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('task_title', models.CharField(max_length=30)),
('task_description', models.CharField(max_length=50)),
('end_date', models.DateField(null=True)),
('start_date', models.DateField(null=True)),
('time_commitment', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('department', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Department')),
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Location')),
('status', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Status')),
],
),
migrations.CreateModel(
name='Task_Skills',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('skill_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Skill')),
('task_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Task')),
],
),
migrations.AddField(
model_name='random_task',
name='task_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Task'),
),
migrations.AddField(
model_name='posted_task',
name='task_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Task'),
),
migrations.AddField(
model_name='filled_task',
name='task_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Task'),
),
migrations.AddField(
model_name='employee_skills',
name='skill_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Skill'),
),
migrations.AddField(
model_name='employee',
name='location',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Location'),
),
migrations.AddField(
model_name='employee',
name='supervisor',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee'),
),
migrations.AddField(
model_name='cv',
name='employee',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee'),
),
migrations.AddField(
model_name='applied_task',
name='employee_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee'),
),
migrations.AddField(
model_name='applied_task',
name='task_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Task'),
),
migrations.AddField(
model_name='account',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasktrader.Employee'),
),
]
| [
"ayserchoudhury@gmail.com"
] | ayserchoudhury@gmail.com |
41d7cac52e8e3fa0ef0cd5e9b1399d0d00bd6d5a | 33402f7bc188cc4bf1502d3b0527b0816f606aae | /isValidParentheses.py | e46bc4baa5c6d7962954c28c1ffe3b59907cca7d | [] | no_license | Narcissus7/Lintcode | 6ecf03b49d9eb995565f6ee3f75b99a10695791b | a70985e28f8f93f4c0a6340d682e91fc6e213e53 | refs/heads/master | 2021-10-24T01:14:11.443922 | 2019-03-21T07:11:30 | 2019-03-21T07:11:30 | 116,116,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | import time
def isValidParentheses(s):
my_key = {'(': ')', '[': ']', '{': '}'}
s_list = list(s)
# print(s_list)
stack = []
for item in s_list:
if item == '(' or item == '[' or item == '{':
stack.append(item)
elif stack:
# print(item)
if my_key[stack[-1]] == item:
stack.pop()
else:
return False
else:
return False
if stack:
return False
else:
return True
s="[]{[]}[]["
start = time.clock()
a = isValidParentheses(s)
end = time.clock()
print("read: %f s" % (end - start))
print(a) | [
"18813104077@163.com"
] | 18813104077@163.com |
a80267b8948ce8925d4f683da8b6c98a011e8e18 | aedb994a7f1d2fee9a4ac39a39b657a863e86dd5 | /exercicio_secao_07_p1.py | 1b38a6d34f3502ad796c6b1cb1dc2ece4b3f84b6 | [] | no_license | Carlos2y/exercicios_python_secao_GeekUniversity | 6a14355577f6366acd222a267dc3ceaa944f6039 | 9107ee717eb08dc00899fbe02120a9efa2c8ecbc | refs/heads/main | 2023-06-18T17:32:00.328465 | 2021-07-08T22:11:31 | 2021-07-08T22:11:31 | 315,795,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,319 | py | import random
import math
import numpy
import pandas as pd
def question(n):
if n < 10 and n > 0:
n = "0" + str(n)
print("\n\n","---" * 26, "\n\n Questão {} \n\n".format(n))
'''
question(1)
v = [1,0,5,-2,-5,7]
foo = v[0] + v [1] + v[5]
print(f"Soma: {foo} \n")
v[4] = 100
print(v[4], "\n")
for i in range(len(v)):
print(f"V[{i}] = {v[i]}")
question(2)
lista = []
for i in range(6):
print(f"Informe o {i + 1}°: ", end=" ")
foo = int(input())
lista.append(foo)
print(f"Valores: {lista}")
question(3)
n = []
n2 = []
for i in range(10):
print(f"Informe o {i+1}°:", end=" ")
foo = float(input())
n.append(foo)
n2.append(foo ** 2)
print(f"\n Valores : {n} \n Valores ao quadrado: {n2} ")
question(4)
lista = []
for i in range(8):
lista.append(random.randrange(0,100))
x = random.randrange(0, 8)
y = random.randrange(0, 8)
print(f"Soma: {lista[x] + lista[y]}")
question(5)
lista = []
c = 0
for i in range(10):
lista.append(random.randrange(0, 1000))
for i in range(len(lista)):
if lista[i] % 2 == 0:
c += 1
print(f"Existem {c} valores pares")
question(6)
v = []
for i in range(10):
print(f"Informe o {i+1}° valor: ", end=" ")
foo = float(input())
v.append(foo)
print(f"\n Maior: {max(v)} \n Menor: {min(v)}")
question(7)
v = []
for i in range(10):
print(f"Informe o {i+1}° valor: ", end=" ")
foo = float(input())
v.append(foo)
print(f" Vetor: {v} \n Maior: {max(v)} \n Index: [{v.index(max(v))}]")
question(8)
v = []
for i in range(1, 7):
v.append(i)
print(v)
print(v[::-1])
question(9)
v = []
for i in range(2, 14, 2):
v.append(i)
print(v)
print(v[::-1])
question(10)
v = []
for i in range(15):
v.append(random.randrange(0,11))
print(v)
print( sum(v) / len(v) )
question(11)
v = []
c = 0
p = 0
for i in range(10):
v.append(random.randrange(-100, 100))
for i in range(10):
if v[i] <= 0:
c += 1
else:
p += v[i]
print(f" Numeros negativos: {c} \n Soma dos positivos: {p} ")
question(12)
lista = []
for i in range(1, 6):
foo = random.randrange(0,100)
print(f"{i}° valor: {foo}")
lista.append(foo)
print(f"\n Maior Valor: {max(lista)} \n Menor Valor: {min(lista)} \n " +
f"Media dos valores: {max(lista) / len(lista)}")
question(13)
lista = []
for i in range(1, 6):
foo = random.randrange(0,100)
print(f"{i}° valor: {foo}")
lista.append(foo)
print(f"\n Index do maior: {lista.index(max(lista))}" +
f"\n Index do menor: {lista.index(min(lista))}")
question(14)
v = []
c = []
for i in range(10):
v.append(random.randrange(0,100))
for i in range(10):
if v.count(v[i]) >= 2:
if v[i] not in c:
c.append(v[i])
print(c)
question(15)
v = []
for i in range(20):
v.append(random.randrange(0,100))
print("Duplicatas removidos: ",end=" ")
for i in range(len(v)):
try:
if v.count(v[i]) > 1:
print(v[i],end=" ")
v.remove(v[i])
except:
break
v.sort()
print("\n\n Lista: ",v)
question(16)
v = []
for i in range(5):
v.append(random.randrange(0,100))
while True:
print("""
1. Ordem Direta
2. Ordem Inversa
Sair.
Opção: """, end=" ")
op = input()
if op == "1":
v.sort()
print(f"Ordem direta: {v}")
elif op == "2":
print(f"Ordem Inversa: {v[::-1]}")
else:
print("Codigo Invalido")
question(17)
v = []
for i in range(10):
v.append(random.randrange(-100, 100))
print(v)
for i in range(len(v)):
if v[i] < 0:
v[i] = 0
print(v)
question(18)
v = []
for i in range(100):
v.append(random.randrange(0,100))
print("Informe um numero: ", end=" ")
num = int(input())
print(f"Multiplos de {num} no vetor:", end=" ")
for i in range(max(v)):
if i * num in v:
print(num * i, end=" ")
question(19)
v = []
for i in range(50):
foo = (i + 5 * i) % (i + 1)
v.append(foo)
print(f"Vetor: {v}")
question(20)
v = []
imp = []
for i in range(10):
v.append(random.randrange(0,50))
v.sort()
for i in range(10):
if v[i] % 2 != 0:
imp.append(v[i])
print("Vetores: \n Vetor || Impares ")
for i in range(10):
print(f" {v[i]} ||", end=" ")
try:
print(f" {imp[i]}",end=" ")
except:
print(" ", end=" ")
print("\n")
question(21)
a = []
b = []
c = []
for i in range(10):
a.append(random.randrange(0,100))
b.append(random.randrange(0,100))
for i in range(10):
c.append(a[i] - b[i])
print(f"Vetor c: {c}")
question(22)
a = []
b = []
c = []
for i in range(10):
a.append(random.randrange(0,100))
b.append(random.randrange(0,100))
for i in range(10):
c.append(a[i])
c.append(b[i])
print(f"Vetor: {c}")
question(23)
x = []
y = []
s = 0
for i in range(5):
x.append(random.randrange(0,100))
y.append(random.randrange(0,100))
print(f"Vetor 1: {x} \n Vetor 2: {y} \n")
for i in range(5):
s += x[i] * y[i]
print(f"Produto Escalar: {s}")
question(24)
x = {}
maior = 0
menor = 100
for i in range(10):
x[i] = float(str(random.randrange(1,3)) + "." + str(random.randrange(100)))
for i in range(10):
if x[i] < menor:
menor = x[i]
if x[i] > maior:
maior = x[i]
for a, b in x.items():
if menor == b:
print(f"Aluno {a} e o menor aluno com altura de {b} metros.")
if maior == b:
print(f"Aluno {a} e o maior aluno com altura de {b} metros.")
question(25)
v = [] # vetor dos naturais
m = [] # multiplos de N
c = 0 # contador
for i in range(100):
m.append(i * 7)
while len(v) < 100:
c += 1
if c not in m:
if "7" not in str(c):
v.append(c)
print(v)
question(26)
v = []
soma = 0
x = 0
y = 0
media = 0
r = 0
for i in range(10):
v.append(i)
for i in range(len(v)):
soma += v[i]
media = soma / len(v)
for i in range(len(v)):
x = v[i] - media
y += x * x
r = math.sqrt(y / len(v))
print(f"Desvio padrão: \n Vetor: {v} \n Desvio: {r:.2f}")
question(27)
v = []
t = []
foo = 0
for i in range(10):
v.append(random.randrange(3,1000))
for i in range(len(v)):
if v[i] % 2 == 0:
pass
else:
for x in range(1, i + 1):
foo = v[i] / x
if int(foo) == foo:
t.append(x)
if len(t) == 2:
print(f"Index: {i} Valor: {v[i]}")
t.clear()
else:
t.clear()
question(28)
v = []
v1 = []
v2 = []
for i in range(10):
v.append(random.randrange(3,1000))
for i in range(len(v)):
if v[i] % 2 == 0:
v1.append(v[i])
else:
v2.append(v[i])
print(v1)
print(v2)
question(29)
v = []
x = 0
for i in range(10):
v.append(random.randrange(1, 11))
print(f" Pares: ", end=" ")
for i in range(10):
if v[i] % 2 == 0:
print(v[i],end=" ")
x += v[i]
print(f"\n Soma dos pares: {x}\n ")
x = 0
print(f" Impares: ", end=" ")
for i in range(10):
if v[i] % 3 == 0:
print(v[i], end=" ")
x += v[i]
print(f"\n Soma dos impares: {x}")
question(30)
a = []
b = []
c = []
for i in range(10):
a.append(random.randrange(11))
b.append(random.randrange(11))
for i in range(10):
if a[i] in b:
if a[i] not in c:
c.append(a[i])
print(c)
question(31)
a = []
b = []
c = []
for i in range(10):
a.append(random.randrange(110))
b.append(random.randrange(110))
for i in range(10):
if a[i] not in c:
c.append(a[i])
if b[i] not in c:
c.append(b[i])
c.sort()
print(c)
print(set(a + b) == set(c))
question(32)
x = []
y = []
a = []
d = []
foo = 0
foo2 = 0
p = ["VALORES A|B", "SOMA", "PRODUTO", "DIFERENÇA", "INTERSEÇÃO"]
while True:
if len(x) == 5 and len(y) == 5: # tamanho das listar precisam ser iguais len(x) == len(y)
break
else:
if len(x) < 5:
foo = random.randrange(10)
if foo not in x:
x.append(foo)
if len(y) < 5:
foo = random.randrange(1, 11)
if foo not in y:
y.append(foo)
a = x
for i in range(len(x)):
if x[i] not in y:
foo = x[i]
else:
foo = ""
if x[i] in y:
foo2 = x[i]
else:
foo2 = ""
if y[i] not in a:
a.append(y[i])
d.append([f"{x[i]} {y[i]}", x[i] + y[i], x[i] * y[i], foo, foo2])
x = pd.DataFrame(d, columns=p)
print(x.to_string(index=False))
print(f"\n\n União: {a}")
question(33)
v = []
for i in range(15):
print("abc: ",end=" ")
foo = int(input())
v.append(foo)
if 0 in v:
for i in range(v.count(0)):
v.remove(0)
print(v)
question(34)
v = []
while len(v) < 10:
print("Informe um numero: ", end=" ")
foo = int(input())
if foo not in v:
v.append(foo)
else:
print(f"\n Digite outro numero. \n")
print(f"Vetor: {v}")
question(35)
a = str(random.randrange(10000))
b = str(random.randrange(10000))
va = []
vb = []
vc = []
x = list(a)
x.remove(min(a))
x.insert(0, min(a))
va = x
x = list(b)
x.remove(min(b))
x.insert(0, min(b))
vb = x
x = 5
for i in range(x):
if i > len(va) and i > len(vb):
vc.append( int(0) + int(0) )
elif i > len(va):
vc.append( int(0) + int(vb[i]) )
elif i < len(vb):
vc.append( int(va[i]) + int(vb[i]) )
elif i > len(va) or i > len(vb):
vc.append( int(va[i]) + int(vb[i]) )
print(f"""
Numero A: {a}
Numero B: {b}
Vetor A: {va}
Vetor B: {vb}
Vetor c: {vc}
""")
question(36)
v = []
for i in range(10):
v.append(random.randrange(100))
print(f"Vetor Desordenado: {v}")
v.sort()
print(f"Vetor Ordenado: {v}")
question(37)
v = []
for i in range(11):
v.append(random.randrange(1000))
v.sort()
print(f"Vetor Ordenado: {v}")
question(37)
v = []
for i in range(11):
v.append(random.randrange(100))
v.sort()
x = v.copy()
print(x)
question(38)
v = []
for i in range(10):
foo = random.randrange(100)
print(f"{i + 1}° Valor: {foo}")
v.append(foo)
v.sort()
print(f"\n\nVetor: {v}")
question(39)
n = 10
v = [[1], [1,1]]
for i in range(1, n):
l = [1]
for x in range(0, len(v[i])-1):
l += [ v[i][x] + v[i][x+1] ]
l += [1]
v += [l]
for i in range(len(v)):
print(v[i])
panda = pd.DataFrame(v)
print("\n\n", panda)
'''
| [
"noreply@github.com"
] | Carlos2y.noreply@github.com |
3e57b528ad994e798d54fc05ea15a8780e8e177b | a2e186009ebc821298ef769a549397f21ddd8c4f | /Content Selection/cnn.py | 2d074d14d27dd24ebc0baa57258f731d246fb422 | [] | no_license | markushoehn/Auto_TextSum | 0b06db5cc84dcbafac7beae08d00d670e20ea901 | 0ba31c37ea0db97681d1cfdf0fbdd175c9ced337 | refs/heads/master | 2020-03-15T04:49:07.300573 | 2018-08-07T20:28:32 | 2018-08-07T20:28:32 | 131,974,443 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,049 | py | from keras.models import Sequential
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras.layers import *
from keras import regularizers
from keras.layers.normalization import BatchNormalization
import numpy as np
import random
# convolutional neural network and hyper parameter optimization
# load data
x_train, y_train = np.load('data/numpy_data/x_train_cnn300_tf_idf.npy'), np.load('data/numpy_data/y_train_cnn300_tf_idf.npy')
x_dev, y_dev = np.load('data/numpy_data/x_dev_cnn300_tf_idf.npy'), np.load('data/numpy_data/y_dev_cnn300_tf_idf.npy')
x_test, y_test = np.load('data/numpy_data/x_test_cnn300_tf_idf.npy'), np.load('data/numpy_data/y_test_cnn300_tf_idf.npy')
train_size, dev_size, test_size = x_train.shape[0], x_dev.shape[0], x_test.shape[0]
# specify some parameters
embedding_dims = 300
pad_length = 50
patience = 2
train_verbose = 1
# load embedding matrix
emb_matrix = np.load('data/numpy_data/embedding_matrix300_tf_idf.npy')
vocab_size = emb_matrix.shape[0]
def train_model(batch_size, optimizer, number_conv_layers, number_filters, kernel_sizes, acts):
best_model_path_early_stopping = 'early_stopping_temp.hdf5'
# specify model
model = Sequential()
model.add(Embedding(vocab_size, embedding_dims, weights=[emb_matrix], input_length=pad_length, trainable=False))
# normalize input
model.add(BatchNormalization())
for i in range(number_conv_layers):
model.add(Conv1D(filters=number_filters[i], kernel_size=kernel_sizes[i]))
model.add(Activation(acts[i]))
model.add(GlobalMaxPool1D())
model.add(Dense(units=2))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
# add model checkpoint and early stopping
callbacks = [ModelCheckpoint(filepath=best_model_path_early_stopping, monitor='val_loss', save_best_only=True),
EarlyStopping(monitor='val_loss', patience=patience)]
model.fit(x_train, y_train, batch_size=batch_size, epochs=20, verbose=train_verbose,
validation_data=(x_dev, y_dev), callbacks=callbacks)
# load best model
model.load_weights(filepath=best_model_path_early_stopping)
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=dev_size, verbose=0)
loss, accuracy = loss_and_metrics[0], loss_and_metrics[1]
prediction = model.predict(x_test)
# calculate precision, recall and f1 score
tp, fp, fn = 0, 0, 0
for i in range(test_size):
if np.argmax(prediction[i]) == 1:
if y_test[i][1] == 1:
tp += 1
else:
fp += 1
else:
if y_test[i][1] == 1:
fn += 1
precision, recall = tp / (tp + fp + 10 ** -8), tp / (tp + fn + 10 ** -8)
f1_score = 2 * precision * recall / (precision + recall + 10 ** -8)
return model, loss, accuracy, precision, recall, f1_score
def hyper_parameter_opt(number_of_settings):
# path for best model in hyper parameter search
best_model_path = 'best_model_cnn.hdf5'
# best evaluation measures
best_loss, best_acc, best_prec, best_rec, best_f1 = np.inf, 0, 0, 0, 0
for i in range(1, number_of_settings + 1):
print('Setting number', i, 'of', number_of_settings, 'running...')
# create random hyperparameters
batch_s = random.randint(120, 180)
opt = random.choice(['adam', 'sgd', 'adagrad'])
number_cl = random.randint(1, 2)
filters, kernel_s, act = [], [], []
for _ in range(number_cl):
filters.append(random.randint(30, 60))
kernel_s.append(random.randint(4, 7))
act.append('relu')
# train model
model, loss, acc, prec, rec, f1 = train_model(batch_s, opt, number_cl, filters, kernel_s, act)
# update best model by the following update rule
if prec > best_prec and rec > 0.05:
model.save(best_model_path)
best_loss, best_acc, best_prec, best_rec, best_f1 = loss, acc, prec, rec, f1
print('Updated best model', '\n', 'Loss:', loss, ', Accuracy:', acc, ', Precision:', prec,
', Recall:', rec, ', F1 Score:', f1, '\n',
'Batch size:', batch_s, ', Optimizer;', opt, ', Number of convolutional layers:', number_cl,
', Number of filters:', filters, ', Kernel sizes:', kernel_s,
', Activation functions:', act)
# run some settings
# hyper_parameter_opt(20)
model, loss, accuracy, precision, recall, f1_score = train_model(batch_size=150, optimizer='adagrad',
number_conv_layers=2,
number_filters=[47, 42], kernel_sizes=[6, 6],
acts=['relu', 'relu'])
print('Loss:', loss, ', accuracy:', accuracy, ', precision:', precision, ', recall:', recall, ',f1 score:', f1_score)
model.save_weights('best_model_cnn_new2.hdf5')
| [
"basti.seipp@gmail.com"
] | basti.seipp@gmail.com |
fdc1224c6002292b05971d72b3ed2f151137982c | 5b1e2acbe3bb286ab5b9f756d695f9b9f71487ef | /app/migrations/0004_pet_last_update.py | e6e50c55d890fea1ca3da35585ef370d15f963db | [] | no_license | BashayerNouri/Pet-Shop | ad9c486e8b9d0af72496cecd17738faffb5487f3 | d43980275224d99df93fc71ec01f21e3a0462c85 | refs/heads/master | 2020-07-15T02:46:31.641785 | 2019-08-31T07:59:29 | 2019-08-31T07:59:29 | 205,461,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # Generated by Django 2.2.4 on 2019-08-30 19:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0003_remove_pet_admin'),
]
operations = [
migrations.AddField(
model_name='pet',
name='last_update',
field=models.DateTimeField(auto_now=True),
),
]
| [
"bashayer_nouri@hotmail.com"
] | bashayer_nouri@hotmail.com |
4f0036ed2e29cb0ceabd923028303136d2aaedb6 | 306eca5585e5733fb605f5278c098274772e8b8f | /countercoup/trainer/traverser.py | 513ef3ce1c8725d9dbc895f9fed2e34d82a0ac42 | [] | no_license | tomwalden/CounterCoup | 302a56214440cf950fb6f8aeb25e9dd211dae56b | 63dda981dce0bcf56f377bf21c4353f77ebe1bc7 | refs/heads/main | 2023-07-17T00:50:49.153586 | 2021-09-03T16:00:59 | 2021-09-03T16:00:59 | 377,480,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,313 | py | from countercoup.trainer.trainer_stats import TrainerStats
from countercoup.model.game import Game
from countercoup.shared.network import Network
from countercoup.shared.infoset import Infoset
class Traverser:
"""Base class for traversers"""
def __init__(self, action_nets: [], block_nets: [], counteract_nets: [], lose_nets: [], iteration: int):
self.action_nets = action_nets
self.block_nets = block_nets
self.counteract_nets = counteract_nets
self.lose_nets = lose_nets
self.iteration = iteration
self.action_mem = [[] for _ in action_nets]
self.block_mem = [[] for _ in block_nets]
self.counteract_mem = [[] for _ in counteract_nets]
self.lose_mem = [[] for _ in lose_nets]
self.action_strategy_mem = []
self.block_strategy_mem = []
self.counteract_strategy_mem = []
self.lose_strategy_mem = []
self.stats = TrainerStats()
def get_regret_strategy(self, network: Network, infoset: Infoset, filt: [] = None):
"""
Get the strategy calculated from the advantage networks
:param network: the network to calculate the advantages
:param infoset: the infoset for the game state
:param filt: the outputs that we're allowed to output
:return: a dict of available actions and the strategy
"""
# If we're on the first iteration, don't bother using the NNs. Speeds up this iteration, and
# resolves issues where the networks don't zero correctly.
if self.iteration == 1:
output = {x: 0 for x in (filt if filt is not None else network.outputs)}
else:
output = network.get_output(infoset, filt)
total = sum(filter(lambda x: x > 0, output.values()))
if total == 0:
return {x: 1 / len(output) for x in output}
else:
return {x: (output[x] if output[x] > 0 else 0) / total for x in output}
def calculate_regrets(self, values: {}, strategy: {}, memory: [], infoset: Infoset, output_formatter):
"""
Calculate the regret values (and insert them into memory)
:param values: the advantage values
:param strategy: the calculated strategy
:param memory: the memory to insert the calculated regrets into
:param infoset: the infoset for the game state
:param output_formatter: a function that formats the regret data before being inserted into the memory
:return: the total instr_regret
"""
instr_regret = 0
for x in values:
instr_regret += strategy[x] * values[x]
# Calculate the scale factor - for robust sampling, it is the inverse of the fraction of actions selected
scale_factor = len(strategy) / len(values)
# Scale the instantaneous regret by the scale factor
instr_regret *= scale_factor
new_regrets = {}
for x in strategy:
if x in values:
new_regrets[x] = (values[x] * scale_factor) - instr_regret
else:
new_regrets[x] = 0 - instr_regret
memory.append(output_formatter(infoset, new_regrets, self.iteration))
return instr_regret
def traverse(self, game: Game, curr_play: int) -> int:
pass
| [
"tomwalden@gmail.com"
] | tomwalden@gmail.com |
09718e34bbf4f3e85051183fc7d4f3740d74c359 | f90078e1e8a5d2becd81f475e355806beaf7b652 | /test/stability_checker.py | 455a5582239b0b88a98332b188825f46c194270b | [] | no_license | Sussex-Invisibles/ftb_RAT_analysis | 4742534e8197cfd771126b19199d26ccf4b195c9 | 281394bfeff8954a7fd375177065c573b80f19aa | refs/heads/master | 2020-12-25T17:36:09.374616 | 2016-08-31T12:57:58 | 2016-08-31T12:57:58 | 36,074,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,984 | py | ##################################################
# Script to test stability functions in
# core.stability funcs.
#
# Author: Ed Leming
# Date: 25/02/15
##################################################
import rat
import ROOT
import core.stability_funcs as sf
import utils.db_access as dba
import utils.psup_map as psup_map
import time
import sys
import os
import numpy as np
def check_dir(dname):
"""Check if directory exists, create it if it doesn't
:param dname: Path to directory to be checked
:retrun dname as passed.
"""
direc = os.path.dirname(dname)
try:
os.stat(direc)
except:
os.mkdir(direc)
print "Made directory %s...." % dname
return dname
if __name__ == "__main__":
# Reset all roor stuff
ROOT.gROOT.Reset()
runDict = { 8843 : "1000Hz",
8991 : "500Hz",
9088 : "100Hz",
9091 : "10Hz",
9093 : "10Hz" }
#runDict = { 8991 : "500Hz" }
# Results and data paths
results_path = check_dir("/epp/scratch/neutrino/el230/ftbAnalysis/stability/cones/")
#results_path = "/home/el230/SNO+/ftbAnalysis/test/results/"
data_path = "/epp/scratch/neutrino/el230/rat_data_scripts/stability/"
# Make canvas and TFile
c1 = ROOT.TCanvas("c1","c1",600,400);
# Load rat defualts for fibre pos stuff
ROOT.RAT.DB.Get().LoadDefaults()
ROOT.RAT.DB.Get().Load("pmt/airfill2.ratdb")
ROOT.RAT.DB.Get().Load("geo/snoplus.geo")
ROOT.RAT.DU.Utility.Get().BeginOfRun()
fibre_pos = ROOT.RAT.DB.Get().GetLink("FIBRE", "FT035A")
fibre_pos_reflec = ROOT.RAT.DB.Get().GetLink("FIBRE", "FT003A")
count = 0
for run in runDict:
# Create or open ROOT file for results.
if runDict.values().count(runDict[run]) == 1:
r = sf.create_root_file("%s%s.root" % (results_path, runDict[run]))
elif runDict.values().count(runDict[run]) != 1 and runDict.values().index(runDict[run]) == count:
r = sf.create_root_file("%s%s.root" % (results_path, runDict[run]))
else:
r = sf.create_root_file("%s%s.root" % (results_path, runDict[run]), option = "UPDATE")
print "Updating file..."
# File stuff
data_dir = "%s%s/" % (data_path, runDict[run])
data_file = "%sR%s*.root" % (data_dir, run)
#data_file = "%sR%s_0.root" % (data_dir, run)
print data_file
time_plots_path = check_dir("%s/time_plots/" % results_path)
hits_path = check_dir("%s/cone_hits_plots/" % results_path)
# Plot hits in direct cone
pmt_hits = sf.get_PMT_hits_cone(data_file, fibre_pos, 25.)
tmp_tits = tmp_title = "NHits for Fibre FT035A direct cone - run %s" % (run)
hitHist = psup_map.proj_pmts(pmt_hits, tmp_tits)
hitHist.Draw("colz")
ROOT.gStyle.SetOptStat(0);
c1.SetLogz()
c1.Update()
c1.Print("%s%s_%s_direct.pdf" % (hits_path, run, runDict[run]))
# Stability in direct cone
mean_hit_graph, rms_graph, avg, stdev = sf.track_mean_nHits_cone(data_file, 500, fibre_pos, 25.)
mean_hit_graph.SetTitle("Cone nHit as a function of time: Freq = %s, run = %i" % (runDict[run], run))
mean_hit_graph.Draw("AP")
mean_hit_graph.Write( "nHitVsTime" )
c1.Update()
c1.Print("%sDirect_nHitVsTime_%s.pdf" % (time_plots_path, run))
rms_graph.SetTitle("Cone RMS as a function of time: Freq = %s, run = %i" % (runDict[run], run))
rms_graph.Draw("AP")
rms_graph.Write( "RMSVsTime" )
c1.Update()
c1.Print("%sDirect_RMSVsTime_%s.pdf" % (time_plots_path, run))
# Plot hits in reflected cone
pmt_hits = sf.get_PMT_hits_cone(data_file, fibre_pos_reflec, 25.)
tmp_tits = tmp_title = "NHits for Fibre FT035A reflected cone - run %s" % (run)
hitHist = psup_map.proj_pmts(pmt_hits, tmp_tits)
hitHist.Draw("colz")
ROOT.gStyle.SetOptStat(0);
c1.SetLogz()
c1.Update()
c1.Print("%s%s_%s_reflec.pdf" % (hits_path, run, runDict[run]))
# Stability in reflected cone
mean_hit_graph, rms_graph, avg, stdev = sf.track_mean_nHits_cone(data_file, 500, fibre_pos_reflec, 25.)
mean_hit_graph.SetTitle("Cone nHit as a function of time: Freq = %s, run = %i" % (runDict[run], run))
mean_hit_graph.Draw("AP")
mean_hit_graph.Write( "nHitVsTime" )
c1.Update()
c1.Print("%sReflected_nHitVsTime_%s.pdf" % (time_plots_path, run))
rms_graph.SetTitle("Cone RMS as a function of time: Freq = %s, run = %i" % (runDict[run], run))
rms_graph.Draw("AP")
rms_graph.Write( "RMSVsTime" )
c1.Update()
c1.Print("%sReflected_RMSVsTime_%s.pdf" % (time_plots_path, run))
| [
"el230@feynman.cm.cluster"
] | el230@feynman.cm.cluster |
73359898ccb822de2547f2d704554574bbd90992 | f0c72975dd8741f5118ce2092abf9f1b2cb69ede | /week4/9-1-People-at-Concert.py | 1cd2bbb79e005410b4fb5124fa0ed2a68bda41da | [] | no_license | tockata/HackBulgaria | fd8a19dbe6bde673a31f3fdb623f275c8cde5a5a | 0fe221ce006ec34010185007930054edda4dd644 | refs/heads/master | 2016-09-06T01:07:47.259854 | 2015-04-13T19:38:03 | 2015-04-13T19:38:03 | 29,928,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | # from test_data import generate_test
def get_people_count(activity):
result_dictionary = {}
for name in activity:
result_dictionary[name] = None
return len(result_dictionary)
print(get_people_count(["Rado", "Ivo", "Maria", "Anneta", "Rado", "Rado", "Anneta", "Ivo", "Maria", "Rado"])) | [
"anatoly.angelov@gmail.com"
] | anatoly.angelov@gmail.com |
6ee6970d20919b8312b162238ef5b01936dcb84b | 4b767c8ea2e37e473647c0dc2b12d1f848260571 | /main/migrations/0026_alter_operation_history_сomponent.py | a8ce3b9dcb7603e7f9485e4337da0e2b9caa8707 | [] | no_license | Shankysik/ARMSOSIS | c52b81549c1a6a616fc4b5ee0753f3f3b3e75383 | 65052c6fe2e179634e3e119a18789c3718ce4fda | refs/heads/main | 2023-06-17T00:01:27.249401 | 2021-07-08T08:32:27 | 2021-07-08T08:32:27 | 384,047,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.2.2 on 2021-06-22 18:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0025_alter_operation_history_status'),
]
operations = [
migrations.AlterField(
model_name='operation_history',
name='сomponent',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Комплектующая'),
),
]
| [
"Shank33@mail.ru"
] | Shank33@mail.ru |
79e47df7a9fe11f27c98d9ac203f9989b92b70ec | f444de809b9733e8253fffa86f6c9450f6ac5523 | /python001.py | 2faef5f5ed3aeed6a309c9e26c52a6a56ea54565 | [] | no_license | topwhere/three | 13b37d54683c9607d14ac4c7c2b9f6129a0eb4bb | 43423dd10331658b5014088648d760dcc293aeb6 | refs/heads/master | 2020-03-28T21:06:29.770888 | 2018-09-17T13:27:21 | 2018-09-17T13:27:21 | 149,129,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | # 画笔
import turtle
t = turtle.Pen()
for x in range(360):
t.forward(x)
t.left(59)
| [
"413118324@qq.com"
] | 413118324@qq.com |
360161f68cc26664822e9a2ccfba9a23d11da692 | e7830d72c06467dffdfc7a87ab80f4206c5ce266 | /first_blog/migrations/0002_auto_20150619_1414.py | ba33f180a3059feabe43ac3db7e7c349da82d5f4 | [] | no_license | Uzzije/djangopractice | a455c27754e2122b9534961ad0eca3fa77b1907b | 1fb97d42afc33d854274e547eca23721917bb795 | refs/heads/master | 2016-09-03T07:17:35.005965 | 2015-07-09T18:52:46 | 2015-07-09T18:52:46 | 37,861,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('first_blog', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='author',
old_name='user_first_name',
new_name='user_name',
),
migrations.RemoveField(
model_name='author',
name='user_last_name',
),
migrations.AlterField(
model_name='author',
name='password',
field=models.CharField(max_length=200),
),
]
| [
"Uzzije2000@yahoo.co.uk"
] | Uzzije2000@yahoo.co.uk |
e17d42a2a2e20eac9b55ea3b041cba48fb9b57fe | e9de2fe68a2538bd0a8c1237363287c6b10ec9d2 | /scrapy_data/combase.py | c29af0882d9d540145f0ff580b0d5403892883d8 | [] | no_license | hechengfei/itjuzi | 9c081a2d3af7bf57be44108213468695ce34a38d | d64a11dc266b2a7071576e36cf777835c612ad32 | refs/heads/master | 2020-04-10T05:13:30.639765 | 2018-12-07T12:26:19 | 2018-12-07T12:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,405 | py | class ComBase():
def __init__(self,products_info,
com_quancheng,
com_faren ,
com_zhuceziben,
com_chenglishjian,
com_gongsileixing ,
com_dizhi,
com_gongsimingcheng,
com_gongsimingcheng2 ,
com_rongzilunci ,
com_zhucemingcheng,
com_guanwang ,
com_webwangzhi,
com_weixin ,
com_chengliyu,
com_state ,
com_guimo
):
self.products_info = products_info
self.com_quancheng = com_quancheng,
self.com_faren = com_faren ,
self.com_zhuceziben = com_zhuceziben,
self.com_chenglishjian = com_chenglishjian,
self.com_gongsileixing = com_gongsileixing,
self.com_dizhi = com_dizhi,
self.com_gongsimingcheng = com_gongsimingcheng,
self.com_gongsimingcheng2 = com_gongsimingcheng2,
self.com_rongzilunci = com_rongzilunci,
self.com_zhucemingcheng = com_zhucemingcheng,
self.com_guanwang = com_guanwang,
self.com_webwangzhi = com_webwangzhi,
self.com_weixin = com_weixin,
self.com_chengliyu = com_chengliyu,
self.com_state = com_state,
self.com_guimo = com_guimo
| [
"hecf@shuzilm.cn"
] | hecf@shuzilm.cn |
1edcceffcfbf8947bb55c85896d44b45eddc8739 | 673e829dda9583c8dd2ac8d958ba1dc304bffeaf | /data/multilingual/Latn.HNS/Serif_16/pdf_to_json_test_Latn.HNS_Serif_16.py | 14b2d82b21a61c2d50f3845e482493f91f58415d | [
"BSD-3-Clause"
] | permissive | antoinecarme/pdf_to_json_tests | 58bab9f6ba263531e69f793233ddc4d33b783b7e | d57a024fde862e698d916a1178f285883d7a3b2f | refs/heads/master | 2021-01-26T08:41:47.327804 | 2020-02-27T15:54:48 | 2020-02-27T15:54:48 | 243,359,934 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.HNS/Serif_16/udhr_Latn.HNS_Serif_16.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
72d63bbb632d004dac54083326c00f067ec1f9c7 | 22e7fdcce6501ebcd7022dce2d4a8eaa1c894c4a | /ANALISIS_02_ GONZALEZ_RODRIGO.py | d7686f7b6ed5134e43ad2ecb27bbd935a08c18f5 | [] | no_license | Rgonzalez247/Curso-Profesional | e18144d020a6d8929bfb104b7404d2a8c11e9b92 | 04bdffc62fba93a20c769cf8ba79846048c8b40e | refs/heads/master | 2022-12-24T22:56:07.602073 | 2020-09-27T22:31:34 | 2020-09-27T22:31:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,550 | py | #!/usr/bin/env python
# coding: utf-8
# In[2]:
#Importar librerías
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
# In[3]:
#Importar datos
df = pd.read_csv('synergy_logistics.csv')
df
# # Opción 1
# ## Importaciones
# In[4]:
dfi= df[df.direction == "Imports"] #Filtrar Importación
# In[5]:
#Código para agrupar datos en un DataFrame el cual puede ser manipulable
rutas = dfi[['origin','destination']].value_counts() #Selecciono columnas a querer utilizar y contar las ocurrencias
rutas = pd.DataFrame(rutas) #Crear resultado anteriror en un DataFrame
rutas = rutas.reset_index() #Asignar un índice de inicio predeterminado, en lugar de utilizar una columna de datos como índice
rutas.columns = ['origin','destination','Count'] #Nombrar nuevas columnas
rutas.head(10) #Mostrar solo los primeros 10 resultados
# In[7]:
# Filtrar sobre la base de datos original tomando como base la base de datos de rutas para poder sumar el valor total de los
#productos e identificar que ruta tiene mayor valor.
r1 = dfi[(dfi.origin == 'Singapore') & (dfi.destination == 'Thailand')]
print('r1:')
print(r1.total_value.sum())
r2 = dfi[(dfi.origin == 'Germany') & (dfi.destination == 'China')]
print('r2:')
print(r2.total_value.sum())
r3 = dfi[(dfi.origin == 'China') & (dfi.destination == 'Japan')]
print('r3:')
print(r3.total_value.sum())
r4 = dfi[(dfi.origin == 'Japan') & (dfi.destination == 'Mexico')]
print('r4:')
print(r4.total_value.sum())
r5 = dfi[(dfi.origin == 'China') & (dfi.destination == 'Thailand')]
print('r5:')
print(r5.total_value.sum())
r6 = dfi[(dfi.origin == 'Malaysia') & (dfi.destination == 'Thailand')]
print('r6:')
print(r6.total_value.sum())
r7 = dfi[(dfi.origin == 'Spain') & (dfi.destination == 'Germany')]
print('r7:')
print(r7.total_value.sum())
r8 = dfi[(dfi.origin == 'Mexico') & (dfi.destination == 'USA')]
print('r8:')
print(r8.total_value.sum())
r9 = dfi[(dfi.origin == 'China') & (dfi.destination == 'United Arab Emirates')]
print('r9:')
print(r9.total_value.sum())
r10 = dfi[(dfi.origin == 'Brazil') & (dfi.destination == 'China')]
print('r10:')
print(r10.total_value.sum())
print("")
print("Valor Total:")
vr = r1.total_value.sum()+r2.total_value.sum()+r3.total_value.sum()+r4.total_value.sum()+r5.total_value.sum()+r6.total_value.sum()+r7.total_value.sum()+r8.total_value.sum()+r9.total_value.sum()+r10.total_value.sum()
print(vr) #sumatoria total para comparar con las otras opciones
# In[8]:
a = rutas.head(10)
print("Cantidad de uso de rutas del Top 10 rutas:")
print(a.Count.sum())
# ## Exportación
# In[10]:
dfe= df[df.direction == "Exports"] #Filtrar Exportación
# In[11]:
#Código para agrupar datos en un DataFrame el cual puede ser manipulable
rutas = dfe[['origin','destination']].value_counts() #Selecciono columnas a querer utilizar y contar las ocurrencias
rutas = pd.DataFrame(rutas) #Crear resultado anteriror en un DataFrame
rutas = rutas.reset_index() #Asignar un índice de inicio predeterminado, en lugar de utilizar una columna de datos como índice
rutas.columns = ['origin','destination','Count'] #Nombrar nuevas columnas
rutas.head(10) #Mostrar solo los primeros 10 resultados
# In[12]:
r1 = dfe[(dfe.origin == 'South Korea') & (dfe.destination == 'Vietnam')]
print('r1:')
print(r1.total_value.sum())
r2 = dfe[(dfe.origin == 'Netherlands') & (dfe.destination == 'Belgium')]
print('r2:')
print(r2.total_value.sum())
r3 = dfe[(dfe.origin == 'USA') & (dfe.destination == 'Netherlands')]
print('r3:')
print(r3.total_value.sum())
r4 = dfe[(dfe.origin == 'China') & (dfe.destination == 'Mexico')]
print('r4:')
print(r4.total_value.sum())
r5 = dfe[(dfe.origin == 'Japan') & (dfe.destination == 'Brazil')]
print('r5:')
print(r5.total_value.sum())
r6 = dfe[(dfe.origin == 'Germany') & (dfe.destination == 'France')]
print('r6:')
print(r6.total_value.sum())
r7 = dfe[(dfe.origin == 'South Korea') & (dfe.destination == 'Japan')]
print('r7:')
print(r7.total_value.sum())
r8 = dfe[(dfe.origin == 'Australia') & (dfe.destination == 'Singapore')]
print('r8:')
print(r8.total_value.sum())
r9 = dfe[(dfe.origin == 'Canada') & (dfe.destination == 'Mexico')]
print('r9:')
print(r9.total_value.sum())
r10 = dfe[(dfe.origin == 'China') & (dfe.destination == 'Spain')]
print('r10:')
print(r10.total_value.sum())
print("")
print("Valor Total:")
vr = r1.total_value.sum()+r2.total_value.sum()+r3.total_value.sum()+r4.total_value.sum()+r5.total_value.sum()+r6.total_value.sum()+r7.total_value.sum()+r8.total_value.sum()+r9.total_value.sum()+r10.total_value.sum()
print(vr) #sumatoria total para comparar con las otras opciones
# In[13]:
a = rutas.head(10)
print("Cantidad de uso de rutas del Top 10 rutas:")
print(a.Count.sum())
# # Opcion 2
# ## Importación
# In[19]:
dfi2= df[df.direction == "Imports"] #Filtrar Importación
# In[20]:
# Mismos códigos que los de la opción 1 pero ahora tomando en cuenta el medio de transporte nada más
transporte = dfi2['transport_mode'].value_counts()
transporte = pd.DataFrame(transporte)
transporte = transporte.reset_index()
transporte.columns = ['transport_mode','Count']
transporte.head(10)
# In[21]:
t1 = dfi2[df.transport_mode == 'Sea']
print('Sea:')
print(t1.total_value.sum())
t2 = dfi2[df.transport_mode == 'Rail']
print('Rail:')
print(t2.total_value.sum())
t4 = dfi2[df.transport_mode == 'Road']
print('Road:')
print(t4.total_value.sum())
t3 = dfi2[df.transport_mode == 'Air']
print('Air:')
print(t3.total_value.sum())
# In[23]:
print('Valor de los 3 medios de transporte más importantes:')
vt = t1.total_value.sum()+t2.total_value.sum()+t3.total_value.sum()
print(vt)
# ## Exportación
# In[15]:
dfe2= df[df.direction == "Exports"] #Filtrar Importación
# In[16]:
# Mismos códigos que los de la opción 1 pero ahora tomando en cuenta el medio de transporte nada más
transporte = dfe2['transport_mode'].value_counts()
transporte = pd.DataFrame(transporte)
transporte = transporte.reset_index()
transporte.columns = ['transport_mode','Count']
transporte.head(10)
# In[17]:
t1 = dfe2[df.transport_mode == 'Sea']
print('Sea:')
print(t1.total_value.sum())
t2 = dfe2[df.transport_mode == 'Rail']
print('Rail:')
print(t2.total_value.sum())
t4 = dfe2[df.transport_mode == 'Road']
print('Road:')
print(t4.total_value.sum())
t3 = dfe2[df.transport_mode == 'Air']
print('Air:')
print(t3.total_value.sum())
# In[18]:
print('Valor de los 3 medios de transporte más importantes:')
vt = t1.total_value.sum()+t2.total_value.sum()+t3.total_value.sum()
print(vt)
# # Opción 3
# In[3]:
# Mismos códigos que los de la opción 1 y 2 pero ahora tomando en cuenta el origen nada más
valor = df.groupby(by = ['origin']).total_value.sum()
valor = pd.DataFrame(valor)
valor = valor.reset_index()
valor.columns = ['origin','Sum']
valor.sort_values(by='Sum',ascending=False)
# In[4]:
#Sobre los resultados de la tabla anterior, dividir las cantidades de valor de productos entre el total para obtener porcentaje
#de representación
percentage = valor.Sum/valor.Sum.sum() #Obtener el porcentaje de cada fila entre el total de la suma de valores
percentage.sort_values(ascending = False)
# In[6]:
sor = percentage.sort_values(ascending = False)
sor
# In[12]:
sor2 = sor.head(9)
sum(sor2)
# In[13]:
#Ordenar tabla en modo descendiente
val = valor.sort_values(by='Sum',ascending=False)
val
# In[18]:
#Filtrar los 9 valores más altos, los cuales representan alrededor del 80% del valor total
val = val.head(9)
val
# In[20]:
val.Sum.sum()
| [
"noreply@github.com"
] | Rgonzalez247.noreply@github.com |
445e907fa27edefa23e97702e934022892753f6d | 8764e4ba185e070e758cd2e804c8c2e808c62bca | /caffe-easy/src/caffe/proto/caffe_pb2.py | cebcf250c1ea5a8e7f6c8c787f9d3e39e61babe8 | [
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla"
] | permissive | Usernamezhx/CCDL | 8e1079730a4f3e42e3417e121034d8b6020817cc | 9d664045c3a336baaa4786fea5ea40ec6e9c2c4e | refs/heads/master | 2021-01-01T16:15:07.951212 | 2017-07-19T02:28:04 | 2017-07-19T02:28:04 | 97,795,985 | 1 | 0 | null | 2017-07-20T05:56:39 | 2017-07-20T05:56:38 | null | UTF-8 | Python | false | true | 396,449 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: caffe.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='caffe.proto',
package='caffe',
syntax='proto2',
serialized_pb=_b('\n\x0b\x63\x61\x66\x66\x65.proto\x12\x05\x63\x61\x66\x66\x65\"\x1c\n\tBlobShape\x12\x0f\n\x03\x64im\x18\x01 \x03(\x03\x42\x02\x10\x01\"\xcc\x01\n\tBlobProto\x12\x1f\n\x05shape\x18\x07 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x10\n\x04\x64\x61ta\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x10\n\x04\x64iff\x18\x06 \x03(\x02\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x08 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_diff\x18\t \x03(\x01\x42\x02\x10\x01\x12\x0e\n\x03num\x18\x01 \x01(\x05:\x01\x30\x12\x13\n\x08\x63hannels\x18\x02 \x01(\x05:\x01\x30\x12\x11\n\x06height\x18\x03 \x01(\x05:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\x05:\x01\x30\"2\n\x0f\x42lobProtoVector\x12\x1f\n\x05\x62lobs\x18\x01 \x03(\x0b\x32\x10.caffe.BlobProto\"\x91\x01\n\x05\x44\x61tum\x12\x10\n\x08\x63hannels\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x12\n\nfloat_data\x18\x06 \x03(\x02\x12\x16\n\x07\x65ncoded\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x0e\n\x06labels\x18\x08 \x03(\x02\"C\n\tMTCNNBBox\x12\x0c\n\x04xmin\x18\x01 \x01(\x02\x12\x0c\n\x04ymin\x18\x02 \x01(\x02\x12\x0c\n\x04xmax\x18\x03 \x01(\x02\x12\x0c\n\x04ymax\x18\x04 \x01(\x02\"U\n\nMTCNNDatum\x12\x1b\n\x05\x64\x61tum\x18\x01 \x01(\x0b\x32\x0c.caffe.Datum\x12\x1d\n\x03roi\x18\x02 \x01(\x0b\x32\x10.caffe.MTCNNBBox\x12\x0b\n\x03pts\x18\x03 \x03(\x02\"A\n\x0cLabelMapItem\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\x05\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\"-\n\x08LabelMap\x12!\n\x04item\x18\x01 \x03(\x0b\x32\x13.caffe.LabelMapItem\"o\n\x07Sampler\x12\x14\n\tmin_scale\x18\x01 \x01(\x02:\x01\x31\x12\x14\n\tmax_scale\x18\x02 \x01(\x02:\x01\x31\x12\x1b\n\x10min_aspect_ratio\x18\x03 \x01(\x02:\x01\x31\x12\x1b\n\x10max_aspect_ratio\x18\x04 \x01(\x02:\x01\x31\"\xc0\x01\n\x10SampleConstraint\x12\x1b\n\x13min_jaccard_overlap\x18\x01 \x01(\x02\x12\x1b\n\x13max_jaccard_overlap\x18\x02 \x01(\x02\x12\x1b\n\x13min_sample_coverage\x18\x03 \x01(\x02\x12\x1b\n\x13max_sample_coverage\x18\x04 \x01(\x02\x12\x1b\n\x13min_object_coverage\x18\x05 \x01(\x02\x12\x1b\n\x13max_object_coverage\x18\x06 \x01(\x02\"\xb2\x01\n\x0c\x42\x61tchSampler\x12 \n\x12use_original_image\x18\x01 \x01(\x08:\x04true\x12\x1f\n\x07sampler\x18\x02 \x01(\x0b\x32\x0e.caffe.Sampler\x12\x32\n\x11sample_constraint\x18\x03 \x01(\x0b\x32\x17.caffe.SampleConstraint\x12\x12\n\nmax_sample\x18\x04 \x01(\r\x12\x17\n\nmax_trials\x18\x05 \x01(\r:\x03\x31\x30\x30\"\x8a\x01\n\x0e\x45mitConstraint\x12\x39\n\temit_type\x18\x01 \x01(\x0e\x32\x1e.caffe.EmitConstraint.EmitType:\x06\x43\x45NTER\x12\x14\n\x0c\x65mit_overlap\x18\x02 \x01(\x02\"\'\n\x08\x45mitType\x12\n\n\x06\x43\x45NTER\x10\x00\x12\x0f\n\x0bMIN_OVERLAP\x10\x01\"\x87\x01\n\x0eNormalizedBBox\x12\x0c\n\x04xmin\x18\x01 \x01(\x02\x12\x0c\n\x04ymin\x18\x02 \x01(\x02\x12\x0c\n\x04xmax\x18\x03 \x01(\x02\x12\x0c\n\x04ymax\x18\x04 \x01(\x02\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x11\n\tdifficult\x18\x06 \x01(\x08\x12\r\n\x05score\x18\x07 \x01(\x02\x12\x0c\n\x04size\x18\x08 \x01(\x02\"I\n\nAnnotation\x12\x16\n\x0binstance_id\x18\x01 \x01(\x05:\x01\x30\x12#\n\x04\x62\x62ox\x18\x02 \x01(\x0b\x32\x15.caffe.NormalizedBBox\"M\n\x0f\x41nnotationGroup\x12\x13\n\x0bgroup_label\x18\x01 \x01(\x05\x12%\n\nannotation\x18\x02 \x03(\x0b\x32\x11.caffe.Annotation\"\xaf\x01\n\x0e\x41nnotatedDatum\x12\x1b\n\x05\x64\x61tum\x18\x01 \x01(\x0b\x32\x0c.caffe.Datum\x12\x32\n\x04type\x18\x02 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\x12\x30\n\x10\x61nnotation_group\x18\x03 \x03(\x0b\x32\x16.caffe.AnnotationGroup\"\x1a\n\x0e\x41nnotationType\x12\x08\n\x04\x42\x42OX\x10\x00\"\x98\x02\n\x0f\x46illerParameter\x12\x16\n\x04type\x18\x01 \x01(\t:\x08\x63onstant\x12\x10\n\x05value\x18\x02 \x01(\x02:\x01\x30\x12\x0e\n\x03min\x18\x03 \x01(\x02:\x01\x30\x12\x0e\n\x03max\x18\x04 \x01(\x02:\x01\x31\x12\x0f\n\x04mean\x18\x05 \x01(\x02:\x01\x30\x12\x0e\n\x03std\x18\x06 \x01(\x02:\x01\x31\x12\x12\n\x06sparse\x18\x07 \x01(\x05:\x02-1\x12\x42\n\rvariance_norm\x18\x08 \x01(\x0e\x32#.caffe.FillerParameter.VarianceNorm:\x06\x46\x41N_IN\x12\x0c\n\x04\x66ile\x18\t \x01(\t\"4\n\x0cVarianceNorm\x12\n\n\x06\x46\x41N_IN\x10\x00\x12\x0b\n\x07\x46\x41N_OUT\x10\x01\x12\x0b\n\x07\x41VERAGE\x10\x02\"\x8e\x02\n\x0cNetParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12%\n\x0binput_shape\x18\x08 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x11\n\tinput_dim\x18\x04 \x03(\x05\x12\x1d\n\x0e\x66orce_backward\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x05state\x18\x06 \x01(\x0b\x32\x0f.caffe.NetState\x12\x19\n\ndebug_info\x18\x07 \x01(\x08:\x05\x66\x61lse\x12$\n\x05layer\x18\x64 \x03(\x0b\x32\x15.caffe.LayerParameter\x12\'\n\x06layers\x18\x02 \x03(\x0b\x32\x17.caffe.V1LayerParameter\"\xa3\x0b\n\x0fSolverParameter\x12\x0b\n\x03net\x18\x18 \x01(\t\x12&\n\tnet_param\x18\x19 \x01(\x0b\x32\x13.caffe.NetParameter\x12\x11\n\ttrain_net\x18\x01 \x01(\t\x12\x10\n\x08test_net\x18\x02 \x03(\t\x12,\n\x0ftrain_net_param\x18\x15 \x01(\x0b\x32\x13.caffe.NetParameter\x12+\n\x0etest_net_param\x18\x16 \x03(\x0b\x32\x13.caffe.NetParameter\x12$\n\x0btrain_state\x18\x1a \x01(\x0b\x32\x0f.caffe.NetState\x12#\n\ntest_state\x18\x1b \x03(\x0b\x32\x0f.caffe.NetState\x12!\n\teval_type\x18) \x01(\t:\x0e\x63lassification\x12\x1c\n\nap_version\x18* \x01(\t:\x08Integral\x12$\n\x15show_per_class_result\x18, \x01(\x08:\x05\x66\x61lse\x12\x11\n\ttest_iter\x18\x03 \x03(\x05\x12\x18\n\rtest_interval\x18\x04 \x01(\x05:\x01\x30\x12 \n\x11test_compute_loss\x18\x13 \x01(\x08:\x05\x66\x61lse\x12!\n\x13test_initialization\x18 \x01(\x08:\x04true\x12\x0f\n\x07\x62\x61se_lr\x18\x05 \x01(\x02\x12\x0f\n\x07\x64isplay\x18\x06 \x01(\x05\x12\x17\n\x0c\x61verage_loss\x18! \x01(\x05:\x01\x31\x12\x10\n\x08max_iter\x18\x07 \x01(\x05\x12\x14\n\titer_size\x18$ \x01(\x05:\x01\x31\x12\x11\n\tlr_policy\x18\x08 \x01(\t\x12\r\n\x05gamma\x18\t \x01(\x02\x12\r\n\x05power\x18\n \x01(\x02\x12\x10\n\x08momentum\x18\x0b \x01(\x02\x12\x14\n\x0cweight_decay\x18\x0c \x01(\x02\x12\x1f\n\x13regularization_type\x18\x1d \x01(\t:\x02L2\x12\x10\n\x08stepsize\x18\r \x01(\x05\x12\x11\n\tstepvalue\x18\" \x03(\x05\x12\x17\n\x0fplateau_winsize\x18+ \x03(\x05\x12\x1a\n\x0e\x63lip_gradients\x18# \x01(\x02:\x02-1\x12\x13\n\x08snapshot\x18\x0e \x01(\x05:\x01\x30\x12\x17\n\x0fsnapshot_prefix\x18\x0f \x01(\t\x12\x1c\n\rsnapshot_diff\x18\x10 \x01(\x08:\x05\x66\x61lse\x12K\n\x0fsnapshot_format\x18% \x01(\x0e\x32%.caffe.SolverParameter.SnapshotFormat:\x0b\x42INARYPROTO\x12;\n\x0bsolver_mode\x18\x11 \x01(\x0e\x32!.caffe.SolverParameter.SolverMode:\x03GPU\x12\x14\n\tdevice_id\x18\x12 \x01(\x05:\x01\x30\x12\x17\n\x0brandom_seed\x18\x14 \x01(\x03:\x02-1\x12\x11\n\x04type\x18( \x01(\t:\x03SGD\x12\x15\n\x05\x64\x65lta\x18\x1f \x01(\x02:\x06\x31\x65-008\x12\x18\n\tmomentum2\x18\' \x01(\x02:\x05\x30.999\x12\x17\n\trms_decay\x18& \x01(\x02:\x04\x30.99\x12\x19\n\ndebug_info\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\"\n\x14snapshot_after_train\x18\x1c \x01(\x08:\x04true\x12;\n\x0bsolver_type\x18\x1e \x01(\x0e\x32!.caffe.SolverParameter.SolverType:\x03SGD\"+\n\x0eSnapshotFormat\x12\x08\n\x04HDF5\x10\x00\x12\x0f\n\x0b\x42INARYPROTO\x10\x01\"\x1e\n\nSolverMode\x12\x07\n\x03\x43PU\x10\x00\x12\x07\n\x03GPU\x10\x01\"U\n\nSolverType\x12\x07\n\x03SGD\x10\x00\x12\x0c\n\x08NESTEROV\x10\x01\x12\x0b\n\x07\x41\x44\x41GRAD\x10\x02\x12\x0b\n\x07RMSPROP\x10\x03\x12\x0c\n\x08\x41\x44\x41\x44\x45LTA\x10\x04\x12\x08\n\x04\x41\x44\x41M\x10\x05\"\xa6\x01\n\x0bSolverState\x12\x0c\n\x04iter\x18\x01 \x01(\x05\x12\x13\n\x0blearned_net\x18\x02 \x01(\t\x12!\n\x07history\x18\x03 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x17\n\x0c\x63urrent_step\x18\x04 \x01(\x05:\x01\x30\x12\x1c\n\x0cminimum_loss\x18\x05 \x01(\x02:\x06\x31\x65+038\x12\x1a\n\x0fiter_last_event\x18\x06 \x01(\x05:\x01\x30\"N\n\x08NetState\x12!\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase:\x04TEST\x12\x10\n\x05level\x18\x02 \x01(\x05:\x01\x30\x12\r\n\x05stage\x18\x03 \x03(\t\"s\n\x0cNetStateRule\x12\x1b\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase\x12\x11\n\tmin_level\x18\x02 \x01(\x05\x12\x11\n\tmax_level\x18\x03 \x01(\x05\x12\r\n\x05stage\x18\x04 \x03(\t\x12\x11\n\tnot_stage\x18\x05 \x03(\t\"\xa3\x01\n\tParamSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\nshare_mode\x18\x02 \x01(\x0e\x32\x1d.caffe.ParamSpec.DimCheckMode\x12\x12\n\x07lr_mult\x18\x03 \x01(\x02:\x01\x31\x12\x15\n\ndecay_mult\x18\x04 \x01(\x02:\x01\x31\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xac\x01\n\x13PredictBoxParameter\x12\x11\n\x06stride\x18\x01 \x01(\r:\x01\x32\x12\x1b\n\x0freceptive_field\x18\x02 \x01(\r:\x02\x31\x32\x12\x11\n\x03nms\x18\x03 \x01(\x08:\x04true\x12\x1b\n\routput_vector\x18\x04 \x01(\x08:\x04true\x12\x1c\n\x0fpositive_thresh\x18\x05 \x01(\x02:\x03\x30.5\x12\x17\n\tbbreg_exp\x18\x06 \x01(\x08:\x04true\"\xc1\x1d\n\x0eLayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06\x62ottom\x18\x03 \x03(\t\x12\x0b\n\x03top\x18\x04 \x03(\t\x12\x1b\n\x05phase\x18\n \x01(\x0e\x32\x0c.caffe.Phase\x12\x13\n\x0bloss_weight\x18\x05 \x03(\x02\x12\x1f\n\x05param\x18\x06 \x03(\x0b\x32\x10.caffe.ParamSpec\x12\x1f\n\x05\x62lobs\x18\x07 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x16\n\x0epropagate_down\x18\x0b \x03(\x08\x12$\n\x07include\x18\x08 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18\t \x03(\x0b\x32\x13.caffe.NetStateRule\x12\x37\n\x0ftransform_param\x18\x64 \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18\x65 \x01(\x0b\x32\x14.caffe.LossParameter\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x66 \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12<\n\x14\x61nnotated_data_param\x18\xc8\x01 \x01(\x0b\x32\x1d.caffe.AnnotatedDataParameter\x12,\n\x0c\x61rgmax_param\x18g \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12\x34\n\x10\x62\x61tch_norm_param\x18\x8b\x01 \x01(\x0b\x32\x19.caffe.BatchNormParameter\x12)\n\nbias_param\x18\x8d\x01 \x01(\x0b\x32\x14.caffe.BiasParameter\x12\x36\n\x11\x63\x65nter_loss_param\x18\x93\x01 \x01(\x0b\x32\x1a.caffe.CenterLossParameter\x12,\n\x0c\x63oncat_param\x18h \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18i \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18j \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12)\n\ncrop_param\x18\x90\x01 \x01(\x0b\x32\x14.caffe.CropParameter\x12(\n\ndata_param\x18k \x01(\x0b\x32\x14.caffe.DataParameter\x12\x44\n\x18\x64\x65tection_evaluate_param\x18\xcd\x01 \x01(\x0b\x32!.caffe.DetectionEvaluateParameter\x12@\n\x16\x64\x65tection_output_param\x18\xcc\x01 \x01(\x0b\x32\x1f.caffe.DetectionOutputParameter\x12.\n\rdropout_param\x18l \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18m \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18n \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12\'\n\telu_param\x18\x8c\x01 \x01(\x0b\x32\x13.caffe.ELUParameter\x12+\n\x0b\x65mbed_param\x18\x89\x01 \x01(\x0b\x32\x15.caffe.EmbedParameter\x12&\n\texp_param\x18o \x01(\x0b\x32\x13.caffe.ExpParameter\x12/\n\rflatten_param\x18\x87\x01 \x01(\x0b\x32\x17.caffe.FlattenParameter\x12\x31\n\x0fhdf5_data_param\x18p \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18q \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18r \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18s \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18t \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18u \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12+\n\x0binput_param\x18\x8f\x01 \x01(\x0b\x32\x15.caffe.InputParameter\x12\'\n\tlog_param\x18\x86\x01 \x01(\x0b\x32\x13.caffe.LogParameter\x12&\n\tlrn_param\x18v \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18w \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12:\n\x13multibox_loss_param\x18\xc9\x01 \x01(\x0b\x32\x1c.caffe.MultiBoxLossParameter\x12&\n\tmvn_param\x18x \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\nnorm_param\x18\xce\x01 \x01(\x0b\x32\x19.caffe.NormalizeParameter\x12\x36\n\x11predict_box_param\x18\xd1\x01 \x01(\x0b\x32\x1a.caffe.PredictBoxParameter\x12\x33\n\x0fparameter_param\x18\x91\x01 \x01(\x0b\x32\x19.caffe.ParameterParameter\x12/\n\rpermute_param\x18\xca\x01 \x01(\x0b\x32\x17.caffe.PermuteParameter\x12.\n\rpooling_param\x18y \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18z \x01(\x0b\x32\x15.caffe.PowerParameter\x12+\n\x0bprelu_param\x18\x83\x01 \x01(\x0b\x32\x15.caffe.PReLUParameter\x12\x32\n\x0fprior_box_param\x18\xcb\x01 \x01(\x0b\x32\x18.caffe.PriorBoxParameter\x12-\n\x0cpython_param\x18\x82\x01 \x01(\x0b\x32\x16.caffe.PythonParameter\x12\x33\n\x0frecurrent_param\x18\x92\x01 \x01(\x0b\x32\x19.caffe.RecurrentParameter\x12\x33\n\x0freduction_param\x18\x88\x01 \x01(\x0b\x32\x19.caffe.ReductionParameter\x12(\n\nrelu_param\x18{ \x01(\x0b\x32\x14.caffe.ReLUParameter\x12/\n\rreshape_param\x18\x85\x01 \x01(\x0b\x32\x17.caffe.ReshapeParameter\x12+\n\x0bscale_param\x18\x8e\x01 \x01(\x0b\x32\x15.caffe.ScaleParameter\x12.\n\rsigmoid_param\x18| \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18} \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12\'\n\tspp_param\x18\x84\x01 \x01(\x0b\x32\x13.caffe.SPPParameter\x12*\n\x0bslice_param\x18~ \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18\x7f \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x33\n\x0fthreshold_param\x18\x80\x01 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12)\n\ntile_param\x18\x8a\x01 \x01(\x0b\x32\x14.caffe.TileParameter\x12\x34\n\x10video_data_param\x18\xcf\x01 \x01(\x0b\x32\x19.caffe.VideoDataParameter\x12\x36\n\x11window_data_param\x18\x81\x01 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12)\n\nflip_param\x18\xd4\x01 \x01(\x0b\x32\x14.caffe.FlipParameter\x12)\n\nlstm_param\x18\x94\x01 \x01(\x0b\x32\x14.caffe.LSTMParameter\x12\'\n\tctc_param\x18\x95\x01 \x01(\x0b\x32\x13.caffe.CTCParameter\x12\x33\n\x0ftranspose_param\x18\x96\x01 \x01(\x0b\x32\x19.caffe.TransposeParameter\x12/\n\rreverse_param\x18\x97\x01 \x01(\x0b\x32\x17.caffe.ReverseParameter\x12\x30\n\x0e\x63tc_loss_param\x18\x98\x01 \x01(\x0b\x32\x17.caffe.CtcLossParameter\x12L\n\x1c\x63ontinuation_indicator_param\x18\x99\x01 \x01(\x0b\x32%.caffe.ContinuationIndicatorParameter\x12L\n\x1clabelsequence_accuracy_param\x18\x9a\x01 \x01(\x0b\x32%.caffe.LabelsequenceAccuracyParameter\x12\x35\n\x08st_param\x18\x9c\x01 \x01(\x0b\x32\".caffe.SpatialTransformerParameter\x12.\n\rst_loss_param\x18\x9d\x01 \x01(\x0b\x32\x16.caffe.STLossParameter\x12\x34\n\x10power_file_param\x18\x9e\x01 \x01(\x0b\x32\x19.caffe.PowerFileParameter\x12\x30\n\x0eloc_loss_param\x18\x9f\x01 \x01(\x0b\x32\x17.caffe.LocLossParameter\"E\n\rFlipParameter\x12\x18\n\nflip_width\x18\x01 \x01(\x08:\x04true\x12\x1a\n\x0b\x66lip_height\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xca\x03\n\x17TransformationParameter\x12\x10\n\x05scale\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\x06mirror\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x14\n\tcrop_size\x18\x03 \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_h\x18\x0b \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_w\x18\x0c \x01(\r:\x01\x30\x12\x11\n\tmean_file\x18\x04 \x01(\t\x12\x12\n\nmean_value\x18\x05 \x03(\x02\x12\x1a\n\x0b\x66orce_color\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nforce_gray\x18\x07 \x01(\x08:\x05\x66\x61lse\x12,\n\x0cresize_param\x18\x08 \x01(\x0b\x32\x16.caffe.ResizeParameter\x12*\n\x0bnoise_param\x18\t \x01(\x0b\x32\x15.caffe.NoiseParameter\x12\x31\n\rdistort_param\x18\r \x01(\x0b\x32\x1a.caffe.DistortionParameter\x12/\n\x0c\x65xpand_param\x18\x0e \x01(\x0b\x32\x19.caffe.ExpansionParameter\x12.\n\x0f\x65mit_constraint\x18\n \x01(\x0b\x32\x15.caffe.EmitConstraint\"\x90\x04\n\x0fResizeParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12=\n\x0bresize_mode\x18\x02 \x01(\x0e\x32\".caffe.ResizeParameter.Resize_mode:\x04WARP\x12\x11\n\x06height\x18\x03 \x01(\r:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\r:\x01\x30\x12\x17\n\x0cheight_scale\x18\x08 \x01(\r:\x01\x30\x12\x16\n\x0bwidth_scale\x18\t \x01(\r:\x01\x30\x12;\n\x08pad_mode\x18\x05 \x01(\x0e\x32\x1f.caffe.ResizeParameter.Pad_mode:\x08\x43ONSTANT\x12\x11\n\tpad_value\x18\x06 \x03(\x02\x12\x37\n\x0binterp_mode\x18\x07 \x03(\x0e\x32\".caffe.ResizeParameter.Interp_mode\"G\n\x0bResize_mode\x12\x08\n\x04WARP\x10\x01\x12\x12\n\x0e\x46IT_SMALL_SIZE\x10\x02\x12\x1a\n\x16\x46IT_LARGE_SIZE_AND_PAD\x10\x03\":\n\x08Pad_mode\x12\x0c\n\x08\x43ONSTANT\x10\x01\x12\x0c\n\x08MIRRORED\x10\x02\x12\x12\n\x0eREPEAT_NEAREST\x10\x03\"I\n\x0bInterp_mode\x12\n\n\x06LINEAR\x10\x01\x12\x08\n\x04\x41REA\x10\x02\x12\x0b\n\x07NEAREST\x10\x03\x12\t\n\x05\x43UBIC\x10\x04\x12\x0c\n\x08LANCZOS4\x10\x05\"9\n\x13SaltPepperParameter\x12\x13\n\x08\x66raction\x18\x01 \x01(\x02:\x01\x30\x12\r\n\x05value\x18\x02 \x03(\x02\"\xee\x02\n\x0eNoiseParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x30\x12\x16\n\x07hist_eq\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x16\n\x07inverse\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndecolorize\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ngauss_blur\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x04jpeg\x18\x06 \x01(\x02:\x02-1\x12\x18\n\tposterize\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x05\x65rode\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nsaltpepper\x18\t \x01(\x08:\x05\x66\x61lse\x12\x34\n\x10saltpepper_param\x18\n \x01(\x0b\x32\x1a.caffe.SaltPepperParameter\x12\x14\n\x05\x63lahe\x18\x0b \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_hsv\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_lab\x18\r \x01(\x08:\x05\x66\x61lse\"\xbd\x02\n\x13\x44istortionParameter\x12\x1a\n\x0f\x62rightness_prob\x18\x01 \x01(\x02:\x01\x30\x12\x1b\n\x10\x62rightness_delta\x18\x02 \x01(\x02:\x01\x30\x12\x18\n\rcontrast_prob\x18\x03 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_lower\x18\x04 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_upper\x18\x05 \x01(\x02:\x01\x30\x12\x13\n\x08hue_prob\x18\x06 \x01(\x02:\x01\x30\x12\x14\n\thue_delta\x18\x07 \x01(\x02:\x01\x30\x12\x1a\n\x0fsaturation_prob\x18\x08 \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_lower\x18\t \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_upper\x18\n \x01(\x02:\x01\x30\x12\x1c\n\x11random_order_prob\x18\x0b \x01(\x02:\x01\x30\"B\n\x12\x45xpansionParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12\x1b\n\x10max_expand_ratio\x18\x02 \x01(\x02:\x01\x31\"\xc2\x01\n\rLossParameter\x12\x14\n\x0cignore_label\x18\x01 \x01(\x05\x12\x44\n\rnormalization\x18\x03 \x01(\x0e\x32&.caffe.LossParameter.NormalizationMode:\x05VALID\x12\x11\n\tnormalize\x18\x02 \x01(\x08\"B\n\x11NormalizationMode\x12\x08\n\x04\x46ULL\x10\x00\x12\t\n\x05VALID\x10\x01\x12\x0e\n\nBATCH_SIZE\x10\x02\x12\x08\n\x04NONE\x10\x03\"L\n\x11\x41\x63\x63uracyParameter\x12\x10\n\x05top_k\x18\x01 \x01(\r:\x01\x31\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x14\n\x0cignore_label\x18\x03 \x01(\x05\"\x95\x01\n\x16\x41nnotatedDataParameter\x12*\n\rbatch_sampler\x18\x01 \x03(\x0b\x32\x13.caffe.BatchSampler\x12\x16\n\x0elabel_map_file\x18\x02 \x01(\t\x12\x37\n\tanno_type\x18\x03 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\"M\n\x0f\x41rgMaxParameter\x12\x1a\n\x0bout_max_val\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x05top_k\x18\x02 \x01(\r:\x01\x31\x12\x0c\n\x04\x61xis\x18\x03 \x01(\x05\"9\n\x0f\x43oncatParameter\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x15\n\nconcat_dim\x18\x01 \x01(\r:\x01\x31\"k\n\x12\x42\x61tchNormParameter\x12\x18\n\x10use_global_stats\x18\x01 \x01(\x08\x12&\n\x17moving_average_fraction\x18\x02 \x01(\x02:\x05\x30.999\x12\x13\n\x03\x65ps\x18\x03 \x01(\x02:\x06\x31\x65-005\"]\n\rBiasParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\"L\n\x18\x43ontrastiveLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x1d\n\x0elegacy_version\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xfc\x03\n\x14\x43onvolutionParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12\x0b\n\x03pad\x18\x03 \x03(\r\x12\x13\n\x0bkernel_size\x18\x04 \x03(\r\x12\x0e\n\x06stride\x18\x06 \x03(\r\x12\x10\n\x08\x64ilation\x18\x12 \x03(\r\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x10\n\x08kernel_h\x18\x0b \x01(\r\x12\x10\n\x08kernel_w\x18\x0c \x01(\r\x12\x10\n\x08stride_h\x18\r \x01(\r\x12\x10\n\x08stride_w\x18\x0e \x01(\r\x12\x10\n\x05group\x18\x05 \x01(\r:\x01\x31\x12-\n\rweight_filler\x18\x07 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x08 \x01(\x0b\x32\x16.caffe.FillerParameter\x12;\n\x06\x65ngine\x18\x0f \x01(\x0e\x32\".caffe.ConvolutionParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x10 \x01(\x05:\x01\x31\x12\x1e\n\x0f\x66orce_nd_im2col\x18\x11 \x01(\x08:\x05\x66\x61lse\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"0\n\rCropParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x32\x12\x0e\n\x06offset\x18\x02 \x03(\r\"\xa4\x02\n\rDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x31\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0e\x32\x17.caffe.DataParameter.DB:\x07LEVELDB\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x66orce_encoded_color\x18\t \x01(\x08:\x05\x66\x61lse\x12\x13\n\x08prefetch\x18\n \x01(\r:\x01\x34\"\x1b\n\x02\x44\x42\x12\x0b\n\x07LEVELDB\x10\x00\x12\x08\n\x04LMDB\x10\x01\"\xdc\x01\n\x1a\x44\x65tectionEvaluateParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x02 \x01(\r:\x01\x30\x12\x1e\n\x11overlap_threshold\x18\x03 \x01(\x02:\x03\x30.5\x12#\n\x15\x65valuate_difficult_gt\x18\x04 \x01(\x08:\x04true\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12,\n\x0cresize_param\x18\x06 \x01(\x0b\x32\x16.caffe.ResizeParameter\"[\n\x1eNonMaximumSuppressionParameter\x12\x1a\n\rnms_threshold\x18\x01 \x01(\x02:\x03\x30.3\x12\r\n\x05top_k\x18\x02 \x01(\x05\x12\x0e\n\x03\x65ta\x18\x03 \x01(\x02:\x01\x31\"\xd8\x01\n\x13SaveOutputParameter\x12\x18\n\x10output_directory\x18\x01 \x01(\t\x12\x1a\n\x12output_name_prefix\x18\x02 \x01(\t\x12\x15\n\routput_format\x18\x03 \x01(\t\x12\x16\n\x0elabel_map_file\x18\x04 \x01(\t\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12\x16\n\x0enum_test_image\x18\x06 \x01(\r\x12,\n\x0cresize_param\x18\x07 \x01(\x0b\x32\x16.caffe.ResizeParameter\"\xc7\x03\n\x18\x44\x65tectionOutputParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1c\n\x0eshare_location\x18\x02 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x03 \x01(\x05:\x01\x30\x12\x38\n\tnms_param\x18\x04 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x35\n\x11save_output_param\x18\x05 \x01(\x0b\x32\x1a.caffe.SaveOutputParameter\x12<\n\tcode_type\x18\x06 \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12)\n\x1avariance_encoded_in_target\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x16\n\nkeep_top_k\x18\x07 \x01(\x05:\x02-1\x12\x1c\n\x14\x63onfidence_threshold\x18\t \x01(\x02\x12\x18\n\tvisualize\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x13visualize_threshold\x18\x0b \x01(\x02\x12\x11\n\tsave_file\x18\x0c \x01(\t\".\n\x10\x44ropoutParameter\x12\x1a\n\rdropout_ratio\x18\x01 \x01(\x02:\x03\x30.5\"\xa0\x01\n\x12\x44ummyDataParameter\x12+\n\x0b\x64\x61ta_filler\x18\x01 \x03(\x0b\x32\x16.caffe.FillerParameter\x12\x1f\n\x05shape\x18\x06 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x0b\n\x03num\x18\x02 \x03(\r\x12\x10\n\x08\x63hannels\x18\x03 \x03(\r\x12\x0e\n\x06height\x18\x04 \x03(\r\x12\r\n\x05width\x18\x05 \x03(\r\"\xa5\x01\n\x10\x45ltwiseParameter\x12\x39\n\toperation\x18\x01 \x01(\x0e\x32!.caffe.EltwiseParameter.EltwiseOp:\x03SUM\x12\r\n\x05\x63oeff\x18\x02 \x03(\x02\x12\x1e\n\x10stable_prod_grad\x18\x03 \x01(\x08:\x04true\"\'\n\tEltwiseOp\x12\x08\n\x04PROD\x10\x00\x12\x07\n\x03SUM\x10\x01\x12\x07\n\x03MAX\x10\x02\" \n\x0c\x45LUParameter\x12\x10\n\x05\x61lpha\x18\x01 \x01(\x02:\x01\x31\"\xac\x01\n\x0e\x45mbedParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x11\n\tinput_dim\x18\x02 \x01(\r\x12\x17\n\tbias_term\x18\x03 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"D\n\x0c\x45xpParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"9\n\x10\x46lattenParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x14\n\x08\x65nd_axis\x18\x02 \x01(\x05:\x02-1\"O\n\x11HDF5DataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x02 \x01(\r\x12\x16\n\x07shuffle\x18\x03 \x01(\x08:\x05\x66\x61lse\"(\n\x13HDF5OutputParameter\x12\x11\n\tfile_name\x18\x01 \x01(\t\"^\n\x12HingeLossParameter\x12\x30\n\x04norm\x18\x01 \x01(\x0e\x32\x1e.caffe.HingeLossParameter.Norm:\x02L1\"\x16\n\x04Norm\x12\x06\n\x02L1\x10\x01\x12\x06\n\x02L2\x10\x02\"\x97\x02\n\x12ImageDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x15\n\nbatch_size\x18\x04 \x01(\r:\x01\x31\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x16\n\x07shuffle\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x15\n\nnew_height\x18\t \x01(\r:\x01\x30\x12\x14\n\tnew_width\x18\n \x01(\r:\x01\x30\x12\x16\n\x08is_color\x18\x0b \x01(\x08:\x04true\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\x0c \x01(\t:\x00\"\'\n\x15InfogainLossParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\"\xcb\x01\n\x15InnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x05 \x01(\x05:\x01\x31\x12\x18\n\ttranspose\x18\x06 \x01(\x08:\x05\x66\x61lse\"1\n\x0eInputParameter\x12\x1f\n\x05shape\x18\x01 \x03(\x0b\x32\x10.caffe.BlobShape\"D\n\x0cLogParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb8\x02\n\x0cLRNParameter\x12\x15\n\nlocal_size\x18\x01 \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x02 \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x03 \x01(\x02:\x04\x30.75\x12\x44\n\x0bnorm_region\x18\x04 \x01(\x0e\x32\x1e.caffe.LRNParameter.NormRegion:\x0f\x41\x43ROSS_CHANNELS\x12\x0c\n\x01k\x18\x05 \x01(\x02:\x01\x31\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.LRNParameter.Engine:\x07\x44\x45\x46\x41ULT\"5\n\nNormRegion\x12\x13\n\x0f\x41\x43ROSS_CHANNELS\x10\x00\x12\x12\n\x0eWITHIN_CHANNEL\x10\x01\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"t\n\x13MemoryDataParameter\x12\x12\n\nbatch_size\x18\x01 \x01(\r\x12\x10\n\x08\x63hannels\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\r\n\x05width\x18\x04 \x01(\r\x12\x18\n\ttranspose\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xe8\x08\n\x15MultiBoxLossParameter\x12J\n\rloc_loss_type\x18\x01 \x01(\x0e\x32(.caffe.MultiBoxLossParameter.LocLossType:\tSMOOTH_L1\x12J\n\x0e\x63onf_loss_type\x18\x02 \x01(\x0e\x32).caffe.MultiBoxLossParameter.ConfLossType:\x07SOFTMAX\x12\x15\n\nloc_weight\x18\x03 \x01(\x02:\x01\x31\x12\x13\n\x0bnum_classes\x18\x04 \x01(\r\x12\x1c\n\x0eshare_location\x18\x05 \x01(\x08:\x04true\x12J\n\nmatch_type\x18\x06 \x01(\x0e\x32&.caffe.MultiBoxLossParameter.MatchType:\x0ePER_PREDICTION\x12\x1e\n\x11overlap_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12$\n\x16use_prior_for_matching\x18\x08 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\t \x01(\r:\x01\x30\x12\x1e\n\x10use_difficult_gt\x18\n \x01(\x08:\x04true\x12\x15\n\rdo_neg_mining\x18\x0b \x01(\x08\x12\x18\n\rneg_pos_ratio\x18\x0c \x01(\x02:\x01\x33\x12\x18\n\x0bneg_overlap\x18\r \x01(\x02:\x03\x30.5\x12<\n\tcode_type\x18\x0e \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12(\n\x19\x65ncode_variance_in_target\x18\x10 \x01(\x08:\x05\x66\x61lse\x12%\n\x16map_object_to_agnostic\x18\x11 \x01(\x08:\x05\x66\x61lse\x12)\n\x1aignore_cross_boundary_bbox\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tbp_inside\x18\x13 \x01(\x08:\x05\x66\x61lse\x12J\n\x0bmining_type\x18\x14 \x01(\x0e\x32\'.caffe.MultiBoxLossParameter.MiningType:\x0cMAX_NEGATIVE\x12\x38\n\tnms_param\x18\x15 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x17\n\x0bsample_size\x18\x16 \x01(\x05:\x02\x36\x34\x12 \n\x11use_prior_for_nms\x18\x17 \x01(\x08:\x05\x66\x61lse\"$\n\x0bLocLossType\x12\x06\n\x02L2\x10\x00\x12\r\n\tSMOOTH_L1\x10\x01\")\n\x0c\x43onfLossType\x12\x0b\n\x07SOFTMAX\x10\x00\x12\x0c\n\x08LOGISTIC\x10\x01\".\n\tMatchType\x12\r\n\tBIPARTITE\x10\x00\x12\x12\n\x0ePER_PREDICTION\x10\x01\":\n\nMiningType\x12\x08\n\x04NONE\x10\x00\x12\x10\n\x0cMAX_NEGATIVE\x10\x01\x12\x10\n\x0cHARD_EXAMPLE\x10\x02\"e\n\x0cMVNParameter\x12 \n\x12normalize_variance\x18\x01 \x01(\x08:\x04true\x12\x1e\n\x0f\x61\x63ross_channels\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x03\x65ps\x18\x03 \x01(\x02:\x06\x31\x65-009\"\x93\x01\n\x12NormalizeParameter\x12\x1c\n\x0e\x61\x63ross_spatial\x18\x01 \x01(\x08:\x04true\x12,\n\x0cscale_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1c\n\x0e\x63hannel_shared\x18\x03 \x01(\x08:\x04true\x12\x13\n\x03\x65ps\x18\x04 \x01(\x02:\x06\x31\x65-010\"5\n\x12ParameterParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\"!\n\x10PermuteParameter\x12\r\n\x05order\x18\x01 \x03(\r\"\xa2\x03\n\x10PoolingParameter\x12\x35\n\x04pool\x18\x01 \x01(\x0e\x32\".caffe.PoolingParameter.PoolMethod:\x03MAX\x12\x0e\n\x03pad\x18\x04 \x01(\r:\x01\x30\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x13\n\x0bkernel_size\x18\x02 \x01(\r\x12\x10\n\x08kernel_h\x18\x05 \x01(\r\x12\x10\n\x08kernel_w\x18\x06 \x01(\r\x12\x11\n\x06stride\x18\x03 \x01(\r:\x01\x31\x12\x10\n\x08stride_h\x18\x07 \x01(\r\x12\x10\n\x08stride_w\x18\x08 \x01(\r\x12\x37\n\x06\x65ngine\x18\x0b \x01(\x0e\x32\x1e.caffe.PoolingParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x1d\n\x0eglobal_pooling\x18\x0c \x01(\x08:\x05\x66\x61lse\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"F\n\x0ePowerParameter\x12\x10\n\x05power\x18\x01 \x01(\x02:\x01\x31\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb5\x02\n\x11PriorBoxParameter\x12\x10\n\x08min_size\x18\x01 \x03(\x02\x12\x10\n\x08max_size\x18\x02 \x03(\x02\x12\x14\n\x0c\x61spect_ratio\x18\x03 \x03(\x02\x12\x12\n\x04\x66lip\x18\x04 \x01(\x08:\x04true\x12\x13\n\x04\x63lip\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x08variance\x18\x06 \x03(\x02\x12\x10\n\x08img_size\x18\x07 \x01(\r\x12\r\n\x05img_h\x18\x08 \x01(\r\x12\r\n\x05img_w\x18\t \x01(\r\x12\x0c\n\x04step\x18\n \x01(\x02\x12\x0e\n\x06step_h\x18\x0b \x01(\x02\x12\x0e\n\x06step_w\x18\x0c \x01(\x02\x12\x13\n\x06offset\x18\r \x01(\x02:\x03\x30.5\"8\n\x08\x43odeType\x12\n\n\x06\x43ORNER\x10\x01\x12\x0f\n\x0b\x43\x45NTER_SIZE\x10\x02\x12\x0f\n\x0b\x43ORNER_SIZE\x10\x03\"g\n\x0fPythonParameter\x12\x0e\n\x06module\x18\x01 \x01(\t\x12\r\n\x05layer\x18\x02 \x01(\t\x12\x13\n\tparam_str\x18\x03 \x01(\t:\x00\x12 \n\x11share_in_parallel\x18\x04 \x01(\x08:\x05\x66\x61lse\"\xc0\x01\n\x12RecurrentParameter\x12\x15\n\nnum_output\x18\x01 \x01(\r:\x01\x30\x12-\n\rweight_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x19\n\ndebug_info\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\rexpose_hidden\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xad\x01\n\x12ReductionParameter\x12=\n\toperation\x18\x01 \x01(\x0e\x32%.caffe.ReductionParameter.ReductionOp:\x03SUM\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x10\n\x05\x63oeff\x18\x03 \x01(\x02:\x01\x31\"5\n\x0bReductionOp\x12\x07\n\x03SUM\x10\x01\x12\x08\n\x04\x41SUM\x10\x02\x12\t\n\x05SUMSQ\x10\x03\x12\x08\n\x04MEAN\x10\x04\"\x8d\x01\n\rReLUParameter\x12\x19\n\x0enegative_slope\x18\x01 \x01(\x02:\x01\x30\x12\x34\n\x06\x65ngine\x18\x02 \x01(\x0e\x32\x1b.caffe.ReLUParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x10ReshapeParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x14\n\x08num_axes\x18\x03 \x01(\x05:\x02-1\"\xa5\x01\n\x0eScaleParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x18\n\tbias_term\x18\x04 \x01(\x08:\x05\x66\x61lse\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"x\n\x10SigmoidParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SigmoidParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"L\n\x0eSliceParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\x12\x13\n\x0bslice_point\x18\x02 \x03(\r\x12\x14\n\tslice_dim\x18\x01 \x01(\r:\x01\x31\"\x89\x02\n\x10SoftmaxParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SoftmaxParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x15\n\nhard_ratio\x18\x03 \x01(\x02:\x01\x31\x12\x14\n\x0c\x63lass_weight\x18\x04 \x03(\x02\x12\x19\n\x11hard_mining_label\x18\x05 \x01(\x05\x12\x15\n\rcutting_point\x18\x06 \x01(\x02\x12\x1f\n\x0enormalize_type\x18\x07 \x01(\t:\x07Softmax\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"r\n\rTanHParameter\x12\x34\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1b.caffe.TanHParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"/\n\rTileParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\r\n\x05tiles\x18\x02 \x01(\x05\"*\n\x12ThresholdParameter\x12\x14\n\tthreshold\x18\x01 \x01(\x02:\x01\x30\"\xbb\x01\n\x12VideoDataParameter\x12?\n\nvideo_type\x18\x01 \x01(\x0e\x32#.caffe.VideoDataParameter.VideoType:\x06WEBCAM\x12\x14\n\tdevice_id\x18\x02 \x01(\x05:\x01\x30\x12\x12\n\nvideo_file\x18\x03 \x01(\t\x12\x16\n\x0bskip_frames\x18\x04 \x01(\r:\x01\x30\"\"\n\tVideoType\x12\n\n\x06WEBCAM\x10\x00\x12\t\n\x05VIDEO\x10\x01\"\xc1\x02\n\x13WindowDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x0c\x66g_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12\x19\n\x0c\x62g_threshold\x18\x08 \x01(\x02:\x03\x30.5\x12\x19\n\x0b\x66g_fraction\x18\t \x01(\x02:\x04\x30.25\x12\x16\n\x0b\x63ontext_pad\x18\n \x01(\r:\x01\x30\x12\x17\n\tcrop_mode\x18\x0b \x01(\t:\x04warp\x12\x1b\n\x0c\x63\x61\x63he_images\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\r \x01(\t:\x00\"\xeb\x01\n\x0cSPPParameter\x12\x16\n\x0epyramid_height\x18\x01 \x01(\r\x12\x31\n\x04pool\x18\x02 \x01(\x0e\x32\x1e.caffe.SPPParameter.PoolMethod:\x03MAX\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.SPPParameter.Engine:\x07\x44\x45\x46\x41ULT\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"\xe0\x13\n\x10V1LayerParameter\x12\x0e\n\x06\x62ottom\x18\x02 \x03(\t\x12\x0b\n\x03top\x18\x03 \x03(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12$\n\x07include\x18 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18! \x03(\x0b\x32\x13.caffe.NetStateRule\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.caffe.V1LayerParameter.LayerType\x12\x1f\n\x05\x62lobs\x18\x06 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x0e\n\x05param\x18\xe9\x07 \x03(\t\x12>\n\x0f\x62lob_share_mode\x18\xea\x07 \x03(\x0e\x32$.caffe.V1LayerParameter.DimCheckMode\x12\x10\n\x08\x62lobs_lr\x18\x07 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x08 \x03(\x02\x12\x13\n\x0bloss_weight\x18# \x03(\x02\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x1b \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18\x17 \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12,\n\x0c\x63oncat_param\x18\t \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18( \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18\n \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12(\n\ndata_param\x18\x0b \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18\x0c \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18\x1a \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18\x18 \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12&\n\texp_param\x18) \x01(\x0b\x32\x13.caffe.ExpParameter\x12\x31\n\x0fhdf5_data_param\x18\r \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18\x0e \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18\x1d \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18\x0f \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18\x10 \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18\x11 \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12&\n\tlrn_param\x18\x12 \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18\x16 \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18\" \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\rpooling_param\x18\x13 \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18\x15 \x01(\x0b\x32\x15.caffe.PowerParameter\x12(\n\nrelu_param\x18\x1e \x01(\x0b\x32\x14.caffe.ReLUParameter\x12.\n\rsigmoid_param\x18& \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18\' \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12*\n\x0bslice_param\x18\x1f \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18% \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x32\n\x0fthreshold_param\x18\x19 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12\x35\n\x11window_data_param\x18\x14 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12\x37\n\x0ftransform_param\x18$ \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18* \x01(\x0b\x32\x14.caffe.LossParameter\x12&\n\x05layer\x18\x01 \x01(\x0b\x32\x17.caffe.V0LayerParameter\"\xd8\x04\n\tLayerType\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06\x41\x42SVAL\x10#\x12\x0c\n\x08\x41\x43\x43URACY\x10\x01\x12\n\n\x06\x41RGMAX\x10\x1e\x12\x08\n\x04\x42NLL\x10\x02\x12\n\n\x06\x43ONCAT\x10\x03\x12\x14\n\x10\x43ONTRASTIVE_LOSS\x10%\x12\x0f\n\x0b\x43ONVOLUTION\x10\x04\x12\x08\n\x04\x44\x41TA\x10\x05\x12\x11\n\rDECONVOLUTION\x10\'\x12\x0b\n\x07\x44ROPOUT\x10\x06\x12\x0e\n\nDUMMY_DATA\x10 \x12\x12\n\x0e\x45UCLIDEAN_LOSS\x10\x07\x12\x0b\n\x07\x45LTWISE\x10\x19\x12\x07\n\x03\x45XP\x10&\x12\x0b\n\x07\x46LATTEN\x10\x08\x12\r\n\tHDF5_DATA\x10\t\x12\x0f\n\x0bHDF5_OUTPUT\x10\n\x12\x0e\n\nHINGE_LOSS\x10\x1c\x12\n\n\x06IM2COL\x10\x0b\x12\x0e\n\nIMAGE_DATA\x10\x0c\x12\x11\n\rINFOGAIN_LOSS\x10\r\x12\x11\n\rINNER_PRODUCT\x10\x0e\x12\x07\n\x03LRN\x10\x0f\x12\x0f\n\x0bMEMORY_DATA\x10\x1d\x12\x1d\n\x19MULTINOMIAL_LOGISTIC_LOSS\x10\x10\x12\x07\n\x03MVN\x10\"\x12\x0b\n\x07POOLING\x10\x11\x12\t\n\x05POWER\x10\x1a\x12\x08\n\x04RELU\x10\x12\x12\x0b\n\x07SIGMOID\x10\x13\x12\x1e\n\x1aSIGMOID_CROSS_ENTROPY_LOSS\x10\x1b\x12\x0b\n\x07SILENCE\x10$\x12\x0b\n\x07SOFTMAX\x10\x14\x12\x10\n\x0cSOFTMAX_LOSS\x10\x15\x12\t\n\x05SPLIT\x10\x16\x12\t\n\x05SLICE\x10!\x12\x08\n\x04TANH\x10\x17\x12\x0f\n\x0bWINDOW_DATA\x10\x18\x12\r\n\tTHRESHOLD\x10\x1f\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xfd\x07\n\x10V0LayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x12\n\nnum_output\x18\x03 \x01(\r\x12\x16\n\x08\x62iasterm\x18\x04 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x06 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0e\n\x03pad\x18\x07 \x01(\r:\x01\x30\x12\x12\n\nkernelsize\x18\x08 \x01(\r\x12\x10\n\x05group\x18\t \x01(\r:\x01\x31\x12\x11\n\x06stride\x18\n \x01(\r:\x01\x31\x12\x35\n\x04pool\x18\x0b \x01(\x0e\x32\".caffe.V0LayerParameter.PoolMethod:\x03MAX\x12\x1a\n\rdropout_ratio\x18\x0c \x01(\x02:\x03\x30.5\x12\x15\n\nlocal_size\x18\r \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x0e \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x0f \x01(\x02:\x04\x30.75\x12\x0c\n\x01k\x18\x16 \x01(\x02:\x01\x31\x12\x0e\n\x06source\x18\x10 \x01(\t\x12\x10\n\x05scale\x18\x11 \x01(\x02:\x01\x31\x12\x10\n\x08meanfile\x18\x12 \x01(\t\x12\x11\n\tbatchsize\x18\x13 \x01(\r\x12\x13\n\x08\x63ropsize\x18\x14 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x05\x62lobs\x18\x32 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x10\n\x08\x62lobs_lr\x18\x33 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x34 \x03(\x02\x12\x14\n\trand_skip\x18\x35 \x01(\r:\x01\x30\x12\x1d\n\x10\x64\x65t_fg_threshold\x18\x36 \x01(\x02:\x03\x30.5\x12\x1d\n\x10\x64\x65t_bg_threshold\x18\x37 \x01(\x02:\x03\x30.5\x12\x1d\n\x0f\x64\x65t_fg_fraction\x18\x38 \x01(\x02:\x04\x30.25\x12\x1a\n\x0f\x64\x65t_context_pad\x18: \x01(\r:\x01\x30\x12\x1b\n\rdet_crop_mode\x18; \x01(\t:\x04warp\x12\x12\n\x07new_num\x18< \x01(\x05:\x01\x30\x12\x17\n\x0cnew_channels\x18= \x01(\x05:\x01\x30\x12\x15\n\nnew_height\x18> \x01(\x05:\x01\x30\x12\x14\n\tnew_width\x18? \x01(\x05:\x01\x30\x12\x1d\n\x0eshuffle_images\x18@ \x01(\x08:\x05\x66\x61lse\x12\x15\n\nconcat_dim\x18\x41 \x01(\r:\x01\x31\x12\x36\n\x11hdf5_output_param\x18\xe9\x07 \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"W\n\x0ePReLUParameter\x12&\n\x06\x66iller\x18\x01 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1d\n\x0e\x63hannel_shared\x18\x02 \x01(\x08:\x05\x66\x61lse\"!\n\x12TransposeParameter\x12\x0b\n\x03\x64im\x18\x01 \x03(\x05\"#\n\x10ReverseParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x30\"\xb5\x01\n\rLSTMParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x1d\n\x12\x63lipping_threshold\x18\x02 \x01(\x02:\x01\x30\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x15\n\nbatch_size\x18\x05 \x01(\r:\x01\x31\"\xa5\x01\n\x0c\x43TCParameter\x12\x16\n\tthreshold\x18\x01 \x01(\x02:\x03\x30.7\x12;\n\x0b\x64\x65\x63ode_type\x18\x02 \x01(\x0e\x32\x1b.caffe.CTCParameter.Decoder:\tbest_path\"@\n\x07\x44\x65\x63oder\x12\r\n\tbest_path\x10\x00\x12\x13\n\x0f\x62\x65st_path_thres\x10\x01\x12\x11\n\rprefix_search\x10\x02\"i\n\x13\x43\x65nterLossParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12-\n\rcenter_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\"Z\n\x10\x43tcLossParameter\x12\x18\n\ralphabet_size\x18\x01 \x01(\r:\x01\x30\x12\x14\n\ttime_step\x18\x03 \x01(\r:\x01\x30\x12\x16\n\x0b\x62lank_label\x18\x04 \x01(\x05:\x01\x30\"M\n\x1e\x43ontinuationIndicatorParameter\x12\x14\n\ttime_step\x18\x01 \x01(\r:\x01\x30\x12\x15\n\nbatch_size\x18\x02 \x01(\r:\x01\x30\"8\n\x1eLabelsequenceAccuracyParameter\x12\x16\n\x0b\x62lank_label\x18\x01 \x01(\x05:\x01\x30\"\x90\x02\n\x1bSpatialTransformerParameter\x12\x1e\n\x0etransform_type\x18\x01 \x01(\t:\x06\x61\x66\x66ine\x12\x1e\n\x0csampler_type\x18\x02 \x01(\t:\x08\x62ilinear\x12\x10\n\x08output_H\x18\x03 \x01(\x05\x12\x10\n\x08output_W\x18\x04 \x01(\x05\x12\x1b\n\rto_compute_dU\x18\x05 \x01(\x08:\x04true\x12\x11\n\ttheta_1_1\x18\x06 \x01(\x01\x12\x11\n\ttheta_1_2\x18\x07 \x01(\x01\x12\x11\n\ttheta_1_3\x18\x08 \x01(\x01\x12\x11\n\ttheta_2_1\x18\t \x01(\x01\x12\x11\n\ttheta_2_2\x18\n \x01(\x01\x12\x11\n\ttheta_2_3\x18\x0b \x01(\x01\"(\n\x12PowerFileParameter\x12\x12\n\nshift_file\x18\x01 \x01(\t\"5\n\x0fSTLossParameter\x12\x10\n\x08output_H\x18\x01 \x02(\x05\x12\x10\n\x08output_W\x18\x02 \x02(\x05\"%\n\x10LocLossParameter\x12\x11\n\tthreshold\x18\x01 \x02(\x01*\x1c\n\x05Phase\x12\t\n\x05TRAIN\x10\x00\x12\x08\n\x04TEST\x10\x01')
)
_PHASE = _descriptor.EnumDescriptor(
name='Phase',
full_name='caffe.Phase',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TRAIN', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TEST', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=24315,
serialized_end=24343,
)
_sym_db.RegisterEnumDescriptor(_PHASE)
Phase = enum_type_wrapper.EnumTypeWrapper(_PHASE)
TRAIN = 0
TEST = 1
_EMITCONSTRAINT_EMITTYPE = _descriptor.EnumDescriptor(
name='EmitType',
full_name='caffe.EmitConstraint.EmitType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CENTER', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MIN_OVERLAP', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1318,
serialized_end=1357,
)
_sym_db.RegisterEnumDescriptor(_EMITCONSTRAINT_EMITTYPE)
_ANNOTATEDDATUM_ANNOTATIONTYPE = _descriptor.EnumDescriptor(
name='AnnotationType',
full_name='caffe.AnnotatedDatum.AnnotationType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BBOX', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1801,
serialized_end=1827,
)
_sym_db.RegisterEnumDescriptor(_ANNOTATEDDATUM_ANNOTATIONTYPE)
_FILLERPARAMETER_VARIANCENORM = _descriptor.EnumDescriptor(
name='VarianceNorm',
full_name='caffe.FillerParameter.VarianceNorm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FAN_IN', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAN_OUT', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVERAGE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2058,
serialized_end=2110,
)
_sym_db.RegisterEnumDescriptor(_FILLERPARAMETER_VARIANCENORM)
_SOLVERPARAMETER_SNAPSHOTFORMAT = _descriptor.EnumDescriptor(
name='SnapshotFormat',
full_name='caffe.SolverParameter.SnapshotFormat',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='HDF5', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BINARYPROTO', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3667,
serialized_end=3710,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SNAPSHOTFORMAT)
_SOLVERPARAMETER_SOLVERMODE = _descriptor.EnumDescriptor(
name='SolverMode',
full_name='caffe.SolverParameter.SolverMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CPU', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GPU', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3712,
serialized_end=3742,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERMODE)
_SOLVERPARAMETER_SOLVERTYPE = _descriptor.EnumDescriptor(
name='SolverType',
full_name='caffe.SolverParameter.SolverType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SGD', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NESTEROV', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADAGRAD', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RMSPROP', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADADELTA', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADAM', index=5, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3744,
serialized_end=3829,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERTYPE)
_PARAMSPEC_DIMCHECKMODE = _descriptor.EnumDescriptor(
name='DimCheckMode',
full_name='caffe.ParamSpec.DimCheckMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRICT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PERMISSIVE', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4319,
serialized_end=4361,
)
_sym_db.RegisterEnumDescriptor(_PARAMSPEC_DIMCHECKMODE)
_RESIZEPARAMETER_RESIZE_MODE = _descriptor.EnumDescriptor(
name='Resize_mode',
full_name='caffe.ResizeParameter.Resize_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WARP', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIT_SMALL_SIZE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIT_LARGE_SIZE_AND_PAD', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9173,
serialized_end=9244,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_RESIZE_MODE)
_RESIZEPARAMETER_PAD_MODE = _descriptor.EnumDescriptor(
name='Pad_mode',
full_name='caffe.ResizeParameter.Pad_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CONSTANT', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MIRRORED', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REPEAT_NEAREST', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9246,
serialized_end=9304,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_PAD_MODE)
_RESIZEPARAMETER_INTERP_MODE = _descriptor.EnumDescriptor(
name='Interp_mode',
full_name='caffe.ResizeParameter.Interp_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LINEAR', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AREA', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NEAREST', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUBIC', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LANCZOS4', index=4, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9306,
serialized_end=9379,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_INTERP_MODE)
_LOSSPARAMETER_NORMALIZATIONMODE = _descriptor.EnumDescriptor(
name='NormalizationMode',
full_name='caffe.LossParameter.NormalizationMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FULL', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VALID', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATCH_SIZE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NONE', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=10326,
serialized_end=10392,
)
_sym_db.RegisterEnumDescriptor(_LOSSPARAMETER_NORMALIZATIONMODE)
_CONVOLUTIONPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.ConvolutionParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_CONVOLUTIONPARAMETER_ENGINE)
_DATAPARAMETER_DB = _descriptor.EnumDescriptor(
name='DB',
full_name='caffe.DataParameter.DB',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LEVELDB', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LMDB', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11871,
serialized_end=11898,
)
_sym_db.RegisterEnumDescriptor(_DATAPARAMETER_DB)
_ELTWISEPARAMETER_ELTWISEOP = _descriptor.EnumDescriptor(
name='EltwiseOp',
full_name='caffe.EltwiseParameter.EltwiseOp',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PROD', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUM', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAX', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13231,
serialized_end=13270,
)
_sym_db.RegisterEnumDescriptor(_ELTWISEPARAMETER_ELTWISEOP)
_HINGELOSSPARAMETER_NORM = _descriptor.EnumDescriptor(
name='Norm',
full_name='caffe.HingeLossParameter.Norm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='L1', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='L2', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13805,
serialized_end=13827,
)
_sym_db.RegisterEnumDescriptor(_HINGELOSSPARAMETER_NORM)
_LRNPARAMETER_NORMREGION = _descriptor.EnumDescriptor(
name='NormRegion',
full_name='caffe.LRNParameter.NormRegion',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ACROSS_CHANNELS', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WITHIN_CHANNEL', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=14694,
serialized_end=14747,
)
_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_NORMREGION)
_LRNPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.LRNParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_ENGINE)
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE = _descriptor.EnumDescriptor(
name='LocLossType',
full_name='caffe.MultiBoxLossParameter.LocLossType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='L2', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SMOOTH_L1', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15854,
serialized_end=15890,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE)
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE = _descriptor.EnumDescriptor(
name='ConfLossType',
full_name='caffe.MultiBoxLossParameter.ConfLossType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SOFTMAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOGISTIC', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15892,
serialized_end=15933,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE)
_MULTIBOXLOSSPARAMETER_MATCHTYPE = _descriptor.EnumDescriptor(
name='MatchType',
full_name='caffe.MultiBoxLossParameter.MatchType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BIPARTITE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PER_PREDICTION', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15935,
serialized_end=15981,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MATCHTYPE)
_MULTIBOXLOSSPARAMETER_MININGTYPE = _descriptor.EnumDescriptor(
name='MiningType',
full_name='caffe.MultiBoxLossParameter.MiningType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAX_NEGATIVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HARD_EXAMPLE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15983,
serialized_end=16041,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MININGTYPE)
_POOLINGPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.PoolingParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16714,
serialized_end=16760,
)
_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_POOLMETHOD)
_POOLINGPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.PoolingParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_ENGINE)
_PRIORBOXPARAMETER_CODETYPE = _descriptor.EnumDescriptor(
name='CodeType',
full_name='caffe.PriorBoxParameter.CodeType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CORNER', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CENTER_SIZE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CORNER_SIZE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=17133,
serialized_end=17189,
)
_sym_db.RegisterEnumDescriptor(_PRIORBOXPARAMETER_CODETYPE)
_REDUCTIONPARAMETER_REDUCTIONOP = _descriptor.EnumDescriptor(
name='ReductionOp',
full_name='caffe.ReductionParameter.ReductionOp',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SUM', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ASUM', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUMSQ', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MEAN', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=17612,
serialized_end=17665,
)
_sym_db.RegisterEnumDescriptor(_REDUCTIONPARAMETER_REDUCTIONOP)
_RELUPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.ReLUParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_RELUPARAMETER_ENGINE)
_SIGMOIDPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SigmoidParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_SIGMOIDPARAMETER_ENGINE)
_SOFTMAXPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SoftmaxParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_SOFTMAXPARAMETER_ENGINE)
_TANHPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.TanHParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_TANHPARAMETER_ENGINE)
_VIDEODATAPARAMETER_VIDEOTYPE = _descriptor.EnumDescriptor(
name='VideoType',
full_name='caffe.VideoDataParameter.VideoType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WEBCAM', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VIDEO', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=18902,
serialized_end=18936,
)
_sym_db.RegisterEnumDescriptor(_VIDEODATAPARAMETER_VIDEOTYPE)
_SPPPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.SPPParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16714,
serialized_end=16760,
)
_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_POOLMETHOD)
_SPPPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SPPParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11510,
serialized_end=11553,
)
_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_ENGINE)
_V1LAYERPARAMETER_LAYERTYPE = _descriptor.EnumDescriptor(
name='LayerType',
full_name='caffe.V1LayerParameter.LayerType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ABSVAL', index=1, number=35,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCURACY', index=2, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ARGMAX', index=3, number=30,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BNLL', index=4, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONCAT', index=5, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONTRASTIVE_LOSS', index=6, number=37,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONVOLUTION', index=7, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DATA', index=8, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DECONVOLUTION', index=9, number=39,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DROPOUT', index=10, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUMMY_DATA', index=11, number=32,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EUCLIDEAN_LOSS', index=12, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ELTWISE', index=13, number=25,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXP', index=14, number=38,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FLATTEN', index=15, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HDF5_DATA', index=16, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HDF5_OUTPUT', index=17, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HINGE_LOSS', index=18, number=28,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IM2COL', index=19, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IMAGE_DATA', index=20, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INFOGAIN_LOSS', index=21, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INNER_PRODUCT', index=22, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LRN', index=23, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MEMORY_DATA', index=24, number=29,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MULTINOMIAL_LOGISTIC_LOSS', index=25, number=16,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MVN', index=26, number=34,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POOLING', index=27, number=17,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POWER', index=28, number=26,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RELU', index=29, number=18,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGMOID', index=30, number=19,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGMOID_CROSS_ENTROPY_LOSS', index=31, number=27,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SILENCE', index=32, number=36,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOFTMAX', index=33, number=20,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOFTMAX_LOSS', index=34, number=21,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SPLIT', index=35, number=22,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SLICE', index=36, number=33,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TANH', index=37, number=23,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WINDOW_DATA', index=38, number=24,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='THRESHOLD', index=39, number=31,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=21385,
serialized_end=21985,
)
_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_LAYERTYPE)
_V1LAYERPARAMETER_DIMCHECKMODE = _descriptor.EnumDescriptor(
name='DimCheckMode',
full_name='caffe.V1LayerParameter.DimCheckMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRICT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PERMISSIVE', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4319,
serialized_end=4361,
)
_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_DIMCHECKMODE)
_V0LAYERPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.V0LayerParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16714,
serialized_end=16760,
)
_sym_db.RegisterEnumDescriptor(_V0LAYERPARAMETER_POOLMETHOD)
_CTCPARAMETER_DECODER = _descriptor.EnumDescriptor(
name='Decoder',
full_name='caffe.CTCParameter.Decoder',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='best_path', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='best_path_thres', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='prefix_search', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=23502,
serialized_end=23566,
)
_sym_db.RegisterEnumDescriptor(_CTCPARAMETER_DECODER)
_BLOBSHAPE = _descriptor.Descriptor(
name='BlobShape',
full_name='caffe.BlobShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dim', full_name='caffe.BlobShape.dim', index=0,
number=1, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=50,
)
_BLOBPROTO = _descriptor.Descriptor(
name='BlobProto',
full_name='caffe.BlobProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.BlobProto.shape', index=0,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='caffe.BlobProto.data', index=1,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='diff', full_name='caffe.BlobProto.diff', index=2,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='double_data', full_name='caffe.BlobProto.double_data', index=3,
number=8, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='double_diff', full_name='caffe.BlobProto.double_diff', index=4,
number=9, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='num', full_name='caffe.BlobProto.num', index=5,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.BlobProto.channels', index=6,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.BlobProto.height', index=7,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.BlobProto.width', index=8,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=53,
serialized_end=257,
)
_BLOBPROTOVECTOR = _descriptor.Descriptor(
name='BlobProtoVector',
full_name='caffe.BlobProtoVector',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.BlobProtoVector.blobs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=259,
serialized_end=309,
)
_DATUM = _descriptor.Descriptor(
name='Datum',
full_name='caffe.Datum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.Datum.channels', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.Datum.height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.Datum.width', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='caffe.Datum.data', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.Datum.label', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='float_data', full_name='caffe.Datum.float_data', index=5,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encoded', full_name='caffe.Datum.encoded', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='labels', full_name='caffe.Datum.labels', index=7,
number=8, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=312,
serialized_end=457,
)
_MTCNNBBOX = _descriptor.Descriptor(
name='MTCNNBBox',
full_name='caffe.MTCNNBBox',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='xmin', full_name='caffe.MTCNNBBox.xmin', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymin', full_name='caffe.MTCNNBBox.ymin', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='xmax', full_name='caffe.MTCNNBBox.xmax', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymax', full_name='caffe.MTCNNBBox.ymax', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=459,
serialized_end=526,
)
_MTCNNDATUM = _descriptor.Descriptor(
name='MTCNNDatum',
full_name='caffe.MTCNNDatum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='datum', full_name='caffe.MTCNNDatum.datum', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='roi', full_name='caffe.MTCNNDatum.roi', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pts', full_name='caffe.MTCNNDatum.pts', index=2,
number=3, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=528,
serialized_end=613,
)
_LABELMAPITEM = _descriptor.Descriptor(
name='LabelMapItem',
full_name='caffe.LabelMapItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.LabelMapItem.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.LabelMapItem.label', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='display_name', full_name='caffe.LabelMapItem.display_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=615,
serialized_end=680,
)
_LABELMAP = _descriptor.Descriptor(
name='LabelMap',
full_name='caffe.LabelMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='caffe.LabelMap.item', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=682,
serialized_end=727,
)
_SAMPLER = _descriptor.Descriptor(
name='Sampler',
full_name='caffe.Sampler',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_scale', full_name='caffe.Sampler.min_scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_scale', full_name='caffe.Sampler.max_scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_aspect_ratio', full_name='caffe.Sampler.min_aspect_ratio', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_aspect_ratio', full_name='caffe.Sampler.max_aspect_ratio', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=729,
serialized_end=840,
)
_SAMPLECONSTRAINT = _descriptor.Descriptor(
name='SampleConstraint',
full_name='caffe.SampleConstraint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_jaccard_overlap', full_name='caffe.SampleConstraint.min_jaccard_overlap', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_jaccard_overlap', full_name='caffe.SampleConstraint.max_jaccard_overlap', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_sample_coverage', full_name='caffe.SampleConstraint.min_sample_coverage', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_sample_coverage', full_name='caffe.SampleConstraint.max_sample_coverage', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_object_coverage', full_name='caffe.SampleConstraint.min_object_coverage', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_object_coverage', full_name='caffe.SampleConstraint.max_object_coverage', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=843,
serialized_end=1035,
)
_BATCHSAMPLER = _descriptor.Descriptor(
name='BatchSampler',
full_name='caffe.BatchSampler',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_original_image', full_name='caffe.BatchSampler.use_original_image', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sampler', full_name='caffe.BatchSampler.sampler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sample_constraint', full_name='caffe.BatchSampler.sample_constraint', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_sample', full_name='caffe.BatchSampler.max_sample', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_trials', full_name='caffe.BatchSampler.max_trials', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=100,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1038,
serialized_end=1216,
)
_EMITCONSTRAINT = _descriptor.Descriptor(
name='EmitConstraint',
full_name='caffe.EmitConstraint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='emit_type', full_name='caffe.EmitConstraint.emit_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='emit_overlap', full_name='caffe.EmitConstraint.emit_overlap', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_EMITCONSTRAINT_EMITTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1219,
serialized_end=1357,
)
_NORMALIZEDBBOX = _descriptor.Descriptor(
name='NormalizedBBox',
full_name='caffe.NormalizedBBox',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='xmin', full_name='caffe.NormalizedBBox.xmin', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymin', full_name='caffe.NormalizedBBox.ymin', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='xmax', full_name='caffe.NormalizedBBox.xmax', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymax', full_name='caffe.NormalizedBBox.ymax', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.NormalizedBBox.label', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='difficult', full_name='caffe.NormalizedBBox.difficult', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='score', full_name='caffe.NormalizedBBox.score', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='caffe.NormalizedBBox.size', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1360,
serialized_end=1495,
)
_ANNOTATION = _descriptor.Descriptor(
name='Annotation',
full_name='caffe.Annotation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instance_id', full_name='caffe.Annotation.instance_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bbox', full_name='caffe.Annotation.bbox', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1497,
serialized_end=1570,
)
_ANNOTATIONGROUP = _descriptor.Descriptor(
name='AnnotationGroup',
full_name='caffe.AnnotationGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='group_label', full_name='caffe.AnnotationGroup.group_label', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotation', full_name='caffe.AnnotationGroup.annotation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1572,
serialized_end=1649,
)
_ANNOTATEDDATUM = _descriptor.Descriptor(
name='AnnotatedDatum',
full_name='caffe.AnnotatedDatum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='datum', full_name='caffe.AnnotatedDatum.datum', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.AnnotatedDatum.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotation_group', full_name='caffe.AnnotatedDatum.annotation_group', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ANNOTATEDDATUM_ANNOTATIONTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1652,
serialized_end=1827,
)
_FILLERPARAMETER = _descriptor.Descriptor(
name='FillerParameter',
full_name='caffe.FillerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='caffe.FillerParameter.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("constant").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='caffe.FillerParameter.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min', full_name='caffe.FillerParameter.min', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max', full_name='caffe.FillerParameter.max', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean', full_name='caffe.FillerParameter.mean', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='std', full_name='caffe.FillerParameter.std', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sparse', full_name='caffe.FillerParameter.sparse', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance_norm', full_name='caffe.FillerParameter.variance_norm', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file', full_name='caffe.FillerParameter.file', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FILLERPARAMETER_VARIANCENORM,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1830,
serialized_end=2110,
)
_NETPARAMETER = _descriptor.Descriptor(
name='NetParameter',
full_name='caffe.NetParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.NetParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input', full_name='caffe.NetParameter.input', index=1,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_shape', full_name='caffe.NetParameter.input_shape', index=2,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_dim', full_name='caffe.NetParameter.input_dim', index=3,
number=4, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_backward', full_name='caffe.NetParameter.force_backward', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='caffe.NetParameter.state', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.NetParameter.debug_info', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.NetParameter.layer', index=7,
number=100, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layers', full_name='caffe.NetParameter.layers', index=8,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2113,
serialized_end=2383,
)
_SOLVERPARAMETER = _descriptor.Descriptor(
name='SolverParameter',
full_name='caffe.SolverParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='net', full_name='caffe.SolverParameter.net', index=0,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='net_param', full_name='caffe.SolverParameter.net_param', index=1,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_net', full_name='caffe.SolverParameter.train_net', index=2,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_net', full_name='caffe.SolverParameter.test_net', index=3,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_net_param', full_name='caffe.SolverParameter.train_net_param', index=4,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_net_param', full_name='caffe.SolverParameter.test_net_param', index=5,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_state', full_name='caffe.SolverParameter.train_state', index=6,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_state', full_name='caffe.SolverParameter.test_state', index=7,
number=27, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eval_type', full_name='caffe.SolverParameter.eval_type', index=8,
number=41, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("classification").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ap_version', full_name='caffe.SolverParameter.ap_version', index=9,
number=42, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("Integral").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='show_per_class_result', full_name='caffe.SolverParameter.show_per_class_result', index=10,
number=44, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_iter', full_name='caffe.SolverParameter.test_iter', index=11,
number=3, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_interval', full_name='caffe.SolverParameter.test_interval', index=12,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_compute_loss', full_name='caffe.SolverParameter.test_compute_loss', index=13,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_initialization', full_name='caffe.SolverParameter.test_initialization', index=14,
number=32, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='base_lr', full_name='caffe.SolverParameter.base_lr', index=15,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='display', full_name='caffe.SolverParameter.display', index=16,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='average_loss', full_name='caffe.SolverParameter.average_loss', index=17,
number=33, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_iter', full_name='caffe.SolverParameter.max_iter', index=18,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iter_size', full_name='caffe.SolverParameter.iter_size', index=19,
number=36, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lr_policy', full_name='caffe.SolverParameter.lr_policy', index=20,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gamma', full_name='caffe.SolverParameter.gamma', index=21,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power', full_name='caffe.SolverParameter.power', index=22,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='momentum', full_name='caffe.SolverParameter.momentum', index=23,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.SolverParameter.weight_decay', index=24,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='regularization_type', full_name='caffe.SolverParameter.regularization_type', index=25,
number=29, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("L2").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stepsize', full_name='caffe.SolverParameter.stepsize', index=26,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stepvalue', full_name='caffe.SolverParameter.stepvalue', index=27,
number=34, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='plateau_winsize', full_name='caffe.SolverParameter.plateau_winsize', index=28,
number=43, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clip_gradients', full_name='caffe.SolverParameter.clip_gradients', index=29,
number=35, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot', full_name='caffe.SolverParameter.snapshot', index=30,
number=14, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_prefix', full_name='caffe.SolverParameter.snapshot_prefix', index=31,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_diff', full_name='caffe.SolverParameter.snapshot_diff', index=32,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_format', full_name='caffe.SolverParameter.snapshot_format', index=33,
number=37, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='solver_mode', full_name='caffe.SolverParameter.solver_mode', index=34,
number=17, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_id', full_name='caffe.SolverParameter.device_id', index=35,
number=18, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='random_seed', full_name='caffe.SolverParameter.random_seed', index=36,
number=20, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.SolverParameter.type', index=37,
number=40, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("SGD").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='delta', full_name='caffe.SolverParameter.delta', index=38,
number=31, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1e-008),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='momentum2', full_name='caffe.SolverParameter.momentum2', index=39,
number=39, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.999),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rms_decay', full_name='caffe.SolverParameter.rms_decay', index=40,
number=38, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.99),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.SolverParameter.debug_info', index=41,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_after_train', full_name='caffe.SolverParameter.snapshot_after_train', index=42,
number=28, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='solver_type', full_name='caffe.SolverParameter.solver_type', index=43,
number=30, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SOLVERPARAMETER_SNAPSHOTFORMAT,
_SOLVERPARAMETER_SOLVERMODE,
_SOLVERPARAMETER_SOLVERTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2386,
serialized_end=3829,
)
_SOLVERSTATE = _descriptor.Descriptor(
name='SolverState',
full_name='caffe.SolverState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='iter', full_name='caffe.SolverState.iter', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='learned_net', full_name='caffe.SolverState.learned_net', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='history', full_name='caffe.SolverState.history', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='current_step', full_name='caffe.SolverState.current_step', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='minimum_loss', full_name='caffe.SolverState.minimum_loss', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1e+038),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iter_last_event', full_name='caffe.SolverState.iter_last_event', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3832,
serialized_end=3998,
)
_NETSTATE = _descriptor.Descriptor(
name='NetState',
full_name='caffe.NetState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.NetState.phase', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='level', full_name='caffe.NetState.level', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stage', full_name='caffe.NetState.stage', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4000,
serialized_end=4078,
)
_NETSTATERULE = _descriptor.Descriptor(
name='NetStateRule',
full_name='caffe.NetStateRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.NetStateRule.phase', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_level', full_name='caffe.NetStateRule.min_level', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_level', full_name='caffe.NetStateRule.max_level', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stage', full_name='caffe.NetStateRule.stage', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='not_stage', full_name='caffe.NetStateRule.not_stage', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4080,
serialized_end=4195,
)
_PARAMSPEC = _descriptor.Descriptor(
name='ParamSpec',
full_name='caffe.ParamSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.ParamSpec.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_mode', full_name='caffe.ParamSpec.share_mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lr_mult', full_name='caffe.ParamSpec.lr_mult', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='decay_mult', full_name='caffe.ParamSpec.decay_mult', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PARAMSPEC_DIMCHECKMODE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4198,
serialized_end=4361,
)
_PREDICTBOXPARAMETER = _descriptor.Descriptor(
name='PredictBoxParameter',
full_name='caffe.PredictBoxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.PredictBoxParameter.stride', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='receptive_field', full_name='caffe.PredictBoxParameter.receptive_field', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=12,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms', full_name='caffe.PredictBoxParameter.nms', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_vector', full_name='caffe.PredictBoxParameter.output_vector', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='positive_thresh', full_name='caffe.PredictBoxParameter.positive_thresh', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bbreg_exp', full_name='caffe.PredictBoxParameter.bbreg_exp', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4364,
serialized_end=4536,
)
_LAYERPARAMETER = _descriptor.Descriptor(
name='LayerParameter',
full_name='caffe.LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.LayerParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.LayerParameter.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bottom', full_name='caffe.LayerParameter.bottom', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top', full_name='caffe.LayerParameter.top', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.LayerParameter.phase', index=4,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_weight', full_name='caffe.LayerParameter.loss_weight', index=5,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param', full_name='caffe.LayerParameter.param', index=6,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.LayerParameter.blobs', index=7,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='propagate_down', full_name='caffe.LayerParameter.propagate_down', index=8,
number=11, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='include', full_name='caffe.LayerParameter.include', index=9,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exclude', full_name='caffe.LayerParameter.exclude', index=10,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transform_param', full_name='caffe.LayerParameter.transform_param', index=11,
number=100, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_param', full_name='caffe.LayerParameter.loss_param', index=12,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accuracy_param', full_name='caffe.LayerParameter.accuracy_param', index=13,
number=102, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotated_data_param', full_name='caffe.LayerParameter.annotated_data_param', index=14,
number=200, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='argmax_param', full_name='caffe.LayerParameter.argmax_param', index=15,
number=103, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_norm_param', full_name='caffe.LayerParameter.batch_norm_param', index=16,
number=139, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_param', full_name='caffe.LayerParameter.bias_param', index=17,
number=141, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='center_loss_param', full_name='caffe.LayerParameter.center_loss_param', index=18,
number=147, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_param', full_name='caffe.LayerParameter.concat_param', index=19,
number=104, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrastive_loss_param', full_name='caffe.LayerParameter.contrastive_loss_param', index=20,
number=105, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convolution_param', full_name='caffe.LayerParameter.convolution_param', index=21,
number=106, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_param', full_name='caffe.LayerParameter.crop_param', index=22,
number=144, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_param', full_name='caffe.LayerParameter.data_param', index=23,
number=107, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='detection_evaluate_param', full_name='caffe.LayerParameter.detection_evaluate_param', index=24,
number=205, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='detection_output_param', full_name='caffe.LayerParameter.detection_output_param', index=25,
number=204, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_param', full_name='caffe.LayerParameter.dropout_param', index=26,
number=108, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dummy_data_param', full_name='caffe.LayerParameter.dummy_data_param', index=27,
number=109, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eltwise_param', full_name='caffe.LayerParameter.eltwise_param', index=28,
number=110, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='elu_param', full_name='caffe.LayerParameter.elu_param', index=29,
number=140, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='embed_param', full_name='caffe.LayerParameter.embed_param', index=30,
number=137, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exp_param', full_name='caffe.LayerParameter.exp_param', index=31,
number=111, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flatten_param', full_name='caffe.LayerParameter.flatten_param', index=32,
number=135, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_data_param', full_name='caffe.LayerParameter.hdf5_data_param', index=33,
number=112, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.LayerParameter.hdf5_output_param', index=34,
number=113, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hinge_loss_param', full_name='caffe.LayerParameter.hinge_loss_param', index=35,
number=114, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_data_param', full_name='caffe.LayerParameter.image_data_param', index=36,
number=115, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='infogain_loss_param', full_name='caffe.LayerParameter.infogain_loss_param', index=37,
number=116, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inner_product_param', full_name='caffe.LayerParameter.inner_product_param', index=38,
number=117, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_param', full_name='caffe.LayerParameter.input_param', index=39,
number=143, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='log_param', full_name='caffe.LayerParameter.log_param', index=40,
number=134, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lrn_param', full_name='caffe.LayerParameter.lrn_param', index=41,
number=118, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory_data_param', full_name='caffe.LayerParameter.memory_data_param', index=42,
number=119, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='multibox_loss_param', full_name='caffe.LayerParameter.multibox_loss_param', index=43,
number=201, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvn_param', full_name='caffe.LayerParameter.mvn_param', index=44,
number=120, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='norm_param', full_name='caffe.LayerParameter.norm_param', index=45,
number=206, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='predict_box_param', full_name='caffe.LayerParameter.predict_box_param', index=46,
number=209, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_param', full_name='caffe.LayerParameter.parameter_param', index=47,
number=145, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='permute_param', full_name='caffe.LayerParameter.permute_param', index=48,
number=202, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pooling_param', full_name='caffe.LayerParameter.pooling_param', index=49,
number=121, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power_param', full_name='caffe.LayerParameter.power_param', index=50,
number=122, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prelu_param', full_name='caffe.LayerParameter.prelu_param', index=51,
number=131, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prior_box_param', full_name='caffe.LayerParameter.prior_box_param', index=52,
number=203, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='python_param', full_name='caffe.LayerParameter.python_param', index=53,
number=130, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='recurrent_param', full_name='caffe.LayerParameter.recurrent_param', index=54,
number=146, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reduction_param', full_name='caffe.LayerParameter.reduction_param', index=55,
number=136, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='relu_param', full_name='caffe.LayerParameter.relu_param', index=56,
number=123, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reshape_param', full_name='caffe.LayerParameter.reshape_param', index=57,
number=133, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_param', full_name='caffe.LayerParameter.scale_param', index=58,
number=142, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sigmoid_param', full_name='caffe.LayerParameter.sigmoid_param', index=59,
number=124, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='softmax_param', full_name='caffe.LayerParameter.softmax_param', index=60,
number=125, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spp_param', full_name='caffe.LayerParameter.spp_param', index=61,
number=132, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_param', full_name='caffe.LayerParameter.slice_param', index=62,
number=126, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tanh_param', full_name='caffe.LayerParameter.tanh_param', index=63,
number=127, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='threshold_param', full_name='caffe.LayerParameter.threshold_param', index=64,
number=128, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tile_param', full_name='caffe.LayerParameter.tile_param', index=65,
number=138, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='video_data_param', full_name='caffe.LayerParameter.video_data_param', index=66,
number=207, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='window_data_param', full_name='caffe.LayerParameter.window_data_param', index=67,
number=129, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flip_param', full_name='caffe.LayerParameter.flip_param', index=68,
number=212, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lstm_param', full_name='caffe.LayerParameter.lstm_param', index=69,
number=148, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_param', full_name='caffe.LayerParameter.ctc_param', index=70,
number=149, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transpose_param', full_name='caffe.LayerParameter.transpose_param', index=71,
number=150, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reverse_param', full_name='caffe.LayerParameter.reverse_param', index=72,
number=151, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_loss_param', full_name='caffe.LayerParameter.ctc_loss_param', index=73,
number=152, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='continuation_indicator_param', full_name='caffe.LayerParameter.continuation_indicator_param', index=74,
number=153, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='labelsequence_accuracy_param', full_name='caffe.LayerParameter.labelsequence_accuracy_param', index=75,
number=154, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='st_param', full_name='caffe.LayerParameter.st_param', index=76,
number=156, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='st_loss_param', full_name='caffe.LayerParameter.st_loss_param', index=77,
number=157, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power_file_param', full_name='caffe.LayerParameter.power_file_param', index=78,
number=158, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loc_loss_param', full_name='caffe.LayerParameter.loc_loss_param', index=79,
number=159, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4539,
serialized_end=8316,
)
_FLIPPARAMETER = _descriptor.Descriptor(
name='FlipParameter',
full_name='caffe.FlipParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='flip_width', full_name='caffe.FlipParameter.flip_width', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flip_height', full_name='caffe.FlipParameter.flip_height', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8318,
serialized_end=8387,
)
_TRANSFORMATIONPARAMETER = _descriptor.Descriptor(
name='TransformationParameter',
full_name='caffe.TransformationParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.TransformationParameter.scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.TransformationParameter.mirror', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.TransformationParameter.crop_size', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_h', full_name='caffe.TransformationParameter.crop_h', index=3,
number=11, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_w', full_name='caffe.TransformationParameter.crop_w', index=4,
number=12, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.TransformationParameter.mean_file', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_value', full_name='caffe.TransformationParameter.mean_value', index=6,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_color', full_name='caffe.TransformationParameter.force_color', index=7,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_gray', full_name='caffe.TransformationParameter.force_gray', index=8,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.TransformationParameter.resize_param', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='noise_param', full_name='caffe.TransformationParameter.noise_param', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distort_param', full_name='caffe.TransformationParameter.distort_param', index=11,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='expand_param', full_name='caffe.TransformationParameter.expand_param', index=12,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='emit_constraint', full_name='caffe.TransformationParameter.emit_constraint', index=13,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8390,
serialized_end=8848,
)
_RESIZEPARAMETER = _descriptor.Descriptor(
name='ResizeParameter',
full_name='caffe.ResizeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.ResizeParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_mode', full_name='caffe.ResizeParameter.resize_mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.ResizeParameter.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.ResizeParameter.width', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height_scale', full_name='caffe.ResizeParameter.height_scale', index=4,
number=8, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width_scale', full_name='caffe.ResizeParameter.width_scale', index=5,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_mode', full_name='caffe.ResizeParameter.pad_mode', index=6,
number=5, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_value', full_name='caffe.ResizeParameter.pad_value', index=7,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='interp_mode', full_name='caffe.ResizeParameter.interp_mode', index=8,
number=7, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_RESIZEPARAMETER_RESIZE_MODE,
_RESIZEPARAMETER_PAD_MODE,
_RESIZEPARAMETER_INTERP_MODE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8851,
serialized_end=9379,
)
_SALTPEPPERPARAMETER = _descriptor.Descriptor(
name='SaltPepperParameter',
full_name='caffe.SaltPepperParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fraction', full_name='caffe.SaltPepperParameter.fraction', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='caffe.SaltPepperParameter.value', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=9381,
serialized_end=9438,
)
_NOISEPARAMETER = _descriptor.Descriptor(
name='NoiseParameter',
full_name='caffe.NoiseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.NoiseParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hist_eq', full_name='caffe.NoiseParameter.hist_eq', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inverse', full_name='caffe.NoiseParameter.inverse', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='decolorize', full_name='caffe.NoiseParameter.decolorize', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gauss_blur', full_name='caffe.NoiseParameter.gauss_blur', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='jpeg', full_name='caffe.NoiseParameter.jpeg', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='posterize', full_name='caffe.NoiseParameter.posterize', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='erode', full_name='caffe.NoiseParameter.erode', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saltpepper', full_name='caffe.NoiseParameter.saltpepper', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saltpepper_param', full_name='caffe.NoiseParameter.saltpepper_param', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clahe', full_name='caffe.NoiseParameter.clahe', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convert_to_hsv', full_name='caffe.NoiseParameter.convert_to_hsv', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convert_to_lab', full_name='caffe.NoiseParameter.convert_to_lab', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=9441,
serialized_end=9807,
)
_DISTORTIONPARAMETER = _descriptor.Descriptor(
name='DistortionParameter',
full_name='caffe.DistortionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='brightness_prob', full_name='caffe.DistortionParameter.brightness_prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='brightness_delta', full_name='caffe.DistortionParameter.brightness_delta', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_prob', full_name='caffe.DistortionParameter.contrast_prob', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_lower', full_name='caffe.DistortionParameter.contrast_lower', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_upper', full_name='caffe.DistortionParameter.contrast_upper', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hue_prob', full_name='caffe.DistortionParameter.hue_prob', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hue_delta', full_name='caffe.DistortionParameter.hue_delta', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_prob', full_name='caffe.DistortionParameter.saturation_prob', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_lower', full_name='caffe.DistortionParameter.saturation_lower', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_upper', full_name='caffe.DistortionParameter.saturation_upper', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='random_order_prob', full_name='caffe.DistortionParameter.random_order_prob', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=9810,
serialized_end=10127,
)
_EXPANSIONPARAMETER = _descriptor.Descriptor(
name='ExpansionParameter',
full_name='caffe.ExpansionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.ExpansionParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_expand_ratio', full_name='caffe.ExpansionParameter.max_expand_ratio', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10129,
serialized_end=10195,
)
_LOSSPARAMETER = _descriptor.Descriptor(
name='LossParameter',
full_name='caffe.LossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ignore_label', full_name='caffe.LossParameter.ignore_label', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='normalization', full_name='caffe.LossParameter.normalization', index=1,
number=3, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='normalize', full_name='caffe.LossParameter.normalize', index=2,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_LOSSPARAMETER_NORMALIZATIONMODE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10198,
serialized_end=10392,
)
_ACCURACYPARAMETER = _descriptor.Descriptor(
name='AccuracyParameter',
full_name='caffe.AccuracyParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.AccuracyParameter.top_k', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.AccuracyParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ignore_label', full_name='caffe.AccuracyParameter.ignore_label', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10394,
serialized_end=10470,
)
_ANNOTATEDDATAPARAMETER = _descriptor.Descriptor(
name='AnnotatedDataParameter',
full_name='caffe.AnnotatedDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='batch_sampler', full_name='caffe.AnnotatedDataParameter.batch_sampler', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_map_file', full_name='caffe.AnnotatedDataParameter.label_map_file', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='anno_type', full_name='caffe.AnnotatedDataParameter.anno_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10473,
serialized_end=10622,
)
_ARGMAXPARAMETER = _descriptor.Descriptor(
name='ArgMaxParameter',
full_name='caffe.ArgMaxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='out_max_val', full_name='caffe.ArgMaxParameter.out_max_val', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.ArgMaxParameter.top_k', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ArgMaxParameter.axis', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10624,
serialized_end=10701,
)
_CONCATPARAMETER = _descriptor.Descriptor(
name='ConcatParameter',
full_name='caffe.ConcatParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ConcatParameter.axis', index=0,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_dim', full_name='caffe.ConcatParameter.concat_dim', index=1,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10703,
serialized_end=10760,
)
_BATCHNORMPARAMETER = _descriptor.Descriptor(
name='BatchNormParameter',
full_name='caffe.BatchNormParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_global_stats', full_name='caffe.BatchNormParameter.use_global_stats', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='moving_average_fraction', full_name='caffe.BatchNormParameter.moving_average_fraction', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.999),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.BatchNormParameter.eps', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1e-005),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10762,
serialized_end=10869,
)
_BIASPARAMETER = _descriptor.Descriptor(
name='BiasParameter',
full_name='caffe.BiasParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.BiasParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.BiasParameter.num_axes', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.BiasParameter.filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10871,
serialized_end=10964,
)
_CONTRASTIVELOSSPARAMETER = _descriptor.Descriptor(
name='ContrastiveLossParameter',
full_name='caffe.ContrastiveLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='margin', full_name='caffe.ContrastiveLossParameter.margin', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_version', full_name='caffe.ContrastiveLossParameter.legacy_version', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=10966,
serialized_end=11042,
)
_CONVOLUTIONPARAMETER = _descriptor.Descriptor(
name='ConvolutionParameter',
full_name='caffe.ConvolutionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.ConvolutionParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.ConvolutionParameter.bias_term', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.ConvolutionParameter.pad', index=2,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_size', full_name='caffe.ConvolutionParameter.kernel_size', index=3,
number=4, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.ConvolutionParameter.stride', index=4,
number=6, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dilation', full_name='caffe.ConvolutionParameter.dilation', index=5,
number=18, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_h', full_name='caffe.ConvolutionParameter.pad_h', index=6,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_w', full_name='caffe.ConvolutionParameter.pad_w', index=7,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_h', full_name='caffe.ConvolutionParameter.kernel_h', index=8,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_w', full_name='caffe.ConvolutionParameter.kernel_w', index=9,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_h', full_name='caffe.ConvolutionParameter.stride_h', index=10,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_w', full_name='caffe.ConvolutionParameter.stride_w', index=11,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group', full_name='caffe.ConvolutionParameter.group', index=12,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.ConvolutionParameter.weight_filler', index=13,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.ConvolutionParameter.bias_filler', index=14,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.ConvolutionParameter.engine', index=15,
number=15, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ConvolutionParameter.axis', index=16,
number=16, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_nd_im2col', full_name='caffe.ConvolutionParameter.force_nd_im2col', index=17,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_CONVOLUTIONPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=11045,
serialized_end=11553,
)
_CROPPARAMETER = _descriptor.Descriptor(
name='CropParameter',
full_name='caffe.CropParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.CropParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset', full_name='caffe.CropParameter.offset', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=11555,
serialized_end=11603,
)
_DATAPARAMETER = _descriptor.Descriptor(
name='DataParameter',
full_name='caffe.DataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.DataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.DataParameter.batch_size', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.DataParameter.rand_skip', index=2,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='backend', full_name='caffe.DataParameter.backend', index=3,
number=8, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.DataParameter.scale', index=4,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.DataParameter.mean_file', index=5,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.DataParameter.crop_size', index=6,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.DataParameter.mirror', index=7,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_encoded_color', full_name='caffe.DataParameter.force_encoded_color', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prefetch', full_name='caffe.DataParameter.prefetch', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=4,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_DATAPARAMETER_DB,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=11606,
serialized_end=11898,
)
_DETECTIONEVALUATEPARAMETER = _descriptor.Descriptor(
name='DetectionEvaluateParameter',
full_name='caffe.DetectionEvaluateParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.DetectionEvaluateParameter.num_classes', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.DetectionEvaluateParameter.background_label_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='overlap_threshold', full_name='caffe.DetectionEvaluateParameter.overlap_threshold', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='evaluate_difficult_gt', full_name='caffe.DetectionEvaluateParameter.evaluate_difficult_gt', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name_size_file', full_name='caffe.DetectionEvaluateParameter.name_size_file', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.DetectionEvaluateParameter.resize_param', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=11901,
serialized_end=12121,
)
_NONMAXIMUMSUPPRESSIONPARAMETER = _descriptor.Descriptor(
name='NonMaximumSuppressionParameter',
full_name='caffe.NonMaximumSuppressionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nms_threshold', full_name='caffe.NonMaximumSuppressionParameter.nms_threshold', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.3),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.NonMaximumSuppressionParameter.top_k', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eta', full_name='caffe.NonMaximumSuppressionParameter.eta', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=12123,
serialized_end=12214,
)
_SAVEOUTPUTPARAMETER = _descriptor.Descriptor(
name='SaveOutputParameter',
full_name='caffe.SaveOutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='output_directory', full_name='caffe.SaveOutputParameter.output_directory', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_name_prefix', full_name='caffe.SaveOutputParameter.output_name_prefix', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_format', full_name='caffe.SaveOutputParameter.output_format', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_map_file', full_name='caffe.SaveOutputParameter.label_map_file', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name_size_file', full_name='caffe.SaveOutputParameter.name_size_file', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_test_image', full_name='caffe.SaveOutputParameter.num_test_image', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.SaveOutputParameter.resize_param', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=12217,
serialized_end=12433,
)
_DETECTIONOUTPUTPARAMETER = _descriptor.Descriptor(
name='DetectionOutputParameter',
full_name='caffe.DetectionOutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.DetectionOutputParameter.num_classes', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_location', full_name='caffe.DetectionOutputParameter.share_location', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.DetectionOutputParameter.background_label_id', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms_param', full_name='caffe.DetectionOutputParameter.nms_param', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='save_output_param', full_name='caffe.DetectionOutputParameter.save_output_param', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='code_type', full_name='caffe.DetectionOutputParameter.code_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance_encoded_in_target', full_name='caffe.DetectionOutputParameter.variance_encoded_in_target', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keep_top_k', full_name='caffe.DetectionOutputParameter.keep_top_k', index=7,
number=7, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='confidence_threshold', full_name='caffe.DetectionOutputParameter.confidence_threshold', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualize', full_name='caffe.DetectionOutputParameter.visualize', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualize_threshold', full_name='caffe.DetectionOutputParameter.visualize_threshold', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='save_file', full_name='caffe.DetectionOutputParameter.save_file', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=12436,
serialized_end=12891,
)
_DROPOUTPARAMETER = _descriptor.Descriptor(
name='DropoutParameter',
full_name='caffe.DropoutParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dropout_ratio', full_name='caffe.DropoutParameter.dropout_ratio', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=12893,
serialized_end=12939,
)
_DUMMYDATAPARAMETER = _descriptor.Descriptor(
name='DummyDataParameter',
full_name='caffe.DummyDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='data_filler', full_name='caffe.DummyDataParameter.data_filler', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.DummyDataParameter.shape', index=1,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num', full_name='caffe.DummyDataParameter.num', index=2,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.DummyDataParameter.channels', index=3,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.DummyDataParameter.height', index=4,
number=4, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.DummyDataParameter.width', index=5,
number=5, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=12942,
serialized_end=13102,
)
_ELTWISEPARAMETER = _descriptor.Descriptor(
name='EltwiseParameter',
full_name='caffe.EltwiseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operation', full_name='caffe.EltwiseParameter.operation', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coeff', full_name='caffe.EltwiseParameter.coeff', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stable_prod_grad', full_name='caffe.EltwiseParameter.stable_prod_grad', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ELTWISEPARAMETER_ELTWISEOP,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13105,
serialized_end=13270,
)
_ELUPARAMETER = _descriptor.Descriptor(
name='ELUParameter',
full_name='caffe.ELUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.ELUParameter.alpha', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13272,
serialized_end=13304,
)
_EMBEDPARAMETER = _descriptor.Descriptor(
name='EmbedParameter',
full_name='caffe.EmbedParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.EmbedParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_dim', full_name='caffe.EmbedParameter.input_dim', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.EmbedParameter.bias_term', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.EmbedParameter.weight_filler', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.EmbedParameter.bias_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13307,
serialized_end=13479,
)
_EXPPARAMETER = _descriptor.Descriptor(
name='ExpParameter',
full_name='caffe.ExpParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='caffe.ExpParameter.base', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.ExpParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.ExpParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13481,
serialized_end=13549,
)
_FLATTENPARAMETER = _descriptor.Descriptor(
name='FlattenParameter',
full_name='caffe.FlattenParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.FlattenParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_axis', full_name='caffe.FlattenParameter.end_axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13551,
serialized_end=13608,
)
_HDF5DATAPARAMETER = _descriptor.Descriptor(
name='HDF5DataParameter',
full_name='caffe.HDF5DataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.HDF5DataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.HDF5DataParameter.batch_size', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle', full_name='caffe.HDF5DataParameter.shuffle', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13610,
serialized_end=13689,
)
_HDF5OUTPUTPARAMETER = _descriptor.Descriptor(
name='HDF5OutputParameter',
full_name='caffe.HDF5OutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='file_name', full_name='caffe.HDF5OutputParameter.file_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13691,
serialized_end=13731,
)
_HINGELOSSPARAMETER = _descriptor.Descriptor(
name='HingeLossParameter',
full_name='caffe.HingeLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='norm', full_name='caffe.HingeLossParameter.norm', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_HINGELOSSPARAMETER_NORM,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13733,
serialized_end=13827,
)
_IMAGEDATAPARAMETER = _descriptor.Descriptor(
name='ImageDataParameter',
full_name='caffe.ImageDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.ImageDataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.ImageDataParameter.batch_size', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.ImageDataParameter.rand_skip', index=2,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle', full_name='caffe.ImageDataParameter.shuffle', index=3,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_height', full_name='caffe.ImageDataParameter.new_height', index=4,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_width', full_name='caffe.ImageDataParameter.new_width', index=5,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_color', full_name='caffe.ImageDataParameter.is_color', index=6,
number=11, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.ImageDataParameter.scale', index=7,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.ImageDataParameter.mean_file', index=8,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.ImageDataParameter.crop_size', index=9,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.ImageDataParameter.mirror', index=10,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='root_folder', full_name='caffe.ImageDataParameter.root_folder', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=13830,
serialized_end=14109,
)
_INFOGAINLOSSPARAMETER = _descriptor.Descriptor(
name='InfogainLossParameter',
full_name='caffe.InfogainLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.InfogainLossParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14111,
serialized_end=14150,
)
_INNERPRODUCTPARAMETER = _descriptor.Descriptor(
name='InnerProductParameter',
full_name='caffe.InnerProductParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.InnerProductParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.InnerProductParameter.bias_term', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.InnerProductParameter.weight_filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.InnerProductParameter.bias_filler', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.InnerProductParameter.axis', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transpose', full_name='caffe.InnerProductParameter.transpose', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14153,
serialized_end=14356,
)
_INPUTPARAMETER = _descriptor.Descriptor(
name='InputParameter',
full_name='caffe.InputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.InputParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14358,
serialized_end=14407,
)
_LOGPARAMETER = _descriptor.Descriptor(
name='LogParameter',
full_name='caffe.LogParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='caffe.LogParameter.base', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(-1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.LogParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.LogParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14409,
serialized_end=14477,
)
_LRNPARAMETER = _descriptor.Descriptor(
name='LRNParameter',
full_name='caffe.LRNParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='local_size', full_name='caffe.LRNParameter.local_size', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.LRNParameter.alpha', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='beta', full_name='caffe.LRNParameter.beta', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.75),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='norm_region', full_name='caffe.LRNParameter.norm_region', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='k', full_name='caffe.LRNParameter.k', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.LRNParameter.engine', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_LRNPARAMETER_NORMREGION,
_LRNPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14480,
serialized_end=14792,
)
_MEMORYDATAPARAMETER = _descriptor.Descriptor(
name='MemoryDataParameter',
full_name='caffe.MemoryDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.MemoryDataParameter.batch_size', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.MemoryDataParameter.channels', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.MemoryDataParameter.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.MemoryDataParameter.width', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transpose', full_name='caffe.MemoryDataParameter.transpose', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14794,
serialized_end=14910,
)
_MULTIBOXLOSSPARAMETER = _descriptor.Descriptor(
name='MultiBoxLossParameter',
full_name='caffe.MultiBoxLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='loc_loss_type', full_name='caffe.MultiBoxLossParameter.loc_loss_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='conf_loss_type', full_name='caffe.MultiBoxLossParameter.conf_loss_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loc_weight', full_name='caffe.MultiBoxLossParameter.loc_weight', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.MultiBoxLossParameter.num_classes', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_location', full_name='caffe.MultiBoxLossParameter.share_location', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='match_type', full_name='caffe.MultiBoxLossParameter.match_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='overlap_threshold', full_name='caffe.MultiBoxLossParameter.overlap_threshold', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_prior_for_matching', full_name='caffe.MultiBoxLossParameter.use_prior_for_matching', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.MultiBoxLossParameter.background_label_id', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_difficult_gt', full_name='caffe.MultiBoxLossParameter.use_difficult_gt', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='do_neg_mining', full_name='caffe.MultiBoxLossParameter.do_neg_mining', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neg_pos_ratio', full_name='caffe.MultiBoxLossParameter.neg_pos_ratio', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(3),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neg_overlap', full_name='caffe.MultiBoxLossParameter.neg_overlap', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='code_type', full_name='caffe.MultiBoxLossParameter.code_type', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encode_variance_in_target', full_name='caffe.MultiBoxLossParameter.encode_variance_in_target', index=14,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_object_to_agnostic', full_name='caffe.MultiBoxLossParameter.map_object_to_agnostic', index=15,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ignore_cross_boundary_bbox', full_name='caffe.MultiBoxLossParameter.ignore_cross_boundary_bbox', index=16,
number=18, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bp_inside', full_name='caffe.MultiBoxLossParameter.bp_inside', index=17,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mining_type', full_name='caffe.MultiBoxLossParameter.mining_type', index=18,
number=20, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms_param', full_name='caffe.MultiBoxLossParameter.nms_param', index=19,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sample_size', full_name='caffe.MultiBoxLossParameter.sample_size', index=20,
number=22, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=64,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_prior_for_nms', full_name='caffe.MultiBoxLossParameter.use_prior_for_nms', index=21,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE,
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE,
_MULTIBOXLOSSPARAMETER_MATCHTYPE,
_MULTIBOXLOSSPARAMETER_MININGTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14913,
serialized_end=16041,
)
_MVNPARAMETER = _descriptor.Descriptor(
name='MVNParameter',
full_name='caffe.MVNParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='normalize_variance', full_name='caffe.MVNParameter.normalize_variance', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='across_channels', full_name='caffe.MVNParameter.across_channels', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.MVNParameter.eps', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1e-009),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16043,
serialized_end=16144,
)
_NORMALIZEPARAMETER = _descriptor.Descriptor(
name='NormalizeParameter',
full_name='caffe.NormalizeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='across_spatial', full_name='caffe.NormalizeParameter.across_spatial', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_filler', full_name='caffe.NormalizeParameter.scale_filler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_shared', full_name='caffe.NormalizeParameter.channel_shared', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.NormalizeParameter.eps', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1e-010),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16147,
serialized_end=16294,
)
_PARAMETERPARAMETER = _descriptor.Descriptor(
name='ParameterParameter',
full_name='caffe.ParameterParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.ParameterParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16296,
serialized_end=16349,
)
_PERMUTEPARAMETER = _descriptor.Descriptor(
name='PermuteParameter',
full_name='caffe.PermuteParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order', full_name='caffe.PermuteParameter.order', index=0,
number=1, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16351,
serialized_end=16384,
)
_POOLINGPARAMETER = _descriptor.Descriptor(
name='PoolingParameter',
full_name='caffe.PoolingParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.PoolingParameter.pool', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.PoolingParameter.pad', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_h', full_name='caffe.PoolingParameter.pad_h', index=2,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_w', full_name='caffe.PoolingParameter.pad_w', index=3,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_size', full_name='caffe.PoolingParameter.kernel_size', index=4,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_h', full_name='caffe.PoolingParameter.kernel_h', index=5,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_w', full_name='caffe.PoolingParameter.kernel_w', index=6,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.PoolingParameter.stride', index=7,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_h', full_name='caffe.PoolingParameter.stride_h', index=8,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_w', full_name='caffe.PoolingParameter.stride_w', index=9,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.PoolingParameter.engine', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='global_pooling', full_name='caffe.PoolingParameter.global_pooling', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_POOLINGPARAMETER_POOLMETHOD,
_POOLINGPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16387,
serialized_end=16805,
)
_POWERPARAMETER = _descriptor.Descriptor(
name='PowerParameter',
full_name='caffe.PowerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='power', full_name='caffe.PowerParameter.power', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.PowerParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.PowerParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16807,
serialized_end=16877,
)
_PRIORBOXPARAMETER = _descriptor.Descriptor(
name='PriorBoxParameter',
full_name='caffe.PriorBoxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_size', full_name='caffe.PriorBoxParameter.min_size', index=0,
number=1, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_size', full_name='caffe.PriorBoxParameter.max_size', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aspect_ratio', full_name='caffe.PriorBoxParameter.aspect_ratio', index=2,
number=3, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flip', full_name='caffe.PriorBoxParameter.flip', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clip', full_name='caffe.PriorBoxParameter.clip', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance', full_name='caffe.PriorBoxParameter.variance', index=5,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_size', full_name='caffe.PriorBoxParameter.img_size', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_h', full_name='caffe.PriorBoxParameter.img_h', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_w', full_name='caffe.PriorBoxParameter.img_w', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step', full_name='caffe.PriorBoxParameter.step', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step_h', full_name='caffe.PriorBoxParameter.step_h', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step_w', full_name='caffe.PriorBoxParameter.step_w', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset', full_name='caffe.PriorBoxParameter.offset', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PRIORBOXPARAMETER_CODETYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16880,
serialized_end=17189,
)
_PYTHONPARAMETER = _descriptor.Descriptor(
name='PythonParameter',
full_name='caffe.PythonParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='module', full_name='caffe.PythonParameter.module', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.PythonParameter.layer', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_str', full_name='caffe.PythonParameter.param_str', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_in_parallel', full_name='caffe.PythonParameter.share_in_parallel', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17191,
serialized_end=17294,
)
_RECURRENTPARAMETER = _descriptor.Descriptor(
name='RecurrentParameter',
full_name='caffe.RecurrentParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.RecurrentParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.RecurrentParameter.weight_filler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.RecurrentParameter.bias_filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.RecurrentParameter.debug_info', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='expose_hidden', full_name='caffe.RecurrentParameter.expose_hidden', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17297,
serialized_end=17489,
)
_REDUCTIONPARAMETER = _descriptor.Descriptor(
name='ReductionParameter',
full_name='caffe.ReductionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operation', full_name='caffe.ReductionParameter.operation', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReductionParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coeff', full_name='caffe.ReductionParameter.coeff', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_REDUCTIONPARAMETER_REDUCTIONOP,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17492,
serialized_end=17665,
)
_RELUPARAMETER = _descriptor.Descriptor(
name='ReLUParameter',
full_name='caffe.ReLUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='negative_slope', full_name='caffe.ReLUParameter.negative_slope', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.ReLUParameter.engine', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_RELUPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17668,
serialized_end=17809,
)
_RESHAPEPARAMETER = _descriptor.Descriptor(
name='ReshapeParameter',
full_name='caffe.ReshapeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.ReshapeParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReshapeParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.ReshapeParameter.num_axes', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17811,
serialized_end=17901,
)
_SCALEPARAMETER = _descriptor.Descriptor(
name='ScaleParameter',
full_name='caffe.ScaleParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ScaleParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.ScaleParameter.num_axes', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.ScaleParameter.filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.ScaleParameter.bias_term', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.ScaleParameter.bias_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=17904,
serialized_end=18069,
)
_SIGMOIDPARAMETER = _descriptor.Descriptor(
name='SigmoidParameter',
full_name='caffe.SigmoidParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SigmoidParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SIGMOIDPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18071,
serialized_end=18191,
)
_SLICEPARAMETER = _descriptor.Descriptor(
name='SliceParameter',
full_name='caffe.SliceParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.SliceParameter.axis', index=0,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_point', full_name='caffe.SliceParameter.slice_point', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_dim', full_name='caffe.SliceParameter.slice_dim', index=2,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18193,
serialized_end=18269,
)
_SOFTMAXPARAMETER = _descriptor.Descriptor(
name='SoftmaxParameter',
full_name='caffe.SoftmaxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SoftmaxParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.SoftmaxParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hard_ratio', full_name='caffe.SoftmaxParameter.hard_ratio', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='class_weight', full_name='caffe.SoftmaxParameter.class_weight', index=3,
number=4, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hard_mining_label', full_name='caffe.SoftmaxParameter.hard_mining_label', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cutting_point', full_name='caffe.SoftmaxParameter.cutting_point', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='normalize_type', full_name='caffe.SoftmaxParameter.normalize_type', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("Softmax").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SOFTMAXPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18272,
serialized_end=18537,
)
_TANHPARAMETER = _descriptor.Descriptor(
name='TanHParameter',
full_name='caffe.TanHParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.TanHParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TANHPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18539,
serialized_end=18653,
)
_TILEPARAMETER = _descriptor.Descriptor(
name='TileParameter',
full_name='caffe.TileParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.TileParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tiles', full_name='caffe.TileParameter.tiles', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18655,
serialized_end=18702,
)
_THRESHOLDPARAMETER = _descriptor.Descriptor(
name='ThresholdParameter',
full_name='caffe.ThresholdParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='threshold', full_name='caffe.ThresholdParameter.threshold', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18704,
serialized_end=18746,
)
_VIDEODATAPARAMETER = _descriptor.Descriptor(
name='VideoDataParameter',
full_name='caffe.VideoDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_type', full_name='caffe.VideoDataParameter.video_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_id', full_name='caffe.VideoDataParameter.device_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='video_file', full_name='caffe.VideoDataParameter.video_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_frames', full_name='caffe.VideoDataParameter.skip_frames', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_VIDEODATAPARAMETER_VIDEOTYPE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18749,
serialized_end=18936,
)
_WINDOWDATAPARAMETER = _descriptor.Descriptor(
name='WindowDataParameter',
full_name='caffe.WindowDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.WindowDataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.WindowDataParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.WindowDataParameter.mean_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.WindowDataParameter.batch_size', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.WindowDataParameter.crop_size', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.WindowDataParameter.mirror', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fg_threshold', full_name='caffe.WindowDataParameter.fg_threshold', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bg_threshold', full_name='caffe.WindowDataParameter.bg_threshold', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fg_fraction', full_name='caffe.WindowDataParameter.fg_fraction', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.25),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='context_pad', full_name='caffe.WindowDataParameter.context_pad', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_mode', full_name='caffe.WindowDataParameter.crop_mode', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("warp").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cache_images', full_name='caffe.WindowDataParameter.cache_images', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='root_folder', full_name='caffe.WindowDataParameter.root_folder', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=18939,
serialized_end=19260,
)
_SPPPARAMETER = _descriptor.Descriptor(
name='SPPParameter',
full_name='caffe.SPPParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pyramid_height', full_name='caffe.SPPParameter.pyramid_height', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.SPPParameter.pool', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SPPParameter.engine', index=2,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SPPPARAMETER_POOLMETHOD,
_SPPPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=19263,
serialized_end=19498,
)
_V1LAYERPARAMETER = _descriptor.Descriptor(
name='V1LayerParameter',
full_name='caffe.V1LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bottom', full_name='caffe.V1LayerParameter.bottom', index=0,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top', full_name='caffe.V1LayerParameter.top', index=1,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='caffe.V1LayerParameter.name', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='include', full_name='caffe.V1LayerParameter.include', index=3,
number=32, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exclude', full_name='caffe.V1LayerParameter.exclude', index=4,
number=33, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.V1LayerParameter.type', index=5,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.V1LayerParameter.blobs', index=6,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param', full_name='caffe.V1LayerParameter.param', index=7,
number=1001, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blob_share_mode', full_name='caffe.V1LayerParameter.blob_share_mode', index=8,
number=1002, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs_lr', full_name='caffe.V1LayerParameter.blobs_lr', index=9,
number=7, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.V1LayerParameter.weight_decay', index=10,
number=8, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_weight', full_name='caffe.V1LayerParameter.loss_weight', index=11,
number=35, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accuracy_param', full_name='caffe.V1LayerParameter.accuracy_param', index=12,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='argmax_param', full_name='caffe.V1LayerParameter.argmax_param', index=13,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_param', full_name='caffe.V1LayerParameter.concat_param', index=14,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrastive_loss_param', full_name='caffe.V1LayerParameter.contrastive_loss_param', index=15,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convolution_param', full_name='caffe.V1LayerParameter.convolution_param', index=16,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_param', full_name='caffe.V1LayerParameter.data_param', index=17,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_param', full_name='caffe.V1LayerParameter.dropout_param', index=18,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dummy_data_param', full_name='caffe.V1LayerParameter.dummy_data_param', index=19,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eltwise_param', full_name='caffe.V1LayerParameter.eltwise_param', index=20,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exp_param', full_name='caffe.V1LayerParameter.exp_param', index=21,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_data_param', full_name='caffe.V1LayerParameter.hdf5_data_param', index=22,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.V1LayerParameter.hdf5_output_param', index=23,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hinge_loss_param', full_name='caffe.V1LayerParameter.hinge_loss_param', index=24,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_data_param', full_name='caffe.V1LayerParameter.image_data_param', index=25,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='infogain_loss_param', full_name='caffe.V1LayerParameter.infogain_loss_param', index=26,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inner_product_param', full_name='caffe.V1LayerParameter.inner_product_param', index=27,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lrn_param', full_name='caffe.V1LayerParameter.lrn_param', index=28,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory_data_param', full_name='caffe.V1LayerParameter.memory_data_param', index=29,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvn_param', full_name='caffe.V1LayerParameter.mvn_param', index=30,
number=34, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pooling_param', full_name='caffe.V1LayerParameter.pooling_param', index=31,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power_param', full_name='caffe.V1LayerParameter.power_param', index=32,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='relu_param', full_name='caffe.V1LayerParameter.relu_param', index=33,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sigmoid_param', full_name='caffe.V1LayerParameter.sigmoid_param', index=34,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='softmax_param', full_name='caffe.V1LayerParameter.softmax_param', index=35,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_param', full_name='caffe.V1LayerParameter.slice_param', index=36,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tanh_param', full_name='caffe.V1LayerParameter.tanh_param', index=37,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='threshold_param', full_name='caffe.V1LayerParameter.threshold_param', index=38,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='window_data_param', full_name='caffe.V1LayerParameter.window_data_param', index=39,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transform_param', full_name='caffe.V1LayerParameter.transform_param', index=40,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_param', full_name='caffe.V1LayerParameter.loss_param', index=41,
number=42, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.V1LayerParameter.layer', index=42,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_V1LAYERPARAMETER_LAYERTYPE,
_V1LAYERPARAMETER_DIMCHECKMODE,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=19501,
serialized_end=22029,
)
_V0LAYERPARAMETER = _descriptor.Descriptor(
name='V0LayerParameter',
full_name='caffe.V0LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.V0LayerParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.V0LayerParameter.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.V0LayerParameter.num_output', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='biasterm', full_name='caffe.V0LayerParameter.biasterm', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.V0LayerParameter.weight_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.V0LayerParameter.bias_filler', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.V0LayerParameter.pad', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernelsize', full_name='caffe.V0LayerParameter.kernelsize', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group', full_name='caffe.V0LayerParameter.group', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.V0LayerParameter.stride', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.V0LayerParameter.pool', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_ratio', full_name='caffe.V0LayerParameter.dropout_ratio', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_size', full_name='caffe.V0LayerParameter.local_size', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.V0LayerParameter.alpha', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='beta', full_name='caffe.V0LayerParameter.beta', index=14,
number=15, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.75),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='k', full_name='caffe.V0LayerParameter.k', index=15,
number=22, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source', full_name='caffe.V0LayerParameter.source', index=16,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.V0LayerParameter.scale', index=17,
number=17, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(1),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='meanfile', full_name='caffe.V0LayerParameter.meanfile', index=18,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batchsize', full_name='caffe.V0LayerParameter.batchsize', index=19,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cropsize', full_name='caffe.V0LayerParameter.cropsize', index=20,
number=20, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.V0LayerParameter.mirror', index=21,
number=21, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.V0LayerParameter.blobs', index=22,
number=50, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs_lr', full_name='caffe.V0LayerParameter.blobs_lr', index=23,
number=51, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.V0LayerParameter.weight_decay', index=24,
number=52, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.V0LayerParameter.rand_skip', index=25,
number=53, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_fg_threshold', full_name='caffe.V0LayerParameter.det_fg_threshold', index=26,
number=54, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_bg_threshold', full_name='caffe.V0LayerParameter.det_bg_threshold', index=27,
number=55, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_fg_fraction', full_name='caffe.V0LayerParameter.det_fg_fraction', index=28,
number=56, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.25),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_context_pad', full_name='caffe.V0LayerParameter.det_context_pad', index=29,
number=58, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_crop_mode', full_name='caffe.V0LayerParameter.det_crop_mode', index=30,
number=59, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("warp").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_num', full_name='caffe.V0LayerParameter.new_num', index=31,
number=60, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_channels', full_name='caffe.V0LayerParameter.new_channels', index=32,
number=61, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_height', full_name='caffe.V0LayerParameter.new_height', index=33,
number=62, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_width', full_name='caffe.V0LayerParameter.new_width', index=34,
number=63, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle_images', full_name='caffe.V0LayerParameter.shuffle_images', index=35,
number=64, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_dim', full_name='caffe.V0LayerParameter.concat_dim', index=36,
number=65, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.V0LayerParameter.hdf5_output_param', index=37,
number=1001, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_V0LAYERPARAMETER_POOLMETHOD,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=22032,
serialized_end=23053,
)
_PRELUPARAMETER = _descriptor.Descriptor(
name='PReLUParameter',
full_name='caffe.PReLUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.PReLUParameter.filler', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_shared', full_name='caffe.PReLUParameter.channel_shared', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23055,
serialized_end=23142,
)
_TRANSPOSEPARAMETER = _descriptor.Descriptor(
name='TransposeParameter',
full_name='caffe.TransposeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dim', full_name='caffe.TransposeParameter.dim', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23144,
serialized_end=23177,
)
_REVERSEPARAMETER = _descriptor.Descriptor(
name='ReverseParameter',
full_name='caffe.ReverseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReverseParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23179,
serialized_end=23214,
)
_LSTMPARAMETER = _descriptor.Descriptor(
name='LSTMParameter',
full_name='caffe.LSTMParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.LSTMParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clipping_threshold', full_name='caffe.LSTMParameter.clipping_threshold', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.LSTMParameter.weight_filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.LSTMParameter.bias_filler', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.LSTMParameter.batch_size', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23217,
serialized_end=23398,
)
_CTCPARAMETER = _descriptor.Descriptor(
name='CTCParameter',
full_name='caffe.CTCParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='threshold', full_name='caffe.CTCParameter.threshold', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.7),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='decode_type', full_name='caffe.CTCParameter.decode_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_CTCPARAMETER_DECODER,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23401,
serialized_end=23566,
)
_CENTERLOSSPARAMETER = _descriptor.Descriptor(
name='CenterLossParameter',
full_name='caffe.CenterLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.CenterLossParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='center_filler', full_name='caffe.CenterLossParameter.center_filler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.CenterLossParameter.axis', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23568,
serialized_end=23673,
)
_CTCLOSSPARAMETER = _descriptor.Descriptor(
name='CtcLossParameter',
full_name='caffe.CtcLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='alphabet_size', full_name='caffe.CtcLossParameter.alphabet_size', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time_step', full_name='caffe.CtcLossParameter.time_step', index=1,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blank_label', full_name='caffe.CtcLossParameter.blank_label', index=2,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23675,
serialized_end=23765,
)
_CONTINUATIONINDICATORPARAMETER = _descriptor.Descriptor(
name='ContinuationIndicatorParameter',
full_name='caffe.ContinuationIndicatorParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time_step', full_name='caffe.ContinuationIndicatorParameter.time_step', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.ContinuationIndicatorParameter.batch_size', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23767,
serialized_end=23844,
)
_LABELSEQUENCEACCURACYPARAMETER = _descriptor.Descriptor(
name='LabelsequenceAccuracyParameter',
full_name='caffe.LabelsequenceAccuracyParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='blank_label', full_name='caffe.LabelsequenceAccuracyParameter.blank_label', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23846,
serialized_end=23902,
)
_SPATIALTRANSFORMERPARAMETER = _descriptor.Descriptor(
name='SpatialTransformerParameter',
full_name='caffe.SpatialTransformerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='transform_type', full_name='caffe.SpatialTransformerParameter.transform_type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("affine").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sampler_type', full_name='caffe.SpatialTransformerParameter.sampler_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("bilinear").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_H', full_name='caffe.SpatialTransformerParameter.output_H', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_W', full_name='caffe.SpatialTransformerParameter.output_W', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='to_compute_dU', full_name='caffe.SpatialTransformerParameter.to_compute_dU', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_1_1', full_name='caffe.SpatialTransformerParameter.theta_1_1', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_1_2', full_name='caffe.SpatialTransformerParameter.theta_1_2', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_1_3', full_name='caffe.SpatialTransformerParameter.theta_1_3', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_2_1', full_name='caffe.SpatialTransformerParameter.theta_2_1', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_2_2', full_name='caffe.SpatialTransformerParameter.theta_2_2', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='theta_2_3', full_name='caffe.SpatialTransformerParameter.theta_2_3', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23905,
serialized_end=24177,
)
_POWERFILEPARAMETER = _descriptor.Descriptor(
name='PowerFileParameter',
full_name='caffe.PowerFileParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shift_file', full_name='caffe.PowerFileParameter.shift_file', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=24179,
serialized_end=24219,
)
_STLOSSPARAMETER = _descriptor.Descriptor(
name='STLossParameter',
full_name='caffe.STLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='output_H', full_name='caffe.STLossParameter.output_H', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_W', full_name='caffe.STLossParameter.output_W', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=24221,
serialized_end=24274,
)
_LOCLOSSPARAMETER = _descriptor.Descriptor(
name='LocLossParameter',
full_name='caffe.LocLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='threshold', full_name='caffe.LocLossParameter.threshold', index=0,
number=1, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=24276,
serialized_end=24313,
)
_BLOBPROTO.fields_by_name['shape'].message_type = _BLOBSHAPE
_BLOBPROTOVECTOR.fields_by_name['blobs'].message_type = _BLOBPROTO
_MTCNNDATUM.fields_by_name['datum'].message_type = _DATUM
_MTCNNDATUM.fields_by_name['roi'].message_type = _MTCNNBBOX
_LABELMAP.fields_by_name['item'].message_type = _LABELMAPITEM
_BATCHSAMPLER.fields_by_name['sampler'].message_type = _SAMPLER
_BATCHSAMPLER.fields_by_name['sample_constraint'].message_type = _SAMPLECONSTRAINT
_EMITCONSTRAINT.fields_by_name['emit_type'].enum_type = _EMITCONSTRAINT_EMITTYPE
_EMITCONSTRAINT_EMITTYPE.containing_type = _EMITCONSTRAINT
_ANNOTATION.fields_by_name['bbox'].message_type = _NORMALIZEDBBOX
_ANNOTATIONGROUP.fields_by_name['annotation'].message_type = _ANNOTATION
_ANNOTATEDDATUM.fields_by_name['datum'].message_type = _DATUM
_ANNOTATEDDATUM.fields_by_name['type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE
_ANNOTATEDDATUM.fields_by_name['annotation_group'].message_type = _ANNOTATIONGROUP
_ANNOTATEDDATUM_ANNOTATIONTYPE.containing_type = _ANNOTATEDDATUM
_FILLERPARAMETER.fields_by_name['variance_norm'].enum_type = _FILLERPARAMETER_VARIANCENORM
_FILLERPARAMETER_VARIANCENORM.containing_type = _FILLERPARAMETER
_NETPARAMETER.fields_by_name['input_shape'].message_type = _BLOBSHAPE
_NETPARAMETER.fields_by_name['state'].message_type = _NETSTATE
_NETPARAMETER.fields_by_name['layer'].message_type = _LAYERPARAMETER
_NETPARAMETER.fields_by_name['layers'].message_type = _V1LAYERPARAMETER
_SOLVERPARAMETER.fields_by_name['net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['train_net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['test_net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['train_state'].message_type = _NETSTATE
_SOLVERPARAMETER.fields_by_name['test_state'].message_type = _NETSTATE
_SOLVERPARAMETER.fields_by_name['snapshot_format'].enum_type = _SOLVERPARAMETER_SNAPSHOTFORMAT
_SOLVERPARAMETER.fields_by_name['solver_mode'].enum_type = _SOLVERPARAMETER_SOLVERMODE
_SOLVERPARAMETER.fields_by_name['solver_type'].enum_type = _SOLVERPARAMETER_SOLVERTYPE
_SOLVERPARAMETER_SNAPSHOTFORMAT.containing_type = _SOLVERPARAMETER
_SOLVERPARAMETER_SOLVERMODE.containing_type = _SOLVERPARAMETER
_SOLVERPARAMETER_SOLVERTYPE.containing_type = _SOLVERPARAMETER
_SOLVERSTATE.fields_by_name['history'].message_type = _BLOBPROTO
_NETSTATE.fields_by_name['phase'].enum_type = _PHASE
_NETSTATERULE.fields_by_name['phase'].enum_type = _PHASE
_PARAMSPEC.fields_by_name['share_mode'].enum_type = _PARAMSPEC_DIMCHECKMODE
_PARAMSPEC_DIMCHECKMODE.containing_type = _PARAMSPEC
_LAYERPARAMETER.fields_by_name['phase'].enum_type = _PHASE
_LAYERPARAMETER.fields_by_name['param'].message_type = _PARAMSPEC
_LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE
_LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE
_LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER
_LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER
_LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER
_LAYERPARAMETER.fields_by_name['annotated_data_param'].message_type = _ANNOTATEDDATAPARAMETER
_LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER
_LAYERPARAMETER.fields_by_name['batch_norm_param'].message_type = _BATCHNORMPARAMETER
_LAYERPARAMETER.fields_by_name['bias_param'].message_type = _BIASPARAMETER
_LAYERPARAMETER.fields_by_name['center_loss_param'].message_type = _CENTERLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER
_LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER
_LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER
_LAYERPARAMETER.fields_by_name['crop_param'].message_type = _CROPPARAMETER
_LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER
_LAYERPARAMETER.fields_by_name['detection_evaluate_param'].message_type = _DETECTIONEVALUATEPARAMETER
_LAYERPARAMETER.fields_by_name['detection_output_param'].message_type = _DETECTIONOUTPUTPARAMETER
_LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER
_LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER
_LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER
_LAYERPARAMETER.fields_by_name['elu_param'].message_type = _ELUPARAMETER
_LAYERPARAMETER.fields_by_name['embed_param'].message_type = _EMBEDPARAMETER
_LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER
_LAYERPARAMETER.fields_by_name['flatten_param'].message_type = _FLATTENPARAMETER
_LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER
_LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER
_LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER
_LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER
_LAYERPARAMETER.fields_by_name['input_param'].message_type = _INPUTPARAMETER
_LAYERPARAMETER.fields_by_name['log_param'].message_type = _LOGPARAMETER
_LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER
_LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER
_LAYERPARAMETER.fields_by_name['multibox_loss_param'].message_type = _MULTIBOXLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER
_LAYERPARAMETER.fields_by_name['norm_param'].message_type = _NORMALIZEPARAMETER
_LAYERPARAMETER.fields_by_name['predict_box_param'].message_type = _PREDICTBOXPARAMETER
_LAYERPARAMETER.fields_by_name['parameter_param'].message_type = _PARAMETERPARAMETER
_LAYERPARAMETER.fields_by_name['permute_param'].message_type = _PERMUTEPARAMETER
_LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER
_LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER
_LAYERPARAMETER.fields_by_name['prelu_param'].message_type = _PRELUPARAMETER
_LAYERPARAMETER.fields_by_name['prior_box_param'].message_type = _PRIORBOXPARAMETER
_LAYERPARAMETER.fields_by_name['python_param'].message_type = _PYTHONPARAMETER
_LAYERPARAMETER.fields_by_name['recurrent_param'].message_type = _RECURRENTPARAMETER
_LAYERPARAMETER.fields_by_name['reduction_param'].message_type = _REDUCTIONPARAMETER
_LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER
_LAYERPARAMETER.fields_by_name['reshape_param'].message_type = _RESHAPEPARAMETER
_LAYERPARAMETER.fields_by_name['scale_param'].message_type = _SCALEPARAMETER
_LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER
_LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER
_LAYERPARAMETER.fields_by_name['spp_param'].message_type = _SPPPARAMETER
_LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER
_LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER
_LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER
_LAYERPARAMETER.fields_by_name['tile_param'].message_type = _TILEPARAMETER
_LAYERPARAMETER.fields_by_name['video_data_param'].message_type = _VIDEODATAPARAMETER
_LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER
_LAYERPARAMETER.fields_by_name['flip_param'].message_type = _FLIPPARAMETER
_LAYERPARAMETER.fields_by_name['lstm_param'].message_type = _LSTMPARAMETER
_LAYERPARAMETER.fields_by_name['ctc_param'].message_type = _CTCPARAMETER
_LAYERPARAMETER.fields_by_name['transpose_param'].message_type = _TRANSPOSEPARAMETER
_LAYERPARAMETER.fields_by_name['reverse_param'].message_type = _REVERSEPARAMETER
_LAYERPARAMETER.fields_by_name['ctc_loss_param'].message_type = _CTCLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['continuation_indicator_param'].message_type = _CONTINUATIONINDICATORPARAMETER
_LAYERPARAMETER.fields_by_name['labelsequence_accuracy_param'].message_type = _LABELSEQUENCEACCURACYPARAMETER
_LAYERPARAMETER.fields_by_name['st_param'].message_type = _SPATIALTRANSFORMERPARAMETER
_LAYERPARAMETER.fields_by_name['st_loss_param'].message_type = _STLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['power_file_param'].message_type = _POWERFILEPARAMETER
_LAYERPARAMETER.fields_by_name['loc_loss_param'].message_type = _LOCLOSSPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['noise_param'].message_type = _NOISEPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['distort_param'].message_type = _DISTORTIONPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['expand_param'].message_type = _EXPANSIONPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['emit_constraint'].message_type = _EMITCONSTRAINT
_RESIZEPARAMETER.fields_by_name['resize_mode'].enum_type = _RESIZEPARAMETER_RESIZE_MODE
_RESIZEPARAMETER.fields_by_name['pad_mode'].enum_type = _RESIZEPARAMETER_PAD_MODE
_RESIZEPARAMETER.fields_by_name['interp_mode'].enum_type = _RESIZEPARAMETER_INTERP_MODE
_RESIZEPARAMETER_RESIZE_MODE.containing_type = _RESIZEPARAMETER
_RESIZEPARAMETER_PAD_MODE.containing_type = _RESIZEPARAMETER
_RESIZEPARAMETER_INTERP_MODE.containing_type = _RESIZEPARAMETER
_NOISEPARAMETER.fields_by_name['saltpepper_param'].message_type = _SALTPEPPERPARAMETER
_LOSSPARAMETER.fields_by_name['normalization'].enum_type = _LOSSPARAMETER_NORMALIZATIONMODE
_LOSSPARAMETER_NORMALIZATIONMODE.containing_type = _LOSSPARAMETER
_ANNOTATEDDATAPARAMETER.fields_by_name['batch_sampler'].message_type = _BATCHSAMPLER
_ANNOTATEDDATAPARAMETER.fields_by_name['anno_type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE
_BIASPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['engine'].enum_type = _CONVOLUTIONPARAMETER_ENGINE
_CONVOLUTIONPARAMETER_ENGINE.containing_type = _CONVOLUTIONPARAMETER
_DATAPARAMETER.fields_by_name['backend'].enum_type = _DATAPARAMETER_DB
_DATAPARAMETER_DB.containing_type = _DATAPARAMETER
_DETECTIONEVALUATEPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_SAVEOUTPUTPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['save_output_param'].message_type = _SAVEOUTPUTPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE
_DUMMYDATAPARAMETER.fields_by_name['data_filler'].message_type = _FILLERPARAMETER
_DUMMYDATAPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_ELTWISEPARAMETER.fields_by_name['operation'].enum_type = _ELTWISEPARAMETER_ELTWISEOP
_ELTWISEPARAMETER_ELTWISEOP.containing_type = _ELTWISEPARAMETER
_EMBEDPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_EMBEDPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_HINGELOSSPARAMETER.fields_by_name['norm'].enum_type = _HINGELOSSPARAMETER_NORM
_HINGELOSSPARAMETER_NORM.containing_type = _HINGELOSSPARAMETER
_INNERPRODUCTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_INNERPRODUCTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_INPUTPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_LRNPARAMETER.fields_by_name['norm_region'].enum_type = _LRNPARAMETER_NORMREGION
_LRNPARAMETER.fields_by_name['engine'].enum_type = _LRNPARAMETER_ENGINE
_LRNPARAMETER_NORMREGION.containing_type = _LRNPARAMETER
_LRNPARAMETER_ENGINE.containing_type = _LRNPARAMETER
_MULTIBOXLOSSPARAMETER.fields_by_name['loc_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_LOCLOSSTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['conf_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_CONFLOSSTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['match_type'].enum_type = _MULTIBOXLOSSPARAMETER_MATCHTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['mining_type'].enum_type = _MULTIBOXLOSSPARAMETER_MININGTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_MATCHTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_MININGTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_NORMALIZEPARAMETER.fields_by_name['scale_filler'].message_type = _FILLERPARAMETER
_PARAMETERPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_POOLINGPARAMETER.fields_by_name['pool'].enum_type = _POOLINGPARAMETER_POOLMETHOD
_POOLINGPARAMETER.fields_by_name['engine'].enum_type = _POOLINGPARAMETER_ENGINE
_POOLINGPARAMETER_POOLMETHOD.containing_type = _POOLINGPARAMETER
_POOLINGPARAMETER_ENGINE.containing_type = _POOLINGPARAMETER
_PRIORBOXPARAMETER_CODETYPE.containing_type = _PRIORBOXPARAMETER
_RECURRENTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_RECURRENTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_REDUCTIONPARAMETER.fields_by_name['operation'].enum_type = _REDUCTIONPARAMETER_REDUCTIONOP
_REDUCTIONPARAMETER_REDUCTIONOP.containing_type = _REDUCTIONPARAMETER
_RELUPARAMETER.fields_by_name['engine'].enum_type = _RELUPARAMETER_ENGINE
_RELUPARAMETER_ENGINE.containing_type = _RELUPARAMETER
_RESHAPEPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_SCALEPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
_SCALEPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_SIGMOIDPARAMETER.fields_by_name['engine'].enum_type = _SIGMOIDPARAMETER_ENGINE
_SIGMOIDPARAMETER_ENGINE.containing_type = _SIGMOIDPARAMETER
_SOFTMAXPARAMETER.fields_by_name['engine'].enum_type = _SOFTMAXPARAMETER_ENGINE
_SOFTMAXPARAMETER_ENGINE.containing_type = _SOFTMAXPARAMETER
_TANHPARAMETER.fields_by_name['engine'].enum_type = _TANHPARAMETER_ENGINE
_TANHPARAMETER_ENGINE.containing_type = _TANHPARAMETER
_VIDEODATAPARAMETER.fields_by_name['video_type'].enum_type = _VIDEODATAPARAMETER_VIDEOTYPE
_VIDEODATAPARAMETER_VIDEOTYPE.containing_type = _VIDEODATAPARAMETER
_SPPPARAMETER.fields_by_name['pool'].enum_type = _SPPPARAMETER_POOLMETHOD
_SPPPARAMETER.fields_by_name['engine'].enum_type = _SPPPARAMETER_ENGINE
_SPPPARAMETER_POOLMETHOD.containing_type = _SPPPARAMETER
_SPPPARAMETER_ENGINE.containing_type = _SPPPARAMETER
_V1LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE
_V1LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE
_V1LAYERPARAMETER.fields_by_name['type'].enum_type = _V1LAYERPARAMETER_LAYERTYPE
_V1LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_V1LAYERPARAMETER.fields_by_name['blob_share_mode'].enum_type = _V1LAYERPARAMETER_DIMCHECKMODE
_V1LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER
_V1LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER
_V1LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER
_V1LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER
_V1LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER
_V1LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER
_V1LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER
_V1LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_V1LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER
_V1LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER
_V1LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER
_V1LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER
_V1LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER
_V1LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER
_V1LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER
_V1LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER
_V1LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER
_V1LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER
_V1LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER
_V1LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER
_V1LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['layer'].message_type = _V0LAYERPARAMETER
_V1LAYERPARAMETER_LAYERTYPE.containing_type = _V1LAYERPARAMETER
_V1LAYERPARAMETER_DIMCHECKMODE.containing_type = _V1LAYERPARAMETER
_V0LAYERPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_V0LAYERPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_V0LAYERPARAMETER.fields_by_name['pool'].enum_type = _V0LAYERPARAMETER_POOLMETHOD
_V0LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_V0LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_V0LAYERPARAMETER_POOLMETHOD.containing_type = _V0LAYERPARAMETER
_PRELUPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
_LSTMPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_LSTMPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_CTCPARAMETER.fields_by_name['decode_type'].enum_type = _CTCPARAMETER_DECODER
_CTCPARAMETER_DECODER.containing_type = _CTCPARAMETER
_CENTERLOSSPARAMETER.fields_by_name['center_filler'].message_type = _FILLERPARAMETER
DESCRIPTOR.message_types_by_name['BlobShape'] = _BLOBSHAPE
DESCRIPTOR.message_types_by_name['BlobProto'] = _BLOBPROTO
DESCRIPTOR.message_types_by_name['BlobProtoVector'] = _BLOBPROTOVECTOR
DESCRIPTOR.message_types_by_name['Datum'] = _DATUM
DESCRIPTOR.message_types_by_name['MTCNNBBox'] = _MTCNNBBOX
DESCRIPTOR.message_types_by_name['MTCNNDatum'] = _MTCNNDATUM
DESCRIPTOR.message_types_by_name['LabelMapItem'] = _LABELMAPITEM
DESCRIPTOR.message_types_by_name['LabelMap'] = _LABELMAP
DESCRIPTOR.message_types_by_name['Sampler'] = _SAMPLER
DESCRIPTOR.message_types_by_name['SampleConstraint'] = _SAMPLECONSTRAINT
DESCRIPTOR.message_types_by_name['BatchSampler'] = _BATCHSAMPLER
DESCRIPTOR.message_types_by_name['EmitConstraint'] = _EMITCONSTRAINT
DESCRIPTOR.message_types_by_name['NormalizedBBox'] = _NORMALIZEDBBOX
DESCRIPTOR.message_types_by_name['Annotation'] = _ANNOTATION
DESCRIPTOR.message_types_by_name['AnnotationGroup'] = _ANNOTATIONGROUP
DESCRIPTOR.message_types_by_name['AnnotatedDatum'] = _ANNOTATEDDATUM
DESCRIPTOR.message_types_by_name['FillerParameter'] = _FILLERPARAMETER
DESCRIPTOR.message_types_by_name['NetParameter'] = _NETPARAMETER
DESCRIPTOR.message_types_by_name['SolverParameter'] = _SOLVERPARAMETER
DESCRIPTOR.message_types_by_name['SolverState'] = _SOLVERSTATE
DESCRIPTOR.message_types_by_name['NetState'] = _NETSTATE
DESCRIPTOR.message_types_by_name['NetStateRule'] = _NETSTATERULE
DESCRIPTOR.message_types_by_name['ParamSpec'] = _PARAMSPEC
DESCRIPTOR.message_types_by_name['PredictBoxParameter'] = _PREDICTBOXPARAMETER
DESCRIPTOR.message_types_by_name['LayerParameter'] = _LAYERPARAMETER
DESCRIPTOR.message_types_by_name['FlipParameter'] = _FLIPPARAMETER
DESCRIPTOR.message_types_by_name['TransformationParameter'] = _TRANSFORMATIONPARAMETER
DESCRIPTOR.message_types_by_name['ResizeParameter'] = _RESIZEPARAMETER
DESCRIPTOR.message_types_by_name['SaltPepperParameter'] = _SALTPEPPERPARAMETER
DESCRIPTOR.message_types_by_name['NoiseParameter'] = _NOISEPARAMETER
DESCRIPTOR.message_types_by_name['DistortionParameter'] = _DISTORTIONPARAMETER
DESCRIPTOR.message_types_by_name['ExpansionParameter'] = _EXPANSIONPARAMETER
DESCRIPTOR.message_types_by_name['LossParameter'] = _LOSSPARAMETER
DESCRIPTOR.message_types_by_name['AccuracyParameter'] = _ACCURACYPARAMETER
DESCRIPTOR.message_types_by_name['AnnotatedDataParameter'] = _ANNOTATEDDATAPARAMETER
DESCRIPTOR.message_types_by_name['ArgMaxParameter'] = _ARGMAXPARAMETER
DESCRIPTOR.message_types_by_name['ConcatParameter'] = _CONCATPARAMETER
DESCRIPTOR.message_types_by_name['BatchNormParameter'] = _BATCHNORMPARAMETER
DESCRIPTOR.message_types_by_name['BiasParameter'] = _BIASPARAMETER
DESCRIPTOR.message_types_by_name['ContrastiveLossParameter'] = _CONTRASTIVELOSSPARAMETER
DESCRIPTOR.message_types_by_name['ConvolutionParameter'] = _CONVOLUTIONPARAMETER
DESCRIPTOR.message_types_by_name['CropParameter'] = _CROPPARAMETER
DESCRIPTOR.message_types_by_name['DataParameter'] = _DATAPARAMETER
DESCRIPTOR.message_types_by_name['DetectionEvaluateParameter'] = _DETECTIONEVALUATEPARAMETER
DESCRIPTOR.message_types_by_name['NonMaximumSuppressionParameter'] = _NONMAXIMUMSUPPRESSIONPARAMETER
DESCRIPTOR.message_types_by_name['SaveOutputParameter'] = _SAVEOUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['DetectionOutputParameter'] = _DETECTIONOUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['DropoutParameter'] = _DROPOUTPARAMETER
DESCRIPTOR.message_types_by_name['DummyDataParameter'] = _DUMMYDATAPARAMETER
DESCRIPTOR.message_types_by_name['EltwiseParameter'] = _ELTWISEPARAMETER
DESCRIPTOR.message_types_by_name['ELUParameter'] = _ELUPARAMETER
DESCRIPTOR.message_types_by_name['EmbedParameter'] = _EMBEDPARAMETER
DESCRIPTOR.message_types_by_name['ExpParameter'] = _EXPPARAMETER
DESCRIPTOR.message_types_by_name['FlattenParameter'] = _FLATTENPARAMETER
DESCRIPTOR.message_types_by_name['HDF5DataParameter'] = _HDF5DATAPARAMETER
DESCRIPTOR.message_types_by_name['HDF5OutputParameter'] = _HDF5OUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['HingeLossParameter'] = _HINGELOSSPARAMETER
DESCRIPTOR.message_types_by_name['ImageDataParameter'] = _IMAGEDATAPARAMETER
DESCRIPTOR.message_types_by_name['InfogainLossParameter'] = _INFOGAINLOSSPARAMETER
DESCRIPTOR.message_types_by_name['InnerProductParameter'] = _INNERPRODUCTPARAMETER
DESCRIPTOR.message_types_by_name['InputParameter'] = _INPUTPARAMETER
DESCRIPTOR.message_types_by_name['LogParameter'] = _LOGPARAMETER
DESCRIPTOR.message_types_by_name['LRNParameter'] = _LRNPARAMETER
DESCRIPTOR.message_types_by_name['MemoryDataParameter'] = _MEMORYDATAPARAMETER
DESCRIPTOR.message_types_by_name['MultiBoxLossParameter'] = _MULTIBOXLOSSPARAMETER
DESCRIPTOR.message_types_by_name['MVNParameter'] = _MVNPARAMETER
DESCRIPTOR.message_types_by_name['NormalizeParameter'] = _NORMALIZEPARAMETER
DESCRIPTOR.message_types_by_name['ParameterParameter'] = _PARAMETERPARAMETER
DESCRIPTOR.message_types_by_name['PermuteParameter'] = _PERMUTEPARAMETER
DESCRIPTOR.message_types_by_name['PoolingParameter'] = _POOLINGPARAMETER
DESCRIPTOR.message_types_by_name['PowerParameter'] = _POWERPARAMETER
DESCRIPTOR.message_types_by_name['PriorBoxParameter'] = _PRIORBOXPARAMETER
DESCRIPTOR.message_types_by_name['PythonParameter'] = _PYTHONPARAMETER
DESCRIPTOR.message_types_by_name['RecurrentParameter'] = _RECURRENTPARAMETER
DESCRIPTOR.message_types_by_name['ReductionParameter'] = _REDUCTIONPARAMETER
DESCRIPTOR.message_types_by_name['ReLUParameter'] = _RELUPARAMETER
DESCRIPTOR.message_types_by_name['ReshapeParameter'] = _RESHAPEPARAMETER
DESCRIPTOR.message_types_by_name['ScaleParameter'] = _SCALEPARAMETER
DESCRIPTOR.message_types_by_name['SigmoidParameter'] = _SIGMOIDPARAMETER
DESCRIPTOR.message_types_by_name['SliceParameter'] = _SLICEPARAMETER
DESCRIPTOR.message_types_by_name['SoftmaxParameter'] = _SOFTMAXPARAMETER
DESCRIPTOR.message_types_by_name['TanHParameter'] = _TANHPARAMETER
DESCRIPTOR.message_types_by_name['TileParameter'] = _TILEPARAMETER
DESCRIPTOR.message_types_by_name['ThresholdParameter'] = _THRESHOLDPARAMETER
DESCRIPTOR.message_types_by_name['VideoDataParameter'] = _VIDEODATAPARAMETER
DESCRIPTOR.message_types_by_name['WindowDataParameter'] = _WINDOWDATAPARAMETER
DESCRIPTOR.message_types_by_name['SPPParameter'] = _SPPPARAMETER
DESCRIPTOR.message_types_by_name['V1LayerParameter'] = _V1LAYERPARAMETER
DESCRIPTOR.message_types_by_name['V0LayerParameter'] = _V0LAYERPARAMETER
DESCRIPTOR.message_types_by_name['PReLUParameter'] = _PRELUPARAMETER
DESCRIPTOR.message_types_by_name['TransposeParameter'] = _TRANSPOSEPARAMETER
DESCRIPTOR.message_types_by_name['ReverseParameter'] = _REVERSEPARAMETER
DESCRIPTOR.message_types_by_name['LSTMParameter'] = _LSTMPARAMETER
DESCRIPTOR.message_types_by_name['CTCParameter'] = _CTCPARAMETER
DESCRIPTOR.message_types_by_name['CenterLossParameter'] = _CENTERLOSSPARAMETER
DESCRIPTOR.message_types_by_name['CtcLossParameter'] = _CTCLOSSPARAMETER
DESCRIPTOR.message_types_by_name['ContinuationIndicatorParameter'] = _CONTINUATIONINDICATORPARAMETER
DESCRIPTOR.message_types_by_name['LabelsequenceAccuracyParameter'] = _LABELSEQUENCEACCURACYPARAMETER
DESCRIPTOR.message_types_by_name['SpatialTransformerParameter'] = _SPATIALTRANSFORMERPARAMETER
DESCRIPTOR.message_types_by_name['PowerFileParameter'] = _POWERFILEPARAMETER
DESCRIPTOR.message_types_by_name['STLossParameter'] = _STLOSSPARAMETER
DESCRIPTOR.message_types_by_name['LocLossParameter'] = _LOCLOSSPARAMETER
DESCRIPTOR.enum_types_by_name['Phase'] = _PHASE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
BlobShape = _reflection.GeneratedProtocolMessageType('BlobShape', (_message.Message,), dict(
DESCRIPTOR = _BLOBSHAPE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobShape)
))
_sym_db.RegisterMessage(BlobShape)
BlobProto = _reflection.GeneratedProtocolMessageType('BlobProto', (_message.Message,), dict(
DESCRIPTOR = _BLOBPROTO,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProto)
))
_sym_db.RegisterMessage(BlobProto)
BlobProtoVector = _reflection.GeneratedProtocolMessageType('BlobProtoVector', (_message.Message,), dict(
DESCRIPTOR = _BLOBPROTOVECTOR,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProtoVector)
))
_sym_db.RegisterMessage(BlobProtoVector)
Datum = _reflection.GeneratedProtocolMessageType('Datum', (_message.Message,), dict(
DESCRIPTOR = _DATUM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Datum)
))
_sym_db.RegisterMessage(Datum)
MTCNNBBox = _reflection.GeneratedProtocolMessageType('MTCNNBBox', (_message.Message,), dict(
DESCRIPTOR = _MTCNNBBOX,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MTCNNBBox)
))
_sym_db.RegisterMessage(MTCNNBBox)
MTCNNDatum = _reflection.GeneratedProtocolMessageType('MTCNNDatum', (_message.Message,), dict(
DESCRIPTOR = _MTCNNDATUM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MTCNNDatum)
))
_sym_db.RegisterMessage(MTCNNDatum)
LabelMapItem = _reflection.GeneratedProtocolMessageType('LabelMapItem', (_message.Message,), dict(
DESCRIPTOR = _LABELMAPITEM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LabelMapItem)
))
_sym_db.RegisterMessage(LabelMapItem)
LabelMap = _reflection.GeneratedProtocolMessageType('LabelMap', (_message.Message,), dict(
DESCRIPTOR = _LABELMAP,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LabelMap)
))
_sym_db.RegisterMessage(LabelMap)
Sampler = _reflection.GeneratedProtocolMessageType('Sampler', (_message.Message,), dict(
DESCRIPTOR = _SAMPLER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Sampler)
))
_sym_db.RegisterMessage(Sampler)
SampleConstraint = _reflection.GeneratedProtocolMessageType('SampleConstraint', (_message.Message,), dict(
DESCRIPTOR = _SAMPLECONSTRAINT,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SampleConstraint)
))
_sym_db.RegisterMessage(SampleConstraint)
BatchSampler = _reflection.GeneratedProtocolMessageType('BatchSampler', (_message.Message,), dict(
DESCRIPTOR = _BATCHSAMPLER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BatchSampler)
))
_sym_db.RegisterMessage(BatchSampler)
EmitConstraint = _reflection.GeneratedProtocolMessageType('EmitConstraint', (_message.Message,), dict(
DESCRIPTOR = _EMITCONSTRAINT,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EmitConstraint)
))
_sym_db.RegisterMessage(EmitConstraint)
NormalizedBBox = _reflection.GeneratedProtocolMessageType('NormalizedBBox', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEDBBOX,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizedBBox)
))
_sym_db.RegisterMessage(NormalizedBBox)
Annotation = _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATION,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Annotation)
))
_sym_db.RegisterMessage(Annotation)
AnnotationGroup = _reflection.GeneratedProtocolMessageType('AnnotationGroup', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATIONGROUP,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotationGroup)
))
_sym_db.RegisterMessage(AnnotationGroup)
AnnotatedDatum = _reflection.GeneratedProtocolMessageType('AnnotatedDatum', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATEDDATUM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotatedDatum)
))
_sym_db.RegisterMessage(AnnotatedDatum)
FillerParameter = _reflection.GeneratedProtocolMessageType('FillerParameter', (_message.Message,), dict(
DESCRIPTOR = _FILLERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FillerParameter)
))
_sym_db.RegisterMessage(FillerParameter)
NetParameter = _reflection.GeneratedProtocolMessageType('NetParameter', (_message.Message,), dict(
DESCRIPTOR = _NETPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetParameter)
))
_sym_db.RegisterMessage(NetParameter)
SolverParameter = _reflection.GeneratedProtocolMessageType('SolverParameter', (_message.Message,), dict(
DESCRIPTOR = _SOLVERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverParameter)
))
_sym_db.RegisterMessage(SolverParameter)
SolverState = _reflection.GeneratedProtocolMessageType('SolverState', (_message.Message,), dict(
DESCRIPTOR = _SOLVERSTATE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverState)
))
_sym_db.RegisterMessage(SolverState)
NetState = _reflection.GeneratedProtocolMessageType('NetState', (_message.Message,), dict(
DESCRIPTOR = _NETSTATE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetState)
))
_sym_db.RegisterMessage(NetState)
NetStateRule = _reflection.GeneratedProtocolMessageType('NetStateRule', (_message.Message,), dict(
DESCRIPTOR = _NETSTATERULE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetStateRule)
))
_sym_db.RegisterMessage(NetStateRule)
ParamSpec = _reflection.GeneratedProtocolMessageType('ParamSpec', (_message.Message,), dict(
DESCRIPTOR = _PARAMSPEC,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParamSpec)
))
_sym_db.RegisterMessage(ParamSpec)
PredictBoxParameter = _reflection.GeneratedProtocolMessageType('PredictBoxParameter', (_message.Message,), dict(
DESCRIPTOR = _PREDICTBOXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PredictBoxParameter)
))
_sym_db.RegisterMessage(PredictBoxParameter)
LayerParameter = _reflection.GeneratedProtocolMessageType('LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LayerParameter)
))
_sym_db.RegisterMessage(LayerParameter)
FlipParameter = _reflection.GeneratedProtocolMessageType('FlipParameter', (_message.Message,), dict(
DESCRIPTOR = _FLIPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FlipParameter)
))
_sym_db.RegisterMessage(FlipParameter)
TransformationParameter = _reflection.GeneratedProtocolMessageType('TransformationParameter', (_message.Message,), dict(
DESCRIPTOR = _TRANSFORMATIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TransformationParameter)
))
_sym_db.RegisterMessage(TransformationParameter)
ResizeParameter = _reflection.GeneratedProtocolMessageType('ResizeParameter', (_message.Message,), dict(
DESCRIPTOR = _RESIZEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ResizeParameter)
))
_sym_db.RegisterMessage(ResizeParameter)
SaltPepperParameter = _reflection.GeneratedProtocolMessageType('SaltPepperParameter', (_message.Message,), dict(
DESCRIPTOR = _SALTPEPPERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SaltPepperParameter)
))
_sym_db.RegisterMessage(SaltPepperParameter)
NoiseParameter = _reflection.GeneratedProtocolMessageType('NoiseParameter', (_message.Message,), dict(
DESCRIPTOR = _NOISEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NoiseParameter)
))
_sym_db.RegisterMessage(NoiseParameter)
DistortionParameter = _reflection.GeneratedProtocolMessageType('DistortionParameter', (_message.Message,), dict(
DESCRIPTOR = _DISTORTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DistortionParameter)
))
_sym_db.RegisterMessage(DistortionParameter)
ExpansionParameter = _reflection.GeneratedProtocolMessageType('ExpansionParameter', (_message.Message,), dict(
DESCRIPTOR = _EXPANSIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ExpansionParameter)
))
_sym_db.RegisterMessage(ExpansionParameter)
LossParameter = _reflection.GeneratedProtocolMessageType('LossParameter', (_message.Message,), dict(
DESCRIPTOR = _LOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LossParameter)
))
_sym_db.RegisterMessage(LossParameter)
AccuracyParameter = _reflection.GeneratedProtocolMessageType('AccuracyParameter', (_message.Message,), dict(
DESCRIPTOR = _ACCURACYPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AccuracyParameter)
))
_sym_db.RegisterMessage(AccuracyParameter)
AnnotatedDataParameter = _reflection.GeneratedProtocolMessageType('AnnotatedDataParameter', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATEDDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotatedDataParameter)
))
_sym_db.RegisterMessage(AnnotatedDataParameter)
ArgMaxParameter = _reflection.GeneratedProtocolMessageType('ArgMaxParameter', (_message.Message,), dict(
DESCRIPTOR = _ARGMAXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ArgMaxParameter)
))
_sym_db.RegisterMessage(ArgMaxParameter)
ConcatParameter = _reflection.GeneratedProtocolMessageType('ConcatParameter', (_message.Message,), dict(
DESCRIPTOR = _CONCATPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConcatParameter)
))
_sym_db.RegisterMessage(ConcatParameter)
BatchNormParameter = _reflection.GeneratedProtocolMessageType('BatchNormParameter', (_message.Message,), dict(
DESCRIPTOR = _BATCHNORMPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BatchNormParameter)
))
_sym_db.RegisterMessage(BatchNormParameter)
BiasParameter = _reflection.GeneratedProtocolMessageType('BiasParameter', (_message.Message,), dict(
DESCRIPTOR = _BIASPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BiasParameter)
))
_sym_db.RegisterMessage(BiasParameter)
ContrastiveLossParameter = _reflection.GeneratedProtocolMessageType('ContrastiveLossParameter', (_message.Message,), dict(
DESCRIPTOR = _CONTRASTIVELOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ContrastiveLossParameter)
))
_sym_db.RegisterMessage(ContrastiveLossParameter)
ConvolutionParameter = _reflection.GeneratedProtocolMessageType('ConvolutionParameter', (_message.Message,), dict(
DESCRIPTOR = _CONVOLUTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConvolutionParameter)
))
_sym_db.RegisterMessage(ConvolutionParameter)
CropParameter = _reflection.GeneratedProtocolMessageType('CropParameter', (_message.Message,), dict(
DESCRIPTOR = _CROPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CropParameter)
))
_sym_db.RegisterMessage(CropParameter)
DataParameter = _reflection.GeneratedProtocolMessageType('DataParameter', (_message.Message,), dict(
DESCRIPTOR = _DATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DataParameter)
))
_sym_db.RegisterMessage(DataParameter)
DetectionEvaluateParameter = _reflection.GeneratedProtocolMessageType('DetectionEvaluateParameter', (_message.Message,), dict(
DESCRIPTOR = _DETECTIONEVALUATEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DetectionEvaluateParameter)
))
_sym_db.RegisterMessage(DetectionEvaluateParameter)
NonMaximumSuppressionParameter = _reflection.GeneratedProtocolMessageType('NonMaximumSuppressionParameter', (_message.Message,), dict(
DESCRIPTOR = _NONMAXIMUMSUPPRESSIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NonMaximumSuppressionParameter)
))
_sym_db.RegisterMessage(NonMaximumSuppressionParameter)
SaveOutputParameter = _reflection.GeneratedProtocolMessageType('SaveOutputParameter', (_message.Message,), dict(
DESCRIPTOR = _SAVEOUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SaveOutputParameter)
))
_sym_db.RegisterMessage(SaveOutputParameter)
DetectionOutputParameter = _reflection.GeneratedProtocolMessageType('DetectionOutputParameter', (_message.Message,), dict(
DESCRIPTOR = _DETECTIONOUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DetectionOutputParameter)
))
_sym_db.RegisterMessage(DetectionOutputParameter)
DropoutParameter = _reflection.GeneratedProtocolMessageType('DropoutParameter', (_message.Message,), dict(
DESCRIPTOR = _DROPOUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DropoutParameter)
))
_sym_db.RegisterMessage(DropoutParameter)
DummyDataParameter = _reflection.GeneratedProtocolMessageType('DummyDataParameter', (_message.Message,), dict(
DESCRIPTOR = _DUMMYDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DummyDataParameter)
))
_sym_db.RegisterMessage(DummyDataParameter)
EltwiseParameter = _reflection.GeneratedProtocolMessageType('EltwiseParameter', (_message.Message,), dict(
DESCRIPTOR = _ELTWISEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EltwiseParameter)
))
_sym_db.RegisterMessage(EltwiseParameter)
ELUParameter = _reflection.GeneratedProtocolMessageType('ELUParameter', (_message.Message,), dict(
DESCRIPTOR = _ELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ELUParameter)
))
_sym_db.RegisterMessage(ELUParameter)
EmbedParameter = _reflection.GeneratedProtocolMessageType('EmbedParameter', (_message.Message,), dict(
DESCRIPTOR = _EMBEDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EmbedParameter)
))
_sym_db.RegisterMessage(EmbedParameter)
ExpParameter = _reflection.GeneratedProtocolMessageType('ExpParameter', (_message.Message,), dict(
DESCRIPTOR = _EXPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ExpParameter)
))
_sym_db.RegisterMessage(ExpParameter)
FlattenParameter = _reflection.GeneratedProtocolMessageType('FlattenParameter', (_message.Message,), dict(
DESCRIPTOR = _FLATTENPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FlattenParameter)
))
_sym_db.RegisterMessage(FlattenParameter)
HDF5DataParameter = _reflection.GeneratedProtocolMessageType('HDF5DataParameter', (_message.Message,), dict(
DESCRIPTOR = _HDF5DATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5DataParameter)
))
_sym_db.RegisterMessage(HDF5DataParameter)
HDF5OutputParameter = _reflection.GeneratedProtocolMessageType('HDF5OutputParameter', (_message.Message,), dict(
DESCRIPTOR = _HDF5OUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5OutputParameter)
))
_sym_db.RegisterMessage(HDF5OutputParameter)
HingeLossParameter = _reflection.GeneratedProtocolMessageType('HingeLossParameter', (_message.Message,), dict(
DESCRIPTOR = _HINGELOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HingeLossParameter)
))
_sym_db.RegisterMessage(HingeLossParameter)
ImageDataParameter = _reflection.GeneratedProtocolMessageType('ImageDataParameter', (_message.Message,), dict(
DESCRIPTOR = _IMAGEDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ImageDataParameter)
))
_sym_db.RegisterMessage(ImageDataParameter)
InfogainLossParameter = _reflection.GeneratedProtocolMessageType('InfogainLossParameter', (_message.Message,), dict(
DESCRIPTOR = _INFOGAINLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InfogainLossParameter)
))
_sym_db.RegisterMessage(InfogainLossParameter)
InnerProductParameter = _reflection.GeneratedProtocolMessageType('InnerProductParameter', (_message.Message,), dict(
DESCRIPTOR = _INNERPRODUCTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InnerProductParameter)
))
_sym_db.RegisterMessage(InnerProductParameter)
InputParameter = _reflection.GeneratedProtocolMessageType('InputParameter', (_message.Message,), dict(
DESCRIPTOR = _INPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InputParameter)
))
_sym_db.RegisterMessage(InputParameter)
LogParameter = _reflection.GeneratedProtocolMessageType('LogParameter', (_message.Message,), dict(
DESCRIPTOR = _LOGPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LogParameter)
))
_sym_db.RegisterMessage(LogParameter)
LRNParameter = _reflection.GeneratedProtocolMessageType('LRNParameter', (_message.Message,), dict(
DESCRIPTOR = _LRNPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LRNParameter)
))
_sym_db.RegisterMessage(LRNParameter)
MemoryDataParameter = _reflection.GeneratedProtocolMessageType('MemoryDataParameter', (_message.Message,), dict(
DESCRIPTOR = _MEMORYDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MemoryDataParameter)
))
_sym_db.RegisterMessage(MemoryDataParameter)
MultiBoxLossParameter = _reflection.GeneratedProtocolMessageType('MultiBoxLossParameter', (_message.Message,), dict(
DESCRIPTOR = _MULTIBOXLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MultiBoxLossParameter)
))
_sym_db.RegisterMessage(MultiBoxLossParameter)
MVNParameter = _reflection.GeneratedProtocolMessageType('MVNParameter', (_message.Message,), dict(
DESCRIPTOR = _MVNPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MVNParameter)
))
_sym_db.RegisterMessage(MVNParameter)
NormalizeParameter = _reflection.GeneratedProtocolMessageType('NormalizeParameter', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizeParameter)
))
_sym_db.RegisterMessage(NormalizeParameter)
ParameterParameter = _reflection.GeneratedProtocolMessageType('ParameterParameter', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParameterParameter)
))
_sym_db.RegisterMessage(ParameterParameter)
PermuteParameter = _reflection.GeneratedProtocolMessageType('PermuteParameter', (_message.Message,), dict(
DESCRIPTOR = _PERMUTEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PermuteParameter)
))
_sym_db.RegisterMessage(PermuteParameter)
PoolingParameter = _reflection.GeneratedProtocolMessageType('PoolingParameter', (_message.Message,), dict(
DESCRIPTOR = _POOLINGPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PoolingParameter)
))
_sym_db.RegisterMessage(PoolingParameter)
PowerParameter = _reflection.GeneratedProtocolMessageType('PowerParameter', (_message.Message,), dict(
DESCRIPTOR = _POWERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PowerParameter)
))
_sym_db.RegisterMessage(PowerParameter)
PriorBoxParameter = _reflection.GeneratedProtocolMessageType('PriorBoxParameter', (_message.Message,), dict(
DESCRIPTOR = _PRIORBOXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PriorBoxParameter)
))
_sym_db.RegisterMessage(PriorBoxParameter)
PythonParameter = _reflection.GeneratedProtocolMessageType('PythonParameter', (_message.Message,), dict(
DESCRIPTOR = _PYTHONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PythonParameter)
))
_sym_db.RegisterMessage(PythonParameter)
RecurrentParameter = _reflection.GeneratedProtocolMessageType('RecurrentParameter', (_message.Message,), dict(
DESCRIPTOR = _RECURRENTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.RecurrentParameter)
))
_sym_db.RegisterMessage(RecurrentParameter)
ReductionParameter = _reflection.GeneratedProtocolMessageType('ReductionParameter', (_message.Message,), dict(
DESCRIPTOR = _REDUCTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReductionParameter)
))
_sym_db.RegisterMessage(ReductionParameter)
ReLUParameter = _reflection.GeneratedProtocolMessageType('ReLUParameter', (_message.Message,), dict(
DESCRIPTOR = _RELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReLUParameter)
))
_sym_db.RegisterMessage(ReLUParameter)
ReshapeParameter = _reflection.GeneratedProtocolMessageType('ReshapeParameter', (_message.Message,), dict(
DESCRIPTOR = _RESHAPEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReshapeParameter)
))
_sym_db.RegisterMessage(ReshapeParameter)
ScaleParameter = _reflection.GeneratedProtocolMessageType('ScaleParameter', (_message.Message,), dict(
DESCRIPTOR = _SCALEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ScaleParameter)
))
_sym_db.RegisterMessage(ScaleParameter)
SigmoidParameter = _reflection.GeneratedProtocolMessageType('SigmoidParameter', (_message.Message,), dict(
DESCRIPTOR = _SIGMOIDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SigmoidParameter)
))
_sym_db.RegisterMessage(SigmoidParameter)
SliceParameter = _reflection.GeneratedProtocolMessageType('SliceParameter', (_message.Message,), dict(
DESCRIPTOR = _SLICEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SliceParameter)
))
_sym_db.RegisterMessage(SliceParameter)
SoftmaxParameter = _reflection.GeneratedProtocolMessageType('SoftmaxParameter', (_message.Message,), dict(
DESCRIPTOR = _SOFTMAXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SoftmaxParameter)
))
_sym_db.RegisterMessage(SoftmaxParameter)
TanHParameter = _reflection.GeneratedProtocolMessageType('TanHParameter', (_message.Message,), dict(
DESCRIPTOR = _TANHPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TanHParameter)
))
_sym_db.RegisterMessage(TanHParameter)
TileParameter = _reflection.GeneratedProtocolMessageType('TileParameter', (_message.Message,), dict(
DESCRIPTOR = _TILEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TileParameter)
))
_sym_db.RegisterMessage(TileParameter)
ThresholdParameter = _reflection.GeneratedProtocolMessageType('ThresholdParameter', (_message.Message,), dict(
DESCRIPTOR = _THRESHOLDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ThresholdParameter)
))
_sym_db.RegisterMessage(ThresholdParameter)
VideoDataParameter = _reflection.GeneratedProtocolMessageType('VideoDataParameter', (_message.Message,), dict(
DESCRIPTOR = _VIDEODATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.VideoDataParameter)
))
_sym_db.RegisterMessage(VideoDataParameter)
WindowDataParameter = _reflection.GeneratedProtocolMessageType('WindowDataParameter', (_message.Message,), dict(
DESCRIPTOR = _WINDOWDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.WindowDataParameter)
))
_sym_db.RegisterMessage(WindowDataParameter)
SPPParameter = _reflection.GeneratedProtocolMessageType('SPPParameter', (_message.Message,), dict(
DESCRIPTOR = _SPPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SPPParameter)
))
_sym_db.RegisterMessage(SPPParameter)
V1LayerParameter = _reflection.GeneratedProtocolMessageType('V1LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _V1LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V1LayerParameter)
))
_sym_db.RegisterMessage(V1LayerParameter)
V0LayerParameter = _reflection.GeneratedProtocolMessageType('V0LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _V0LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V0LayerParameter)
))
_sym_db.RegisterMessage(V0LayerParameter)
PReLUParameter = _reflection.GeneratedProtocolMessageType('PReLUParameter', (_message.Message,), dict(
DESCRIPTOR = _PRELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PReLUParameter)
))
_sym_db.RegisterMessage(PReLUParameter)
TransposeParameter = _reflection.GeneratedProtocolMessageType('TransposeParameter', (_message.Message,), dict(
DESCRIPTOR = _TRANSPOSEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TransposeParameter)
))
_sym_db.RegisterMessage(TransposeParameter)
ReverseParameter = _reflection.GeneratedProtocolMessageType('ReverseParameter', (_message.Message,), dict(
DESCRIPTOR = _REVERSEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReverseParameter)
))
_sym_db.RegisterMessage(ReverseParameter)
LSTMParameter = _reflection.GeneratedProtocolMessageType('LSTMParameter', (_message.Message,), dict(
DESCRIPTOR = _LSTMPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LSTMParameter)
))
_sym_db.RegisterMessage(LSTMParameter)
CTCParameter = _reflection.GeneratedProtocolMessageType('CTCParameter', (_message.Message,), dict(
DESCRIPTOR = _CTCPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CTCParameter)
))
_sym_db.RegisterMessage(CTCParameter)
CenterLossParameter = _reflection.GeneratedProtocolMessageType('CenterLossParameter', (_message.Message,), dict(
DESCRIPTOR = _CENTERLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CenterLossParameter)
))
_sym_db.RegisterMessage(CenterLossParameter)
CtcLossParameter = _reflection.GeneratedProtocolMessageType('CtcLossParameter', (_message.Message,), dict(
DESCRIPTOR = _CTCLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CtcLossParameter)
))
_sym_db.RegisterMessage(CtcLossParameter)
ContinuationIndicatorParameter = _reflection.GeneratedProtocolMessageType('ContinuationIndicatorParameter', (_message.Message,), dict(
DESCRIPTOR = _CONTINUATIONINDICATORPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ContinuationIndicatorParameter)
))
_sym_db.RegisterMessage(ContinuationIndicatorParameter)
LabelsequenceAccuracyParameter = _reflection.GeneratedProtocolMessageType('LabelsequenceAccuracyParameter', (_message.Message,), dict(
DESCRIPTOR = _LABELSEQUENCEACCURACYPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LabelsequenceAccuracyParameter)
))
_sym_db.RegisterMessage(LabelsequenceAccuracyParameter)
SpatialTransformerParameter = _reflection.GeneratedProtocolMessageType('SpatialTransformerParameter', (_message.Message,), dict(
DESCRIPTOR = _SPATIALTRANSFORMERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SpatialTransformerParameter)
))
_sym_db.RegisterMessage(SpatialTransformerParameter)
PowerFileParameter = _reflection.GeneratedProtocolMessageType('PowerFileParameter', (_message.Message,), dict(
DESCRIPTOR = _POWERFILEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PowerFileParameter)
))
_sym_db.RegisterMessage(PowerFileParameter)
STLossParameter = _reflection.GeneratedProtocolMessageType('STLossParameter', (_message.Message,), dict(
DESCRIPTOR = _STLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.STLossParameter)
))
_sym_db.RegisterMessage(STLossParameter)
LocLossParameter = _reflection.GeneratedProtocolMessageType('LocLossParameter', (_message.Message,), dict(
DESCRIPTOR = _LOCLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LocLossParameter)
))
_sym_db.RegisterMessage(LocLossParameter)
_BLOBSHAPE.fields_by_name['dim'].has_options = True
_BLOBSHAPE.fields_by_name['dim']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['data'].has_options = True
_BLOBPROTO.fields_by_name['data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['diff'].has_options = True
_BLOBPROTO.fields_by_name['diff']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['double_data'].has_options = True
_BLOBPROTO.fields_by_name['double_data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['double_diff'].has_options = True
_BLOBPROTO.fields_by_name['double_diff']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
# @@protoc_insertion_point(module_scope)
| [
"512690069@qq.com"
] | 512690069@qq.com |
9fb875dbf52353b21994402b4fa2b771522fe822 | 90529c73e0c8b12cb9ca71d926fae1d8f701ed0a | /finalproject/settings.py | e534702509f96717e87b644a7dcdc458e30a91b5 | [] | no_license | bavneetsingh16/Intelligent-Hiring-System | d7981479ad3b1c47117748326090c1715f8b6498 | b96a95da77b0f5484d21b36970c05a68a9998aad | refs/heads/master | 2022-12-10T21:08:31.018535 | 2018-01-14T04:43:18 | 2018-01-14T04:43:18 | 117,400,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,222 | py | """
Django settings for finalproject project.
Generated by 'django-admin startproject' using Django 1.11.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ''
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['resumeranking-env.us-east-1.elasticbeanstalk.com','127.0.0.1','127.0.0.1:8000/resumeranking/']
# Application definition
INSTALLED_APPS = [
'resumeranking',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'finalproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'finalproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT=os.path.join(BASE_DIR,'static/')
| [
"noreply@github.com"
] | bavneetsingh16.noreply@github.com |
842e9f0bec1e4d80b543836e3a70b73e39df57ea | b08dea9994b80c4935663f1fc63a1994eb3ca68f | /sample/work/apps.py | 1300d5690abe8cdba226514c2d6a088153023570 | [] | no_license | mukul96/Quiz | f16a5ae51add933bc5220e12b78730f51a1bbbdb | d1ecf5a02103275d08b1b34748db0b13b244b59d | refs/heads/master | 2021-01-01T18:53:04.685426 | 2019-05-05T18:04:39 | 2019-05-05T18:04:39 | 98,459,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from __future__ import unicode_literals
from django.apps import AppConfig
class WorkConfig(AppConfig):
name = 'work'
| [
"mukul96"
] | mukul96 |
12dee9eb8074c8d801d2fb535f800831db8e4cbb | 7138995b7197fd76fdfb4596abfd996d0c8e039e | /icon converting/androidify_icons.py | 8ee066038adf43ab650c6365fb09edf6bcaca00d | [
"MIT"
] | permissive | krissrex/Icon-Tools | d7b18906684611d0c4d77a8b8b349051275f48c9 | ca5412760c587c008de5d597fe296854c1856280 | refs/heads/master | 2020-05-23T08:11:55.374659 | 2016-10-07T20:25:45 | 2016-10-07T20:25:45 | 70,279,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,038 | py | import os, shutil
drawable_folder = 'drawable'
def list_files():
files = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.png')]
print(files)
return files
def find_out():
out_folder = 'out'
counter = 0;
while os.path.exists(out_folder + (str(counter) if counter != 0 else '')):
counter += 1
out_folder += (str(counter) if counter != 0 else '')
print('Out folder:', out_folder)
return out_folder
def make_folders(out_folder):
os.mkdir(out_folder)
os.chdir(out_folder)
densities = {'ldpi':0, 'mdpi':1, 'hdpi':1, 'xhdpi':1, 'xxhdpi':1, 'xxxhdpi':1} # 0.75, 1, 1.5, 2, 3, 4. Turn on/off with 1 and 0.
enabled = {key: densities[key] for key in densities if densities[key] == 1}
for key in enabled:
os.mkdir(drawable_folder+'-'+key)
os.chdir('..')
return enabled
def correct_name(name):
densities = {0.75:'ldpi', 1:'mdpi', 1.5:'hdpi', 2:'xhdpi', 3:'xxhdpi', 4:'xxxhdpi'}
scale = 1
if '@' in name:
position = name.find('@')
scale = name[position + 1 : -5]
scale = scale.replace(',','.')
scale = float(scale)
name = name[:position] + name[-4:]
name = name.replace('-', '_')
name = name.lower()
if scale in densities:
density = densities[scale]
else:
print('Invalid scale found:',name,density)
raise ValueError('Invalid scale '+str(density)+'.')
return (name, density)
def copy_files(files, destination):
for f in files:
new_name = files[f][0]
density = files[f][1]
folder = drawable_folder + '-' + density
path = os.path.join(destination, folder, new_name)
print('Moving',f,'to destination',path)
shutil.copy(f, path)
if __name__ == '__main__':
files = list_files()
path = find_out()
make_folders(path)
new_files = {f:correct_name(f) for f in files} # {"Cross.png":(cross.png, mdpi)}
print('File mapping:\n',new_files)
copy_files(new_files, path)
print('Done.')
| [
"krirek@msn.com"
] | krirek@msn.com |
eab4af00c1cf02878ef3ffc85ea4f270a3a49e25 | 418603934625c30aec5c9549e25567912434bf93 | /src/core/lightning/pytorch_lightning/core/lightning.py | 3eeb23cf862054f5707f2799b303183c57f446db | [] | no_license | antoalli/shape_completion | 9559859f6cb94d8224c1d1fe3b61b6c24801dbf5 | 310b2b08479514bfea3fe5cbe4d61c407cd59f02 | refs/heads/master | 2022-12-07T13:28:14.065782 | 2020-08-17T06:09:43 | 2020-08-17T06:09:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45,546 | py | import collections
import logging as log
import csv
import os
import warnings
from abc import ABC, abstractmethod
from argparse import Namespace
import torch
import torch.distributed as dist
from lightning.pytorch_lightning.core.decorators import data_loader
from lightning.pytorch_lightning.core.grads import GradInformation
from lightning.pytorch_lightning.core.hooks import ModelHooks
from lightning.pytorch_lightning.core.saving import ModelIO
from lightning.pytorch_lightning.core.memory import ModelSummary
from lightning.pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
def __init__(self, *args, **kwargs):
super(LightningModule, self).__init__(*args, **kwargs)
#: Current dtype
self.dtype = torch.FloatTensor
self.exp_save_path = None
#: The current epoch
self.current_epoch = 0
#: Total training batches seen across all epochs
self.global_step = 0
self.loaded_optimizer_states_dict = {}
#: Pointer to the trainer object
self.trainer = None
#: Pointer to the logger object
self.logger = None
self.example_input_array = None
#: True if your model is currently running on GPUs.
#: Useful to set flags around the LightningModule for different CPU vs GPU behavior.
self.on_gpu = False
#: True if using dp
self.use_dp = False
#: True if using ddp
self.use_ddp = False
#: True if using ddp2
self.use_ddp2 = False
#: True if using amp
self.use_amp = False
@abstractmethod
def forward(self, *args, **kwargs):
r"""
Same as torch.nn.Module.forward(), however in Lightning you want this to define
the operations you want to use for prediction (ie: on a server or as a feature extractor).
Normally you'd call self.forward() from your training_step() method. This makes it easy to write a complex
system for training with the outputs you'd want in a prediction setting.
Args:
x (tensor): Whatever you decide to define in the forward method
Return:
Predicted output
Example
-------
.. code-block:: python
# example if we were using this model as a feature extractor
def forward(self, x):
feature_maps = self.convnet(x)
return feature_maps
def training_step(self, batch, batch_idx):
x, y = batch
feature_maps = self.forward(x)
logits = self.classifier(feature_maps)
# ...
return loss
# splitting it this way allows model to be used a feature extractor
model = MyModelAbove()
inputs = server.get_request()
results = model(inputs)
server.write_results(results)
# -------------
# This is in stark contrast to torch.nn.Module where normally you would have this:
def forward(self, batch):
x, y = batch
feature_maps = self.convnet(x)
logits = self.classifier(feature_maps)
return logits
"""
@abstractmethod
def training_step(self, *args, **kwargs):
r"""return loss, dict with metrics for tqdm
Args:
batch (torch.nn.Tensor | (Tensor, Tensor) | [Tensor, Tensor]): The output of your dataloader.
A tensor, tuple or list
batch_idx (int): Integer displaying index of this batch
optimizer_idx (int): If using multiple optimizers, this argument will also be present.
hiddens(:`Tensor <https://pytorch.org/docs/stable/tensors.html>`_): Passed in if truncated_bptt_steps > 0.
:param
:return: dict with loss key and optional log, progress keys
if implementing training_step, return whatever you need in that step:
- loss -> tensor scalar [REQUIRED]
- progress_bar -> Dict for progress bar display. Must have only tensors
- log -> Dict of metrics to add to logger. Must have only tensors (no images, etc)
In this step you'd normally do the forward pass and calculate the loss for a batch.
You can also do fancier things like multiple forward passes or something specific to your model.
Example
-------
.. code-block:: python
def training_step(self, batch, batch_idx):
x, y, z = batch
# implement your own
out = self.forward(x)
loss = self.loss(out, x)
logger_logs = {'training_loss': loss} # optional (MUST ALL BE TENSORS)
# if using TestTubeLogger or TensorBoardLogger you can nest scalars
logger_logs = {'losses': logger_logs} # optional (MUST ALL BE TENSORS)
output = {
'loss': loss, # required
'progress_bar': {'training_loss': loss}, # optional (MUST ALL BE TENSORS)
'log': logger_logs
}
# return a dict
return output
If you define multiple optimizers, this step will also be called with an additional `optimizer_idx` param.
.. code-block:: python
# Multiple optimizers (ie: GANs)
def training_step(self, batch, batch_idx, optimizer_idx):
if optimizer_idx == 0:
# do training_step with encoder
if optimizer_idx == 1:
# do training_step with decoder
If you add truncated back propagation through time you will also get an additional
argument with the hidden states of the previous step.
.. code-block:: python
# Truncated back-propagation through time
def training_step(self, batch, batch_idx, hiddens):
# hiddens are the hiddens from the previous truncated backprop step
...
out, hiddens = self.lstm(data, hiddens)
...
return {
"loss": ...,
"hiddens": hiddens # remember to detach() this
}
You can also return a -1 instead of a dict to stop the current loop. This is useful
if you want to break out of the current training epoch early.
"""
def training_end(self, *args, **kwargs):
"""return loss, dict with metrics for tqdm
:param outputs: What you return in `training_step`.
:return dict: dictionary with loss key and optional log, progress keys:
- loss -> tensor scalar [REQUIRED]
- progress_bar -> Dict for progress bar display. Must have only tensors
- log -> Dict of metrics to add to logger. Must have only tensors (no images, etc)
In certain cases (dp, ddp2), you might want to use all outputs of every process to do something.
For instance, if using negative samples, you could run a batch via dp and use ALL the outputs
for a single softmax across the full batch (ie: the denominator would use the full batch).
In this case you should define training_end to perform those calculations.
Example
-------
.. code-block:: python
# WITHOUT training_end
# if used in DP or DDP2, this batch is 1/num_gpus large
def training_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self.forward(x)
loss = self.softmax(out)
loss = nce_loss(loss)
return {'loss': loss}
# --------------
# with training_end to do softmax over the full batch
def training_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self.forward(x)
return {'out': out}
def training_end(self, outputs):
# this out is now the full size of the batch
out = outputs['out']
# this softmax now uses the full batch size
loss = self.softmax(out)
loss = nce_loss(loss)
return {'loss': loss}
If you define multiple optimizers, this step will also be called with an additional `optimizer_idx` param.
.. code-block:: python
# Multiple optimizers (ie: GANs)
def training_step(self, batch, batch_idx, optimizer_idx):
if optimizer_idx == 0:
# do training_step with encoder
if optimizer_idx == 1:
# do training_step with decoder
If you add truncated back propagation through time you will also get an additional argument
with the hidden states of the previous step.
.. code-block:: python
# Truncated back-propagation through time
def training_step(self, batch, batch_idx, hiddens):
# hiddens are the hiddens from the previous truncated backprop step
You can also return a -1 instead of a dict to stop the current loop. This is useful if you want to
break out of the current training epoch early.
"""
def validation_step(self, *args, **kwargs):
r"""
This is the validation loop. It is called for each batch of the validation set.
Whatever is returned from here will be passed in as a list on validation_end.
In this step you'd normally generate examples or calculate anything of interest such as accuracy.
Args:
batch (torch.nn.Tensor | (Tensor, Tensor) | [Tensor, Tensor]): The output of your dataloader.
A tensor, tuple or list
batch_idx (int): The index of this batch
dataloader_idx (int): The index of the dataloader that produced this batch (only if multiple
val datasets used)
Return:
Dict or OrderedDict - passed to the validation_end step
.. code-block:: python
# if you have one val dataloader:
def validation_step(self, batch, batch_idx)
# if you have multiple val dataloaders:
def validation_step(self, batch, batch_idx, dataloader_idxdx)
Example
-------
.. code-block:: python
# CASE 1: A single validation dataset
def validation_step(self, batch, batch_idx):
x, y = batch
# implement your own
out = self.forward(x)
loss = self.loss(out, y)
# log 6 example images
# or generated text... or whatever
sample_imgs = x[:6]
grid = torchvision.utils.make_grid(sample_imgs)
self.logger.experiment.add_image('example_images', grid, 0)
# calculate acc
labels_hat = torch.argmax(out, dim=1)
val_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
# all optional...
# return whatever you need for the collation function validation_end
output = OrderedDict({
'val_loss': loss_val,
'val_acc': torch.tensor(val_acc), # everything must be a tensor
})
# return an optional dict
return output
If you pass in multiple validation datasets, validation_step will have an additional argument.
.. code-block:: python
# CASE 2: multiple validation datasets
def validation_step(self, batch, batch_idx, dataset_idx):
# dataset_idx tells you which dataset this is.
.. note:: If you don't need to validate you don't need to implement this method.
.. note:: When the validation_step is called, the model has been put in eval mode and PyTorch gradients
have been disabled. At the end of validation, model goes back to training mode and gradients are enabled.
"""
def test_step(self, *args, **kwargs):
"""return whatever outputs will need to be aggregated in test_end
:param batch: The output of your dataloader. A tensor, tuple or list
:param int batch_idx: Integer displaying which batch this is
:param int dataloader_idx: Integer displaying which dataloader this is (only if multiple test datasets used)
:return dict: Dict or OrderedDict with metrics to display in progress bar. All keys must be tensors.
.. code-block:: python
# if you have one test dataloader:
def test_step(self, batch, batch_idx)
# if you have multiple test dataloaders:
def test_step(self, batch, batch_idx, dataloader_idxdx)
**OPTIONAL**
If you don't need to test you don't need to implement this method. In this step you'd normally
generate examples or calculate anything of interest such as accuracy.
When the validation_step is called, the model has been put in eval mode and PyTorch gradients
have been disabled. At the end of validation, model goes back to training mode and gradients are enabled.
The dict you return here will be available in the `test_end` method.
This function is used when you execute `trainer.test()`.
Example
-------
.. code-block:: python
# CASE 1: A single test dataset
def test_step(self, batch, batch_idx):
x, y = batch
# implement your own
out = self.forward(x)
loss = self.loss(out, y)
# calculate acc
labels_hat = torch.argmax(out, dim=1)
test_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
# all optional...
# return whatever you need for the collation function test_end
output = OrderedDict({
'test_loss': loss_test,
'test_acc': torch.tensor(test_acc), # everything must be a tensor
})
# return an optional dict
return output
If you pass in multiple test datasets, `test_step` will have an additional argument.
.. code-block:: python
# CASE 2: multiple test datasets
def test_step(self, batch, batch_idx, dataset_idx):
# dataset_idx tells you which dataset this is.
The `dataset_idx` corresponds to the order of datasets returned in `test_dataloader`.
"""
def validation_end(self, outputs):
"""Outputs has the appended output after each validation step.
:param outputs: List of outputs you defined in validation_step, or if there are multiple dataloaders,
a list containing a list of outputs for each dataloader
:return dict: Dictionary or OrderedDict with optional:
progress_bar -> Dict for progress bar display. Must have only tensors
log -> Dict of metrics to add to logger. Must have only tensors (no images, etc)
If you didn't define a validation_step, this won't be called.
Called at the end of the validation loop with the outputs of validation_step.
The outputs here are strictly for the progress bar.
If you don't need to display anything, don't return anything.
Any keys present in 'log', 'progress_bar' or the rest of the dictionary
are available for callbacks to access. If you want to manually set current step, you can specify it with
'step' key in the 'log' Dict.
Example
-------
With a single dataloader
.. code-block:: python
def validation_end(self, outputs):
val_loss_mean = 0
val_acc_mean = 0
for output in outputs:
val_loss_mean += output['val_loss']
val_acc_mean += output['val_acc']
val_loss_mean /= len(outputs)
val_acc_mean /= len(outputs)
tqdm_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()}
# show val_loss and val_acc in progress bar but only log val_loss
results = {
'progress_bar': tqdm_dict,
'log': {'val_loss': val_loss_mean.item()}
}
return results
With multiple dataloaders, `outputs` will be a list of lists. The outer list contains
one entry per dataloader, while the inner list contains the individual outputs of
each validation step for that dataloader.
.. code-block:: python
def validation_end(self, outputs):
val_loss_mean = 0
val_acc_mean = 0
i = 0
for dataloader_outputs in outputs:
for output in dataloader_outputs:
val_loss_mean += output['val_loss']
val_acc_mean += output['val_acc']
i += 1
val_loss_mean /= i
val_acc_mean /= i
tqdm_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()}
# show val_loss and val_acc in progress bar but only log val_loss
results = {
'progress_bar': tqdm_dict,
'log': {'val_loss': val_loss_mean.item(), 'step': self.current_epoch}
}
return results
"""
def test_end(self, outputs):
"""Outputs has the appended output after each test step.
:param outputs: List of outputs you defined in test_step, or if there are multiple dataloaders,
a list containing a list of outputs for each dataloader
:return dict: Dict of OrderedDict with metrics to display in progress bar
If you didn't define a test_step, this won't be called.
Called at the end of the test step with the output of each test_step.
The outputs here are strictly for the progress bar.
If you don't need to display anything, don't return anything.
Example
-------
.. code-block:: python
def test_end(self, outputs):
test_loss_mean = 0
test_acc_mean = 0
for output in outputs:
test_loss_mean += output['test_loss']
test_acc_mean += output['test_acc']
test_loss_mean /= len(outputs)
test_acc_mean /= len(outputs)
tqdm_dict = {'test_loss': test_loss_mean.item(), 'test_acc': test_acc_mean.item()}
# show test_loss and test_acc in progress bar but only log test_loss
results = {
'progress_bar': tqdm_dict,
'log': {'test_loss': val_loss_mean.item()}
}
return results
With multiple dataloaders, `outputs` will be a list of lists. The outer list contains
one entry per dataloader, while the inner list contains the individual outputs of
each validation step for that dataloader.
.. code-block:: python
def test_end(self, outputs):
test_loss_mean = 0
test_acc_mean = 0
i = 0
for dataloader_outputs in outputs:
for output in dataloader_outputs:
test_loss_mean += output['test_loss']
test_acc_mean += output['test_acc']
i += 1
test_loss_mean /= i
test_acc_mean /= i
tqdm_dict = {'test_loss': test_loss_mean.item(), 'test_acc': test_acc_mean.item()}
# show test_loss and test_acc in progress bar but only log test_loss
results = {
'progress_bar': tqdm_dict,
'log': {'test_loss': val_loss_mean.item()}
}
return results
"""
def configure_ddp(self, model, device_ids):
r"""
Override to init DDP in your own way or with your own wrapper.
The only requirements are that:
1. On a validation batch the call goes to model.validation_step.
2. On a training batch the call goes to model.training_step.
3. On a testing batch, the call goes to model.test_step
Args:
model (LightningModule): the LightningModule currently being optimized
device_ids (list): the list of GPU ids
Return:
DDP wrapped model
Example
-------
.. code-block:: python
# default implementation used in Trainer
def configure_ddp(self, model, device_ids):
# Lightning DDP simply routes to test_step, val_step, etc...
model = LightningDistributedDataParallel(
model,
device_ids=device_ids,
find_unused_parameters=True
)
return model
"""
model = LightningDistributedDataParallel(
model,
device_ids=device_ids,
find_unused_parameters=True
)
return model
def init_ddp_connection(self, proc_rank, world_size):
r"""
Override to define your custom way of setting up a distributed environment.
Lightning's implementation uses env:// init by default and sets the first node as root.
Args:
proc_rank (int): The current process rank within the node.
world_size (int): Number of GPUs being use across all nodes. (num_nodes*nb_gpu_nodes).
Example
-------
.. code-block:: python
def init_ddp_connection(self):
# use slurm job id for the port number
# guarantees unique ports across jobs from same grid search
try:
# use the last 4 numbers in the job id as the id
default_port = os.environ['SLURM_JOB_ID']
default_port = default_port[-4:]
# all ports should be in the 10k+ range
default_port = int(default_port) + 15000
except Exception as e:
default_port = 12910
# if user gave a port number, use that one instead
try:
default_port = os.environ['MASTER_PORT']
except Exception:
os.environ['MASTER_PORT'] = str(default_port)
# figure out the root node addr
try:
root_node = os.environ['SLURM_NODELIST'].split(' ')[0]
except Exception:
root_node = '127.0.0.2'
root_node = self.trainer.resolve_root_node_address(root_node)
os.environ['MASTER_ADDR'] = root_node
dist.init_process_group(
'nccl',
rank=self.proc_rank,
world_size=self.world_size
)
"""
# use slurm job id for the port number
# guarantees unique ports across jobs from same grid search
try:
# use the last 4 numbers in the job id as the id
default_port = os.environ['SLURM_JOB_ID']
default_port = default_port[-4:]
# all ports should be in the 10k+ range
default_port = int(default_port) + 15000
except Exception:
default_port = 12910
# if user gave a port number, use that one instead
try:
default_port = os.environ['MASTER_PORT']
except Exception:
os.environ['MASTER_PORT'] = str(default_port)
# figure out the root node addr
try:
root_node = os.environ['SLURM_NODELIST'].split(' ')[0]
except Exception:
root_node = '127.0.0.2'
root_node = self.trainer.resolve_root_node_address(root_node)
os.environ['MASTER_ADDR'] = root_node
dist.init_process_group('nccl', rank=proc_rank, world_size=world_size)
def configure_apex(self, amp, model, optimizers, amp_level):
r"""
Override to init AMP your own way
Must return a model and list of optimizers
Args:
amp (object): pointer to amp library object
model (LightningModule): pointer to current lightningModule
optimizers (list): list of optimizers passed in configure_optimizers()
amp_level (str): AMP mode chosen ('O1', 'O2', etc...)
Return:
Apex wrapped model and optimizers
Example
-------
.. code-block:: python
# Default implementation used by Trainer.
def configure_apex(self, amp, model, optimizers, amp_level):
model, optimizers = amp.initialize(
model, optimizers, opt_level=amp_level,
)
return model, optimizers
"""
model, optimizers = amp.initialize(
model, optimizers, opt_level=amp_level,
)
return model, optimizers
@abstractmethod
def configure_optimizers(self):
r"""
This is where you choose what optimizers and learning-rate schedulers to use in your optimization.
Normally you'd need one. But in the case of GANs or something more esoteric you might have multiple.
Return: any of these 3 options:
- Single optimizer
- List or Tuple - List of optimizers
- Two lists - The first list has multiple optimizers, the second a list of learning-rate schedulers
Example
-------
.. code-block:: python
# most cases
def configure_optimizers(self):
opt = Adam(self.parameters(), lr=0.01)
return opt
# multiple optimizer case (eg: GAN)
def configure_optimizers(self):
generator_opt = Adam(self.model_gen.parameters(), lr=0.01)
disriminator_opt = Adam(self.model_disc.parameters(), lr=0.02)
return generator_opt, disriminator_opt
# example with learning_rate schedulers
def configure_optimizers(self):
generator_opt = Adam(self.model_gen.parameters(), lr=0.01)
disriminator_opt = Adam(self.model_disc.parameters(), lr=0.02)
discriminator_sched = CosineAnnealing(discriminator_opt, T_max=10)
return [generator_opt, disriminator_opt], [discriminator_sched]
.. note:: Lightning calls .backward() and .step() on each optimizer and learning rate scheduler as needed.
.. note:: If you use 16-bit precision (use_amp=True), Lightning will automatically
handle the optimizers for you.
.. note:: If you use multiple optimizers, training_step will have an additional `optimizer_idx` parameter.
.. note:: If you use LBFGS lightning handles the closure function automatically for you
.. note:: If you use multiple optimizers, gradients will be calculated only
for the parameters of current optimizer at each training step.
.. note:: If you need to control how often those optimizers step or override the default .step() schedule,
override the `optimizer_step` hook.
"""
def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
r"""
Override this method to adjust the default way the Trainer calls each optimizer. By default, Lightning
calls .step() and zero_grad() as shown in the example once per optimizer.
Args:
epoch (int): Current epoch
batch_idx (int): Index of current batch
optimizer (torch.nn.Optimizer): A PyTorch optimizer
optimizer_idx (int): If you used multiple optimizers this indexes into that list
second_order_closure (int): closure for second order methods
Example
-------
.. code-block:: python
# DEFAULT
def optimizer_step(self, current_epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
optimizer.step()
optimizer.zero_grad()
# Alternating schedule for optimizer steps (ie: GANs)
def optimizer_step(self, current_epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
# update generator opt every 2 steps
if optimizer_idx == 0:
if batch_idx % 2 == 0 :
optimizer.step()
optimizer.zero_grad()
# update discriminator opt every 4 steps
if optimizer_idx == 1:
if batch_idx % 4 == 0 :
optimizer.step()
optimizer.zero_grad()
# ...
# add as many optimizers as you want
Here's another example showing how to use this for more advanced things such as learning-rate warm-up:
.. code-block:: python
# learning rate warm-up
def optimizer_step(self, current_epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
# warm up lr
if self.trainer.global_step < 500:
lr_scale = min(1., float(self.trainer.global_step + 1) / 500.)
for pg in optimizer.param_groups:
pg['lr'] = lr_scale * self.hparams.learning_rate
# update params
optimizer.step()
optimizer.zero_grad()
"""
if isinstance(optimizer, torch.optim.LBFGS):
optimizer.step(second_order_closure)
else:
optimizer.step()
# clear gradients
optimizer.zero_grad()
def tbptt_split_batch(self, batch, split_size):
r"""
When using truncated backpropagation through time, each batch must be split along the time dimension.
Lightning handles this by default, but for custom behavior override this function.
Args:
batch (torch.nn.Tensor): Current batch
split_size (int): How big the split is
Return:
list of batch splits. Each split will be passed to forward_step to enable truncated
back propagation through time. The default implementation splits root level Tensors and
Sequences at dim=1 (i.e. time dim). It assumes that each time dim is the same length.
Example
-------
.. code-block:: python
def tbptt_split_batch(self, batch, split_size):
splits = []
for t in range(0, time_dims[0], split_size):
batch_split = []
for i, x in enumerate(batch):
if isinstance(x, torch.Tensor):
split_x = x[:, t:t + split_size]
elif isinstance(x, collections.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t:t + split_size]
batch_split.append(split_x)
splits.append(batch_split)
return splits
.. note:: Called in the training loop after on_batch_start if `truncated_bptt_steps > 0`.
Each returned batch split is passed separately to training_step(...).
"""
time_dims = [len(x[0]) for x in batch if isinstance(x, (torch.Tensor, collections.Sequence))]
assert len(time_dims) >= 1, "Unable to determine batch time dimension"
assert all(x == time_dims[0] for x in time_dims), "Batch time dimension length is ambiguous"
splits = []
for t in range(0, time_dims[0], split_size):
batch_split = []
for i, x in enumerate(batch):
if isinstance(x, torch.Tensor):
split_x = x[:, t:t + split_size]
elif isinstance(x, collections.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t:t + split_size]
batch_split.append(split_x)
splits.append(batch_split)
return splits
@data_loader
def train_dataloader(self):
"""Implement a PyTorch DataLoader
:return: PyTorch DataLoader
Called by lightning during training loop. Make sure to use the @pl.data_loader decorator,
this ensures not calling this function until the data are needed.
If you want to change the data during every epoch DON'T use the data_loader decorator.
Example
-------
.. code-block:: python
@pl.data_loader
def train_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=True, transform=transform, download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.hparams.batch_size,
shuffle=True
)
return loader
"""
return None
@data_loader
def test_dataloader(self):
r"""
Called by lightning during test loop. Make sure to use the @pl.data_loader decorator,
this ensures not calling this function until the data are needed.
Return:
PyTorch DataLoader
Example
-------
.. code-block:: python
@pl.data_loader
def test_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=False, transform=transform, download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.hparams.batch_size,
shuffle=True
)
return loader
.. note:: If you don't need a test dataset and a test_step, you don't need to implement this method.
.. note:: If you want to change the data during every epoch DON'T use the data_loader decorator.
"""
return None
@data_loader
def val_dataloader(self):
r"""
Called by lightning during validation loop. Make sure to use the @pl.data_loader decorator,
this ensures not calling this function until the data are needed.
Return:
PyTorch DataLoader
Example
-------
.. code-block:: python
@pl.data_loader
def val_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=False, transform=transform, download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.hparams.batch_size,
shuffle=True
)
return loader
# can also return multiple dataloaders
@pl.data_loader
def val_dataloader(self):
return [loader_a, loader_b, ..., loader_n]
Example
-------
.. code-block:: python
@pl.data_loader
def val_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=False, transform=transform, download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.hparams.batch_size,
shuffle=True
)
return loader
# can also return multiple dataloaders
@pl.data_loader
def val_dataloader(self):
return [loader_a, loader_b, ..., loader_n]
.. note:: If you don't need a validation dataset and a validation_step, you don't need to implement this method.
.. note:: If you want to change the data during every epoch DON'T use the data_loader decorator.
.. note:: In the case where you return multiple `val_dataloaders`, the `validation_step`
will have an argument `dataset_idx` which matches the order here.
"""
return None
@classmethod
def load_from_metrics(cls, weights_path, tags_csv, map_location=None):
r"""
You should use `load_from_checkpoint` instead!
However, if your .ckpt weights don't have the hyperparameters saved, use this method to pass
in a .csv with the hparams you'd like to use. These will be converted into a argparse.Namespace
and passed into your LightningModule for use.
Args:
weights_path (str): Path to a PyTorch checkpoint
tags_csv (str): Path to a .csv with two columns (key, value) as in this
Example::
key,value
drop_prob,0.2
batch_size,32
map_location (dict): A dictionary mapping saved weight GPU devices to new
GPU devices (example: {'cuda:1':'cuda:0'})
Return:
LightningModule with loaded weights
Example
-------
.. code-block:: python
pretrained_model = MyLightningModule.load_from_metrics(
weights_path='/path/to/pytorch_checkpoint.ckpt',
tags_csv='/path/to/hparams_file.csv',
on_gpu=True,
map_location=None
)
# predict
pretrained_model.eval()
pretrained_model.freeze()
y_hat = pretrained_model(x)
"""
hparams = load_hparams_from_tags_csv(tags_csv)
hparams.__setattr__('on_gpu', False)
if map_location is not None:
checkpoint = torch.load(weights_path, map_location=map_location)
else:
checkpoint = torch.load(weights_path, map_location=lambda storage, loc: storage)
# load the state_dict on the model automatically
model = cls(hparams)
model.load_state_dict(checkpoint['state_dict'])
# give model a chance to load something
model.on_load_checkpoint(checkpoint)
return model
@classmethod
def load_from_checkpoint(cls, checkpoint_path, map_location=None):
r"""
Primary way of loading model from a checkpoint. When Lightning saves a checkpoint
it stores the hyperparameters in the checkpoint if you initialized your LightningModule
with an argument called `hparams` which is a Namespace or dictionary of hyperparameters
Example
-------
.. code-block:: python
# --------------
# Case 1
# when using Namespace (output of using Argparse to parse command line arguments)
from argparse import Namespace
hparams = Namespace(**{'learning_rate': 0.1})
model = MyModel(hparams)
class MyModel(pl.LightningModule):
def __init__(self, hparams):
self.learning_rate = hparams.learning_rate
# --------------
# Case 2
# when using a dict
model = MyModel({'learning_rate': 0.1})
class MyModel(pl.LightningModule):
def __init__(self, hparams):
self.learning_rate = hparams['learning_rate']
Args:
checkpoint_path (str): Path to checkpoint.
map_location (dic): If your checkpoint saved from a GPU model and you now load on CPUs
or a different number of GPUs, use this to map to the new setup.
Return:
LightningModule with loaded weights.
Example
-------
.. code-block:: python
# load weights without mapping
MyLightningModule.load_from_checkpoint('path/to/checkpoint.ckpt')
# load weights mapping all weights from GPU 1 to GPU 0
map_location = {'cuda:1':'cuda:0'}
MyLightningModule.load_from_checkpoint('path/to/checkpoint.ckpt', map_location=map_location)
"""
if map_location is not None:
checkpoint = torch.load(checkpoint_path, map_location=map_location)
else:
checkpoint = torch.load(checkpoint_path, map_location=lambda storage, loc: storage)
try:
ckpt_hparams = checkpoint['hparams']
except KeyError:
raise IOError(
"Checkpoint does not contain hyperparameters. Are your model hyperparameters stored"
"in self.hparams?"
)
hparams = Namespace(**ckpt_hparams)
# load the state_dict on the model automatically
model = cls(hparams)
model.load_state_dict(checkpoint['state_dict'])
# give model a chance to load something
model.on_load_checkpoint(checkpoint)
return model
def summarize(self, mode):
model_summary = ModelSummary(self, mode=mode)
log.info('\n' + model_summary.__str__())
def freeze(self):
r"""
Freeze all params for inference
Example
-------
.. code-block:: python
model = MyLightningModule(...)
model.freeze()
"""
for param in self.parameters():
param.requires_grad = False
self.eval()
def unfreeze(self):
"""Unfreeze all params for inference.
.. code-block:: python
model = MyLightningModule(...)
model.unfreeze()
"""
for param in self.parameters():
param.requires_grad = True
self.train()
def on_load_checkpoint(self, checkpoint):
r"""
Called by lightning to restore your model.
If you saved something with **on_save_checkpoint** this is your chance to restore this.
Args:
checkpoint (dict): Loaded checkpoint
Example
-------
.. code-block:: python
def on_load_checkpoint(self, checkpoint):
# 99% of the time you don't need to implement this method
self.something_cool_i_want_to_save = checkpoint['something_cool_i_want_to_save']
.. note:: Lighting auto-restores global step, epoch, and all training state including amp scaling.
No need for you to restore anything regarding training.
"""
def on_save_checkpoint(self, checkpoint):
r"""
Called by lightning when saving a checkpoint to give you a chance to store anything else you
might want to save
Args:
checkpoint (dic): Checkpoint to be saved
Example
-------
.. code-block:: python
def on_save_checkpoint(self, checkpoint):
# 99% of use cases you don't need to implement this method
checkpoint['something_cool_i_want_to_save'] = my_cool_pickable_object
.. note:: Lighting saves all aspects of training (epoch, global step, etc...) including amp scaling. No need
for you to store anything about training.
"""
def get_tqdm_dict(self):
r"""
Additional items to be displayed in the progress bar.
Return:
Dictionary with the items to be displayed in the progress bar.
"""
tqdm_dict = {
'loss': '{:.3f}'.format(self.trainer.avg_loss)
}
if self.trainer.truncated_bptt_steps is not None:
tqdm_dict['split_idx'] = self.trainer.split_idx
if self.trainer.logger is not None and self.trainer.logger.version is not None:
tqdm_dict['v_num'] = self.trainer.logger.version
return tqdm_dict
def load_hparams_from_tags_csv(tags_csv):
if not os.path.isfile(tags_csv):
log.warning(f'Missing Tags: {tags_csv}.')
return Namespace()
tags = {}
with open(tags_csv) as f:
csv_reader = csv.reader(f, delimiter=',')
for row in list(csv_reader)[1:]:
tags[row[0]] = convert(row[1])
ns = Namespace(**tags)
return ns
def convert(val):
constructors = [int, float, str]
if isinstance(val, str):
if val.lower() == 'true':
return True
if val.lower() == 'false':
return False
for c in constructors:
try:
return c(val)
except ValueError:
pass
return val
| [
"ido.imanuel@gmail.com"
] | ido.imanuel@gmail.com |
beda3861457b5c0ec5adcf3e59533198388c1a78 | 44a5a7742ed6c888e24fcbb382d50f747fc171ba | /motiondetection.py | 26ef6ede95f5d68218eb7d737fbf4f76656acae0 | [] | no_license | seil-cse-iitb/classroom-yolo | 02375c0f76654faf556b740621792c625828096b | 339bc3ed8ddfc8fa34a4549cc5b13378b097662d | refs/heads/main | 2023-06-12T01:52:28.637228 | 2017-06-21T15:40:04 | 2017-06-21T15:40:04 | 383,571,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,242 | py | import time
import imutils
import numpy as np
import cv2
import json
import threading
import logging
from hd_variables import variables_hd
from datetime import datetime
from networkLayer import send_HD
class motiondetection(threading.Thread):
def __init__(self,cycle,data_id,cam_url,threadid):
threading.Thread.__init__(self)
self.threadid = threadid
self.cam_url = cam_url
self.data_id = data_id
self.cycle = cycle
def run(self):
#print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+" Starting Motion Detection for Camera " + str(self.threadid+1)
conf = json.load(open('config_imageprocessing.json'))
conf_zones = json.load(open('config_zones.json'))
zone_info = conf_zones[self.data_id]["cameras"]
for keys in zone_info.keys():
if str(self.threadid) in keys :
zone_no = 0
zone_no=keys
zone_no = int(zone_no)
HD_Timer =0
ThresholdArea = conf["Threshold Area"]
if self.threadid == 2:
ThresholdArea = 0
connection = False
camera = cv2.VideoCapture(self.cam_url)
start = time.time()
firstFrame = None
max_contour_area = 0
no_of_contours = 0
while(not connection):
try:
_, frame = camera.read()
connection = True
except:
camera = cv2.VideoCapture(self.cam_url)
print camera
# loop over the frames of the video
while True:
if variables_hd.decision[zone_no] == True:
print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+ "QUITTING MOTION" + "(Camera " + str(self.threadid) + ")"
logging.info('Quitting Motion Detection. Another Camera and/or Algorithm gives HD for Zone ' + str(self.zone_no))
return
(grabbed, frame) = camera.read()
if grabbed is None:
print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+" Waiting"
continue
#original_feed = frame
try:
frame = imutils.resize(frame, conf["frame width"])
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
except:
print "opencv error: frame not found. Please check camera " + str(self.cam_url)
return
if firstFrame is None:
firstFrame = gray
continue
frameDelta = cv2.absdiff(firstFrame, gray)
firstFrame = gray
thresh = cv2.adaptiveThreshold(frameDelta,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY_INV,11,2)
thresh = cv2.dilate(thresh, None, iterations=3)
cnts, _ = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)[-2:]
contourcheck = len(cnts)
no_of_contours += len(cnts)
for i in range(len(cnts)):
if(cv2.contourArea(cnts[i]) < ThresholdArea):
contourcheck = contourcheck - 1
no_of_contours -= 1
continue
#print cv2.contourArea(cnts[i])
if cv2.contourArea(cnts[i])>max_contour_area:
max_contour_area = cv2.contourArea(cnts[i])
(x, y, w, h) = cv2.boundingRect(cnts[i])
cv2.drawContours(frame, cnts, i,(244,233,0))
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)
if (contourcheck>0):
HD_Timer += 1
if HD_Timer > 25:
#print HD_Timer
camera.release()
#cv2.destroyAllWindows()
variables_hd.mutex.acquire()
logging.info('Mutex Acquired, Camera:' + str(self.threadid +1))
logging.info('Motion HD, Camera:' + str(self.threadid +1))
variables_hd.decision[zone_no] = True;
send_HD(self.data_id,self.cycle,zone_no)
logging.info('Motion HD, Camera:' + str(self.threadid +1))
print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+ "Camera " + str(self.threadid) + ":Motion HD"
variables_hd.mutex.release()
logging.info('Mutex Released, Camera:' + str(self.threadid +1))
variables_hd.hd_zone[zone_no] = True
print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+" Camera "+ str(self.threadid+1) + ": max_contour_size:" + str(max_contour_area) + ", no_of_contours:" + \
str(no_of_contours) + ", HD Timer:" + str(HD_Timer) + ", MOTION:" + str(variables_hd.hd_zone[self.threadid])
return
#filename = "/home/stark/BA/Malvika/Presence/" + str(time.strftime("%H %M %S")) + "_" + str(int(max_contour_area)) + ".jpg"
#print filename
#cv2.imwrite(filename,frame)
if conf["show_video"]:
cv2.imshow("Motion Detection", frame)
#key = cv2.waitKey(1) & 0xFF
if ((time.time() - start) > conf["T_Check"]):
break
camera.release()
#cv2.destroyAllWindows()
variables_hd.hd_zone[self.threadid] = False
print datetime.now().strftime('[%d-%b-%y %H:%M:%S]')+" Camera "+ str(self.threadid+1) + ": max_contour_size:" + str(max_contour_area) + ", no_of_contours:" + str(no_of_contours) + \
", HD Timer:" + str(HD_Timer) + ", MOTION:" + str(variables_hd.hd_zone[self.threadid])
return
def skin_detection(image, x,y,w,h):
# Constants for finding range of skin color in YCrCb
min_YCrCb = np.array([0,133,77],np.uint8)
max_YCrCb = np.array([255,173,127],np.uint8)
# Convert image to YCrCb
imageYCrCb = cv2.cvtColor(image,cv2.COLOR_BGR2YCR_CB)
# Find region with skin tone in YCrCb image
skinRegion = cv2.inRange(imageYCrCb,min_YCrCb,max_YCrCb)
area = w*h
count=0.0
for i in range(y,y+h):
for j in range(x,x+w):
if skinRegion[i][j]==255:
count+=1.0
percentage=(count/area)*100
if percentage > 0:
return True
else:
return False
| [
"malvika0311@gmail.com"
] | malvika0311@gmail.com |
98649096ac72586c4cc39e7e4b5b32871381a937 | ffbad21b9e8a92f9669ebaa1e0542d9bf114f414 | /akshare/bank/bank_banker.py | e14042b63153052d133088af489debe4c662e335 | [
"MIT"
] | permissive | cqzhao/akshare | 6bccdb4eceae633609bb6a797760a5d05ed0165a | cd740d050015edd26590cc0f3d493d7dc57ea79b | refs/heads/master | 2023-03-02T01:24:19.238315 | 2021-01-25T04:06:27 | 2021-01-25T04:06:27 | 311,243,910 | 0 | 0 | MIT | 2021-01-25T04:06:28 | 2020-11-09T06:26:44 | null | UTF-8 | Python | false | false | 9,970 | py | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2021/1/14 15:56
Desc: thebankerdatabase
https://www.thebankerdatabase.com/index.cfm/search/ranking
"""
import pandas as pd
import requests
from bs4 import BeautifulSoup
from tqdm import tqdm
def bank_rank_banker() -> pd.DataFrame:
"""
全球银行排名前 25 家
https://www.thebankerdatabase.com/index.cfm/search/ranking
:return: 全球银行排名前 25 家
:rtype: pandas.DataFrame
"""
url = "https://www.thebankerdatabase.com/index.cfm/search/index.cfm"
headers = {
"accept": "application/json, text/javascript, */*; q=0.01",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"content-length": "5906",
"content-type": "application/x-www-form-urlencoded; charset=UTF-8",
"cookie": "CFID=4066679; CFTOKEN=757b91f9e32ccf96-DABAED1E-5056-81CB-AC16B7759B219C5F; __utmz=11608689.1610550237.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); __utmv=11608689.|1=User%20Type=Anonymous=1; X-Mapping-mcmjnkih=105487F00B86D7352E95B0FD5E7117FE; JSESSIONID=AAFB1EFAC538A6591033D322503118E6.cfusion; LIVEPAGEHEIGHT=600; LIVEPAGEWIDTH=800; __utma=11608689.1485486898.1610550237.1610550237.1610609939.2; __utmc=11608689; __utmt=1; __utmb=11608689.1.10.1610609939; CFGLOBALS=urltoken%3DCFID%23%3D4066679%26CFTOKEN%23%3D757b91f9e32ccf96%2DDABAED1E%2D5056%2D81CB%2DAC16B7759B219C5F%26jsessionid%23%3DAAFB1EFAC538A6591033D322503118E6%2Ecfusion%23lastvisit%3D%7Bts%20%272021%2D01%2D14%2007%3A39%3A01%27%7D%23hitcount%3D44%23timecreated%3D%7Bts%20%272021%2D01%2D13%2015%3A03%3A42%27%7D%23cftoken%3D757b91f9e32ccf96%2DDABAED1E%2D5056%2D81CB%2DAC16B7759B219C5F%23cfid%3D4066679%23",
"origin": "https://www.thebankerdatabase.com",
"pragma": "no-cache",
"referer": "https://www.thebankerdatabase.com/index.cfm/search/ranking",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36",
"x-requested-with": "XMLHttpRequest",
}
params = {
"fuseaction": "search.search_results_json",
"ajax": "1",
"ranking": "1",
}
payload = {
"draw": "4",
"columns[0][data]": "bank_id",
"columns[0][name]": "bank_id",
"columns[0][searchable]": "true",
"columns[0][orderable]": "false",
"columns[0][search][value]": "",
"columns[0][search][regex]": "false",
"columns[1][data]": "primary_ranking",
"columns[1][name]": "primary_ranking",
"columns[1][searchable]": "true",
"columns[1][orderable]": "1",
"columns[1][search][value]": "",
"columns[1][search][regex]": "false",
"columns[2][data]": "previous_ranking",
"columns[2][name]": "previous_ranking",
"columns[2][searchable]": "true",
"columns[2][orderable]": "1",
"columns[2][search][value]": "",
"columns[2][search][regex]": "false",
"columns[3][data]": "current_name",
"columns[3][name]": "current_name",
"columns[3][searchable]": "true",
"columns[3][orderable]": "1",
"columns[3][search][value]": "",
"columns[3][search][regex]": "false",
"columns[4][data]": "country_name",
"columns[4][name]": "country_name",
"columns[4][searchable]": "true",
"columns[4][orderable]": "1",
"columns[4][search][value]": "",
"columns[4][search][regex]": "false",
"columns[5][data]": "yearend_datetime",
"columns[5][name]": "yearend_datetime",
"columns[5][searchable]": "true",
"columns[5][orderable]": "1",
"columns[5][search][value]": "",
"columns[5][search][regex]": "false",
"columns[6][data]": "DP2",
"columns[6][name]": "DP2",
"columns[6][searchable]": "true",
"columns[6][orderable]": "1",
"columns[6][search][value]": "",
"columns[6][search][regex]": "false",
"columns[7][data]": "DP2_change",
"columns[7][name]": "DP2_change",
"columns[7][searchable]": "true",
"columns[7][orderable]": "1",
"columns[7][search][value]": "",
"columns[7][search][regex]": "false",
"columns[8][data]": "DP2_rank",
"columns[8][name]": "DP2_rank",
"columns[8][searchable]": "true",
"columns[8][orderable]": "1",
"columns[8][search][value]": "",
"columns[8][search][regex]": "false",
"columns[9][data]": "DP6",
"columns[9][name]": "DP6",
"columns[9][searchable]": "true",
"columns[9][orderable]": "1",
"columns[9][search][value]": "",
"columns[9][search][regex]": "false",
"columns[10][data]": "DP6_change",
"columns[10][name]": "DP6_change",
"columns[10][searchable]": "true",
"columns[10][orderable]": "1",
"columns[10][search][value]": "",
"columns[10][search][regex]": "false",
"columns[11][data]": "DP6_rank",
"columns[11][name]": "DP6_rank",
"columns[11][searchable]": "true",
"columns[11][orderable]": "1",
"columns[11][search][value]": "",
"columns[11][search][regex]": "false",
"columns[12][data]": "DP1",
"columns[12][name]": "DP1",
"columns[12][searchable]": "true",
"columns[12][orderable]": "1",
"columns[12][search][value]": "",
"columns[12][search][regex]": "false",
"columns[13][data]": "DP1_change",
"columns[13][name]": "DP1_change",
"columns[13][searchable]": "true",
"columns[13][orderable]": "1",
"columns[13][search][value]": "",
"columns[13][search][regex]": "false",
"columns[14][data]": "DP12",
"columns[14][name]": "DP12",
"columns[14][searchable]": "true",
"columns[14][orderable]": "1",
"columns[14][search][value]": "",
"columns[14][search][regex]": "false",
"columns[15][data]": "DP48",
"columns[15][name]": "DP48",
"columns[15][searchable]": "true",
"columns[15][orderable]": "1",
"columns[15][search][value]": "",
"columns[15][search][regex]": "false",
"columns[16][data]": "DP48_rank",
"columns[16][name]": "DP48_rank",
"columns[16][searchable]": "true",
"columns[16][orderable]": "1",
"columns[16][search][value]": "",
"columns[16][search][regex]": "false",
"columns[17][data]": "DP130",
"columns[17][name]": "DP130",
"columns[17][searchable]": "true",
"columns[17][orderable]": "1",
"columns[17][search][value]": "",
"columns[17][search][regex]": "false",
"columns[18][data]": "DP130_rank",
"columns[18][name]": "DP130_rank",
"columns[18][searchable]": "true",
"columns[18][orderable]": "1",
"columns[18][search][value]": "",
"columns[18][search][regex]": "false",
"columns[19][data]": "DP13",
"columns[19][name]": "DP13",
"columns[19][searchable]": "true",
"columns[19][orderable]": "1",
"columns[19][search][value]": "",
"columns[19][search][regex]": "false",
"columns[20][data]": "DP13_rank",
"columns[20][name]": "DP13_rank",
"columns[20][searchable]": "true",
"columns[20][orderable]": "1",
"columns[20][search][value]": "",
"columns[20][search][regex]": "false",
"columns[21][data]": "DP8",
"columns[21][name]": "DP8",
"columns[21][searchable]": "true",
"columns[21][orderable]": "1",
"columns[21][search][value]": "",
"columns[21][search][regex]": "false",
"columns[22][data]": "DP49",
"columns[22][name]": "DP49",
"columns[22][searchable]": "true",
"columns[22][orderable]": "1",
"columns[22][search][value]": "",
"columns[22][search][regex]": "false",
"columns[23][data]": "DP49_rank",
"columns[23][name]": "DP49_rank",
"columns[23][searchable]": "true",
"columns[23][orderable]": "1",
"columns[23][search][value]": "",
"columns[23][search][regex]": "false",
"columns[24][data]": "DP131",
"columns[24][name]": "DP131",
"columns[24][searchable]": "true",
"columns[24][orderable]": "1",
"columns[24][search][value]": "",
"columns[24][search][regex]": "false",
"columns[25][data]": "DP132",
"columns[25][name]": "DP132",
"columns[25][searchable]": "true",
"columns[25][orderable]": "1",
"columns[25][search][value]": "",
"columns[25][search][regex]": "false",
"order[0][column]": "0",
"order[0][dir]": "asc",
"start": "0",
"length": "100",
"search[value]": "",
"search[regex]": "false",
}
r = requests.post(url, params=params, data=payload, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json["data"])
del temp_df["columnlist"]
del temp_df["bank_id"]
bank_url_list = [
"https://www.thebankerdatabase.com/"
+ BeautifulSoup(item, "lxml").find("a")["href"]
for item in temp_df["current_name"]
]
bank_name_list = []
for item in tqdm(bank_url_list):
r = requests.get(item)
soup = BeautifulSoup(r.text, "lxml")
bank_name = soup.find("h1", attrs={"class": "bank"}).find("span").text
bank_name_list.append(bank_name)
temp_df["current_name"] = bank_name_list
temp_df["yearend_datetime"] = pd.to_datetime(temp_df["yearend_datetime"])
return temp_df
if __name__ == "__main__":
bank_rank_banker_df = bank_rank_banker()
print(bank_rank_banker_df)
| [
"jindaxiang@163.com"
] | jindaxiang@163.com |
ba3cf1e5261362abcb2fbb2ef294e56868785b5d | d4738162bf2558abb01e7e67191dc63ca2bc39f2 | /software/5/noisy-temperature.py | 277f1c9fbe1461a81262f4cc8a5d2d22b516f802 | [] | no_license | ahgalila/masters-thesis | 63b93fa157bb430883c96d8bc7fddfb79f55bc83 | 05362b0627e117deb97ed6e1c06487b64883a3c9 | refs/heads/master | 2022-01-17T07:40:24.579903 | 2019-08-09T05:06:08 | 2019-08-09T05:06:08 | 198,347,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,400 | py | from __future__ import division, print_function, absolute_import
from lstm_model import LSTMModel
from tensorflow.examples.tutorials.mnist import input_data
from utils import argmax, one_hot
from random import randint
import numpy as np
def temperature(value):
result = [0, 0, 0, 0, 0, 0, 0, 0, 0]
index = -1
while index >= -value:
result[index] = 1
index -= 1
return result
def is_temperature(value, target):
for i in range(len(value)):
if (target[1] == 1 and value[1] < 0.5) or target[i] == 0 and value[i] > 0.5:
return False
return True
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
trainSamples = [[], [], [], [], [], [], [], [], [], []]
operators = {
"+": np.load("MNIST_data/plus.npy"),
"x": np.load("MNIST_data/times.npy"),
"-": np.load("MNIST_data/minus.npy"),
"/": np.load("MNIST_data/divide.npy")
}
plus = np.load("MNIST_data/plus.npy")
equals = np.load("MNIST_data/equals.npy")
for i in range(len(mnist.train.images)):
trainSamples[argmax(mnist.train.labels[i])].append(mnist.train.images[i])
train_x = []
train_y = []
val_x = []
val_y = []
test_x = []
test_y = []
testWatchListA = {}
testWatchListB = {}
for a in range(10):
testWatchListA[str(a)] = randint(0, 9)
for b in range(10):
testWatchListB[str(b)] = randint(0, 9)
for a in range(0, 10):
for b in range(0, 10):
for key in operators:
if key == "+":
leftTarget = temperature((a + b) // 10)
rightTarget = temperature((a + b) % 10)
if key == "x":
leftTarget = temperature((a * b) // 10)
rightTarget = temperature((a * b) % 10)
if key == "-" and a >= b:
leftTarget = temperature((a - b) // 10)
rightTarget = temperature((a - b) % 10)
if key == "/" and b > 0:
leftTarget = temperature(a % b)
rightTarget = temperature(a // b)
if b == testWatchListA[str(a)] or a == testWatchListB[str(b)]:
for i in range(2):
left = trainSamples[a][randint(0, len(trainSamples[a]) - 1)]
right = trainSamples[b][randint(0, len(trainSamples[b]) - 1)]
test_x.append([right, left, operators[key], equals])
test_y.append([temperature(b), temperature(a), rightTarget, leftTarget])
else:
for i in range(25):
left = trainSamples[a][randint(0, len(trainSamples[a]) - 1)]
right = trainSamples[b][randint(0, len(trainSamples[b]) - 1)]
train_x.append([right, left, operators[key], equals])
train_y.append([temperature(b), temperature(a), rightTarget, leftTarget])
for i in range(2):
left = trainSamples[a][randint(0, len(trainSamples[a]) - 1)]
right = trainSamples[b][randint(0, len(trainSamples[b]) - 1)]
val_x.append([right, left, operators[key], equals])
val_y.append([temperature(b), temperature(a), rightTarget, leftTarget])
train_x = np.array(train_x)
train_y = np.array(train_y)
val_x = np.array(val_x)
val_y = np.array(val_y)
test_x = np.array(test_x)
test_y = np.array(test_y)
model = LSTMModel(4, [10, 100], 784, 9)
model.train(train_x, train_y, val_x, val_y, 100, 5000)
results = model.predict(val_x)
count = 0
for index, result in enumerate(results):
left = result[2]
right = result[3]
leftTarget = val_y[index][2]
rightTarget = val_y[index][3]
if is_temperature(left, leftTarget) and is_temperature(right, rightTarget):
count += 1
test_count = 0
test_results = model.predict(test_x)
for index, result in enumerate(test_results):
left = result[2]
right = result[3]
leftTarget = test_y[index][2]
rightTarget = test_y[index][3]
if is_temperature(left, leftTarget) and is_temperature(right, rightTarget):
test_count += 1
print("SCORE: " + str(float(count) / len(results)))
print("TEST SCORE: " + str(float(test_count) / len(test_results)))
#2 Layers, 512 units each
#SCORE: 0.716049382716
#TEST SCORE: 0.335526315789
#2 Layers, 20 units each
#SCORE: 0.54475308642
#TEST SCORE: 0.447368421053
#2 Layers, 100 units, 10 units
#SCORE: 0.7125
#TEST SCORE: 0.5375 | [
"mail@ahmedabada.com"
] | mail@ahmedabada.com |
ba0f688f9cb711a5b1bad58d76ac28cf73dc5873 | b3724aeb2b8fc43c1ddc5c2481d329e952a5fd1f | /device34470A.py | 7a5dc93419cdf6b5ae6374da7c0868da4d9ef40c | [] | no_license | SHU023/test1 | 4cc0ebaa2ab5af438ffcabb8e57f407014186f77 | d4b5623bc06d5148f5fb2c11bc35884be9366af4 | refs/heads/master | 2020-07-23T05:03:59.265486 | 2020-01-07T04:15:38 | 2020-01-07T04:15:38 | 207,453,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py |
#created on 2019/9/17 @auther SHU
#python visa 使い方
import time
import visa
rm =visa.ResourceManager()
rm.list_resources()
instr=rm.open_resource('USB0::0x2A8D::0x0201::MY57700883::0::INSTR')
instr.write("MEAS:RES?")
instr.write("MEAS:CURR:DC?")
instr.write("MEAS:VOLT:DC?")
print(instr.query("*IDN?"))
'''
instr.write("CONF:CURR:DC 100 mA")
instr.write("CONF:VOLT:DC 100 mV")
instr.write("MEAS:VOLT:DC 100 mV")
instr.write("MEAS:CURR:DC 100 mA")
instr.write("TRIG:COUN 10")
instr.write("TRIG:SOUR EXT;SLOP POS")
instr.write("READ?")
'''
| [
"shu.rarara21@gmail.com"
] | shu.rarara21@gmail.com |
ae5f27b58b42509c2fb6f82e2e426f521420b5dd | d87f6d9e769709def3efcf30230cd8bf6ac2cef7 | /WWTest/autotest/config/xkz/youyanyace/globalconfig/globalConfig.py | e6cc20a18999b112dc5f12dade19633d8c3165fc | [] | no_license | wawj901124/centos8xitong | 876dcc45b895871119404ad1899ca59ab5dd90b6 | 81fc0d1151e3172ceec2093b035d2cd921e1a433 | refs/heads/master | 2023-02-23T22:33:22.314433 | 2021-01-31T01:54:35 | 2021-01-31T01:54:35 | 290,476,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | class GlobalConfig(object):
ISONLINE = False
ONLINE_WEB_YUMING= ""
ONLINE_LOGIN_ACCOUNT = ""
ONLINE_LOGIN_PASSWORD = ""
TEST_WEB_YUMING = "http://111.207.18.22:22044/"
TEST_LOGIN_ACCOUNT = "admin"
TEST_LOGIN_PASSWORD = "admin123A"
COOKIE_FILE_NAME = "youyanyacelogincookie.json"
gc = GlobalConfig()
| [
"wawj900805"
] | wawj900805 |
66d0aae4bb8e2821ef1d5d3484b395f66319485a | 6af43406a942279558232a4f9649ac645f373779 | /notebooks/myutil_regr.py | acd22bfb7d80c9423dd34e7e1163ae7d50a5b506 | [] | no_license | VajiraPrabuddhaka/dengAI | 284c59729ab05b141d82310f932e36f593e153fd | f5db3f2d7e0c05e39dd5b1e829150898510ab46c | refs/heads/master | 2020-03-20T06:29:34.544906 | 2018-07-27T18:18:28 | 2018-07-27T18:18:28 | 137,250,543 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,289 | py | import scipy as sp
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import statsmodels as stats
from sklearn.preprocessing import MinMaxScaler, OneHotEncoder
from sklearn.model_selection import train_test_split
import theano
import tensorflow as tf
import keras
def plot_cols(arr):
plt.figure(figsize=(15,40))
for i in np.arange(0, arr.shape[1]):
plt.subplot(arr.shape[1], 1, i+1)
plt.plot(arr[:, i])
def plot_cols2(df):
# won't plot certain columns, string, constant
plotcolumns = set(df.columns) - {'city', 'year', 'weekofyear', 'week_start_date'}
i = 1
plt.figure(figsize=(25,60))
for column in plotcolumns:
s = df[column].apply(pd.to_numeric)
plt.subplot(len(df.columns), 1, i)
s.plot(kind='line', color='blue', lw=2)
s.rolling(window=12, center=False).mean().plot(kind='line', color='red', lw=0.8)
plt.title(column, y=0.8, loc='right', fontsize=18)
#s.rolling(window=12, center=False).std().plot(kind='line', color='black', lw=0.8)
i += 1
def set_index(df):
df['yearweekofyear'] = df[['year','weekofyear']].\
apply(lambda x: str(x[0]) + str(x[1]).zfill(2), axis=1)
df.set_index('yearweekofyear', inplace=True)
return df
def get_indexed_dataset(path):
df = pd.read_csv(path)
return set_index(df)
def split_dataset_by_city(df):
return df[df['city']=='iq'], df[df['city']=='sj']
# will set nan to avarage value for the column for the given weekofyear
def set_nan_to_week_mean(df_with_nans):
cityweek_mean = df_with_nans.groupby(['city','weekofyear']).mean().to_dict()
# here's how we'd retrive mean ndvi_ne for city of iquito, week 1
# cityweek_mean['ndvi_ne'][('iq',1)]
df_clean = df_with_nans.copy()
# row is index to row
# cols is series with series index = dataframe column name
# series value = dataframe column value
#
skip_columns = set(['city','weekofyear','week_start_date','total_cases'])
# process iquito first
where = df_with_nans['city'] == 'iq'
for (row, cols) in df_with_nans[where].iterrows():
for idx in cols.index:
if pd.isnull(cols[idx]):
#print('In rows {}, found null for field {}'.format(row, idx))
if idx not in skip_columns:
# there are no values for weekofyear = 53
week_of_year = min(52, cols['weekofyear'])
df_clean.loc[row, idx] = cityweek_mean[idx][('iq', week_of_year)]
# process san juan
where = df_with_nans['city'] == 'sj'
for (row, cols) in df_with_nans[where].iterrows():
for idx in cols.index:
if pd.isnull(cols[idx]):
#print('In rows {}, found null for field {}'.format(row, idx))
if idx not in skip_columns:
# there are no values for weekofyear = 53
week_of_year = min(52, cols['weekofyear'])
df_clean.loc[row, idx] = cityweek_mean[idx][('sj', week_of_year)]
return df_clean
# not used
def prep_interpolate(df):
dfnanfixdriver = pd.DataFrame(df.count()).reset_index()
dfnanfixdriver.columns = ('colname','rowcount')
target = dfnanfixdriver['rowcount'].max()
collist = list(dfnanfixdriver.colname.values)
for col in collist:
non_nan_count = dfnanfixdriver[dfnanfixdriver['colname']==col]['rowcount'].values[0]
if non_nan_count < target:
df[col].interpolate(method='linear', axis=0, inplace=True)
return df
# preprocess train data
def preprocess(df, timesteps=1):
# step 4: split array into features (starting at col 5) and labels
X = df.values[:,:-1].astype('float32')
y = df.values[:,-1].reshape(X.shape[0],1)
# step 5: normalize all features
scaler = MinMaxScaler(feature_range=(0,1))
X_scaled = scaler.fit_transform(X)
# shifts features one row at a time and pads them to the left of existing feature set
feature_count = X.shape[1]
X_scaled= X_scaled[:-1,:feature_count]
for i in range(1, timesteps):
leftadd = X_scaled[:-1,:feature_count]
X_scaled = np.concatenate((leftadd, X_scaled[1:,:]), axis=1)
return np.concatenate((X_scaled, y[timesteps:]), axis=1)
# preprocess test data
def preprocess_test(df_train, df_test, timesteps=1):
df_test_rowcount = df_test.shape[0]
# will append training data, which preceeds test data in time
# so we can create sequences using previous periods data for our predictions
# just like we did during training
Xtrain = df_train.values[:,:-1].astype('float32')
X = np.concatenate((Xtrain, df_test.values.astype('float32')), axis=0)
scaler = MinMaxScaler(feature_range=(0,1))
X_scaled = scaler.fit_transform(X)
# shifts features one row at a time and pads them to the left of existing feature set
feature_count = X.shape[1]
X_scaled= X_scaled[:-1,:feature_count]
for i in range(1, timesteps):
leftadd = X_scaled[:-1,:feature_count]
X_scaled = np.concatenate((leftadd, X_scaled[1:,:]), axis=1)
return X_scaled[-df_test_rowcount:,:]
| [
"vajiraprabuddhaka@gmail.com"
] | vajiraprabuddhaka@gmail.com |
be8e295b654f5d94d37b3a8adae135d6fb9cd366 | 735dfed95f1440d6d846f30881e37ad1b6e0ea78 | /JeongHwi/Level_2/순위 검색/순위 검색.py | d74f987c37d4e89acb21656475a2c1cf6f057632 | [] | no_license | SunivAlgo/Algorithm | bb5814bf19aa4059a5b7e506c992b41bc62bd2ec | 71d2e568153fbfd7cb16085366fac3927e1e2c54 | refs/heads/main | 2023-04-13T10:47:31.463661 | 2021-04-23T05:07:20 | 2021-04-23T05:07:20 | 332,732,134 | 1 | 0 | null | 2021-01-25T16:30:40 | 2021-01-25T11:58:09 | Python | UTF-8 | Python | false | false | 2,631 | py | from itertools import combinations
from bisect import bisect_left
infos = {}
def getinfos(score,info_):
global infos
for k in range(5):
for x in combinations([0,1,2,3],k):
case = ""
for i in range(4):
if i not in x:
case += info_[i]
else:
case += "-"
if case not in infos:
infos[case]=[int(score)]
else:
infos[case].append(int(score))
def solution(info,query):
for i in info:
info_split = i.split()
score = info_split[-1]
info_ = info_split[:-1]
getinfos(score,info_)
for x in infos.keys():
infos[x].sort()
ans = []
for q in query:
q = q.replace("and","")
q_split = q.split()
condition = "".join(q_split[:4])
score = int(q_split[4])
if condition in infos:
ans.append(len(infos[condition])-bisect_left(infos[condition],score,lo=0,hi=len(infos[condition])))
else:
ans.append(0)
return ans
# 시간초과 코드
"""
from collections import Counter
def condition_Check(conditions,applicants):
sub_ans = []
notCondition = 0
for i,cond in enumerate(["language","job","career","soulFood"]):
if conditions[i] == "-":
for x in applicants[cond]:
sub_ans += applicants[cond][x]
continue
sub_ans += applicants[cond][conditions[i]]
counter = [x for x,c in Counter(sub_ans).items() if c == 4]
# print(counter)
count = 0
for i in counter:
if int(applicants["score"][i]) >= int(conditions[4]):
count+=1
return count
def solution(info,query):
# init
applicants = {"language":{"java":[],"cpp":[],"python":[]},
"job":{"backend":[],"frontend":[]},
"career":{"junior":[],"senior":[]},
"soulFood":{"chicken":[],"pizza":[]},
"score":{}}
for number,info_ in enumerate(info):
infos = info_.split()
applicants["language"][infos[0]].append(number)
applicants["job"][infos[1]].append(number)
applicants["career"][infos[2]].append(number)
applicants["soulFood"][infos[3]].append(number)
applicants["score"][number] = infos[4]
# pprint.pprint(applicants)
ans = []
# query
for q in query:
query_Split = q.split()
conditions = [query_Split[0],query_Split[2],query_Split[4],query_Split[6],query_Split[7]]
ans.append(condition_Check(conditions,applicants))
return ans
""" | [
"wjdgnl97@gmail.com"
] | wjdgnl97@gmail.com |
8fad67f8ce8ce001bfb436e710258ff19d7ff81a | 6849f09504c1b9e7e6b4bdc2a924f84ec98ec432 | /webapp/manage.py | 62c14e20c068799663d30d3c0e974d9a606680f0 | [
"Apache-2.0"
] | permissive | likit/lab-instrument-booking-app | a1c9d16635b8cff3511901d5510560349e8e5911 | c21b42342376dc54fdd11a7f87bc7609e6204020 | refs/heads/master | 2021-01-02T09:14:33.291562 | 2015-06-28T14:57:39 | 2015-06-28T14:57:39 | 37,254,301 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,377 | py | #!/usr/bin/env python
import os
from app import create_app, mongo
from flask.ext.script import Manager, Shell
# from flask.ext.migrate import Migrate, MigrateCommand
from werkzeug.security import generate_password_hash
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
# migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=mongo.db,)
@manager.command
def test():
"""Run the unit tests"""
import unittest
tests = unittest.TestLoader().discover('.')
unittest.TextTestRunner(verbosity=2).run(tests)
@manager.command
def initdb():
"""Init the database"""
mongo.db.drop_collection('users')
password = generate_password_hash('testpass')
user = {
'name': 'Foo',
'lastname': 'Jiang',
'email': 'foo@example.com',
'password': password,
'pi_email': 'gao@example.com',
'status': 'undergrad',
}
# password = generate_password_hash('testpass')
# admin = {
# 'email': 'admin@example.com',
# 'password': password,
# }
# mongo.db.admins.insert(admin, safe=True)
mongo.db.users.insert(user, safe=True)
manager.add_command('shell', Shell(make_context=make_shell_context))
# manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| [
"preeyano@msu.edu"
] | preeyano@msu.edu |
57f06221f658938344d6d977bd9f135397f84e82 | ea9c1bcced1c31bcd4649115d9e27fb8cc28b360 | /functions.py | 07a3bec86be515a4f526aa97c4a6ba2d280b48c9 | [] | no_license | lweedage/hyperloglog-conductance | 7282c1425a89957f81464164ea92f2c6b1d603f9 | e53186801c25c063559cbbb5631d44bd9dcf570d | refs/heads/main | 2023-08-22T11:13:52.899784 | 2021-10-19T14:50:58 | 2021-10-19T14:50:58 | 418,906,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,763 | py | import math
import matplotlib
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import unidip.dip as dip
from cycler import cycler
fig_width = 2.809 * 2
fig_height = fig_width/4*3
matplotlib.use('PDF')
matplotlib.rcParams['axes.prop_cycle'] = cycler('color', ['DeepSkyBlue', 'DarkMagenta', 'LightPink', 'Orange', 'LimeGreen', 'OrangeRed'])
matplotlib.rcParams['font.size'] = 14
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['savefig.format'] = 'pdf'
matplotlib.rcParams['figure.figsize'] = fig_width, fig_height
matplotlib.rcParams['axes.grid'] = True
matplotlib.rcParams['lines.markersize'] = 3
matplotlib.rcParams['figure.autolayout'] = True
def make_graph(number_of_nodes, mu, average_degree, minimum_community, max_community, maximum_degree, seed):
tau1 = 3
tau2 = 2
G = nx.LFR_benchmark_graph(number_of_nodes, tau1, tau2, mu, average_degree=average_degree,
max_degree=maximum_degree, min_community=minimum_community, max_community=max_community,
seed=seed)
# Remove the selfloops
G.remove_edges_from(nx.selfloop_edges(G))
nx.set_node_attributes(G, {n: ','.join(map(str, G.nodes[n]['community'])) for n in G.nodes()}, 'community')
return G
def find_beta(b):
m = 2 ** b
if m < 17:
return 1.106
elif m < 33:
return 1.070
elif m < 65:
return 1.054
elif m < 129:
return 1.046
else:
return 1.03896
def find_conductance(edgeball_around_node, directed_edgeball_around_node, number_of_nodes):
conductance = list([] for i in range(number_of_nodes))
for t in [0, 1, 2]:
for node in range(number_of_nodes):
if edgeball_around_node[node][t + 1] == 0:
conductance[node].append(1)
else:
conductance[node].append(2 * edgeball_around_node[node][t + 1] / directed_edgeball_around_node[node][t + 1] - 1)
return conductance
def find_real_conductance_edgebase(real_edges, real_directed_edges, number_of_nodes, t):
conductance = list([] for i in range(number_of_nodes))
for t in [0, 1, 2]:
for node in range(number_of_nodes):
if len(real_edges[node][t + 1]) == 0:
conductance[node].append(1)
else:
conductance[node].append(2 * len(real_edges[node][t + 1]) / len(real_directed_edges[node][t + 1]) - 1)
return conductance
def triangles(G):
number_of_nodes = G.number_of_nodes()
triangles_around_node = [[set(), set(), set(), set()] for i in range(number_of_nodes)]
for node in range(number_of_nodes):
for neighbor1 in nx.neighbors(G, node):
for neighbor2 in nx.neighbors(G, node):
if neighbor1 in set(nx.neighbors(G, neighbor2)):
a = min(node, neighbor1, neighbor2)
b = max(node, neighbor1, neighbor2)
c = node + neighbor1 + neighbor2 - a - b
triangles_around_node[node][0].add((a, c, b))
for iteration in [1, 2, 3]:
for node in range(number_of_nodes):
triangles_around_node[node][iteration] = triangles_around_node[node][iteration - 1].copy()
for neighbor in nx.neighbors(G, node):
triangles_around_node[node][iteration] |= triangles_around_node[neighbor][iteration - 1]
return triangles_around_node
def plot_triangles_distance_n(number_of_nodes, triangles_around_node, real_triangles_around_node, realisation_name,
b, Realisations=True):
fig, ax = plt.subplots()
lijst2 = [[] for i in range(3)]
lowerbound = [[] for i in range(3)]
upperbound = [[] for i in range(3)]
lowerboundvp = [[] for i in range(3)]
upperboundvp = [[] for i in range(3)]
for n in [0, 1, 2]:
triangles_around_node_of_distance_n = []
real_triangles_around_node_of_distance_n = []
for i in range(number_of_nodes):
triangles_around_node_of_distance_n.append(triangles_around_node[i][n])
if Realisations:
real_triangles_around_node_of_distance_n.append(len(real_triangles_around_node[i][n]))
if Realisations:
sortedindex_real_triangles_n = sorted(range(number_of_nodes),
key=lambda index: real_triangles_around_node_of_distance_n[index])
# Plotting
lijst1 = []
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta = etam + delta2
for j in sortedindex_real_triangles_n:
lijst1.append(triangles_around_node_of_distance_n[j])
p4 = real_triangles_around_node_of_distance_n[j] * eta / math.sqrt(0.05)
p5 = 2 / 3 * real_triangles_around_node_of_distance_n[j] * eta / math.sqrt(0.05)
lowerbound[n].append(real_triangles_around_node_of_distance_n[j] - p4)
upperbound[n].append(real_triangles_around_node_of_distance_n[j] + p4)
lowerboundvp[n].append(real_triangles_around_node_of_distance_n[j] - p5)
upperboundvp[n].append(real_triangles_around_node_of_distance_n[j] + p5)
lijst2[n] = sorted(real_triangles_around_node_of_distance_n)
else:
lijst1 = sorted(triangles_around_node_of_distance_n)
# ------------------------------------ plot triangles --------------------------------------
ax.plot(range(number_of_nodes), lijst1, 'o', label=f'$S_{n + 1}(v)$')
First = True
if Realisations:
for i in range(3):
if First:
ax.plot(range(number_of_nodes), lowerbound[i], '-', color = 'LimeGreen',
label=str("Chebyshev"))
ax.plot(range(number_of_nodes), upperbound[i], '-', color = 'LimeGreen')
ax.plot(range(number_of_nodes), lowerboundvp[i], '-', color = 'Orange',
label=str("VP"))
ax.plot(range(number_of_nodes), upperboundvp[i], '-', color = 'Orange')
ax.plot(range(number_of_nodes), lijst2[i], '-', color = 'OrangeRed', label='Realisation')
First = False
else:
ax.plot(range(number_of_nodes), lowerbound[i], '-', color = 'LimeGreen')
ax.plot(range(number_of_nodes), upperbound[i], '-', color = 'LimeGreen')
ax.plot(range(number_of_nodes), lowerboundvp[i], '-', color = 'Orange')
ax.plot(range(number_of_nodes), upperboundvp[i], '-', color = 'Orange')
ax.plot(range(number_of_nodes), lijst2[i], color = 'OrangeRed')
ax.set(xlabel='Node $v$', ylabel='$|\Delta_r(v)|$')
#plt.title('Number of triangles in $S_1(v)$, $S_2(v)$ and $S_3(v)$')#, fontsize = 12)
name = str('Pictures/' + realisation_name + '_triangles_distance_1_and_2_and_3.pdf')
ax.set_yscale('log')
ax.legend()
plt.savefig(name)
plt.show()
def plot_triangles_of_multiple_distances(number_of_nodes, triangles_around_node, realisation_name, degreelist):
sortedindex = sorted(range(number_of_nodes),
key=lambda index: degreelist[index])
fig, ax = plt.subplots()
First = True
for node in sortedindex[:10]:
lijst = []
for distance in range(4):
lijst.append(triangles_around_node[node][distance])
if First and 3 == 2:
data = np.msort([triangles_around_node[node][distance] for node in range(number_of_nodes)])
print('DIP for triangles in $S_', distance, '(v)$ gives us', dip.diptst(data))
if First:
ax.plot([i for i in range(4)], lijst, ':', color='lightblue')
ax.plot([i for i in range(4)], lijst, 'o',markersize = 6, color = 'DeepSkyBlue', label=str('Low degree nodes'))
First = False
else:
ax.plot([i for i in range(4)], lijst, ':', color='lightblue')
ax.plot([i for i in range(4)], lijst, 'o',markersize = 6, color = 'DeepSkyBlue')
First = True
for node in sortedindex[-10:]:
lijst = []
for distance in range(4):
lijst.append(triangles_around_node[node][distance])
if First:
ax.plot([i for i in range(4)], lijst, ':', color='pink')
ax.plot([i for i in range(4)], lijst, 'o', markersize = 6, color = 'OrangeRed', label=str('High degree nodes'))
First = False
else:
ax.plot([i for i in range(4)], lijst, ':', color='pink')
ax.plot([i for i in range(4)], lijst, 'o', markersize = 6, color = 'OrangeRed')
ax.set(xlabel='Radius $r$', ylabel='$|\hat{\Delta}_r(v)|$')#, title=str('com-Amazon: Triangles in different radii $r$'))
name = str('Pictures/' + realisation_name + '_triangles_of_all_distances.pdf')
major_ticks = np.arange(0, 4, 1)
ax.set_xticks(major_ticks)
ax.legend()
plt.savefig(name)
plt.show()
def plot_conductance_real(number_of_nodes, real_conductance, conductance, real_edges, real_directed_edges,
realisation_name,
b, distance, Realisations=True):
fig, ax = plt.subplots()
delta1 = 0.00005
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta3 = etam + delta2
sortedindex_real_conductance = sorted(range(number_of_nodes), key=lambda index: real_conductance[index][distance])
sorted_conductance = []
sorted_real_conductance = []
for i in sortedindex_real_conductance[1:10]:
sorted_real_conductance.append(real_conductance[i][distance])
sorted_conductance.append(conductance[i][distance])
# Plotting
lijst1 = []
lijst2 = []
chebyshevlowervariance = []
chebyshevuppervariance = []
vplowervariance = []
vpuppervariance = []
vpbounds = 0
chebbounds = 0
for j in sortedindex_real_conductance:
lijst1.append(conductance[j][distance])
lijst2.append(real_conductance[j][distance])
p = eta3 / math.sqrt(0.05) * math.sqrt(
len(real_edges[j][distance]) ** 2 + len(real_directed_edges[j][distance]) ** 2)
epsilon = p / len(real_edges[j][distance]) - delta1
delta = p / len(real_directed_edges[j][distance]) - delta1
chebyshevlowervariance.append(real_conductance[j][distance] * (1 - epsilon) / (1 + delta))
chebyshevuppervariance.append(real_conductance[j][distance] * (1 + epsilon) / (1 - delta))
vp = math.sqrt(4 / 9) * eta3 / math.sqrt(0.05) * math.sqrt(
len(real_edges[j][distance]) ** 2 + len(real_directed_edges[j][distance]) ** 2)
epsilonvp = vp / len(real_edges[j][distance]) - delta1
deltavp = vp / len(real_directed_edges[j][distance]) - delta1
vplowervariance.append(real_conductance[j][distance] * (1 - epsilonvp) / (1 + deltavp))
vpuppervariance.append(real_conductance[j][distance] * (1 + epsilonvp) / (1 - deltavp))
vpbounds += 1 - (1 + epsilonvp) / (1 - deltavp)
chebbounds += 1 - (1 + epsilon) / (1 - delta)
print('VP:', vpbounds / number_of_nodes)
print('Cheb:', chebbounds / number_of_nodes)
# ------------------------------------ plot conductance --------------------------------------
#plt.title('Conductance')
ax.plot(range(number_of_nodes), lijst1, 'o', label='Estimate')
ax.plot(range(number_of_nodes), lijst2, color = 'OrangeRed', label='Realisation')
ax.plot(range(number_of_nodes), chebyshevlowervariance, '-', color = 'LimeGreen',
label=str('Chebyshev'))
ax.plot(range(number_of_nodes), chebyshevuppervariance, '-', color =
'LimeGreen')
ax.plot(range(number_of_nodes), vplowervariance, '-', color = 'Orange',
label=str('VP'))
ax.plot(range(number_of_nodes), vpuppervariance, '-', color = 'Orange')
#plt.title('Conductance in $S_' + str(distance + 1) + '(v)$')
plt.ylim(0.45, 1.05)
name = str(
"Pictures/" + realisation_name + '_conductance_with_Chebyshev_and_VP_distance_' + str(distance + 1) + '.pdf')
plt.xlabel('Node $v$')
plt.ylabel('Conductance $\phi(S_' + str(distance + 1) +
'(v))$')
ax.legend()
plt.savefig(name)
plt.show()
if Realisations:
fig, ax = plt.subplots()
differences = [lijst2[i] - lijst1[i] for i in range(number_of_nodes)]
average_difference = sum(differences) / number_of_nodes
kwargs = dict(alpha=0.5, bins=50, density=False, stacked=True)
# Plot
plt.hist(differences, **kwargs)
#plt.gca().set(title='Error in estimated conductance in $S_' + str(distance + 1) + '(v)$')
plt.xlabel('Realisation - Estimate')
ax.grid()
name = str(
"Pictures/" + realisation_name + '_conductance_difference_histogram_distance_' + str(distance + 1) + '.pdf')
plt.savefig(name)
plt.show()
def plot_conductance(number_of_nodes, conductance, realisation_name):
fig, ax = plt.subplots()
# ------------------------------------ plot conductance --------------------------------------
for distance in [1, 2]:
ax.plot(range(number_of_nodes), sorted([conductance[i][distance] for i in range(number_of_nodes)]), 'o',
label=str('$S_' + str(distance) + '(v)$'))
#plt.title('Estimated conductance $\hat{\phi}(S_r(v))$')
name = str("Pictures/" + realisation_name + '_conductance.pdf')
plt.xlabel('Node $v$')
plt.ylabel('Conductance $\hat{\phi}(S_r(v))$')
ax.legend()
plt.savefig(name)
plt.show()
def plot_transitivity_Estimate(number_of_nodes, transitivity, realisation_name):
fig, ax = plt.subplots()
#plt.title('Estimated transitivity $\hat{t}(S_r(v))$')
for distance in [0, 1, 2]:
ax.plot(range(number_of_nodes), sorted([transitivity[i][distance] for i in range(number_of_nodes)]), 'o',
label=str('$S_' + str(distance + 1) + '(v)$'))
data = np.msort([transitivity[node][distance] for node in range(number_of_nodes)])
print('DIP for transitivity in S_', distance + 1 , '(v) gives us', dip.diptst(data))
name = str("Pictures/" + realisation_name + '_transitivity.pdf')
plt.xlabel('Node $v$')
plt.ylabel(str('$\hat{t}(S_r(v))$'))
ax.legend()
plt.savefig(name)
plt.show()
def find_real_cycles(number_of_nodes, real_edges_around_node, real_directed_edges, real_ball_around_node, r):
cycles = [0] * number_of_nodes
for i in range(number_of_nodes):
cycles[i] = len(real_directed_edges[i][r]) - len(real_edges_around_node[i][r]) - len(
real_ball_around_node[i][r]) + 1
return cycles
def find_cycles(number_of_nodes, edges_around_node, directed_edges, ball_around_node, r):
cycles = [0] * number_of_nodes
for i in range(number_of_nodes):
cycles[i] = directed_edges[i][r] - edges_around_node[i][r] - ball_around_node[i][r] + 1
return cycles
def plot_cycles(number_of_nodes, cycles, real_cycles, real_edges, real_directed_edges, real_ball, realisation_name, b,
Realisations):
fig, ax = plt.subplots()
if Realisations:
sortedindex_real_cycles = sorted(range(number_of_nodes), key=lambda index: real_cycles[index])
vpbounds = 0
chebbounds = 0
delta1 = 0.00005
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta3 = etam + delta2
chebyshevlowervariance = []
chebyshevuppervariance = []
vplowervariance = []
vpuppervariance = []
for j in sortedindex_real_cycles:
p = math.sqrt(4 / 9) * 1 / math.sqrt(0.05) * eta3 * math.sqrt(
len(real_edges[j][1]) ** 2 + len(real_ball[j][1]) ** 2 + len(real_directed_edges[j][1]) ** 2)
epsilon = p / len(real_edges[j][1]) + delta1
xi = p / len(real_ball[j][1]) + delta1
gamma = p / len(real_directed_edges[j][1]) + delta1
K = gamma * len(real_directed_edges[j][1]) + epsilon * len(real_edges[j][1]) + xi * len(real_ball[j][1])
vpbounds += K/max(1, real_cycles[j])
vplowervariance.append(real_cycles[j] - K)
vpuppervariance.append(real_cycles[j] + K)
p = 1 / math.sqrt(0.05) * eta3 * math.sqrt(
len(real_edges[j][1]) ** 2 + len(real_ball[j][1]) ** 2 + len(real_directed_edges[j][1]) ** 2)
epsilon = p / len(real_edges[j][1]) + delta1
xi = p / len(real_ball[j][1]) + delta1
gamma = p / len(real_directed_edges[j][1]) + delta1
K = gamma * len(real_directed_edges[j][1]) + epsilon * len(real_edges[j][1]) + xi * len(real_ball[j][1])
chebyshevuppervariance.append(real_cycles[j] - K)
chebyshevlowervariance.append(real_cycles[j] + K)
chebbounds += K/max(1, real_cycles[j])
print('VP:', vpbounds / number_of_nodes)
print('Cheb:', chebbounds / number_of_nodes)
# ------------------------------------ plot conductance --------------------------------------
sorted_cycles = []
for i in sortedindex_real_cycles:
sorted_cycles.append(cycles[i])
ax.plot(range(number_of_nodes), chebyshevlowervariance, '-', color = 'LimeGreen',
label=str('Chebyshev'))
ax.plot(range(number_of_nodes), chebyshevuppervariance, '-', color = 'LimeGreen')
ax.plot(range(number_of_nodes), vplowervariance, '-', color = 'Orange',
label=str('VP'))
ax.plot(range(number_of_nodes), vpuppervariance, '-', color = 'Orange')
data = np.msort(cycles)
print('DIP for cycles gives us', dip.diptst(data))
else:
sorted_cycles = sorted(cycles)
ax.plot(range(number_of_nodes), sorted_cycles, 'o', label="Estimate")
if Realisations:
ax.plot(range(number_of_nodes), sorted(real_cycles), '-', color = 'OrangeRed', label="Realisation")
name = str("Pictures/" + realisation_name + '_cycles_vp_inequality_and_chebyshev.pdf')
plt.xlabel('Node $v$')
plt.ylabel("$C(S_2(v))$")
#plt.title(str('Number of cycles of length 3 or 4'))
ax.legend()
plt.savefig(name)
plt.show()
if Realisations:
fig, ax = plt.subplots()
differences = [real_cycles[i] - cycles[i] for i in range(number_of_nodes)]
kwargs = dict(alpha=0.5, bins=50, density=False, stacked=True)
# Plot
plt.hist(differences, **kwargs)
#plt.gca().set(title='Error in estimated number of cycles in $S_1(v)$')
plt.xlabel('Realisation - Estimate')
ax.grid()
plt.xlim(-5, 5)
name = str(
"Pictures/" + realisation_name + '_cycles_difference_histogram_distance_1.pdf')
plt.savefig(name)
plt.show()
def find_transitivity(triangles, wedges, number_of_nodes):
transitivity = list([] for i in range(number_of_nodes))
for distance in [0, 1, 2]:
for i in range(number_of_nodes):
transitivity[i].append((3 * triangles[i][distance]) / wedges[i][distance])
return transitivity
def find_real_transitivity(real_wedges, real_triangles, number_of_nodes):
real_transitivity = list([] for i in range(number_of_nodes))
for distance in [0, 1, 2]:
for i in range(number_of_nodes):
real_transitivity[i].append(3 * len(real_triangles[i][distance]) / real_wedges[i][distance])
return real_transitivity
def plot_transitivity(number_of_nodes, transitivity, real_transitivity, real_wedges, real_triangles, distance,
realisation_name, Realisations, b):
fig, ax = plt.subplots()
delta1 = 0.00005
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta3 = etam + delta2
print(eta3)
First = True
blub = True
if Realisations:
for distance in [distance]:
chebyshevlowervariance = []
chebyshevuppervariance = []
vplowervariance = []
vpuppervariance = []
sortedindex_real_transitivity = sorted(range(number_of_nodes),
key=lambda index: real_transitivity[index][distance])
sorted_transitivity = []
sortedreal_transitivity = []
for i in sortedindex_real_transitivity:
sorted_transitivity.append(transitivity[i][distance])
sortedreal_transitivity.append(real_transitivity[i][distance])
if transitivity[i][distance] == 0:
chebyshevlowervariance.append(real_transitivity[i][distance])
chebyshevuppervariance.append(real_transitivity[i][distance])
vplowervariance.append(real_transitivity[i][distance])
vpuppervariance.append(real_transitivity[i][distance])
else:
p = math.sqrt(eta3**2 *
(real_wedges[i][distance] ** 2 + len(real_triangles[i][distance]) ** 2) / 0.05)
epsilon = p / real_wedges[i][distance] + delta1
delta = p / len(real_triangles[i][distance]) + delta1
p1 = 2 / 3 * math.sqrt(eta3**2 * (real_wedges[i][distance] ** 2 + len(
real_triangles[i][distance]) ** 2) / 0.05)
epsilon1 = p1 / real_wedges[i][distance] + delta1
delta2 = p1 / len(real_triangles[i][distance]) + delta1
chebyshevlowervariance.append(real_transitivity[i][distance] * (1 - delta) / (1 + epsilon))
chebyshevuppervariance.append(real_transitivity[i][distance] * (1 + delta) / (1 - epsilon))
vplowervariance.append(real_transitivity[i][distance] * (1 - delta2) / (1 + epsilon1))
vpuppervariance.append(real_transitivity[i][distance] * (1 + delta2) / (1 - epsilon1))
if blub:
ax.plot(range(number_of_nodes), chebyshevlowervariance, '-', color = 'LimeGreen',
label=str('Chebyshev'))
ax.plot(range(number_of_nodes), vplowervariance, '-', color = 'Orange',
label=str('VP'))
blub = False
else:
ax.plot(range(number_of_nodes), chebyshevlowervariance, '-', color = 'LimeGreen',
)
ax.plot(range(number_of_nodes), vplowervariance, '-', color = 'Orange'
)
ax.plot(range(number_of_nodes), chebyshevuppervariance, '-', color = 'LimeGreen')
ax.plot(range(number_of_nodes), vpuppervariance, '-', color = 'Orange')
ax.plot(range(number_of_nodes), sorted_transitivity, 'o',
label=str("Estimate"))
if Realisations:
if First:
ax.plot(range(number_of_nodes), sortedreal_transitivity, '-', color = 'OrangeRed', label="Realisation")
First = False
else:
ax.plot(range(number_of_nodes), sortedreal_transitivity, '-', color = 'OrangeRed')
name = str("Pictures/" + realisation_name + '_transitivity_distance_' + str(distance + 1) + '.pdf')
plt.ylabel(str("Transitivity $t(S_" + str(distance +1) + "(v)$"))
plt.xlabel('Node $v$')
#plt.title(str('Transitivity in $S_' + str(distance + 1) + '(v)$'))
ax.legend()
plt.savefig(name)
plt.show()
def plot_transitivity_vs_conductance(real_conductance, real_transitivity, realisation_name):
fig, ax = plt.subplots()
ax.plot(real_conductance, real_transitivity, 'o')
name = str("Pictures/" + realisation_name + '_transitivity_vs_conductance.pdf')
#plt.title(str('Transitivity vs conductance'))
plt.ylabel('Transitivity')
plt.xlabel('Conductance')
plt.savefig(name)
plt.show()
def plot_wedges(number_of_nodes, realisation_name, wedges_around_node, real_wedges, Realisations, b, distance=1):
lowerbound = []
upperbound = []
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta = etam + delta2
fig, ax = plt.subplots()
if Realisations:
sortedindex_real_wedges = sorted(range(number_of_nodes), key=lambda index: real_wedges[index][distance])
sorted_wedges = []
sorted_real_wedges = []
for i in sortedindex_real_wedges:
sorted_wedges.append(wedges_around_node[i][distance])
sorted_real_wedges.append(real_wedges[i][distance])
p4 = real_wedges[i][distance] * eta / math.sqrt(0.05)
lowerbound.append(real_wedges[i][distance] - p4)
upperbound.append(real_wedges[i][distance] + p4)
ax.plot(range(number_of_nodes), lowerbound, 'g-',
label=str("Chebyshev"))
ax.plot(range(number_of_nodes), upperbound, 'g-')
else:
sorted_wedges = []
for i in range(number_of_nodes):
sorted_wedges.append(wedges_around_node[i][distance])
sorted_wedges = sorted(sorted_wedges)
ax.plot(range(number_of_nodes), sorted_wedges, 'o', label="Estimate")
if Realisations:
ax.plot(range(number_of_nodes), sorted_real_wedges, '-', color = 'OrangeRed', label="Realisation")
name = str("Pictures/" + realisation_name + '_wedges_distance_' + str(distance) + '.pdf')
#plt.title(str('Wedges in $S_' + str(distance) + '(v)$'))
plt.ylabel('#wedges')
plt.xlabel('Node')
ax.legend()
plt.savefig(name)
plt.show()
def plot_wedges_triangles(wedges_around_node, triangles_around_node, distance, number_of_nodes, realisation_name):
fig, ax = plt.subplots()
for distance in [2, 1, 0]:
wedges = []
triangles = []
for i in range(number_of_nodes):
wedges.append(wedges_around_node[i][distance])
triangles.append(triangles_around_node[i][distance])
ax.plot(wedges, triangles, 'o', label = str('$S_' + str(distance + 1) + '(v)$'))
name = str("Pictures/" + realisation_name + '_wedges_vs_triangles_distance.pdf')
#plt.title(str('Triangles versus wedges over different radii $r$'))
plt.ylabel('$|\hat{\Delta}_r(v)|$')
plt.xlabel('$|\hat{w}(S_r(v))|$')
plt.legend()
plt.savefig(name)
plt.show()
def generic_plot(x, y, realisation_name, specific_name, xlabel, ylabel):
fig, ax = plt.subplots()
ax.plot(range(len(x)), range(len(x)), '-')
ax.plot(x, y, 'o')
name = str(
"Pictures/" + realisation_name + specific_name + '.pdf')
plt.xlabel(str(xlabel))
plt.ylabel(str(ylabel))
plt.xlim(0, 1)
plt.ylim(0, 1)
#plt.title(specific_name)
plt.savefig(name)
plt.show()
def plot_conductance_of_multiple_distances(number_of_nodes, conductance, realisation_name, degreelist):
sortedindex = sorted(range(number_of_nodes),
key=lambda index: degreelist[index])
fig, ax = plt.subplots()
First = True
for node in sortedindex[:5]:
lijst = [1]
for distance in range(3):
lijst.append(conductance[node][distance])
data = np.msort([conductance[node][distance] for node in range(number_of_nodes)])
print('DIP for conductance in S_', distance, '(v) gives us', dip.diptst(data))
if First:
ax.plot([i for i in range(4)], lijst, 'o', color = 'DeepSkyblue' ,markersize = 6, label=str('Low degree nodes'))
ax.plot([i for i in range(4)], lijst, ':', color='lightblue')
First = False
else:
ax.plot([i for i in range(4)], lijst, 'o', color = 'DeepSkyblue', markersize = 6,)
ax.plot([i for i in range(4)], lijst, ':', color='lightblue')
First = True
for node in sortedindex[-5:]:
lijst = [1]
for distance in range(3):
lijst.append(conductance[node][distance])
if First:
ax.plot([i for i in range(4)], lijst, 'o', markersize = 6,color = 'OrangeRed', label=str('High degree nodes'))
ax.plot([i for i in range(4)], lijst, ':', color='pink')
First = False
else:
ax.plot([i for i in range(4)], lijst, 'o', markersize = 6,color = 'OrangeRed')
ax.plot([i for i in range(4)], lijst, ':', color='pink')
ax.set(xlabel='Radius $r$', ylabel='Conductance $\hat{\phi}(S_r(v))$')#, title=str('com-Amazon: Conductance in different radii $r$'))
name = str('Pictures/' + realisation_name + '_conductance_of_all_distances.pdf')
major_ticks = np.arange(0, 4, 1)
ax.set_xticks(major_ticks)
ax.legend()
plt.savefig(name)
plt.show()
def estimate_time(b, edges, nodes):
if b == 10:
min_initialization = min(0.0005 * edges, 0.0026 * nodes)
min_iteration_time = min(0.0001 * edges, 0.00006 * nodes)
max_initialization = max(0.0005 * edges, 0.0026 * nodes)
max_iteration_time = max(0.0001 * edges, 0.00006 * nodes)
elif b == 12:
min_initialization = min(0.0006 * edges, 0.0029 * nodes)
min_iteration_time = min(0.0002 * edges, 0.0001 * nodes)
max_initialization = max(0.0006 * edges, 0.0029 * nodes)
max_iteration_time = max(0.0002 * edges, 0.0001 * nodes)
elif b == 14:
min_initialization = min(0.0007 * edges, 0.0036 * nodes)
min_iteration_time = min(0.0004 * edges, 0.0022 * nodes)
max_initialization = max(0.0007 * edges, 0.0036 * nodes)
max_iteration_time = max(0.0004 * edges, 0.0022 * nodes)
elif b == 16:
min_initialization = min(0.00013 * edges, 0.0067 * nodes)
min_iteration_time = min(0.00013 * edges, 0.0066 * nodes)
max_initialization = max(0.00013 * edges, 0.0067 * nodes)
max_iteration_time = max(0.00013 * edges, 0.0066 * nodes)
else:
return
print(f"The initialization is going to take between {min_initialization:.1f} and {max_initialization:.1f} seconds")
print(f"One iteration is going to take between {min_iteration_time:.1f} and {max_iteration_time:.1f} seconds")
print(f"So this program will be finished in {max_initialization + 3 * max_iteration_time:.1f} seconds, which is "
f"{(max_initialization + 3 * max_iteration_time) / 60:.1f} minutes")
def plot_vp_chebyshev_triangles(b, realisation_name):
cheb = []
vp = []
delta2 = 0.0005
etam = find_beta(b) / math.sqrt(2 ** b)
eta = etam + delta2
for i in range(8, 24):
etam = find_beta(i) / math.sqrt(2 ** i)
eta = etam + delta2
cheb.append(eta / math.sqrt(0.05) * 100)
vp.append(2 / 3 * eta / math.sqrt(0.05) * 100)
fig, ax = plt.subplots()
ax.plot(range(8, 24), cheb, label='Chebyshev')
ax.plot(range(8, 24), vp, label='VP')
name = str("Pictures/" + realisation_name + '_chebyshev_and_VP_bounds_in_triangles.pdf')
#plt.title(str('Size of Chebyshev and VP bounds in triangles'))
plt.ylabel('Percentage of number of triangles')
plt.xlabel('$b$ ($p = 2^b$ registers)')
major_ticks = np.arange(8, 24, 2)
ax.set_xticks(major_ticks)
plt.legend()
plt.savefig(name)
plt.show()
def difference_edges_directed_edges(edgeball_around_node, real_edges_around_node, directed_edgeball_around_node, real_directed_edges, number_of_nodes, realisation_name):
difference_edges = [edgeball_around_node[node][1] - len(real_edges_around_node[node][1]) for node in
range(number_of_nodes)]
difference_directed_edges = [directed_edgeball_around_node[node][1] - len(real_directed_edges[node][1]) for node in
range(number_of_nodes)]
fig, ax = plt.subplots()
ax.plot(difference_directed_edges, difference_edges, '+')
#plt.title('Error in estimated number of edges and directed edges in $S_1(v)$')
plt.xlabel('Error in number of directed edges')
plt.ylabel('Error in number of edges')
ax.grid()
plt.show()
name = str('Pictures/' + realisation_name + '_difference_edges_S1(v).pdf')
plt.savefig(name)
fig, ax = plt.subplots()
ax.plot(difference_directed_edges, difference_edges, '+')
#plt.title('Error in estimated number of edges and directed edges in $S_1(v)$')
plt.xlabel('Error in number of directed edges')
plt.ylabel('Error in number of edges')
plt.xlim(-1, 1)
plt.ylim(-1, 1)
ax.grid()
plt.show()
name = str('Pictures/' + realisation_name + '_difference_edges_S1(v)_zoomed_in.pdf')
plt.savefig(name)
difference_edges = [edgeball_around_node[node][2] - len(real_edges_around_node[node][2]) for node in
range(number_of_nodes)]
difference_directed_edges = [directed_edgeball_around_node[node][2] - len(real_directed_edges[node][2]) for node in
range(number_of_nodes)]
print(difference_edges)
print(difference_directed_edges)
fig, ax = plt.subplots()
ax.plot(difference_directed_edges, difference_edges, '+')
#plt.title('Error in estimated number of edges and directed edges in $S_2(v)$')
plt.xlabel('Error in number of directed edges')
plt.ylabel('Error in number of edges')
ax.grid()
plt.show()
name = str('Pictures/' + realisation_name + '_difference_edges_S2(v).pdf')
plt.savefig(name)
fig, ax = plt.subplots()
ax.plot(difference_directed_edges, difference_edges, '+')
#plt.title('Error in estimated number of edges and directed edges in $S_2(v)$')
plt.xlabel('Error in number of directed edges')
plt.ylabel('Error in number of edges')
plt.xlim(-1, 1)
plt.ylim(-1, 1)
ax.grid()
plt.show()
name = str('Pictures/' + realisation_name + '_difference_edges_S2(v)_zoomed_in.pdf')
plt.savefig(name)
| [
"l.weedage@utwente.nl"
] | l.weedage@utwente.nl |
06dab2f9670cd83f665f86cf716d9db211053760 | ad07d6ab992e3cc55288c46b692c7d826f028638 | /NoWire_v1/server/monitor.py | a7a55f31945b696d1c75e5fe2124444fd57c04c6 | [] | no_license | Ribster/NoWire | 21d26d0cca39a72f2132f34f1874279c1ca06aab | 16b2cd3cdb2017ed3a3b112324d7cbaa809e5988 | refs/heads/master | 2021-01-10T16:13:33.676210 | 2016-09-04T15:00:43 | 2016-09-04T15:00:43 | 46,177,242 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,165 | py | #!/usr/bin/python
import paho.mqtt.client as mqtt
import MySQLdb
import time
from time import sleep
from array import *
ts = time.time()
db = MySQLdb.connect(host="localhost", # your host, usually localhost
user="nowire", # your username
passwd="secret", # your password
db="NoWire") # name of the data base
db.autocommit(True)
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("sensors")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
#print("the client is: " + str(client))
#print("the userdata is: " + str(userdata))
if(msg.topic == "sensors"):
# split the payload in their seperate parts
words = str(msg.payload).split();
print("got heartbeat from: " + words[1])
# set the module online
setOnline(words[1], db)
# look up the module in the online list
test = 0
cur = db.cursor()
cur.execute("SELECT moduleIdentifier FROM `NoWire`.`wifimodule_online`")
for row in cur.fetchall():
if(row[0] == words[1]):
test = 1
if(test == 1):
# already in list, update timestamp online list
cur = db.cursor()
cur.execute("UPDATE `NoWire`.`wifimodule_online` SET `timestamp` =" + str(time.time()) + " WHERE moduleIdentifier='" + words[1] + "'")
else:
# not yet in the list, add module to the list
temp = words[2]
cur = db.cursor()
cur.execute("INSERT INTO `NoWire`.`wifimodule_online` (`moduleIdentifier`, `ipv4`, `timestamp`) VALUES ('" + words[1] + "', " + str(temp[3:len(temp)]) + ", " + str(time.time()) + ")")
# subscribe to all topics
subscribeTopicsSensors(words[1], db)
# get all the outputs of this module, send state current database state to the module
publishOutputStates(words[1], db)
else:
# get every esp topic online in query
# loop over each and upon match relay them to the database
cur = db.cursor()
cur.execute("SELECT concat_ws('/','sensors',`wifimodule`.`moduleIdentifier`,`sensortype`.`topic`) FROM `NoWire`.`sensor`" +
"LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID`" +
"LEFT JOIN `NoWire`.`wifimodule` ON `sensor`.`IDwifimodule` = `wifimodule`.`ID`")
for row in cur.fetchall():
# got the right message
if(msg.topic == row[0]):
# monitor the actions
monitorActions(db, msg.topic, msg.payload)
#print(str(msg.topic) + ": " + str(msg.payload))
words = str(msg.topic).split("/");
# 0 is sensors
# 1 is ESPxxxxx
# 2 is sensor topic
# get a list for sensors matching the ESP and sensor topic
querySQL = ("SELECT `sensor`.`ID`"
"FROM NoWire.sensor "
"LEFT JOIN `NoWire`.`wifimodule` ON `sensor`.`IDwifimodule` = `wifimodule`.`ID`"
"LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID`"
"WHERE `wifimodule`.`moduleIdentifier` = '"
+ words[1] +
"' AND"
"`sensortype`.`topic` = '"
+ words[2] +
"'ORDER BY `sensor`.`ID`")
cur2 = db.cursor()
cur2.execute(querySQL)
# split the payload on "-"
payloadSplit = str(msg.payload).split("-")
# 0 is iteration of sensor
# 1 is the value
# get the iteration
iterMatch = int(payloadSplit[0])
iterCounter = 0
for row2 in cur2.fetchall():
iterCounter = iterCounter + 1
if(iterCounter == iterMatch):
# we have a match on the ID of sensor
idSensor = row2[0]
# now we can update the value of this sensor
querySQLUpdate = ("UPDATE `NoWire`.`sensor`"
"SET"
"`value` = '"
+ payloadSplit[1] +
"'"
"WHERE `sensor`.`ID`='"
+ str(idSensor) +
"';")
cur3 = db.cursor()
cur3.execute(querySQLUpdate)
# get last entry of sensor in sensor_data
querySQLSecond = ("SELECT `sensor_data`.`ID`, `sensor_data`.`IDsensor`, `sensor_data`.`value`, `sensor_data`.`from`, `sensor_data`.`to` "
"FROM NoWire.sensor_data "
"WHERE `sensor_data`.`IDsensor` = '"
+ str(idSensor) +
"' AND `sensor_data`.`to` IS NULL ORDER BY `sensor_data`.`from` DESC "
"LIMIT 1")
cur4 = db.cursor()
cur4.execute(querySQLSecond)
if(cur4.rowcount == 1):
for row4 in cur4.fetchall():
#print row4[4]
if(row4[4]):
# if the to time is filled in, insert new
querySQLFourth = ("INSERT INTO `NoWire`.`sensor_data`"
"(`IDsensor`, `value`, `from`) VALUES ('"
+ str(idSensor) +
"', '"
+ payloadSplit[1] +
"', NOW());")
cur6 = db.cursor()
cur6.execute(querySQLFourth)
else:
# if the to time is null
# if the value is the same, do nothing
if(float(row4[2]) != float(payloadSplit[1])):
# if the value is different, get the ID of the sensor_data and close the entry
querySQLThird = ("UPDATE `NoWire`.`sensor_data`"
"SET `to`=NOW(), `from`=`from` WHERE `ID`='"
+ str(row4[0]) +
"';")
cur5 = db.cursor()
cur5.execute(querySQLThird)
# make new entry with value, id en from
querySQLFourth = ("INSERT INTO `NoWire`.`sensor_data`"
"(`IDsensor`, `value`, `from`) VALUES ('"
+ str(idSensor) +
"', '"
+ payloadSplit[1] +
"', NOW()+0.1);")
cur6 = db.cursor()
cur6.execute(querySQLFourth)
else:
querySQLFourth = ("INSERT INTO `NoWire`.`sensor_data`"
"(`IDsensor`, `value`, `from`) VALUES ('"
+ str(idSensor) +
"', '"
+ payloadSplit[1] +
"', NOW());")
cur6 = db.cursor()
cur6.execute(querySQLFourth)
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("127.0.0.1", 7777, 60)
def check_onlinestates():
print("checking online states")
#check every module in online database list and compare the timestamp
cur = db.cursor()
cur.execute("SELECT ID, moduleIdentifier, ipv4, timestamp FROM NoWire.wifimodule_online")
for row in cur.fetchall():
# if 1:30 has passed
if(row[3] < (time.time() - 90)):
# delete online entry
cur2 = db.cursor()
cur2.execute("DELETE FROM wifimodule_online WHERE ID=" + str(row[0]))
# update wifi module online status
cur2 = db.cursor()
cur2.execute("UPDATE `NoWire`.`wifimodule` SET `online` = 0 WHERE moduleIdentifier='" + row[1] + "'")
# update open entry wifi online status
setOffline(row[1], db)
# update open entry sensor values
setOfflineSensors(row[1], db)
# send message
print "The module "+row[1]+" is set to the offline state"
def getModuleID(moduleIdentifier, database):
cur = database.cursor()
cur.execute("SELECT `wifimodule`.`ID` FROM NoWire.wifimodule WHERE `wifimodule`.`moduleIdentifier` = '" + str(moduleIdentifier) +"'")
for row in cur.fetchall():
return int(row[0])
def setOnline(moduleIdentifier, database):
cur = database.cursor()
cur.execute("UPDATE `NoWire`.`wifimodule` SET `online` = 1 WHERE moduleIdentifier=\'" + moduleIdentifier + "'")
modID = getModuleID(moduleIdentifier, database)
if(modID != 0):
# got an existing module
cur = database.cursor()
cur.execute("SELECT `wifimodule_data`.`ID`, `wifimodule_data`.`fromOnline`, `wifimodule_data`.`toOnline` "
"FROM NoWire.wifimodule_data "
"WHERE `wifimodule_data`.`IDwifimodule` = '" + str(modID) + "'"
" ORDER BY `wifimodule_data`.`fromOnline` DESC "
"LIMIT 1")
if(cur.rowcount == 0):
cur2 = database.cursor()
cur2.execute("INSERT INTO `NoWire`.`wifimodule_data` "
"(`IDwifimodule`,`fromOnline`) VALUES ('" + str(modID) + "', NOW());")
else:
for row in cur.fetchall():
if row[2] != None:
# should insert a new value
cur2 = database.cursor()
cur2.execute("INSERT INTO `NoWire`.`wifimodule_data` "
"(`IDwifimodule`,`fromOnline`) VALUES ('" + str(modID) + "', NOW());")
def subscribeTopicsSensors(moduleIdentifier, database):
cur = db.cursor()
cur.execute("SELECT distinct(`sensortype`.`topic`) FROM `NoWire`.`sensor`" + \
" LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID`" + \
" LEFT JOIN `NoWire`.`wifimodule` ON `sensor`.`IDwifimodule` = `wifimodule`.`ID` WHERE `wifimodule`.`moduleIdentifier` = '" + moduleIdentifier + "'")
for row in cur.fetchall():
#subscribe to all the topics
client.subscribe("sensors/" + moduleIdentifier + "/" + str(row[0]))
def setOffline(moduleIdentifier, database):
modID = getModuleID(moduleIdentifier, database)
if(modID != 0):
# got an existing module
cur = database.cursor()
cur.execute("SELECT `wifimodule_data`.`ID`, `wifimodule_data`.`fromOnline`, `wifimodule_data`.`toOnline` "
"FROM NoWire.wifimodule_data "
"WHERE `wifimodule_data`.`IDwifimodule` = '" + str(modID) + "'"
" ORDER BY `wifimodule_data`.`fromOnline` DESC "
"LIMIT 1")
for row in cur.fetchall():
if row[2] is None:
print ("closing module " + moduleIdentifier)
# should insert a new value
cur2 = database.cursor()
cur2.execute("UPDATE `NoWire`.`wifimodule_data` SET `toOnline` = NOW() WHERE `wifimodule_data`.`ID` = '" + str(row[0]) + "';")
def publishOutputStates(moduleIdentifier, database):
moduleID = getModuleID(moduleIdentifier, database)
# get a list of all output sensors
cur = database.cursor()
cur.execute("SELECT `ID` FROM `NoWire`.`sensor` WHERE `sensor`.`IDwifimodule` = " + str(moduleID))
for row in cur.fetchall():
# every sensor ID from the module
sensID = int(row[0])
# get the sensor type
sensSoort = getSensorSoort(sensID, database)
if (sensSoort == "licht"):
# get the topic
sensTopic = getSensorTopic(sensID, database)
# get the payload
sensPayload = getSensorPayload(sensID, database)
# publish
client.publish(sensTopic, sensPayload)
def getSensorPayload(sensorID, database):
# get the sensor nth occurence
nthOccurence = 0
nIterator = 1
sensTypeID = getSensorTypeID(sensorID, database)
sensWifiID = getSensorWifiModuleID(sensorID, database)
# get the iterator
cur = database.cursor()
cur.execute("SELECT `sensor`.`ID` FROM `NoWire`.`sensor` WHERE `sensor`.`IDwifimodule` = " + str(sensWifiID) + " AND `sensor`.`IDtype` = " + str(sensTypeID) + " ORDER BY `sensor`.`ID`;")
for row in cur.fetchall():
#count
if(int(row[0]) == int(sensorID)):
nthOccurence = nIterator
else:
nIterator = nIterator + 1
# get the sensor current value (int)
curValue = int(getSensorValue(int(sensorID), database))
# return composite
return str(nthOccurence) + "-" + str(curValue)
def getSensorTypeID(sensorID, database):
cur = database.cursor()
cur.execute("SELECT `IDtype` FROM `NoWire`.`sensor` WHERE ID=" + str(sensorID) + " LIMIT 1")
for row in cur.fetchall():
return str(row[0])
def getSensorValue(sensorID, database):
cur = database.cursor()
cur.execute("SELECT `value` FROM `NoWire`.`sensor` WHERE `sensor`.`ID` = " + str(sensorID))
for row in cur.fetchall():
return float(row[0])
def getSensorSoort(sensorID, database):
cur = database.cursor()
cur.execute("SELECT `sensorsoort`.`soort` "
"FROM `NoWire`.`sensor` "
"LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID` "
"LEFT JOIN `NoWire`.`sensorsoort` ON `sensortype`.`soort` = `sensorsoort`.`ID` "
"WHERE `sensor`.`ID` = " + str(sensorID))
for row in cur.fetchall():
return row[0]
def getSensorTopic(sensorID, database):
wifiID = getSensorWifiModuleID(sensorID, database)
cur = database.cursor()
cur.execute("SELECT concat_ws('/', 'sensors', `wifimodule`.`moduleIdentifier`, '') as topic FROM NoWire.wifimodule WHERE `wifimodule`.`ID`=" + str(wifiID))
topicPrefix = ""
for row in cur.fetchall():
topicPrefix = row[0]
cur = db.cursor()
cur.execute("SELECT `sensortype`.`topic` FROM NoWire.sensor LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID` WHERE `sensor`.`ID` = " + str(sensorID))
for row in cur.fetchall():
topicPrefix = topicPrefix + row[0]
return topicPrefix
def getSensorWifiModuleID(sensorID, database):
cur = database.cursor()
cur.execute("SELECT `IDwifimodule` FROM `NoWire`.`sensor` WHERE `sensor`.`ID` =" + str(sensorID))
wifiID = 0
for row in cur.fetchall():
wifiID = int(row[0])
return wifiID
def setOfflineSensors(moduleIdentifier, database):
modID = getModuleID(moduleIdentifier, database)
if(modID != 0):
# got an existing module
cur = database.cursor()
cur.execute("UPDATE `NoWire`.`sensor_data` LEFT JOIN `NoWire`.`sensor` ON `sensor_data`.`IDsensor` = `sensor`.`ID` SET `to` = NOW() "
"WHERE `sensor_data`.`to` IS NULL AND `sensor`.`IDwifimodule` = '" + str(modID) + "'")
def monitorActions(database, topic, payload):
# get all the topics and payloads
cur = database.cursor()
cur.execute("SELECT `ID` FROM `NoWire`.`wifimodule`;");
# make variables
wifiModules = array('i', [])
arr_sensorID = []
arr_payload_prefix = []
# get the wifi module ids
for row in cur.fetchall():
wifiModules.append(int(row[0]))
# get the sens id, payload and payload prefixes
for i in wifiModules:
cur2 = database.cursor()
cur2.execute("SELECT `sensor`.`IDtype` "
" FROM `NoWire`.`sensor` "
" WHERE IDwifimodule=" + str(i) +
" GROUP BY `sensor`.`IDtype`")
for row in cur2.fetchall():
cur = database.cursor()
cur.execute("SET @rank=0;")
cur = database.cursor()
cur.execute("SELECT `sensor`.`ID`,"
"concat_ws('-', @rank:=@rank+1, `sensor`.`value`) as payload, "
"concat_ws('-', @rank, '') as payloadPrefix "
"FROM `NoWire`.`sensor` "
"LEFT JOIN `NoWire`.`wifimodule` ON `sensor`.`IDwifimodule` = `wifimodule`.`ID` "
"LEFT JOIN `NoWire`.`sensortype` ON `sensor`.`IDtype` = `sensortype`.`ID` "
"WHERE `wifimodule`.`ID`=" + str(i) + " AND `sensortype`.`ID`= " + str(row[0]))
for roww in cur.fetchall():
# add all this stuff in the arrays
arr_sensorID += [int(roww[0])]
arr_payload_prefix += [str(roww[2])]
arrLength = len(arr_sensorID)
for i in xrange(0, arrLength):
#print "sensor id: " + str(arr_sensorID[i]) + ", sensor payload: " + str(arr_payload[i]) + ", payload prefix: " + str(arr_payload_prefix[i])
cur = database.cursor()
cur.execute("SELECT "
"sSource.ID, "
"`sensor_koppeling`.`source_trigger_value` as sVal, "
"concat_ws('/', 'sensors', sMod.moduleIdentifier, tSource.`topic`) as sTopic, "
"`sensor_koppeling`.`target_assign_value` as tVal, "
"sTarget.ID, "
"concat_ws('/', 'sensors', tMod.moduleIdentifier, tTarget.`topic`) as tTopic, "
"`koppelingstype`.`ID` as couplingID "
"FROM NoWire.sensor_koppeling "
"LEFT JOIN `NoWire`.`sensor` sSource ON sSource.ID = `sensor_koppeling`.`IDsensorBron` "
"LEFT JOIN `NoWire`.`sensor` sTarget ON sTarget.ID = `sensor_koppeling`.`IDsensorDoel` "
"LEFT JOIN `NoWire`.`sensortype` tSource ON tSource.ID = sSource.IDtype "
"LEFT JOIN `NoWire`.`sensortype` tTarget ON tTarget.ID = sTarget.IDtype "
"LEFT JOIN `NoWire`.`koppelingstype` ON `koppelingstype`.`ID` = `sensor_koppeling`.`IDkoppelingstype` "
"LEFT JOIN `NoWire`.`wifimodule_gebruikers` sUser ON sSource.IDwifimodule = sUser.`IDwifimodule` "
"LEFT JOIN `NoWire`.`wifimodule_gebruikers` tUser ON sTarget.IDwifimodule = tUser.`IDwifimodule` "
"LEFT JOIN `NoWire`.`wifimodule` sMod ON sUser.IDwifimodule = sMod.`ID` "
"LEFT JOIN `NoWire`.`wifimodule` tMod ON tUser.IDwifimodule = tMod.`ID`")
for row in cur.fetchall():
# get topic from query
source_topic = str(row[2])
# if topic match
if (source_topic == topic):
#print "topic match: " + str(source_topic)
# get source sensor ID from query
source_sensID = int(row[0])
# get sensor ID from arr_sensorID
for j in xrange(0, arrLength):
if(int(arr_sensorID[j]) == int(source_sensID)):
#print "sensor " + str(arr_sensorID[j]) + " matching " + str(source_sensID)
arrIndex = j
# temp variable
targetID = 0
# get target sensor ID
target_sensID = int(row[4])
for k in xrange(0, arrLength):
if(arr_sensorID[k] == target_sensID):
targetID = k
# end for loop
k = arrLength
if targetID != 0:
# get target value
target_sensVal = row[3]
# get the target trigger
source_payloadTrigger = str(arr_payload_prefix[targetID])+str(int(target_sensVal))
# if payload match
#print "does " + str(source_topic) + "_" + str(source_payloadTrigger) + " match " + str(topic) + "_" + str(payload)
if(source_payloadTrigger == payload):
#print "yes"
# get payload prefix
#print "publishing: " + row[5] + ", " + str(arr_payload_prefix[targetID]) + str(int(target_sensVal))
client.publish(str(row[5]), str(arr_payload_prefix[targetID]) + str(int(target_sensVal)))
# end for loop
j = arrLength
#all the modules that are online, clear them
cur = db.cursor()
cur.execute("SELECT ID, moduleIdentifier, ipv4, timestamp FROM NoWire.wifimodule_online")
for row in cur.fetchall():
setOffline(row[1], db)
cur = db.cursor()
cur.execute("TRUNCATE `NoWire`.`wifimodule_online`")
#set all current modules to offline
cur = db.cursor()
cur.execute("UPDATE `NoWire`.`wifimodule` SET `online` = 0 WHERE 1=1")
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
while client.loop(0, 0) == 0:
if (ts < (time.time() - 90) ):
#time is outdated, call online check states
check_onlinestates()
ts = time.time()
#sleep(0.5)
pass
| [
"robbevanassche@gmail.com"
] | robbevanassche@gmail.com |
3f3eebddf1980d557d39e2eef82f0f178cb64734 | 2990b0841b63f300a722107933c01c7237a7976b | /all_xuef/code/sicp_code_python/2.2/exer2_36.py | 45876f0bdd6ff5e412e92460f44c40c00c5394aa | [] | no_license | xuefengCrown/Files_01_xuef | 8ede04751689e0495e3691fc5d8682da4d382b4d | 677329b0189149cb07e7ba934612ad2b3e38ae35 | refs/heads/master | 2021-05-15T04:34:49.936001 | 2019-01-23T11:50:54 | 2019-01-23T11:50:54 | 118,802,861 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | """
exer2.36 accumulate_n
((1 2 3) (4 5 6) (7 8 9) (10 11 12))-->(22 26 30)
"""
import exer2_33 as funcs
import accumulate as accu
import operator as oper
def accumulate_n(op, init, seqs):
# 每个序列等长度,所以如果第一个处理完了,意味着都处理完了
if len(seqs[0])==0: return []
return funcs._append([accu.accumulate(op,
init,
list(map(lambda seq:seq[0], seqs)))],
accumulate_n(op,
init,
list(map(lambda seq:seq[1:], seqs))))
def test():
seqs = [[1,2,3],[4,5,6],[7,8,9],[10,11,12]]
print(accumulate_n(oper.add, 0, seqs))
if __name__ == '__main__':
test()
| [
"643472092@qq.com"
] | 643472092@qq.com |
d6e2afb642502fd7f4276c1fe7b1f001940c9f59 | 3138a4f627f3eae63a43fc85d84ce1f4d4c9bbff | /seleniumappre.py | 065d776be6e9c3752a0056f6d61b65545a5c8f68 | [] | no_license | ivansgithub/tkinter_and_selenium | cc75ee3f30a2d7bcd96467f6add63f2bb9e06488 | 17d36eab530739f18e5b8b4f90f269b2a4b6ba93 | refs/heads/master | 2023-06-22T11:04:32.421290 | 2021-07-29T18:52:53 | 2021-07-29T18:52:53 | 390,821,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,296 | py | from selenium import webdriver
import time
import pandas as pd
from selenium.webdriver.support.ui import Select
def entrando(username_info,password_info):
global driver
driver = webdriver.Chrome(r"./chromedriver")
url = 'https://stevegriggsdesign.com/portal/admin/leads'
driver.get(url)
driver.maximize_window()
correo=driver.find_element_by_xpath('//input[@type="email"]')
contrasena=driver.find_element_by_xpath('//input[@type="password"]')
correo.send_keys(username_info)
contrasena.send_keys(password_info)
driver.find_element_by_xpath('//button[@type="submit"]').click()
def reporte():
time.sleep(5)
todos=Select(driver.find_element_by_xpath('//select[@name="table-leads_length"]'))
todos.select_by_visible_text('All')
time.sleep(3)
name_leads=driver.find_elements_by_xpath('//a[contains(@href, "leads")]')
leads=[]
for n in name_leads:
na=n.get_attribute('href')
leads.append(na)
s = set()
any(x in s or s.add(x) for x in leads)
s = set()
duplicates = set(x for x in leads if x in s or s.add(x))
todos_los_frames=[]
todos_los_frames3=[]
for d in duplicates:
driver.get(d)
time.sleep(4)
des=''
try:
description_leads3=driver.find_element_by_xpath('//div[@class="lead-view"]')
iso=description_leads3.text
iso2=iso.split('\n')
des=[]
for i in iso2:
if '*' in i:
des.append(i)
except:
des='na'
name_l=''
try:
name_leads=driver.find_element_by_xpath('//p[@class="bold font-medium-xs lead-name"]')
name_l=(name_leads.text)
except:
name_l='no name'
try:
driver.find_element_by_xpath('//a[@aria-controls="lead_notes"]').click()
time.sleep(3)
except:
pass
# casilla email leads
email_l_row=('')
try:
email_leads=driver.find_element_by_xpath('//div[contains(text(), "Sent email")]')
if email_leads:
email_l_row=('complete')
else:
email_l_row=('')
except:
email_l_row=('')
call_l_row=('')
try:
call_leads=driver.find_element_by_xpath('//div[contains(text(), "initial call")]')
if call_leads:
call_l_row=('complete')
else:
call_l_row=('')
except:
call_l_row=('')
steve_l_row=('')
try:
steve_leads=driver.find_element_by_xpath('//div[contains(text(), "Site visit")]')
if steve_leads:
steve_l_row=('complete')
else:
steve_l_row=('')
except:
steve_l_row=('')
note_l=''
try:
description_leads4=driver.find_elements_by_xpath('//div[@data-note-description]')
for ino in description_leads4:
note_l=(ino.text)
except:
note_l=('Na')
#proposal leads
try:
driver.find_element_by_xpath('//a[@aria-controls="tab_proposals_leads"]').click()
time.sleep(4)
except:
pass
proposal_l=''
try:
proposal_sent=driver.find_element_by_xpath('//span[contains(text(), "Sent")]')
if proposal_sent:
proposal_l='sent'
else:
proposal_l='no sent'
except:
proposal_l='no sent'
joined_des = "\n".join(des)
df1 = pd.DataFrame({'leads name':[name_l]})
df2 = pd.DataFrame({'Description':[joined_des]})
df3 = pd.DataFrame({'email':[email_l_row]})
df4 = pd.DataFrame({'call':[call_l_row]})
df5 = pd.DataFrame({'visit':[steve_l_row]})
df6 = pd.DataFrame({'proposal':[proposal_l]})
df7 = pd.DataFrame({'notes':[note_l]})
tru=pd.concat([df1,df2,df3,df4,df5,df6,df7], axis=1)
todos_los_frames.append(tru)
#################################################################################################################
#projectos
# on boarding
driver.get('https://stevegriggsdesign.com/portal/admin/projects')
time.sleep(2)
todos=Select(driver.find_element_by_xpath('//select[@name="DataTables_Table_0_length"]'))
todos.select_by_visible_text('All')
time.sleep(3)
name_projects=driver.find_elements_by_xpath('//a[contains(@href, "view")]')
time.sleep(3)
nasa=[]
for nv in name_projects:
nasaa=nv.get_attribute('href')
nasa.append(nasaa)
s = set()
any(x in s or s.add(x) for x in nasa)
s = set()
duplicates2 = set(x for x in nasa if x in s or s.add(x))
todos_los_frames2=[]
for na in duplicates2:
driver.get(na)
time.sleep(3)
descri_p=''
try:
description_pro=driver.find_element_by_xpath('//div[@class="tc-content project-overview-description"]')
descri_p=description_pro.text.replace('DESCRIPTION','')
except:
descri_p=('Na')
dfp0 = pd.DataFrame({'Description':[descri_p]})
try:
driver.find_element_by_xpath('//li[@class="project_tab_project_milestones"]').click()
name_proj=driver.title
time.sleep(3)
except:
pass
dfp1 = pd.DataFrame({'project name':[name_proj]})
try:
driver.find_element_by_xpath('//input[@type="checkbox"]').click()
except:
pass
time.sleep(3)
task_c=[]
try:
onbo=driver.find_elements_by_xpath('//div/ul/li/div/div/div[2]/a[@class="task_milestone pull-left mbot5 mtop5 text-muted line-throught"]')
for o in onbo:
task_c.append(o.text)
except:
pass
if '1. Send Design Proposal' in task_c:
proposal_onb='complete'
else:
proposal_onb=''
if '2. Send Thank you/Onboarding email to client' in task_c or '3. Get Information from Client'in task_c or '2. Send Onboarding email to client' in task_c:
email_onb='complete'
else:
email_onb=''
if '3. Send Invoice' in task_c or '2. Send Invoice' in task_c:
invoice_onb='complete'
else:
invoice_onb=''
dfp2 = pd.DataFrame({'send proposal':[proposal_onb]})
dfp3 = pd.DataFrame({'send email':[email_onb]})
dfp4 = pd.DataFrame({'inovice':[invoice_onb]})
try:
driver.find_element_by_xpath('//input[@type="checkbox"]').click()
except:
pass
time.sleep(3)
send_desing_notes=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "1. Send Design Proposal")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note=[]
for i in description_pro:
send_desing_note.append(i.text)
try:
send_desing_notes = "\n".join(send_desing_note)
except:
send_desing_notes=i.text
except:
send_desing_notes=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes=('')
dfp5 = pd.DataFrame({'proposal notes':[send_desing_notes]})
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "2. Send Thank you/Onboarding email to client")]').click()
time.sleep(3)
except:
pass
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "3. Get Information from Client")]').click()
time.sleep(3)
except:
pass
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "2. Send Onboarding email to client")]').click()
time.sleep(3)
except:
pass
send_desing_notes2=''
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note2=[]
for i in description_pro:
send_desing_note2.append(i)
try:
send_desing_notes2 = "\n".join(send_desing_note2)
except:
send_desing_notes2=i.text
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes2=''
dfp6 = pd.DataFrame({'email notes':[send_desing_notes2]})
send_desing_note3=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "3. Send Invoice")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note3=[]
for i in description_pro:
send_desing_note3.append(i)
try:
send_desing_notes3 = "\n".join(send_desing_note3)
except:
send_desing_notes3=i.text
except:
send_desing_notes3=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes3=''
dfp7 = pd.DataFrame({'inovice notes':[send_desing_notes3]})
tru2=pd.concat([dfp1,dfp0,dfp2,dfp3,dfp4,dfp5,dfp6,dfp7], axis=1)
todos_los_frames2.append(tru2)
###########################################################################################################################################
# desing table
if '1. Site Visit' in task_c:
visit_ond='complete'
else:
visit_ond=''
if '2. Get plot plan or survey' in task_c:
survey_ond='complete'
else:
survey_ond=''
if '3. Design' in task_c:
desing_ond='complete'
else:
desing_ond=''
if "4. Client's Approval" in task_c:
client_ond='complete'
else:
client_ond=''
if '5. Request Permits' in task_c:
permit_ond='complete'
else:
permit_ond=''
dfd1 = pd.DataFrame({'site visit':[visit_ond]})
dfd2 = pd.DataFrame({'survey':[survey_ond]})
dfd3 = pd.DataFrame({'desing':[desing_ond]})
dfd4 = pd.DataFrame({'Client Approval':[client_ond]})
dfd5 = pd.DataFrame({'permits':[permit_ond]})
send_desing_notes4=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "1. Site Visit")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note4=[]
for i in description_pro:
send_desing_note4.append(i)
try:
send_desing_notes4 = "\n".join(send_desing_note3)
except:
send_desing_notes4=i.text
except:
send_desing_notes4=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes4=''
dfd6 = pd.DataFrame({'note visit':[send_desing_notes4]})
send_desing_notes5=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "2. Get plot plan or survey")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note5=[]
for i in description_pro:
send_desing_note5.append(i)
try:
send_desing_notes5 = "\n".join(send_desing_note3)
except:
send_desing_notes5=i.text
except:
send_desing_notes5=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes5=''
dfd6 = pd.DataFrame({'Note survey':[send_desing_notes5]})
send_desing_notes6=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "3. Design")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note6=[]
for i in description_pro:
send_desing_note6.append(i)
try:
send_desing_notes6 = "\n".join(send_desing_note3)
except:
send_desing_notes6=i.text
except:
send_desing_notes6=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes6=''
dfd6 = pd.DataFrame({'Note Desing':[send_desing_notes6]})
send_desing_notes7=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "Approval")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note7=[]
for i in description_pro:
send_desing_note7.append(i)
try:
send_desing_notes7 = "\n".join(send_desing_note3)
except:
send_desing_notes7=i.text
except:
send_desing_notes7=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes7=''
dfd7 = pd.DataFrame({'Note Approval':[send_desing_notes7]})
send_desing_notes8=''
try:
time.sleep(2)
driver.find_element_by_xpath('//a[contains(text(), "5. Request Permits")]').click()
time.sleep(3)
try:
description_pro=driver.find_elements_by_xpath('//div[@data-task-attachment-id="0"]')
send_desing_note8=[]
for i in description_pro:
send_desing_note8.append(i)
try:
send_desing_notes8 = "\n".join(send_desing_note3)
except:
send_desing_notes8=i.text
except:
send_desing_notes8=('Na')
try:
aa=driver.find_elements_by_xpath('//button[@class="close"]')[5]
aa.click()
except:
pass
except:
send_desing_notes8=''
dfd8 = pd.DataFrame({'Note Permits':[send_desing_notes8]})
tru3=pd.concat([dfp1,dfp0,dfd1,dfd2,dfd3,dfd4,dfd5,dfd6,dfd7,dfd8], axis=1)
todos_los_frames3.append(tru3)
driver.get('https://stevegriggsdesign.com/portal/admin/invoices')
time.sleep(3)
d=driver.find_elements_by_xpath('//tr/td[6]')
namesp=[]
for i in d:
namesp.append(i.text)
da=driver.find_elements_by_xpath('//tr/td[9]')
pagos=[]
for i in da:
pagos.append(i.text)
ds=driver.find_elements_by_xpath('//tr/td[8]')
fechas=[]
for i in ds:
fechas.append(i.text)
list_tuples = list(zip(namesp, pagos,fechas))
print(list_tuples)
dframe = pd.DataFrame(list_tuples, columns=['leads name', 'pagos','fechas'])
newdf = dframe.loc[(dframe.pagos == "PAID") ]
leadsdata=pd.concat(todos_los_frames)
projedata=pd.concat(todos_los_frames2)
projedata2=pd.concat(todos_los_frames3)
################################leads########################################
sololeads=leadsdata.loc[(leadsdata.proposal == "no sent") ]
#############################################################################
############################# on bording ####################################
lead_con_proposal=leadsdata.loc[(leadsdata.proposal == "sent") ]
data={'project name':lead_con_proposal['leads name'],
'Description':'new project',
'send proposal':'', 'send email':'', 'inovice':'',
'proposal notes':'', 'email notes':'', 'inovice notes':''}
pasando_leads_to_projec=pd.DataFrame(data)
#los sent + los que no tienen 3 checks
cols = projedata.columns[projedata.columns.isin(['send email','send proposal', 'inovice'])]
leads_to_onbording2=(projedata[(projedata[cols] == 'complete').all(1)])
newdf = dframe.loc[(dframe.pagos == "PAID") ]
lista_pagos=list(newdf['leads name'].values)
onbording_to_desing=leads_to_onbording2.loc[leads_to_onbording2['project name'].isin(lista_pagos)]
project_sin_3_checks=projedata[~projedata.apply(tuple,1).isin(onbording_to_desing.apply(tuple,1))]
onbording_table=pd.concat([project_sin_3_checks,pasando_leads_to_projec])
####################################### desing #######################################
cols2 = projedata2.columns[projedata2.columns.isin(['site visit','survey', 'desing', 'Client Approval','permits'])]
onbording_to_desing2=(projedata2[(projedata2[cols2] == 'complete').all(1)])
desing_sin_checks=projedata2[~projedata2.apply(tuple,1).isin(onbording_to_desing2.apply(tuple,1))]
######################################## execute ####################################
import datetime
now=datetime.datetime.now()
naa=now.strftime("%Y-%m-%d %H:%M")
datv=str(naa).replace(' ','-').split('.')[0]
dfo=datv.replace(':','')
writer = pd.ExcelWriter('reportprojects-{}.xlsx'.format(dfo), engine='xlsxwriter')
sololeads.to_excel(writer, sheet_name='leads',index=False)
onbording_table.to_excel(writer, sheet_name='onbording',index=False)
desing_sin_checks.to_excel(writer, sheet_name='desing',index=False)
workbook = writer.book
worksheet = writer.sheets['leads']
# Add a header format.
header_format = workbook.add_format({
'bold': True,
'fg_color': '#ffcccc',
'border': 1})
for col_num, value in enumerate(sololeads.columns.values):
worksheet.write(0, col_num, value, header_format)
column_len = sololeads[value].astype(str).str.len().max()
# Setting the length if the column header is larger
# than the max column value length
column_len = max(column_len, len(value)) + 3
print(column_len)
# set the column length
worksheet.set_column(col_num, col_num, column_len)
workbook = writer.book
worksheet = writer.sheets['onbording']
# Add a header format.
header_format = workbook.add_format({
'bold': True,
'fg_color': '#ffcccc',
'border': 1})
for col_num, value in enumerate(onbording_table.columns.values):
worksheet.write(0, col_num, value, header_format)
column_len = onbording_table[value].astype(str).str.len().max()
# Setting the length if the column header is larger
# than the max column value length
column_len = max(column_len, len(value)) + 3
print(column_len)
# set the column length
worksheet.set_column(col_num, col_num, column_len)
workbook = writer.book
worksheet = writer.sheets['desing']
# Add a header format.
header_format = workbook.add_format({
'bold': True,
'fg_color': '#ffcccc',
'border': 1})
for col_num, value in enumerate(desing_sin_checks.columns.values):
worksheet.write(0, col_num, value, header_format)
column_len = desing_sin_checks[value].astype(str).str.len().max()
# Setting the length if the column header is larger
# than the max column value length
column_len = max(column_len, len(value)) + 3
print(column_len)
# set the column length
worksheet.set_column(col_num, col_num, column_len)
writer.save()
driver.close() | [
"ivan.sal.be@gmail.com"
] | ivan.sal.be@gmail.com |
4e0d1fc89d542e61c7b2ec65b8c9602d2f5dcca5 | 6bb649848012fb5fceabe6403efebc457483323e | /clase1/ej1.py | 1b53e0b2b2600759e35db01039c4d71145325e51 | [] | no_license | ndf14685/raspberry | bc23df6980fa499df86cdc8ece93d12383be4f35 | 8fb47de0f24a6abad6f4e6fb4893f309d016aa24 | refs/heads/master | 2020-09-11T20:56:49.345222 | 2019-12-01T04:19:09 | 2019-12-01T04:19:09 | 222,188,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,783 | py |
#include <wiringPi.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#define MAXTIMINGS 85
#define DHTPIN 7
int dht11_dat[5] = { 0, 0, 0, 0, 0 };
void read_dht11_dat()
{
uint8_t laststate = HIGH;
uint8_t counter = 0;
uint8_t j = 0, i;
float f; /* fahrenheit */
// Pin de datos en 0 por 10ms
pinMode( DHTPIN, OUTPUT );
digitalWrite( DHTPIN, LOW );
delay( 18 );
//Pin de datos en 1 por 40ms
digitalWrite( DHTPIN, HIGH );
delayMicroseconds( 40 );
//Cambiamos la configuración del pin a SALIDA para leer los datos.
pinMode( DHTPIN, INPUT );
//Detectamos los cambios y leemos los datos
for ( i = 0; i < MAXTIMINGS; i++ )
{
counter = 0;
while ( digitalRead( DHTPIN ) == laststate )
{
counter++;
delayMicroseconds( 1 );
if ( counter == 255 )
{
break;
}
}
laststate = digitalRead( DHTPIN );
if ( counter == 255 )
break;
//Ignoramos las primeras dos transacciones.
if ( (i >= 4) && (i % 2 == 0) )
{
dht11_dat[j / 8] <<= 1;
if ( counter > 16 )
dht11_dat[j / 8] |= 1;
j++;
}
}
/*
* check we read 40 bits (8bit x 5 ) + verify checksum in the last byte
* print it out if data is good
*/
if ( (j >= 40) &&
(dht11_dat[4] == ( (dht11_dat[0] + dht11_dat[1] + dht11_dat[2] + dht11_dat[3]) & 0xFF) ) )
{
f = dht11_dat[2] * 9. / 5. + 32;
printf( "Humidity = %d.%d %% Temperature = %d.%d *C (%.1f *F)\n",
dht11_dat[0], dht11_dat[1], dht11_dat[2], dht11_dat[3], f );
}else {
printf( "Data not good, skip\n" );
}
}
int main( void )
{
printf( "Raspberry Pi wiringPi DHT11 Temperature test program\n" );
if ( wiringPiSetup() == -1 )
exit( 1 );
while ( 1 )
{
read_dht11_dat();
delay( 1000 ); /* wait 1sec to refresh */
}
return(0);
}
/*dht11.c
Displaying dht11.c.*/ | [
"nfleitas@fusap.com.ar"
] | nfleitas@fusap.com.ar |
2f2897da3ab199c97a2904a7bc4488f42042c775 | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/bwobsolete_helpers/PyGUI/FocusManager.py | d63a96b2d06f8769fe8fa7654f13ed3704920427 | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,004 | py | # 2017.05.04 15:20:45 Střední Evropa (letní čas)
# Embedded file name: scripts/client/bwobsolete_helpers/PyGUI/FocusManager.py
_focusedComponent = None
def getFocusedComponent():
global _focusedComponent
return _focusedComponent
def setFocusedComponent(newFocus):
global _focusedComponent
if newFocus != _focusedComponent:
if _focusedComponent is not None:
_focusedComponent.focus = False
_focusedComponent = newFocus
if newFocus is not None:
newFocus.focus = True
return
def isFocusedComponent(component):
if _focusedComponent is None or component is None:
return _focusedComponent is component
else:
return _focusedComponent.__str__() == component.__str__()
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\bwobsolete_helpers\PyGUI\FocusManager.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:20:45 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
77cc7c9dea37d25900a3ef81b6fe8e5c4ac325d8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02259/s333912873.py | 70af1be9d0001017dd4c907d0d466616f478bb16 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | def bubbles(N):
count = 0
for i in range(len(N)):
for j in range(len(N)-1, i, -1):
if N[j] < N[j-1]:
N[j], N[j-1] = N[j-1], N[j]
count += 1
c = 1
for i in N:
print(i, end='')
if c < len(N):
print(' ', end='')
c += 1
print('')
return count
n = int(input())
numbers = list(map(int, input().split()))
print(bubbles(numbers)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
5dfd1f2fa0a20f7374881feaa573ca57dd325796 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_040/ch152_2020_04_13_20_50_06_154418.py | 4e37d65e1c26223db4521858483a177d0b8585da | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | def verifica_preco(x,y,z):
dic1 = {}
dic2 = {}
for x, cor in y.items():
dic1[x] = cor
for cor2, valor in z.items():
dic2[cor2] = valor
if cor == cor2:
return valor
| [
"you@example.com"
] | you@example.com |
3d67c406dd6596ce4a8f83beeb9a76c2da03d32d | 4d49901213a6dd12ae183dcb4cc6b16b56ac423d | /carzone/settings.py | 7ed610f9492641166d149d43cba412e87c4d7217 | [] | no_license | sad60/carzone_gitproject | eeab6eb1d828db78c039fe37bbfae37426b0b191 | 0bbe0f6d83a19b18763b33683c5a11b503f05a7f | refs/heads/main | 2023-01-03T19:45:18.883812 | 2020-10-28T20:02:03 | 2020-10-28T20:02:03 | 306,869,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,389 | py | """
Django settings for carzone project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jy+=*tv)29-vv3oo!g1(vd83@711y5eb2=r75dq)u!u6@j&)yx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pages.apps.PagesConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'carzone.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'carzone.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'carzone_db',
'USER' : 'postgres',
'PASSWORD' :'sad60',
'HOST': 'localhost',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'carzone/static'),
]
# Media
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [
"sadki9@gmail.com"
] | sadki9@gmail.com |
b94a0ad8dc014ef4cf5e196798920c4717dc245e | b9613c015e139d8c98a729dedc09ee4bffe88a7d | /第一第二大值.py | b7fdafb6d001e79355df7f9f23083219c020733d | [] | no_license | hoopizs1452/Python | 49a99876a6548026403c6b8c733f27dc610b0e5f | 1ca18d770c1f24f55072c3e4ca586927defed0d7 | refs/heads/master | 2020-09-16T07:42:52.218781 | 2020-01-28T07:00:38 | 2020-01-28T07:00:38 | 223,701,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | def max2(x):
m1, m2 = (x[0], x[1]) if x[0] > x[1] else (x[1], x[0])
for index in range(2, len(x)):
if x[index] > m1:
m2 = m1
m1 = x[index]
elif x[index] > m2:
m2 = x[index]
return m1, m2
a = [1, 2, 3, 4]
print(max2(a)) | [
"chenyilin@ChenYis-iMac.local"
] | chenyilin@ChenYis-iMac.local |
e1269bcb25875f70fc64b5c77aa877e42b600bf0 | 96104bf1a79257399cf5781d3b29d145fdef5073 | /molbert/tests/test_utils.py | b97a900ad68e982ed8d4307a16e794f0e1bd9e37 | [
"MIT"
] | permissive | panpiort8/MolBERT | 704c8ee00379f07f65d72695701a2e00f3c812ce | b312700733d6f14ba5bf90347adb59b0d99be65d | refs/heads/main | 2023-03-09T17:23:05.789108 | 2021-03-01T15:27:54 | 2021-03-01T15:29:09 | 343,453,109 | 1 | 0 | MIT | 2021-03-01T14:53:30 | 2021-03-01T14:53:29 | null | UTF-8 | Python | false | false | 3,317 | py | import random
import numpy as np
from molbert.utils.lm_utils import (
InputExample,
_truncate_seq_pair,
convert_example_to_features,
get_seq_lengths,
random_word,
unmask_lm_labels,
)
from molbert.utils.featurizer.molfeaturizer import SmilesIndexFeaturizer
TOKENIZER = SmilesIndexFeaturizer.bert_smiles_index_featurizer(10)
def test_get_seq_lenghts_with_issame():
seqlen = 10
single_seq_len, total_seq_len = get_seq_lengths(seqlen, is_same=True)
assert single_seq_len == seqlen - 2
assert total_seq_len == 2 * seqlen
def test_get_seq_lenghts_without_issame():
seqlen = 10
single_seq_len, total_seq_len = get_seq_lengths(seqlen, is_same=False)
assert single_seq_len == seqlen - 2
assert total_seq_len == seqlen
def test_get_unmasked_labels():
random.seed(1)
tokens_a = list('C1CCCCC1')
tokens_b = None
example = InputExample(guid=1, tokens_a=tokens_a, tokens_b=tokens_b, is_next=False)
# transform sample to original_features
features = convert_example_to_features(example, 10, TOKENIZER)
# get the unmasked label id's - useful for calculating accuracy
unmasked_lm_label_ids = unmask_lm_labels(features.input_ids, features.lm_label_ids)
# for all input tokens
for i in range(len(features.input_ids)):
# if token is masked:
if features.lm_label_ids[i] == -1:
# then the unmasked token is equal to the input token
assert unmasked_lm_label_ids[i] == features.input_ids[i]
else:
# else the unmasked label is equal to the lm_label_id
assert unmasked_lm_label_ids[i] == features.lm_label_ids[i]
def test_random_word():
smiles = list('C1CCCCC1')
random.seed(1)
expected_output_labels = np.array([TOKENIZER.token_to_idx[t] for t in smiles])
masked_tokens, output_labels = random_word(smiles, TOKENIZER)
assert np.array_equal(masked_tokens, np.array(['F', '1', 'C', 'C', 'C', 'C', '[MASK]', '[MASK]']))
mask = np.array([True, False, False, False, False, False, True, True])
expected_output_labels[~mask] = -1
assert np.array_equal(output_labels, expected_output_labels)
def test_convert_example_to_features():
example = InputExample(guid=1, tokens_a=list('C1CCCCC1'), tokens_b=None)
convert_example_to_features(example, TOKENIZER.max_length, TOKENIZER)
def test_truncate_seq_pair_concatenation_is_shorter_than_max_length():
# given two sequences and a max_length where the two sequences together are shorter than the max length
tokens_a = list(range(10))
tokens_b = list(range(5))
max_length = 20
# when truncation is called
_truncate_seq_pair(tokens_a, tokens_b, max_length)
# then the sequences haven't changed
assert tokens_a == list(range(10))
assert tokens_b == list(range(5))
def test_truncate_seq_pair_concatenation_is_longer_than_max_length():
# given two sequences and a max_length where the two sequences together are longer than the max length
tokens_a = list(range(10))
tokens_b = list(range(5))
max_length = 10
# when truncation is called
_truncate_seq_pair(tokens_a, tokens_b, max_length)
# then the longer sequences has been truncated
assert tokens_a == list(range(5))
assert tokens_b == list(range(5))
| [
"noreply@github.com"
] | panpiort8.noreply@github.com |
5b685114688e55d4d6c801aa3798773078ed3a46 | 77758fe7790c5dea7615ccc66ae7214230eb07d5 | /drf_braces/tests/test_mixins.py | 5745cf26162b74138a9ca8e0e3b99645a42928a0 | [
"MIT"
] | permissive | NoraGithub/django-rest-framework-braces | 355be302c7a434f925edd0af06162bd632e730ba | ad98c6abef2045b1cae65db63793e810d989ee72 | refs/heads/master | 2021-01-01T18:43:32.481857 | 2017-05-09T14:52:58 | 2017-05-09T14:52:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,074 | py | from __future__ import absolute_import, print_function, unicode_literals
import unittest
import mock
from rest_framework.generics import GenericAPIView
from drf_braces.mixins import (
MapDataViewMixin,
MultipleSerializersViewMixin,
StrippingJSONViewMixin,
)
class TestMultipleSerializersViewMixin(unittest.TestCase):
def setUp(self):
super(TestMultipleSerializersViewMixin, self).setUp()
class View(MultipleSerializersViewMixin, GenericAPIView):
pass
self.view = View()
@mock.patch.object(GenericAPIView, 'get_serializer_context')
@mock.patch.object(GenericAPIView, 'get_serializer_class')
def test_get_serializer(self,
mock_get_serializer_class,
mock_get_serializer_context):
context = {'context': 'here'}
mock_get_serializer_context.return_value = context
serializer = self.view.get_serializer(hello='world')
self.assertEqual(serializer, mock_get_serializer_class.return_value.return_value)
mock_get_serializer_class.assert_called_once_with()
mock_get_serializer_class.return_value.assert_called_once_with(
hello='world', context=context
)
mock_get_serializer_context.assert_called_once_with()
@mock.patch.object(GenericAPIView, 'get_serializer_context')
@mock.patch.object(GenericAPIView, 'get_serializer_class')
def test_get_serializer_with_class(self,
mock_get_serializer_class,
mock_get_serializer_context):
context = {'context': 'here'}
mock_get_serializer_context.return_value = context
serializer_class = mock.MagicMock()
serializer = self.view.get_serializer(hello='world', serializer_class=serializer_class)
self.assertEqual(serializer, serializer_class.return_value)
self.assertFalse(mock_get_serializer_class.called)
serializer_class.assert_called_once_with(hello='world', context=context)
mock_get_serializer_context.assert_called_once_with()
class TestMapDataViewMixin(unittest.TestCase):
def setUp(self):
super(TestMapDataViewMixin, self).setUp()
class View(MapDataViewMixin, GenericAPIView):
pass
self.view = View()
self.view.request = mock.MagicMock(data=mock.sentinel.data)
def test_get_data_no_mapper(self):
actual = self.view.get_data()
self.assertEqual(actual, mock.sentinel.data)
@mock.patch.object(GenericAPIView, 'get_serializer_context')
def test_get_data_attribute_mapper(self, mock_get_serializer_context):
mapper = self.view.data_mapper_class = mock.MagicMock()
actual = self.view.get_data()
self.assertEqual(actual, mapper.return_value.return_value)
mapper.assert_called_once_with(
context=mock_get_serializer_context.return_value
)
mapper.return_value.assert_called_once_with(mock.sentinel.data)
@mock.patch.object(GenericAPIView, 'get_serializer_context')
def test_get_data_provided(self, mock_get_serializer_context):
mapper = mock.MagicMock()
actual = self.view.get_data(mapper_class=mapper)
self.assertEqual(actual, mapper.return_value.return_value)
mapper.assert_called_once_with(
context=mock_get_serializer_context.return_value
)
mapper.return_value.assert_called_once_with(mock.sentinel.data)
class TestStrippingJSONViewMixin(unittest.TestCase):
def setUp(self):
super(TestStrippingJSONViewMixin, self).setUp()
class View(StrippingJSONViewMixin, GenericAPIView):
pass
self.view = View()
self.view.request = mock.MagicMock()
def test_get_parser_context(self):
self.view.parser_root = mock.sentinel.parser_root
actual = self.view.get_parser_context(self.view.request)
self.assertIn('parse_root', actual)
self.assertEqual(actual['parse_root'], mock.sentinel.parser_root)
| [
"miroslav.shubernetskiy@dealertrack.com"
] | miroslav.shubernetskiy@dealertrack.com |
b9169e937fabc228e29384360ef65944f5973688 | 1d87b6e7cd7879fefeaa8f475045de1cc1bc2bf5 | /podder_task_foundation/logging/log_setting.py | db6d2ddd99e112b75acd67189097e92a65cda131 | [] | no_license | nagisa-sakamoto/podder-task-foundation | 2ecb24e07bbfcc1121661fb7d9e7005faf9093e0 | 8de453bf8f89d5ddcb8e82d394f73f3a8f715329 | refs/heads/main | 2022-12-30T01:10:23.051183 | 2020-10-20T08:13:55 | 2020-10-20T08:13:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,068 | py | import logging
import os
from typing import Any, Dict
from podder_task_foundation.config import Config
class LogSetting:
TASK_NAME_PATH = 'task_name.ini'
DEFAULT_FORMAT = '[%(asctime)s.%(msecs)03d] %(levelname)s - %(message)s'
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
_log_setting = None
def __init__(self, mode: str, config: Config):
self._mode = mode
self._config = config
def load(self):
if LogSetting._log_setting is None:
LogSetting._log_setting = self._load_log_yml()
return LogSetting._log_setting
def _get_config(self, key: str, default: Any) -> Any:
value = self._config.get("log." + key)
if value is not None:
return value
value = self._config.get("pipeline." + key)
if value is not None:
return value
return default
def _load_log_yml(self) -> Dict:
if os.path.exists(self.TASK_NAME_PATH):
with open(self.TASK_NAME_PATH, 'r') as stream:
task_name = stream.read()
else:
task_name = self._get_config('app.name', '')
settings = {
'task_name': task_name,
'default_log_format': self.DEFAULT_FORMAT,
'date_format': self.DATE_FORMAT,
'task_log_format': self._get_config('task_log_format', self.DEFAULT_FORMAT),
'server_log_format': self._get_config('server_log_format', self.DEFAULT_FORMAT),
'color_task_log_format': self._get_config('color_task_log_format', self.DEFAULT_FORMAT),
'color_server_log_format': self._get_config('color_server_log_format',
self.DEFAULT_FORMAT),
'task_log_level': self._get_config('task_log_level', logging.DEBUG),
'server_log_level': self._get_config('server_log_level', logging.DEBUG),
'log_colors': self._get_config('log_colors', {}),
'secondary_log_colors': self._get_config('secondary_log_colors', {}),
}
return settings
| [
"takaaki.mizuno@gmail.com"
] | takaaki.mizuno@gmail.com |
d4bbd03fe42ba9327eb0c52142ba5c84766cd36c | ec153cf6c65b02d8d714e042bbdcf476001c6332 | /keystone/common/fernet_utils.py | cb7a69863bc71fd65b38a3e1a3ff689a24470723 | [] | no_license | bopopescu/dashboard | c4322f7602a9ba589400212aaef865ed4ffa8bdb | a74b4a549cd7d516dd9a0f5f2e17d06679c13bf6 | refs/heads/master | 2022-11-21T15:56:42.755310 | 2017-07-05T12:04:14 | 2017-07-05T12:04:17 | 281,596,428 | 0 | 0 | null | 2020-07-22T06:38:37 | 2020-07-22T06:38:36 | null | UTF-8 | Python | false | false | 11,370 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import os
import stat
from cryptography import fernet
from oslo_log import log
import keystone.conf
from keystone.i18n import _LE, _LW, _LI
LOG = log.getLogger(__name__)
CONF = keystone.conf.CONF
# NOTE(lbragstad): In the event there are no encryption keys on disk, let's use
# a default one until a proper key repository is set up. This allows operators
# to gracefully upgrade from Mitaka to Newton without a key repository,
# especially in multi-node deployments. The NULL_KEY is specific to credential
# encryption only and has absolutely no beneficial purpose outside of easing
# upgrades.
NULL_KEY = base64.urlsafe_b64encode(b'\x00' * 32)
class FernetUtils(object):
def __init__(self, key_repository=None, max_active_keys=None):
self.key_repository = key_repository
self.max_active_keys = max_active_keys
def validate_key_repository(self, requires_write=False):
"""Validate permissions on the key repository directory."""
# NOTE(lbragstad): We shouldn't need to check if the directory was
# passed in as None because we don't set allow_no_values to True.
# ensure current user has sufficient access to the key repository
is_valid = (os.access(self.key_repository, os.R_OK) and
os.access(self.key_repository, os.X_OK))
if requires_write:
is_valid = (is_valid and
os.access(self.key_repository, os.W_OK))
if not is_valid:
LOG.error(
_LE('Either [fernet_tokens] key_repository does not exist or '
'Keystone does not have sufficient permission to access '
'it: %s'), self.key_repository)
else:
# ensure the key repository isn't world-readable
stat_info = os.stat(self.key_repository)
if(stat_info.st_mode & stat.S_IROTH or
stat_info.st_mode & stat.S_IXOTH):
LOG.warning(_LW(
'key_repository is world readable: %s'),
self.key_repository)
return is_valid
def create_key_directory(self, keystone_user_id=None,
keystone_group_id=None):
"""Attempt to create the key directory if it doesn't exist."""
if not os.access(self.key_repository, os.F_OK):
LOG.info(_LI(
'key_repository does not appear to exist; attempting to '
'create it'))
try:
os.makedirs(self.key_repository, 0o700)
except OSError:
LOG.error(_LE(
'Failed to create key_repository: either it already '
'exists or you don\'t have sufficient permissions to '
'create it'))
if keystone_user_id and keystone_group_id:
os.chown(
self.key_repository,
keystone_user_id,
keystone_group_id)
elif keystone_user_id or keystone_group_id:
LOG.warning(_LW(
'Unable to change the ownership of key_repository without '
'a keystone user ID and keystone group ID both being '
'provided: %s') % self.key_repository)
def _create_new_key(self, keystone_user_id, keystone_group_id):
"""Securely create a new encryption key.
Create a new key that is readable by the Keystone group and Keystone
user.
"""
key = fernet.Fernet.generate_key() # key is bytes
# This ensures the key created is not world-readable
old_umask = os.umask(0o177)
if keystone_user_id and keystone_group_id:
old_egid = os.getegid()
old_euid = os.geteuid()
os.setegid(keystone_group_id)
os.seteuid(keystone_user_id)
elif keystone_user_id or keystone_group_id:
LOG.warning(_LW(
'Unable to change the ownership of the new key without a '
'keystone user ID and keystone group ID both being provided: '
'%s') %
self.key_repository)
# Determine the file name of the new key
key_file = os.path.join(self.key_repository, '0')
try:
with open(key_file, 'w') as f:
# convert key to str for the file.
f.write(key.decode('utf-8'))
finally:
# After writing the key, set the umask back to it's original value.
# Do the same with group and user identifiers if a Keystone group
# or user was supplied.
os.umask(old_umask)
if keystone_user_id and keystone_group_id:
os.seteuid(old_euid)
os.setegid(old_egid)
LOG.info(_LI('Created a new key: %s'), key_file)
def initialize_key_repository(self, keystone_user_id=None,
keystone_group_id=None):
"""Create a key repository and bootstrap it with a key.
:param keystone_user_id: User ID of the Keystone user.
:param keystone_group_id: Group ID of the Keystone user.
"""
# make sure we have work to do before proceeding
if os.access(os.path.join(self.key_repository, '0'),
os.F_OK):
LOG.info(_LI('Key repository is already initialized; aborting.'))
return
# bootstrap an existing key
self._create_new_key(keystone_user_id, keystone_group_id)
# ensure that we end up with a primary and secondary key
self.rotate_keys(keystone_user_id, keystone_group_id)
def rotate_keys(self, keystone_user_id=None, keystone_group_id=None):
"""Create a new primary key and revoke excess active keys.
:param keystone_user_id: User ID of the Keystone user.
:param keystone_group_id: Group ID of the Keystone user.
Key rotation utilizes the following behaviors:
- The highest key number is used as the primary key (used for
encryption).
- All keys can be used for decryption.
- New keys are always created as key "0," which serves as a placeholder
before promoting it to be the primary key.
This strategy allows you to safely perform rotation on one node in a
cluster, before syncing the results of the rotation to all other nodes
(during both key rotation and synchronization, all nodes must recognize
all primary keys).
"""
# read the list of key files
key_files = dict()
for filename in os.listdir(self.key_repository):
path = os.path.join(self.key_repository, str(filename))
if os.path.isfile(path):
try:
key_id = int(filename)
except ValueError: # nosec : name isn't a number
pass
else:
key_files[key_id] = path
LOG.info(_LI('Starting key rotation with %(count)s key files: '
'%(list)s'), {
'count': len(key_files),
'list': list(key_files.values())})
# determine the number of the new primary key
current_primary_key = max(key_files.keys())
LOG.info(_LI('Current primary key is: %s'), current_primary_key)
new_primary_key = current_primary_key + 1
LOG.info(_LI('Next primary key will be: %s'), new_primary_key)
# promote the next primary key to be the primary
os.rename(
os.path.join(self.key_repository, '0'),
os.path.join(self.key_repository, str(new_primary_key))
)
key_files.pop(0)
key_files[new_primary_key] = os.path.join(
self.key_repository,
str(new_primary_key))
LOG.info(_LI('Promoted key 0 to be the primary: %s'), new_primary_key)
# add a new key to the rotation, which will be the *next* primary
self._create_new_key(keystone_user_id, keystone_group_id)
max_active_keys = self.max_active_keys
# purge excess keys
# Note that key_files doesn't contain the new active key that was
# created, only the old active keys.
keys = sorted(key_files.keys(), reverse=True)
while len(keys) > (max_active_keys - 1):
index_to_purge = keys.pop()
key_to_purge = key_files[index_to_purge]
LOG.info(_LI('Excess key to purge: %s'), key_to_purge)
os.remove(key_to_purge)
def load_keys(self, use_null_key=False):
"""Load keys from disk into a list.
The first key in the list is the primary key used for encryption. All
other keys are active secondary keys that can be used for decrypting
tokens.
:param use_null_key: If true, a known key containing null bytes will be
appended to the list of returned keys.
"""
if not self.validate_key_repository():
if use_null_key:
return [NULL_KEY]
return []
# build a dictionary of key_number:encryption_key pairs
keys = dict()
for filename in os.listdir(self.key_repository):
path = os.path.join(self.key_repository, str(filename))
if os.path.isfile(path):
with open(path, 'r') as key_file:
try:
key_id = int(filename)
except ValueError: # nosec : filename isn't a number,
# ignore this file since it's not a key.
pass
else:
keys[key_id] = key_file.read()
if len(keys) != self.max_active_keys:
# Once the number of keys matches max_active_keys, this log entry
# is too repetitive to be useful. Also note that it only makes
# sense to log this message for tokens since credentials doesn't
# have a `max_active_key` configuration option.
if self.key_repository == CONF.fernet_tokens.key_repository:
LOG.debug(
'Loaded %(count)d Fernet keys from %(dir)s, but '
'`[fernet_tokens] max_active_keys = %(max)d`; perhaps '
'there have not been enough key rotations to reach '
'`max_active_keys` yet?', {
'count': len(keys),
'max': self.max_active_keys,
'dir': self.key_repository})
# return the encryption_keys, sorted by key number, descending
key_list = [keys[x] for x in sorted(keys.keys(), reverse=True)]
if use_null_key:
key_list.append(NULL_KEY)
return key_list
| [
"laurencechan@qq.com"
] | laurencechan@qq.com |
72dcc5026c9b6f3fe26dbfe3f630eb55f9c2859e | a9e64d7a3b0db7a032f059862b012d173c2420e5 | /Models/TTWeek15_2DBinary_ad.py | ed05dff55fb2fd68835a06a7ca89c1435115d422 | [] | no_license | WilliamPJSmith/Support | 7aa4975ca621ef74e9b6b191e9378d444127178e | 6f6199dd8df41ae4e7dc268b4931ca26b7b891b9 | refs/heads/master | 2021-01-21T10:13:25.638760 | 2015-09-01T18:49:46 | 2015-09-01T18:49:46 | 41,625,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,904 | py | import random
from CellModeller.Regulation.ModuleRegulator import ModuleRegulator
from CellModeller.Biophysics.BacterialModels.CLBacterium import CLBacterium
from CellModeller.GUI import Renderers
import numpy
import math
# calculate radii
radiusA = 0.76
radiusB = 0.63
radiusC = 0.56
radiusD = 0.48
# specify target volume (same for each type, limited by smallest volume allowing spheres to have L>0)
initialVol = 1.85
targetVol = 2*initialVol
# specify initial lengths
lengthA = initialVol/(math.pi*radiusA**2) - 4*radiusA/3.0
lengthB = initialVol/(math.pi*radiusB**2) - 4*radiusB/3.0
lengthC = initialVol/(math.pi*radiusC**2) - 4*radiusC/3.0
lengthD = initialVol/(math.pi*radiusD**2) - 4*radiusD/3.0
# initial separation between cells
init_sep = 5.0
# other details
max_cells = 6000 # assuming we're going for 5000
saveEvery = 10
def setup(sim):
# Set biophysics, signalling, and regulation models. Add dolfin solver if used.
biophys = CLBacterium(sim,
max_substeps=8,
max_cells=max_cells,
max_contacts=32,
max_sqs=50**2,
jitter_z=False,
reg_param=0.04,
gamma=500,
periodic=False,
grid_spacing=10.0)
# add mechanical planes
planeWeight = 1.0
biophys.addPlane((0,0,0), (0,1,0), planeWeight) # base of box
#biophys.addPlane((0,0,-radiusD/2.0), (0,0,+1), planeWeight) # front of box
#biophys.addPlane((0,0,+radiusD/2.0), (0,0,-1), planeWeight) # back of box
regul = ModuleRegulator(sim, __file__) # use this file for reg too
# compile a list of solver parameters (using scaled values)
solverParams = None
# add biophysics, regulation, [solver], objects to simulator
sim.init(biophys, regul, None, None, solverParams)
# initialise 2 cells with different lengths, radii
sim.addCell(cellType=0, len=lengthA, rad=radiusA, pos=(+init_sep/2.0,radiusA,0), dir=(1,0,0))
sim.addCell(cellType=1, len=lengthD, rad=radiusD, pos=(-init_sep/2.0,radiusD,0), dir=(1,0,0))
# Add some objects to draw the models
mainRenderer = Renderers.GLBacteriumRenderer(sim)
sim.addRenderer(mainRenderer)
# How often should we output data?
sim.renderEveryNSteps = 1
sim.savePickle = True
sim.pickleSteps = saveEvery
print "Ready."
def init(cell):
cell.targetVol = targetVol + random.uniform(0.0,0.09*targetVol)
cell.growthRate = 1
def numSignals():
return 0
def numSpecies():
return 0
def update(cells):
for (id, cell) in cells.iteritems():
# division checks
if cell.volume > cell.targetVol:
cell.asymm = [1,1]
cell.divideFlag = True
def divide(parent, d1, d2):
d1.targetVol = targetVol + random.uniform(0.0,0.09*targetVol)
d2.targetVol = targetVol + random.uniform(0.0,0.09*targetVol)
def kill(cell):
cell.growthRate = 0.0 # dead cells can't grow any more
cell.divideFlag = False # dead cells can't divide
| [
"william.smith@cs.ox.ac.uk"
] | william.smith@cs.ox.ac.uk |
c2021b9e0344a89457e29d2a982dbf6b15c69282 | 9c347f3f021a0c7fb424c04ea6c6e1efbc7eff20 | /rpa_basic/1_excel/3_cell.py | 239ab96bfcf02efc6504f10425b35d5ab60198b5 | [] | no_license | cyanluna-git/NadoCoding | 6904471f274b8e9d0f0739a0d5d74fa515a92134 | 1dfbcb031e467aaa60e946af1efcce675fdfe47e | refs/heads/master | 2023-04-12T11:47:33.624368 | 2021-05-16T00:39:11 | 2021-05-16T00:39:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | from openpyxl import Workbook
wb = Workbook()
ws = wb.active
ws.title = "NadoSheet"
ws["A1"] = 1
ws["A2"] = 2
ws["A3"] = 3
ws["B1"] = 'Apple'
ws["B2"] = 'Beta'
ws["B3"] = 'C'
print(ws["A1"])
print(ws["A1"].value)
print(ws["A10"].value)
print(ws.cell(row=1, column=1).value)
print(ws.cell(row=1, column=2).value)
c = ws.cell(column=3, row=1, value='Meta')
print(c.value)
from random import *
index = 1
for x in range(1, 11):
for y in range(1,11):
# ws.cell(row=x, column=y, value= randint(0, 100))
ws.cell(row=x, column=y, value=index)
index += 1
wb.save("sample.xlsx")
wb.close() | [
"51350627+cyanluna-git@users.noreply.github.com"
] | 51350627+cyanluna-git@users.noreply.github.com |
26cd2b2ab735f881fe7976799cae416f7f22a77f | 37a1668eb7f05e72c7ee2c5c75b412cf85968f66 | /mtsmorf/move_exp/experiments.py | 7b60cadb50186fefb9ac934b8ac039ff6934df79 | [] | no_license | adam2392/motor-decoding | 7643f6849c83170c373599229d8a275db179a34a | 901a2c69429c82e7dbc00cd1db88d21a304a1fc1 | refs/heads/master | 2023-05-30T11:33:59.693081 | 2021-06-14T21:54:41 | 2021-06-14T21:54:41 | 317,599,056 | 0 | 0 | null | 2021-03-24T16:34:23 | 2020-12-01T16:19:29 | Jupyter Notebook | UTF-8 | Python | false | false | 14,931 | py | import argparse
import os
import sys
import traceback
from pathlib import Path
import dabest
import numpy as np
import matplotlib.pyplot as plt
import mne
import pandas as pd
from mne_bids.path import BIDSPath
from mne_bids.tsv_handler import _from_tsv
from mne.time_frequency.tfr import tfr_morlet
from rerf.rerfClassifier import rerfClassifier
from sklearn.dummy import DummyClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import cohen_kappa_score, confusion_matrix, make_scorer, roc_curve
from sklearn.model_selection import cross_validate, StratifiedKFold
from sklearn.utils import check_random_state
# Hack-y way to import from files in sibling "io" directory
if str(Path(__file__).parents[2]) in sys.path:
sys.path.append(str(Path(__file__).parents[2]))
from mtsmorf.move_exp.cv import cv_roc, cv_fit
from mtsmorf.move_exp.functions.move_experiment_functions import get_event_data
from mtsmorf.move_exp.functions.time_window_selection_functions import (
fit_classifiers_cv,
get_event_durations,
plot_event_durations,
plot_event_onsets,
)
from mtsmorf.move_exp.plotting import (
plot_roc_multiclass_cv,
plot_accuracies,
plot_roc_aucs,
plot_classifier_performance,
)
from mtsmorf.io.read import read_dataset, read_label, read_trial, get_trial_info, _get_anatomical_bad_chs
from mtsmorf.io.utils import NumpyEncoder
import json
from sklearn.inspection import permutation_importance
import yaml
def frequency_band_comparison(
epochs, destination_path, cv, metrics, nfreqs=10, random_state=None
):
"""
docstring
"""
destination = Path(destination_path)
rng = check_random_state(random_state)
seed = rng.randint(sys.maxint)
if not os.path.exists(destination):
os.makedirs(destination)
frequency_bands = dict(
delta=(0.5, 4),
theta=(4, 8),
alpha=(8, 13),
beta=(13, 30),
gamma=(30, 70),
hi_gamma=(70, 200),
)
scores = dict()
for name, (lfreq, hfreq) in frequency_bands.items():
freqs = np.logspace(*np.log10([lfreq, hfreq]), num=nfreqs)
n_cycles = freqs / 2.0 # different number of cycle per frequency
power = tfr_morlet(
epochs,
freqs=freqs,
n_cycles=n_cycles,
average=False,
return_itc=False,
decim=3,
n_jobs=1,
)
# Extract data and crop
inds = np.where((power.times >= -0.3) & (power.times <= 0.3))[0]
power_data = power.data[:, :, :, inds]
ntrials, nchs, nfreqs, nsteps = power_data.shape
included_trials = np.isin(labels, [0, 1, 2, 3])
# Create X, y data
X = power_data[included_trials].reshape(np.sum(included_trials), -1)
y = labels[included_trials]
mtsmorf = rerfClassifier(
projection_matrix="MT-MORF",
max_features="auto",
n_jobs=-1,
random_state=random_state,
image_height=nchs * nfreqs,
image_width=nsteps,
)
scores[name] = cv_fit(
mtsmorf,
X,
y,
metrics=metrics,
cv=cv,
n_jobs=None,
return_train_score=True,
return_estimator=True,
)
fig, axs = plt.subplots(ncols=2, figsize=(22, 6), dpi=100)
axs = axs.flatten()
## Accuracy comparison
id_col = pd.Series(range(1, n_splits + 1))
accuracies = {name: score["test_accuracy"] for name, score in scores.items()}
accuracies["ID"] = id_col
df = pd.DataFrame(accuracies)
# Re-order so that control is hi-gamma band
idx = [list(scores.keys())[-1]] + list(scores.keys())[:-1]
my_data = dabest.load(df, idx=idx, resamples=100, random_seed=seed)
my_data.mean_diff.plot(ax=axs[0])
axs[0].set(title=f"{subject.upper()} Accuracy Comparison between Frequency Bands")
## ROC AUC comparison
roc_auc_ovrs = {name: score["test_roc_auc_ovr"] for name, score in scores.items()}
roc_auc_ovrs["ID"] = id_col
df = pd.DataFrame(roc_auc_ovrs)
my_data = dabest.load(df, idx=idx, resamples=100, random_seed=seed)
my_data.mean_diff.plot(ax=axs[1])
axs[1].set(title=f"{subject.upper()} ROC AUC Comparison between Frequency Bands")
fig.tight_layout()
plt.savefig(
destination / f"{subject}_frequency_band_comparison_tmin=-0.5_tmax=1.0.png"
)
plt.close(fig)
def time_window_experiment(
bids_path,
destination_path,
domain,
cv,
metrics,
freqs=None,
n_cycles=None,
random_state=None,
):
if domain.lower() in ["frequency", "freq"] and (freqs is None or n_cycles is None):
raise TypeError("freqs and n_cycles must not be None to run frequency domain")
subject = bids_path.subject
destination = Path(destination_path) / f"trial_specific_window/{domain}_domain/"
if not os.path.exists(destination):
os.makedirs(destination)
go_cue_durations = get_event_durations(
bids_path, event_key="Left Target", periods=-1
)
left_target_durations = get_event_durations(
bids_path, event_key="Left Target", periods=1
)
tmin = -max(go_cue_durations)
tmax = max(left_target_durations)
epochs, labels = get_event_data(bids_path, tmin=tmin - 0.2, tmax=tmax + 0.2)
if domain.lower() in ["frequency", "freq"]:
power = tfr_morlet(
epochs,
freqs=freqs,
n_cycles=n_cycles,
average=False,
return_itc=False,
decim=3,
n_jobs=-1,
)
power.crop(tmin=tmin, tmax=tmax)
data = power.data
ntrials, nchs, nfreqs, nsteps = data.shape
print(f"{subject.upper()}: data.shape = ({data.shape})")
t = power.times
mask = (t >= -np.asarray(go_cue_durations)[:, None, None, None]) & (
t <= np.asarray(left_target_durations)[:, None, None, None]
)
masked_data = data * mask
image_height = nchs * nfreqs
image_width = nsteps
elif domain.lower() == "time":
epochs.crop(tmin=tmin, tmax=tmax)
data = epochs.get_data()
ntrials, nchs, nsteps = data.shape
print(f"{subject.upper()}: data.shape = ({data.shape})")
t = epochs.times
mask = (t >= -np.asarray(go_cue_durations)[:, None, None]) & (
t <= np.asarray(left_target_durations)[:, None, None]
)
masked_data = data * mask
image_height = nchs
image_width = nsteps
else:
raise ValueError('domain must be one of "time", "freq", or "frequency".')
X = masked_data.reshape(ntrials, -1)
y = labels
cv_scores = fit_classifiers_cv(
X,
y,
image_height,
image_width,
cv,
metrics,
n_jobs=-1,
random_state=random_state,
)
n_repeats = 5 # number of repeats for permutation importance
clf_name = "MT-MORF"
scores = cv_scores[clf_name]
best_ind = np.argmax(scores["test_roc_auc_ovr"])
best_estimator = scores["estimator"][best_ind]
best_train_inds = scores["train_inds"][best_ind]
best_test_inds = scores["test_inds"][best_ind]
X_train = X[best_train_inds]
y_train = y[best_train_inds]
X_test = X[best_test_inds]
y_test = y[best_test_inds]
# Run feat importance for roc_auc_ovr
try:
scoring_methods = [
"roc_auc_ovr",
]
for scoring_method in scoring_methods:
key_mean = f"validate_{scoring_method}_imp_mean"
if key_mean not in scores:
scores[key_mean] = []
key_std = f"validate_{scoring_method}_imp_std"
if key_std not in scores:
scores[key_std] = []
mtsmorf = rerfClassifier(
projection_matrix="MT-MORF",
max_features="auto",
n_jobs=-1,
random_state=random_state,
image_height=image_height,
image_width=image_width,
)
mtsmorf.fit(X_test, y_test) # For some reason need to call this?
print(f"{subject.upper()}: Running feature importances...")
result = permutation_importance(
best_estimator,
X_test,
y_test,
scoring=scoring_method,
n_repeats=n_repeats,
n_jobs=1,
random_state=random_state,
)
imp_std = result.importances_std
imp_vals = result.importances_mean
scores[key_mean].append(list(imp_vals))
scores[key_std].append(list(imp_std))
cv_scores[clf_name] = scores
except:
print("feat importances failed...")
traceback.print_exc()
for clf_name, clf_scores in cv_scores.items():
estimator = clf_scores["estimator"]
if estimator is not None:
del clf_scores["estimator"]
with open(destination / f"{subject}_{clf_name}_results.json", "w") as fout:
json.dump(clf_scores, fout, cls=NumpyEncoder)
print(f"{subject.upper()} CV results for {clf_name} saved as json.")
clf_scores["estimator"] = estimator
fig, axs = plt.subplots(nrows=2, ncols=3, dpi=100, figsize=(24, 12))
axs = axs.flatten()
for i, (clf_name, scores) in enumerate(cv_scores.items()):
ax = axs[i]
plot_roc_multiclass_cv(
scores["test_predict_proba"],
X,
y,
scores["test_inds"],
ax=ax,
)
ax.set(
xlabel="False Positive Rate",
ylabel="True Positive Rate",
xlim=[-0.05, 1.05],
ylim=[-0.05, 1.05],
title=f"{subject.upper()} {clf_name} One vs. Rest ROC Curves",
)
ax.legend(loc="lower right")
plot_roc_aucs(cv_scores, ax=axs[-1])
axs[-1].set(
ylabel="ROC AUC",
title=f"{subject.upper()}: ROC AUCs for Trial-Specific Time Window",
)
fig.tight_layout()
plt.savefig(destination / f"{subject}_trial_specific_time_window_rocs.png")
plt.close(fig)
print(
f"Figure saved at {destination}/{subject}_trial_specific_time_window_rocs.png"
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("subject", type=str, help="subject ID (e.g. efri02)")
parser.add_argument(
"-experiment",
type=str,
choices=[
"shuffle",
"baseline",
"frequency_bands",
"trial_specific_time_window_time",
"trial_specific_time_window_freq",
"plot_event_durations",
"plot_event_onsets",
],
help="which experiment to run",
)
args = parser.parse_args()
subject = args.subject
experiment = args.experiment
with open(Path(os.path.abspath(__file__)).parent / "config.yml") as f:
config = yaml.load(f, Loader=yaml.FullLoader)
bids_root = Path(config["bids_root"])
results_path = Path(config["results_path"])
# path identifiers
path_identifiers = dict(
subject=subject,
session="efri",
task="move",
acquisition="seeg",
run="01",
suffix="ieeg",
extension=".vhdr",
root=bids_root,
)
bids_path = BIDSPath(**path_identifiers)
# Prep data for model fitting
if not os.path.exists(results_path / subject):
try:
os.makedirs(results_path / subject)
except FileExistsError as e:
print(
f"Tried making results directory for {subject}, but file already exists."
)
except Exception as e:
print(
f"Tried making results directory for {subject}, but an error occurred:"
)
traceback.print_exc()
seed = 1
n_splits = 5
tmin, tmax = (-0.75, 1.25)
cv = StratifiedKFold(n_splits)
metrics = dict(
accuracy="accuracy",
cohen_kappa_score=make_scorer(cohen_kappa_score),
roc_auc_ovr="roc_auc_ovr",
)
if experiment == "shuffle":
epochs, labels = get_event_data(bids_path, tmin=tmin, tmax=tmax)
shuffle_channels_experiment(
epochs,
labels,
cv,
metrics,
results_path / subject,
tmin=tmin,
tmax=tmax,
nfreqs=10,
lfreq=70,
hfreq=200,
random_state=seed,
)
elif experiment == "baseline":
baseline_experiment(
bids_path,
results_path / subject,
cv,
metrics,
random_state=seed,
)
elif experiment == "frequency_bands":
epochs, labels = get_event_data(bids_path, tmin=tmin, tmax=tmax)
epochs.crop(tmin=-0.5, tmax=1.0)
frequency_band_comparison(
epochs, results_path / subject, cv, metrics, random_state=seed
)
elif experiment == "trial_specific_time_window_time":
nfreqs = 10
lfreq, hfreq = (70, 200)
freqs = np.logspace(*np.log10([lfreq, hfreq]), num=nfreqs)
n_cycles = freqs / 3.0
time_window_experiment(
bids_path,
results_path / subject,
"time",
cv,
metrics,
random_state=seed,
)
elif experiment == "trial_specific_time_window_freq":
nfreqs = 10
lfreq, hfreq = (70, 200)
freqs = np.logspace(*np.log10([lfreq, hfreq]), num=nfreqs)
n_cycles = freqs / 3.0
time_window_experiment(
bids_path,
results_path / subject,
"freq",
cv,
metrics,
freqs=freqs,
n_cycles=n_cycles,
random_state=seed,
)
elif experiment == "plot_event_durations":
fig, ax = plt.subplots(dpi=150, figsize=(8, 6))
behav, events = map(pd.DataFrame, get_trial_info(bids_path))
plot_event_durations(behav, events, ax=ax, random_state=seed)
ax.set(ylabel="duration (s)", title=f"{subject.upper()}: Duration of Events")
fig.tight_layout()
plt.savefig(results_path / subject / f"{subject}_event_durations.png")
elif experiment == "plot_event_onsets":
fig, ax = plt.subplots(dpi=150, figsize=(8, 6))
behav, events = map(pd.DataFrame, get_trial_info(bids_path))
plot_event_onsets(behav, events, ax=ax, random_state=seed)
ax.set(
ylabel='Onset Relative to "Go Cue" (s)',
title=f"{subject.upper()}: Onset of Events",
)
fig.tight_layout()
plt.savefig(results_path / subject / f"{subject}_event_onsets.png")
| [
"chester.huynh924@gmail.com"
] | chester.huynh924@gmail.com |
06903707b804c4c3de31c5af875b5b8ede7a761a | 3cbee2296fd6b54f80587eead83813d4c878e06a | /vpr2swcs/genu.py | 0617a3692038e528684849099de7f48133b0c0d9 | [] | no_license | nikhil-soraba/rasp30 | 872afa4ad0820b8ca3ea4f232c4168193acbd854 | 936c6438de595f9ac30d5619a887419c5bae2b0f | refs/heads/master | 2021-01-12T15:19:09.899590 | 2016-10-31T03:23:48 | 2016-10-31T03:23:48 | 71,756,442 | 0 | 0 | null | 2016-10-24T05:58:57 | 2016-10-24T05:58:56 | null | UTF-8 | Python | false | false | 17,725 | py | import pdb, copy
def recStrExpand0(x):
"""takes a string like: x[0:1].y[0:1] and returns a list:
['x[0].y[0]', 'x[0].y[1]', 'x[1].y[0]', 'x[1].y[1]']
"""
results = []
if x.find(':') == -1:
return [x]
else:
ind = x.find(':')
r0 = int(x[:ind].split('[')[-1])
r1 = int(x[ind+1:].split(']')[0])
for i in range(r0,r1+1):
xn = '['.join(x[:ind].split('[')[:-1]) + '[%g]'%i + ']'.join(x[ind+1:].split(']')[1:])
results.extend(recStrExpand(xn))
return results
def recStrExpand(*var):
x = var[0]
res = []
if isinstance(x, str):
res = recStrExpand0(x)
else:
for i in x:
res.extend(recStrExpand0(i))
if len(var) > 1 and var[1] == 'remBrak':
for i in range(len(res)):
res[i] = res[i].replace('[','').replace(']','')
return res
def smDictFromList(*var):
"""used to build a SM address double-look-up-table (i think i made that up)
where net names are keys into the dict that return partial addresses
adding two partial addresses from two indexes into the dict returns
the fg address that connects those two nets. these two nets must
be bipartite in the SM connection graph
anyway, this function builds this dictionary more conveniently
x = ['nfet[0:1].out[0]' ,[0, range(23, 25)],
'ota[0:3].in[0:1]' ,[range( 0, 8), 0]]
as input will return
{'ota[0].in[1]': [1, 0], 'nfet[1].out[0]': [0, 24], 'ota[2].in[1]': [5, 0], 'ota[1].in[1]': [3, 0], 'ota[1].in[0]': [2, 0], 'ota[3].in[1]': [7, 0], 'ota[3].in[0]': [6, 0], 'nfet[0].out[0]': [0, 23], 'ota[0].in[0]': [0, 0], 'ota[2].in[0]': [4, 0]}
"""
x = var[0]
smdict = dict()
for i in range(len(x))[::2]:
if len(var) > 1 and var[1] == 'remBrak':
names = recStrExpand(x[i], 'remBrak')
else:
names = recStrExpand(x[i])
sma = x[i+1]
smal = []
if len(names) == 1:
smal = [sma]
else:
if isinstance(sma[0], int):
#print sma
#pdb.set_trace()
for j in sma[1]:
smal.append([sma[0], j])
else:
for j in sma[0]:
smal.append([j, sma[1]])
idict = dict(zip(names, smal))
smdict.update(idict)
return smdict
def lutExpand(ki, p0i, p1):
"""
ki= ['10--', '11-1', '0000'] #cover
p0i= ['i0','i1','i2','i3'] #original pin order
p1 = ['i2','i3','i1','i0'] #new pin order
also expands out cover size to k-input lut
ki = ['01', '10']
p0i= ['n1', 'i2']
p1 = ['i2', 'open', 'open', 'n1']
"""
lut_size = 4
k = [x.rjust(lut_size, '-') for x in ki]
p0 = ['open']*(lut_size-len(p0i))+p0i
order = [p0.index(x) for x in p1]
cc = []
for a0 in k:
a = [a0[x] for x in order]
N = a.count('-')
c = [list(a) for x in range(2**N)]
if len(c) > 1:
for i in range(len(c)):
b = bin(i)[2:].zfill(N)
for j in b:
ind = c[i].index('-')
c[i][ind] = j
c[i] = ''.join(c[i]) # list convert to str
else:
c[0] = ''.join(c[0])
cc.extend(c)
#for i in cc: print i
return cc
class pbarray(object):
name = []
type = []
array = [] #array[x][y] indexed
def __init__(self, xsize, ysize):
#self.array = [[tile('%g_%g'%(x,y),'empty') for y in range(ysize)] for x in range(xsize)]
self.array = [[tile('--',[]) for y in range(ysize)] for x in range(xsize)]
def getSub(self, *var):
#pdb.set_trace()
if isinstance(var[0], str): #getSub('o1')
for x in range(len(self.array)):
for y in range(len(self.array[0])):
cur_tile = self.array[x][y]
if cur_tile.name == var[0]:
return cur_tile
elif len(var) == 1: #getSub([0,1])
return self.array[var[0][0]][var[0][1]]
else:
return self.array[var[0]][var[1]] #getSub(0,1)
def addSub(self, new_sub, grid_loc):
new_sub.grid_loc = grid_loc
self.array[grid_loc[0]][grid_loc[1]] = new_sub
#def addPin(self,grid_loc,pin_loc)
# self.array[]
def __repr__(self):
repr_str = ''
for i in range(len(self.array[0]))[::-1]:
for j in range(len(self.array)):
repr_str = repr_str + ' %s'%self.array[j][i].name.ljust(8)
repr_str = repr_str + '\n'
return repr_str
class stats(object):
def get(self,var):
if hasattr(self, var):
return getattr(self, var)
elif hasattr(self, var.upper()): #try uppercase
return getattr(self, var.upper())
elif hasattr(self, var.lower()): #try lowercase
return getattr(self, var.lower())
else:
pdb.set_trace()
raise Exception("'var' or 'VAR' not an attribute of thingy")
class tile:
name = []
type = []
chanx = []
chany = []
sblock = []
cb = []
def __init__(self, name, type):
self.name = name
self.type = type
self.cb = complexBlock(name, type)
def __repr__(self):
return '%s %s \n'%(self.name, self.type)
class pblock:
name = []
type = []
number = []
inputs = []
outputs = []
portorder = [] # portorder[:] <-> inputs[:] + ouputs[:]
subblocks = []
pin_num= [] #pin locations
ex_fgs = []
def __init__(self, name, type):
self.name = name
self.type = type
self.subblocks = []
self.inputs = []
self.outputs = 'open'
self.number = 0
def addSub(self, *var):
newsub = copy.deepcopy(var[0])
#pdb.set_trace()
if len(var) == 1:
newsub.number = len(self.subblocks)
self.subblocks.append(newsub)
else:
if isinstance(var[1], str):
for i in range(len(self.subblocks)):
if self.subblocks[i].name == var[1]:
subind = i
else:
subind = var[1]
try:
del self.subblocks[subind]
except:
pdb.set_trace()
self.subblocks.insert(subind, newsub)
self.subblocks[subind].number = subind
def addSubs(self, dev_types, dev_pins):
dev_name = 'temp[0]'
dev_num = 0
for i in range(len(dev_types)):
if dev_types[i] != dev_name.split('[')[0]:
dev_num = 0
dev_type = dev_types[i]
dev_name = '%s[%g]'%(dev_type,dev_num) #ota[0]e
#pdb.set_trace()
nsb = pblock(dev_name, dev_type) #ota[0], ota
if self.type in ['CLB']:## if you change this change i/p type in genli() for clb in rasp30.py
nsb.inputs = ['open']*(dev_pins[dev_type]-1)
nsb.outputs = 'open'
else: ## CAB2 variation
nsb.inputs = ['open']*(dev_pins[dev_type+'_in'])
nsb.outputs =['open']*(dev_pins[dev_type+'_out'])
self.addSub(nsb)
dev_num = dev_num+1
#print self
#pdb.set_trace()
#print nsb.outputs
def getSub(self, x):
if self.subblocks:
for i in range(len(self.subblocks)):
if self.subblocks[i].number == x or\
self.subblocks[i].name == x or\
self.subblocks[i].outputs == x:
return self.subblocks[i]
def getPort(self, x):
if isinstance(x, str):
if x in self.inputs:
return self.portorder[self.inputs.index(x)]
else:
return self.portorder[self.outputs.index(x)+len(self.inputs)]
else:
ind = self.portorder.index(x)
if ind >= len(self.inputs):
return self.outputs[ind-len(self.inputs)]
else:
return self.inputs[ind]
def setPort(self, x, val):
ind = self.portorder.index(x)
if ind >= len(self.inputs):
self.outputs[ind-len(self.inputs)] = val
else:
self.inputs[ind] = val
def movePort(self, val, x):
#remove pin from old port location if it existed
if val in self.inputs: self.inputs[self.inputs.index(val)] = 'open'
if val in self.outputs: self.outputs[self.outputs.index(val)] = 'open'
#add pin new port location
self.setPort(x, val)
#add pin new port location
def printSubs(self, *var):
if var:
printall = 1
else:
printall = 1
if self.subblocks:
for i in range(len(self.subblocks)):
cur_sub = self.getSub(i)
if cur_sub.outputs != 'open' or printall:
print '%g %s %s | '%(i, cur_sub.name, cur_sub.type),
for j in range(len(cur_sub.inputs)):
print '%s '%(cur_sub.inputs[j]),
print '-> %s'%(cur_sub.outputs)
def printAllSubs(self):
self.printSubs('printall')
def __repr__(self):
return 'class: %s - name: %s - type: %s - num: %s '%(self.__class__.__name__, self.name, self.type, str(self.number))
class complexBlock(pblock):
"""
after each block deals w/ making its own custom local interconnect matrix
we look up the switch address for each on switch
"""
def swcsFromLi(self):
verbose = 1
print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
#pdb.set_trace()
for x in range(len(self.li)):
for y in range(len(self.li[0])):
#pdb.set_trace()
if self.li[x][y] == 1:
try:
#pdb.set_trace()
swc_name0 = self.stats.li0[y]
swc_name1 = self.stats.li1[x]
if swc_name1 in ['cab_vmm.O[5]','cab_vmm.O[6]','cab_vmm.O[7]','vmm12x1[0].in[0]','vmm12x1_wowta[0].in[0]','vmm12x1_wowta[0].in[1]','vmm12x1_wowta[0].in[2]','vmm12x1_wowta[0].in[3]','vmm12x1_wowta[0].in[4]','vmm12x1_wowta[0].in[5]','vmm12x1_wowta[0].in[6]','vmm12x1_wowta[0].in[7]','vmm12x1_wowta[0].in[8]','vmm12x1_wowta[0].in[9]','vmm12x1_wowta[0].in[10]','vmm12x1_wowta[0].in[11]','vmm12x1[0].in[1]','vmm12x1[0].in[2]','vmm12x1[0].in[3]','vmm12x1[0].in[4]','vmm12x1[0].in[5]','vmm12x1[0].in[6]','vmm12x1[0].in[7]','vmm12x1[0].in[8]','vmm12x1[0].in[9]','vmm12x1[0].in[10]','vmm12x1[0].in[11]','vmm8x4_in[0].in[0]','vmm8x4_in[0].in[1]','vmm8x4_in[0].in[2]','vmm8x4_in[0].in[3]','vmm8x4_in[0].in[4]','vmm8x4_in[0].in[5]','vmm8x4_in[0].in[6]','vmm8x4_in[0].in[7]','DAC_sftreg[0].in[0]','DAC_sftreg[0].in[1]','DAC_sftreg[0].in[2]','nmirror[0].in[0]','nmirror_vmm[0].in[0]','th_logic[0].in[0]','th_logic[0].in[1]','th_logic[0].in[2]','th_logic[0].in[3]','th_logic[0].in[4]','th_logic[0].in[5]','th_logic[0].in[6]','th_logic[0].in[7]','dendrite_4x4[0].in[0]','dendrite_4x4[0].in[1]','dendrite_4x4[0].in[2]','dendrite_4x4[0].in[3]','vmm8inx8in[0].in[0]','vmm8inx8in[0].in[1]','vmm8inx8in[0].in[2]','vmm8inx8in[0].in[3]','vmm8inx8in[0].in[4]','vmm8inx8in[0].in[5]','vmm8inx8in[0].in[6]','vmm8inx8in[0].in[7]','vmm8inx8in[0].in[8]','vmm8inx8in[0].in[9]','vmm8inx8in[0].in[10]','vmm8inx8in[0].in[11]','vmm8inx8in[0].in[12]','vmm8inx8in[0].in[13]','vmm8inx8in[0].in[14]','vmm8inx8in[0].in[15]','vmm8inx8in[0].in[16]','sftreg3[0].in[0]','sftreg3[0].in[1]','sftreg3[0].in[2]','sftreg4[0].in[0]','sftreg4[0].in[1]','sftreg4[0].in[2]']:
#pdb.set_trace()
print "no LI needed dont worry!"
continue
if swc_name1 in ['cab_vmm.O[0]','cab_vmm.O[1]','cab_vmm.O[2]', 'cab_vmm.O[3]' ] and swc_name0 =='sftreg2[0].out[0]':
continue
print "NO LI needed"
if swc_name1 in ['cab_vmm.O[0]','cab_vmm.O[1]','cab_vmm.O[2]', 'cab_vmm.O[3]' ] and swc_name0 =='sftreg3[0].out[0]':
continue
print "NO LI needed"
if swc_name1 in ['cab_vmm.O[0]','cab_vmm.O[1]','cab_vmm.O[2]', 'cab_vmm.O[3]' ] and swc_name0 =='sftreg4[0].out[0]':
continue
print "NO LI needed"
if swc_name1 in ['cab_vmm.O[0]','cab_vmm.O[1]','cab_vmm.O[2]', 'cab_vmm.O[3]' ] and swc_name0 =='mmap_local_swc[0].out[0]':
continue
print "NO LI needed"
if swc_name1 in ['cab.O[0]','cab.O[1]','cab.O[2]', 'cab.O[3]' ] and swc_name0 =='mmap_local_swc[0].out[0]':
continue
print "NO LI needed"
if swc_name1 in ['vmm4x4_SR[0].in[0]','vmm8x4_SR[0].in[0]']:
swc_name0='cab_vmm.I[6]'
elif swc_name1 in ['vmm4x4_SR[0].in[1]','vmm8x4_SR[0].in[1]']:
swc_name0='cab_vmm.I[10]'
elif swc_name1 in ['vmm4x4_SR[0].in[2]','vmm8x4_SR[0].in[2]']:
swc_name0='cab_vmm.I[0]'
elif swc_name1 in ['vmm4x4_SR[0].in[3]','vmm8x4_SR[0].in[3]']:
swc_name0='cab_vmm.I[4]'
elif swc_name1.split("4x4[")[0] in ['vmm']:
swc_name0='vmm4x4_dummy['+swc_name1[13]+']'
#pdb.set_trace()
#elif swc_name0 in ['cab_vmm.I[13]','cab_vmm.I[14]','cab_vmm.I[15]','cab2.I[13]','cab2.I[14]','cab2.I[15]']:
elif swc_name0 in ['cab_vmm.I[13]','cab_vmm.I[14]','cab_vmm.I[15]','cab.I[13]','cab.I[14]','cab.I[15]','cab2.I[13]','cab2.I[14]','cab2.I[15]']:
print "no LI need for I[13:15] so dont worry!"
continue
elif swc_name1 in ['in2in_x1[0].out[0]','in2in_x1[0].in[0]','vmm8x4_in[0].in[12]']:
print "no LI Needed"
continue
elif swc_name1[:12] in ['sftreg[0].in']:
continue
elif swc_name1 == 'dendiff[0].in[0]':
#pdb.set_trace()
continue
print swc_name0
print swc_name1
#pdb.set_trace()
swc0 = self.stats.li[swc_name0]
swc1 = self.stats.li[swc_name1]
if swc_name0== 'meas_volt_mite[0].out':
swc1=[11,0]
elif swc_name0== 'meas_volt_mite[1].out':
swc1=[15,0]
#pdb.set_trace()
if all(isinstance(x,int) for x in swc0)==False:
for i in range(len(swc0[1])):
swc = [swc0[0]+swc1[0], swc0[1][i]+swc1[1]]
swcx = self.array_stats.getTileOffset(swc, self.grid_loc)
self.swcs.append(swcx)
if verbose :
print 'local interconnect %g %s -> %g %s (%g %g) -> (%g %g)'%(y, swc_name0, x, swc_name1, swc[0], swc[1], swcx[0], swcx[1])
elif all(isinstance(x,int) for x in swc1)==False :
for i in range(len(swc1[0])):
swc = [swc0[0]+swc1[0][i], swc0[1]+swc1[1]]
swcx = self.array_stats.getTileOffset(swc, self.grid_loc)
self.swcs.append(swcx)
if verbose :
print 'local interconnect %g %s -> %g %s (%g %g) -> (%g %g)'%(y, swc_name0, x, swc_name1, swc[0], swc[1], swcx[0], swcx[1])
else:
swc = [swc0[0]+swc1[0], swc0[1]+swc1[1]]
swcx = self.array_stats.getTileOffset(swc, self.grid_loc)
self.swcs.append(swcx)
if verbose :
print 'local interconnect %g %s -> %g %s (%g %g) -> (%g %g)'%(y, swc_name0, x, swc_name1, swc[0], swc[1], swcx[0], swcx[1])
#print self.array_stats.getTileOffset(swc, self.grid_loc)
#print self.grid_loc
except:
print 'failed in swcsFromLI()'
pdb.set_trace()
#class ioblock(complexBlock):
# def __init__(self, name):
# self.name = name
# self.type = 'ioblock'
## self.inputs = ['open','open','open','open','open','open']
## self.outputs = ['open','open','open','open','open','open']
## self.portorder = [0,3,6,9,12,15,1,4,7,10,13,16]
# self.inputs = ['open']*6
# self.outputs= ['open']*12
# self.portorder = [0,3,6,9,12,15,1,2,4,5,7,8,10,11,13,14,16,17]
#
# self.subblocks = []
# for i in range(6):
# self.addSub(pblock('empty', 'ioslice'))
#
#
# def genLI(self):
# for i in range(len(self.portorder)):
# print self.getPort(i),
# print
# for i in range(len(self.portorder)):
# print iosdStats().pinorder[i]
def mainTest():
print 'genu mainTest()'
a = 'x[0:1].y[0:2].z[0]'
ax = recStrExpand(a)
print a
for i in ax: print i
print
a = ['x[0:1].y[0:2].z[0]', 'z[0:3]']
ax = recStrExpand(a)
print a
for i in ax: print i
print
ax = recStrExpand(a, 'bacon')
print a
for i in ax: print i
print
ax = recStrExpand(a, 'remBrak')
print a
for i in ax: print i
print
fg = ['volswc[0:1].out[0]' ,[0, range(33, 35)],
'ota[0:3].in[0:1]' ,[range( 0, 8), 0]]
fgd = smDictFromList(fg)
#fgd = smDictFromList(fg, 'remBrak')
for i in fgd.keys():
print '%s (%s %s)'%(i, fgd[i][0], fgd[i][1])
#print recStrExpand.__doc__
#print smDictFromList.__doc__
k = ['10--', '11-1', '0000'] #cover
p0 = ['i0','i1','i2','i3'] #original pin order
p1 = ['i2','i3','i1','i0'] #new pin order
kk = lutExpand(k, p0, p1)
# for i in kk: print i
k = ['01', '10']
p0 = ['n1', 'i2']
p1 = ['i2', 'open', 'open', 'n1']
kk = lutExpand(k, p0, p1)
for i in kk: print i
#pdb.set_trace()
if __name__ == "__main__":
mainTest()
| [
"ms.m.d.collins@gmail.com"
] | ms.m.d.collins@gmail.com |
4dabeab8706df514f84e25c7faab9eca0de1afdc | 4ab71fd5344328392f4954c9e65288d162bfa97b | /profiles/migrations/0023_auto__add_field_awardtype_plus.py | 5e38994faaaee84f87cd993150003424b4a3b750 | [] | no_license | stripesolutions/xvs | 5dff2a6fae3461d78d2a7271646804619831e925 | 0beb916b95dab96645bb279cfc0539d891d93cb7 | refs/heads/master | 2021-01-18T23:06:59.105293 | 2016-10-24T20:55:48 | 2016-10-24T20:55:48 | 40,054,100 | 0 | 3 | null | 2016-02-22T14:59:13 | 2015-08-01T17:14:02 | Python | UTF-8 | Python | false | false | 12,566 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'AwardType.plus'
db.add_column('profiles_awardtype', 'plus',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'AwardType.plus'
db.delete_column('profiles_awardtype', 'plus')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'positions.organisation': {
'Meta': {'ordering': "('name',)", 'object_name': 'Organisation'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['positions.OrganisationCategory']", 'null': 'True', 'blank': 'True'}),
'charity_number': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Department']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'directions': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'phone_number': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '31', 'blank': 'True'}),
'primary_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'purpose': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'volunteer_policy': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'positions.organisationcategory': {
'Meta': {'ordering': "('name',)", 'object_name': 'OrganisationCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'positions.positioncategory': {
'Meta': {'ordering': "('name',)", 'object_name': 'PositionCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'profiles.award': {
'Meta': {'object_name': 'Award'},
'award': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.AwardType']"}),
'date_awarded': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'profiles.awardtype': {
'Meta': {'object_name': 'AwardType'},
'hours_required': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'plus': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'profiles.baseprofile': {
'Meta': {'object_name': 'BaseProfile'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'communication': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'department': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Department']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_representative': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_volunteer': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slas': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.ServiceLevelAgreement']", 'symmetrical': 'False', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'profiles.course': {
'Meta': {'ordering': "('name',)", 'object_name': 'Course'},
'faculty': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Faculty']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'profiles.department': {
'Meta': {'ordering': "('name',)", 'object_name': 'Department'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'profiles.faculty': {
'Meta': {'ordering': "('name',)", 'object_name': 'Faculty'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'profiles.howdidyouhear': {
'Meta': {'object_name': 'HowDidYouHear'},
'how': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'profiles.representativeprofile': {
'Meta': {'object_name': 'RepresentativeProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['positions.Organisation']"}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['profiles.BaseProfile']", 'unique': 'True'})
},
'profiles.servicelevelagreement': {
'Meta': {'ordering': "('order',)", 'object_name': 'ServiceLevelAgreement'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'preferred_answer': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'statement': ('django.db.models.fields.TextField', [], {})
},
'profiles.volunteerprofile': {
'Meta': {'object_name': 'VolunteerProfile'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['positions.PositionCategory']", 'null': 'True', 'blank': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'max_length': '127', 'null': 'True', 'blank': 'True'}),
'course': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
'cv': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'hours': ('weekgrid.WeekgridField', [], {'max_length': '127'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'international': ('django.db.models.fields.CharField', [], {'default': "'H'", 'max_length': '1'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '31'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'postgrad': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['profiles.BaseProfile']", 'unique': 'True'}),
'referencefile': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'referrer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '127', 'blank': 'True'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '31', 'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['profiles'] | [
"alex@x13n.com"
] | alex@x13n.com |
aada75f992b5765b99c8b557488861d8f58312bf | 123235e95aff61c58b2cfbf0558b1d2eb211c096 | /pydb_utill.py | 679588aeae80baf089930574db257ba563ded233 | [] | no_license | beatbox4108/pyDB | 72e0582f2b10f8b224613abb69a1471908349344 | 5551328ff51a0a6b60b0577bc17e3b8f24c742d1 | refs/heads/master | 2023-06-21T09:39:50.751269 | 2021-07-22T06:11:11 | 2021-07-22T06:11:11 | 374,066,707 | 0 | 0 | null | 2021-06-05T09:23:08 | 2021-06-05T08:56:56 | null | UTF-8 | Python | false | false | 701 | py | import json
import pprint
from tabulate import tabulate
import datetime
def load_db_config():
json_file = open('config.json', 'r')
json_obj = json.load(json_file)
username = json_obj["DB_USERNAME"]
password = json_obj["DB_PASSWORD"]
hostname = json_obj["DB_HOST"]
port = json_obj["DB_PORT"]
dbname = json_obj["DB_DATABASE"]
db_type = json_obj["DB_CONNECTION"]
db_url = ""
if db_type == "postgres":
db_url = f'postgres://{username}:{password}@{hostname}:{port}/{dbname}'
elif db_type == "sqlite":
db_url = f'{dbname}'
return db_type, db_url
if __name__ == '__main__':
db_type, url = load_db_config()
print(db_type, url)
| [
"nakano16180@gmail.com"
] | nakano16180@gmail.com |
d29f0d2f11801343f11a47d05288fa24f931602c | 2b968068343edf3cee4280cd7b58f27abf8d4f15 | /html/SGP/web/py/chat_client.py | 1fc36be64b8371dc40e2f9681fc97e52dc49f766 | [] | no_license | Juaca5/hydroid | 39d44dcb916b41c51f5a833e2102d267acd39ff4 | ab52e9e3671f35092261c7dc5550cc866a203184 | refs/heads/master | 2020-03-19T18:40:04.929100 | 2018-06-15T13:48:10 | 2018-06-15T13:48:10 | 136,819,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,532 | py | import sys, socket, select
def chat_client():
if(len(sys.argv) < 3) :
print 'Usage : python chat_client.py hostname port namefile'
sys.exit()
host = sys.argv[1]
port = int(sys.argv[2])
namefile = sys.argv[3]
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(2)
# connect to remote host
try :
s.connect((host, port))
except :
print 'Unable to connect'
sys.exit()
print 'Connected to remote host. You can start sending messages'
sys.stdout.write('[Me] '); sys.stdout.flush()
while 1:
socket_list = [sys.stdin, s]
# Get the list sockets which are readable
read_sockets, write_sockets, error_sockets = select.select(socket_list , [], [])
for sock in read_sockets:
if sock == s:
# incoming message from remote server, s
data = sock.recv(4096)
if not data :
print '\nDisconnected from chat server'
sys.exit()
else :
#print data
sys.stdout.write(data)
sys.stdout.write('[Me] '); sys.stdout.flush()
else :
# user entered a message
msg = sys.stdin.readline()
s.send(msg)
sys.stdout.write('[Me] '); sys.stdout.flush()
if __name__ == "__main__":
sys.exit(chat_client())
| [
"juan.tapia@alumnos.uv.cl"
] | juan.tapia@alumnos.uv.cl |
4d80d77bec5c5634b2020e2cdfe0b7a30e03cb60 | 2657b25d290884e23d507a518f0f721bcf36ccf6 | /proj/settings.py | d7657415d7420712982111ad3a192cedd94a57fb | [] | no_license | wkrueger/exemplo-django-graphql | 731a547c10c5742604315c3c887b199665eb6408 | a7840eb07751c81f48cce8c46b97ec0f4a15e93e | refs/heads/main | 2023-08-26T03:15:37.862009 | 2021-10-25T14:38:25 | 2021-10-25T14:38:25 | 394,453,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,367 | py | """
Django settings for proj project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import environ
env = environ.Env(DEBUG=(bool, False))
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env("DEBUG")
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"graphene_django",
"app",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "proj.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "proj.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = "pt"
TIME_ZONE = "America/Sao_Paulo"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = "/static/"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
GRAPHENE = {"SCHEMA": "app.schema.schema"}
| [
"wkrueger128@gmail.com"
] | wkrueger128@gmail.com |
dd29ba0161560b2e89b22a3616b0cd936035b9cb | 2589e080a2cc76bae58963576ebd76fc024bb64e | /Snakefile | 39024ec9c31e8fa55fdce4689f6a3f81b6f6f5fc | [
"Apache-2.0"
] | permissive | inambioinfo/2020plus | eb0d8932d3d0748d9676430c9d22af5c50727b60 | 5c1bda3cfe59719509408f96c473d6d9d582442f | refs/heads/master | 2020-03-28T05:18:48.417528 | 2018-08-02T15:04:32 | 2018-08-02T15:04:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,862 | from os.path import join
# configuration file
if 'config' not in vars() or not config or 'ntrees' not in config:
configfile: "config.yaml"
# output directory
output_dir=config["output_dir"]
# MAF file containing mutations
mutations=config["mutations"]
# pre-trained classifier
trained_classifier=config["trained_classifier"]
# flag for CV
cv="--cv"
# number of trees in RF
ntrees=config['ntrees']
ntrees2=5*ntrees
# params for simulations
num_iter=10
ids=list(map(str, range(1, num_iter+1)))
# minimum recurrent missense
min_recur=3
###################################
# Top-level rules
###################################
rule all:
input: join(output_dir, "output/results/r_random_forest_prediction.txt")
# same rule is "all", but semantically more meaningful
rule predict:
"""
Predict on a pan-cancer set of somatic mutations from multiple cancer types.
This command will simultaneous train 20/20+ and make predictions using
gene hold-out cross-validation. The predict command uses the following parameters:
Input
-----
mutations : MAF file
MAF file containing mutations. Please see http://probabilistic2020.readthedocs.io/en/latest/tutorial.html#mutations for details on file format.
Output
------
output_dir : directory
Path of directory to save output. The results are save in the
"output/results/r_random_forest_prediction.txt" file.
"""
input: join(output_dir, "output/results/r_random_forest_prediction.txt")
# top-level rule to only train the 20/20+ random forest
rule train:
"""
Train a 20/20+ model to predict cancer driver genes. The trained model can
be used for subsequent prediction. The train command uses the following parameters:
Input
-----
mutations : MAF file
MAF file containing mutations. Please see http://probabilistic2020.readthedocs.io/en/latest/tutorial.html#mutations for details on file format.
Output
------
output_dir : directory
Path to file directory to save output. The saved model file from
20/20+ will be named 2020plus.Rdata by default.
"""
input: join(output_dir, "2020plus.Rdata")
# use an already trained 20/20+ random forest to predict new data
rule pretrained_predict:
"""
Predict cancer driver genes using a pre-trained 20/20+ model from the "train" command. The pretrained_predict command uses the following parameters:
Input
-----
mutations : MAF file
MAF file containing mutations. Please see http://probabilistic2020.readthedocs.io/en/latest/tutorial.html#mutations for details on file format.
trained_classifier : .Rdata file
File path of saved R workspace containing the trained 20/20+ model.
Output
------
output_dir : directory
File path of directory to save output. The results are save in the
"pretrained_output/results/r_random_forest_prediction.txt" file.
"""
input: join(output_dir, "pretrained_output/results/r_random_forest_prediction.txt")
rule help:
"""
Print list of all targets with help.
"""
run:
print('Input and output parameters are specified via the command line or in the config.yaml file. If done via the command line, e.g., the "trained_classifier" option would be specified by the following argument:\n\n--config trained_classifier="data/2020plus_100k.Rdata"\n\nMultiple options can follow after the --config flag.\n')
myhelp = ['predict', 'train', 'pretrained_predict', 'help']
for myrule in workflow.rules:
if myrule.name in myhelp:
print('='*len(myrule.name))
print(myrule.name)
print('='*len(myrule.name))
print(myrule.docstring)
print('See "snakemake --help" for additional snakemake command line help documentation.\n')
###################################
# Code for calculating empirical null
# distribution based on simulations
###################################
# Simulate MAF files for subsequent running by oncogene/tsg test
rule simMaf:
input:
MUTATIONS=mutations
params:
min_recur=min_recur,
data_dir=config["data_dir"]
output:
join(output_dir, "simulated_summary/chasm_sim_maf{iter,[0-9]+}.txt")
shell:
"mut_annotate --log-level=INFO "
" -b {params.data_dir}/snvboxGenes.bed -i {params.data_dir}/snvboxGenes.fa -c 1.5 "
" -m {input.MUTATIONS} -p 0 -n 1 --maf --seed=$(({wildcards.iter}*42)) "
" -r {params.min_recur} --unique -o {output}"
# calculate summarized features for the simulated mutations
rule simSummary:
input:
MUTATIONS=mutations
params:
min_recur=min_recur,
data_dir=config["data_dir"]
output:
join(output_dir, "simulated_summary/chasm_sim_summary{iter}.txt")
shell:
"mut_annotate --log-level=INFO "
" -b {params.data_dir}/snvboxGenes.bed -i {params.data_dir}/snvboxGenes.fa "
" -c 1.5 -m {input.MUTATIONS} -p 0 -n 1 --summary --seed=$(({wildcards.iter}*42)) "
" --score-dir={params.data_dir}/scores "
" --unique -r {params.min_recur} -o {output}"
# run probabilistic2020 tsg statistical test on simulated MAF
rule simTsg:
input:
join(output_dir, "simulated_summary/chasm_sim_maf{iter}.txt")
params:
num_sim=config["NUMSIMULATIONS"],
data_dir=config["data_dir"]
threads: 10
output:
join(output_dir, "simulated_summary/tsg_sim{iter}.txt")
shell:
"probabilistic2020 --log-level=INFO tsg "
" -c 1.5 -n {params.num_sim} -b {params.data_dir}/snvboxGenes.bed "
" -m {input} -i {params.data_dir}/snvboxGenes.fa -p {threads} -d 1 "
" -o {output} "
# run probabilistic2020 oncogene statistical test on simulated MAF
rule simOg:
input:
mutations=join(output_dir, "simulated_summary/chasm_sim_maf{iter}.txt")
params:
min_recur=min_recur,
num_sim=config["NUMSIMULATIONS"],
data_dir=config["data_dir"]
threads: 10
output:
join(output_dir, "simulated_summary/oncogene_sim{iter}.txt")
shell:
"probabilistic2020 --log-level=INFO oncogene "
" -c 1.5 -n {params.num_sim} -b {params.data_dir}/snvboxGenes.bed "
" -m {input.mutations} -i {params.data_dir}/snvboxGenes.fa -p {threads} "
" --score-dir={params.data_dir}/scores -r {params.min_recur} "
" -o {output}"
# Combine the results from simOg, simTsg, and simSummary
rule simFeatures:
input:
summary=join(output_dir, "simulated_summary/chasm_sim_summary{iter}.txt"),
og=join(output_dir, "simulated_summary/oncogene_sim{iter}.txt"),
tsg=join(output_dir, "simulated_summary/tsg_sim{iter}.txt")
params:
data_dir=config["data_dir"]
output:
join(output_dir, "simulated_summary/simulated_features{iter}.txt")
shell:
"python `which 2020plus.py` features "
" -s {input.summary} --tsg-test {input.tsg} -og-test {input.og} "
" -o {output}"
# final processing of the simulation results
rule finishSim:
input:
expand(join(output_dir, "simulated_summary/simulated_features{iter}.txt"), iter=ids)
output:
join(output_dir, "simulated_summary/simulated_features.txt")
shell:
'cat {input} | awk -F"\t" \'{{OFS="\t"}} NR == 1 || !/^gene/\' - > ' + output_dir + '/simulated_summary/tmp_simulated_features.txt ; '
'cat '+output_dir+'/simulated_summary/tmp_simulated_features.txt | awk -F"\t" \'{{OFS="\t"}}{{if(NR != 1) printf (NR"\t"); if(NR!=1) for(i=2; i<NF; i++) printf ($i"\t"); if(NR != 1) print $i; if(NR==1) print $0}}\' - > {output}'
###################################
# Code for calculating results on
# actually observed mutations
###################################
# calculate summarized features for the observed mutations
rule summary:
input:
mutations=mutations
params:
min_recur=min_recur,
data_dir=config["data_dir"]
output:
join(output_dir, "summary.txt")
shell:
"mut_annotate --log-level=INFO "
" -b {params.data_dir}/snvboxGenes.bed -i {params.data_dir}/snvboxGenes.fa "
" -c 1.5 -m {input.mutations} -p 0 -n 0 --summary "
" --score-dir={params.data_dir}/scores "
" --unique -r {params.min_recur} -o {output}"
# run probabilistic2020 tsg statistical test on MAF
rule tsg:
input:
mutations
params:
num_sim=config["NUMSIMULATIONS"],
data_dir=config["data_dir"]
threads: 10
output:
join(output_dir, "tsg.txt")
shell:
"probabilistic2020 -v --log-level=INFO tsg "
" -c 1.5 -n {params.num_sim} -b {params.data_dir}/snvboxGenes.bed "
" -m {input} -i {params.data_dir}/snvboxGenes.fa -p {threads} -d 1 "
" -o {output} "
# run probabilistic2020 oncogene statistical test on MAF
rule og:
input:
mutations=mutations
params:
min_recur=min_recur,
num_sim=config["NUMSIMULATIONS"],
data_dir=config["data_dir"]
threads: 10
output:
join(output_dir, "oncogene.txt")
shell:
"probabilistic2020 -v --log-level=INFO oncogene "
" -c 1.5 -n {params.num_sim} -b {params.data_dir}/snvboxGenes.bed "
" -m {input.mutations} -i {params.data_dir}/snvboxGenes.fa -p {threads} "
" --unique --score-dir={params.data_dir}/scores -r {params.min_recur} "
" -o {output}"
# Combine the results from og, tsg, and summary
rule features:
input:
summary=join(output_dir, "summary.txt"),
og=join(output_dir, "oncogene.txt"),
tsg=join(output_dir, "tsg.txt")
params:
data_dir=config["data_dir"]
output:
join(output_dir, "features.txt")
shell:
"python `which 2020plus.py` features "
" -s {input.summary} --tsg-test {input.tsg} -og-test {input.og} "
" -o {output}"
# perform prediction by random forest
# in this case the data is pan-cancer
# and so a cross-validation loop is performed
rule cv_predict:
input:
features=join(output_dir, "features.txt"),
sim_features=join(output_dir, "simulated_summary/simulated_features.txt"),
params:
ntrees=ntrees,
ntrees2=ntrees2,
data_dir=config["data_dir"],
output_dir=config["output_dir"]
output:
join(output_dir, "output/results/r_random_forest_prediction.txt"),
join(output_dir, "trained.Rdata")
shell:
"""
python `which 2020plus.py` --log-level=INFO train -d .7 -o 1.0 -n {{params.ntrees2}} -r {outdir}/trained.Rdata --features={{input.features}} --random-seed 71
python `which 2020plus.py` --log-level=INFO classify --trained-classifier {outdir}/trained.Rdata --null-distribution {outdir}/simulated_null_dist.txt --features {{input.sim_features}} --simulated
python `which 2020plus.py` --out-dir {outdir}/output --log-level=INFO classify -n {{params.ntrees}} -d .7 -o 1.0 --features {{input.features}} --null-distribution {outdir}/simulated_null_dist.txt --random-seed 71
""".format(outdir=output_dir)
#############################
# Rules for just training on
# pan-cancer data
#############################
rule train_pancan:
input:
features=join(output_dir, "features.txt")
params:
ntrees=ntrees,
data_dir=config["data_dir"],
output_dir=config["output_dir"]
output:
join(output_dir, "2020plus.Rdata")
shell:
"""
python `which 2020plus.py` --log-level=INFO train -d .7 -o 1.0 -n {{params.ntrees}} --features={{input.features}} {cv} --random-seed 71 -r {outdir}/2020plus.Rdata
""".format(outdir=output_dir, cv=cv)
#############################
# Rules for predicting using
# a trained classifier on a separate
# mutation data set
#############################
rule predict_test:
input:
trained_classifier=trained_classifier,
features=join(output_dir, "features.txt"),
sim_features=join(output_dir, "simulated_summary/simulated_features.txt"),
params:
ntrees=ntrees,
output:
join(output_dir, "pretrained_output/results/r_random_forest_prediction.txt")
shell:
"""
python `which 2020plus.py` --log-level=INFO classify --trained-classifier {{input.trained_classifier}} --null-distribution {outdir}/simulated_null_dist.txt --features {{input.sim_features}} --simulated {cv}
python `which 2020plus.py` --out-dir {outdir}/pretrained_output --log-level=INFO classify -n {{params.ntrees}} --trained-classifier {{input.trained_classifier}} -d .7 -o 1.0 --features {{input.features}} --null-distribution {outdir}/simulated_null_dist.txt --random-seed 71 {cv}
""".format(outdir=output_dir, cv=cv)
| [
"collintokheim@gmail.com"
] | collintokheim@gmail.com | |
2721ad3609c87f4d0dcad289c0ed5d8d1ad446fc | 6af50159e5b4af2ee41abf3ad7a4082f522662db | /bv_cirq.py | 298d8d3403842a5651b7f282f0a1db66b01e0cc5 | [] | no_license | jikaufman/CS239-Final-Project | 425b1c8a2ea75ee6d37210e0ed66c610d333c8bb | 8765859fc63b32648ab9d934f09c4ddf2ceca752 | refs/heads/master | 2023-03-22T01:26:11.283205 | 2021-03-21T23:49:39 | 2021-03-21T23:49:39 | 349,953,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,203 | py | # Jacob Kaufman 204 929 264
# Nikhil Arora 204 965 841
# Bernstein-Vazirani Problem:
import cirq
import requests
'''
Input: a function f: {0,1}^n → {0,1}.
Assumption: f(x) = a*x+b
Output: a,b
Notation: {0,1}^n is the set of bit strings of length n, a is an unknown bit string
of length n, * is inner product mod 2, + is addition mod 2, and b is an unknown single bit
'''
#########################################################################################
# Main Code #
#########################################################################################
# constructs a bernstein-vazirani circuit
# example circuit: a = 0101101, b = 0
def bernstein(error_correct=False):
# initialize qubits with architecture in mind
qubits = [cirq.GridQubit(0, 5), cirq.GridQubit(1, 4),\
cirq.GridQubit(0, 6), cirq.GridQubit(2, 5),\
cirq.GridQubit(2, 3), cirq.GridQubit(1, 5),\
cirq.GridQubit(3, 4), cirq.GridQubit(2, 4)]
if error_correct:
error_qubits = [cirq.GridQubit(3, 4), cirq.GridQubit(3, 3),\
cirq.GridQubit(3, 2), cirq.GridQubit(4, 3)]
# construct circuit
circuit = cirq.Circuit()
# error correction setup. error correct qubit (2,3)
if error_correct:
circuit.append([cirq.CNOT(qubits[2], error_qubits[1])])
circuit.append([cirq.SWAP(error_qubits[0], error_qubits[1])])
circuit.append([cirq.CNOT(qubits[2], error_qubits[1])])
circuit.append([cirq.SWAP(error_qubits[0], error_qubits[1])])
# hadamards
circuit.append([cirq.H(q) for q in qubits])
# turn helper qubit to 1
circuit.append([cirq.Z(qubits[7])])
# oracle
circuit.append([cirq.CNOT(qubits[1], qubits[7])])
circuit.append([cirq.CNOT(qubits[3], qubits[7])])
circuit.append([cirq.CNOT(qubits[4], qubits[7])])
circuit.append([cirq.CNOT(qubits[6], qubits[7])])
# hadamards
circuit.append([cirq.H(q) for q in qubits[:-1]])
# error detection and correction
if error_correct:
circuit.append([cirq.SWAP(error_qubits[2], error_qubits[1])])
circuit.append([cirq.CNOT(qubits[2], error_qubits[1])])
circuit.append([cirq.SWAP(error_qubits[2], error_qubits[1])])
circuit.append([cirq.CNOT(error_qubits[1], error_qubits[2])])
circuit.append([cirq.SWAP(error_qubits[3], error_qubits[1])])
circuit.append([cirq.CNOT(qubits[2], error_qubits[1])])
circuit.append([cirq.CNOT(error_qubits[0], error_qubits[1])])
circuit.append([cirq.SWAP(error_qubits[1], error_qubits[3])])
circuit.append([cirq.measure(error_qubits[2]), cirq.measure(error_qubits[3])])
circuit.append([cirq.CCNOT(qubits[2], error_qubits[1], error_qubits[0])])
# measure
circuit.append([cirq.measure(q) for q in qubits[:-1]])
# check for sycamore
cirq.google.optimized_for_sycamore(circuit=circuit, new_device=cirq.google.Sycamore, optimizer_type='sycamore')
url = 'http://quant-edu-scalability-tools.wl.r.appspot.com/send'
job_payload = {"circuit":cirq.to_json(circuit),\
"email":"jacobkaufman4@gmail.com",\
"repetitions":1000,\
"student_id":204929264}
return requests.post(url, json=job_payload)
if __name__ == '__main__':
response = bernstein()
print(response.text)
| [
"jacob@Jacobs-MacBook-Pro-2.local"
] | jacob@Jacobs-MacBook-Pro-2.local |
a500d0d54970ec25831ee58b453f03daf5f02059 | 306baa2ad596e3962e427d587e7b0d4175a1e48e | /configs/ttfnetv3/ttfv3net_r34_0114_3l_128_48_s16twice_basicup_aug_10x.py | 847551575ebcdb8c878b17ac7b992f8214941afd | [
"Apache-2.0"
] | permissive | mrsempress/mmdetection | 9c7ed7ed0c9f1d6200f79a2ab14fc0c8fe32c18a | cb650560c97a2fe56a9b369a1abc8ec17e06583a | refs/heads/master | 2022-04-24T04:34:30.959082 | 2020-04-26T07:52:23 | 2020-04-26T07:52:23 | 258,957,856 | 0 | 0 | Apache-2.0 | 2020-04-26T06:33:32 | 2020-04-26T06:33:32 | null | UTF-8 | Python | false | false | 4,154 | py | # model settings
model = dict(
type='TTFNet',
pretrained='modelzoo://resnet34',
backbone=dict(
type='ResNet',
depth=34,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_eval=False,
style='pytorch'),
neck=None,
bbox_head=dict(
type='TTFv3Head',
inplanes=(64, 128, 256, 512),
planes=(256, 128, 64),
down_ratio=(16, 8, 4),
hm_head_channels=((128, 128), (128, 128), (64, 64)),
wh_head_channels=((32, 32), (32, 32), (32, 32)),
num_classes=81,
shortcut_cfg=(1, 2, 3),
s16_shortcut_twice=True,
wh_scale_factor=(8., 8., 8.),
alpha=0.6,
beta=0.6,
hm_weight=(1.4, 1.4, 1.),
wh_weight=(7., 7., 5.),
length_range=((128, 512), (48, 128), (1, 48)),
train_branch=(True, True, True),
inf_branch=(True, True, True),
use_simple_nms=True,
fast_nms=False,
up_conv_cfg=dict(type='BasicBlock'),
max_objs=128,
conv_cfg=None,
norm_cfg=dict(type='BN')))
cudnn_benchmark = True
# training and testing settings
train_cfg = dict(debug=False)
test_cfg = dict(score_thr=0.01, max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile', to_float32=True),
dict(type='LoadAnnotations', with_bbox=True),
dict(
type='PhotoMetricDistortion',
brightness_delta=32,
contrast_range=(0.5, 1.5),
saturation_range=(0.5, 1.5),
hue_delta=18),
dict(
type='Expand',
mean=img_norm_cfg['mean'],
to_rgb=img_norm_cfg['to_rgb'],
ratio_range=(1, 4)),
dict(
type='MinIoURandomCrop',
min_ious=(0.1, 0.3, 0.5, 0.7, 0.9),
min_crop_size=0.3),
dict(type='Resize', img_scale=(512, 512), keep_ratio=False),
dict(type='Normalize', **img_norm_cfg),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(512, 512),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=False),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=16,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(
type='SGD',
lr=0.002,
momentum=0.9,
weight_decay=0.0004,
paramwise_options=dict(bias_lr_mult=2., bias_decay_mult=0.))
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 5,
step=[90, 110])
checkpoint_config = dict(save_every_n_steps=200, max_to_keep=1, keep_in_n_epoch=[63, 90])
# yapf:disable
log_config = dict(interval=20)
# yapf:enable
# runtime settings
total_epochs = 120
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/ttfv3net_r34_10x'
load_from = 'work_dirs/2001/0215_ttfv334_0114_3l_128_48_s16twice_basicup2_aug_10x/work_dirs/ttfv3net_r34_10x_0217_1444/epoch_120_iter_127630.pth'
resume_from = None
workflow = [('train', 1)]
| [
"mrsempress98@gmail.com"
] | mrsempress98@gmail.com |
424604fc081fe31949ac6a2ea5e3618af401701a | b6e34dec0831f43d442e89c64f521f77bb2438b2 | /fbta/fbta_sequence.py | e2b01b7dfc4e19723ab1165e36d73b524e70cf67 | [] | no_license | kandation/FBTAFast | a1a38e09d5964915d46492f84f8fa0fead43185c | 505a2f232ef7ef9b6fc153357fb4eec5480cd92a | refs/heads/master | 2022-12-31T02:43:09.339384 | 2020-04-22T19:13:54 | 2020-04-22T19:13:54 | 212,693,322 | 0 | 0 | null | 2020-04-22T19:13:56 | 2019-10-03T22:34:15 | HTML | UTF-8 | Python | false | false | 4,209 | py | import time
from pprint import pprint
from fbta_04_activity_to_card import FBTAActivityToCardsNew
from fbta_05_cards_download_manager import FBTACardsDownloadManager
from fbta_02_clusters import FBTAClusterInfo
from fbta_06_photos_download_manager import FBTAPhotosDownloadManager
from fbta_07_dataft import FBTADataft
from fbta_120_album_count_manager import FBTAAlbumCountManager
from fbta_configs import FBTAConfigs
from fbta_03_history_download_manager import FBTAHistoryDownloadManager
from fbta_mkdir import FBTAMkdir
from fbta_node_master import FBTANodeMaster
from fbta_sequence_func import FBTASequenceFunction
from fbta_settings import FBTASettings
from fbta_01_yearbox import FBTAYearBox
class FBTASequence(FBTASequenceFunction):
def __init__(self, setting: FBTASettings, configs: FBTAConfigs):
FBTASequenceFunction.__init__(self, setting, configs)
self.__node_master: FBTANodeMaster = FBTANodeMaster.NONE
self.__node_yearbox = None
self.__node_cluster_info: FBTAClusterInfo = None
def start(self):
self._warnningTimeOptimize()
self.__px0_initDirectory()
self.__p00_generateMasterNode(0)
self._showFinishedProcessEndNotify(0)
self.__p01_processYearBox(1)
self._showFinishedProcessEndNotify(1)
self.__p02_processsClustersInfo(2)
self._showFinishedProcessEndNotify(2)
self.__p03_processDownloader(3)
self._showFinishedProcessEndNotify(3)
self.__p04_processDatabaseAsCard(4)
self._showFinishedProcessEndNotify(4)
self.__p05_processCardAsPost(5)
self._showFinishedProcessEndNotify(5)
self.__processDonloadPhotos(6)
self._showFinishedProcessEndNotify(6)
self.__processDataft(7)
self._showFinishedProcessEndNotify(7)
self.__p08_processAlbumCount(8)
self._showFinishedProcessEndNotify(8)
print('ENDT$EST')
exit()
def __px0_initDirectory(self):
self.__mkdirClass = FBTAMkdir(self._settings, self._configs)
self.__mkdirClass.startProjectDir()
def __p00_generateMasterNode(self, step):
if self._isInTestStep(step):
self.__node_master = FBTANodeMaster(self._settings, self._configs)
self.__node_master.start()
def __p01_processYearBox(self, step):
if self._isInTestStep(step):
self.__node_yearbox = FBTAYearBox(self.__node_master)
cond = self._settings.renew_index
cond = cond or not self.__node_yearbox.hasYearboxFile(self._settings.dir_data_path)
if cond:
self.__node_yearbox.run()
self.__node_yearbox.save(self._settings.dir_data_path)
else:
self.__node_yearbox.load(self._settings.dir_data_path)
def __p02_processsClustersInfo(self, step):
if self._isInTestStep(step):
self.__node_cluster_info = FBTAClusterInfo(self._settings, self._configs, self.__node_yearbox)
self.__node_cluster_info.run()
def __p03_processDownloader(self, step):
if self._isInTestStep(step):
# Step01 Download Activity
dl = FBTAHistoryDownloadManager(self.__node_master,
self.__node_cluster_info.clusters)
dl.main()
def __p04_processDatabaseAsCard(self, step):
if self._isInTestStep(step):
analysis = FBTAActivityToCardsNew(self._settings, self._configs)
analysis.main()
def __p05_processCardAsPost(self, step):
if self._isInTestStep(step):
order = FBTACardsDownloadManager(self.__node_master)
order.main()
def __processDonloadPhotos(self, step):
if self._isInTestStep(step):
photos = FBTAPhotosDownloadManager(self.__node_master)
photos.main()
def __p08_processAlbumCount(self, step):
if self._isInTestStep(step):
album_count = FBTAAlbumCountManager(self.__node_master)
album_count.main()
def __processDataft(self, step):
if self._isInTestStep(step):
dataft = FBTADataft(self.__node_master)
dataft.main() | [
"akkhaporn@gmail.com"
] | akkhaporn@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.