hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
454862ed0335535e0622f4cd4d4b3040600274f8
254
py
Python
setup.py
piotrmaslanka/python-cassandra-jaeger
961a33955e54a84348d97a5e0b0985a3850ee1ad
[ "MIT" ]
null
null
null
setup.py
piotrmaslanka/python-cassandra-jaeger
961a33955e54a84348d97a5e0b0985a3850ee1ad
[ "MIT" ]
null
null
null
setup.py
piotrmaslanka/python-cassandra-jaeger
961a33955e54a84348d97a5e0b0985a3850ee1ad
[ "MIT" ]
null
null
null
#!/usr/bin/env python from distutils.core import setup from setuptools import find_packages import python_cassandra_jaeger setup(version=python_cassandra_jaeger.__version__, packages=find_packages(include=['python_cassandra_jaeger']), )
19.538462
66
0.795276
31
254
6.129032
0.516129
0.236842
0.331579
0
0
0
0
0
0
0
0
0
0.125984
254
12
67
21.166667
0.855856
0.07874
0
0
0
0
0.098712
0.098712
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
45712dc508f41385cf0cbd9c2e96f1445b5789c9
163
py
Python
utils/decorators/__init__.py
jemand2001/python-utils
652d3998cb272530e42100ae844178ad7a092c8c
[ "MIT" ]
null
null
null
utils/decorators/__init__.py
jemand2001/python-utils
652d3998cb272530e42100ae844178ad7a092c8c
[ "MIT" ]
null
null
null
utils/decorators/__init__.py
jemand2001/python-utils
652d3998cb272530e42100ae844178ad7a092c8c
[ "MIT" ]
null
null
null
from .strict import (strict) from .overload import (overload) from .template import (template) from .convert import (convert) from .auto_slots import (auto_slots)
27.166667
36
0.785276
22
163
5.727273
0.363636
0.142857
0
0
0
0
0
0
0
0
0
0
0.122699
163
5
37
32.6
0.881119
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
458860860a8c1e1c28fb5b0848e12367ffab7ad3
370
py
Python
serial_scripts/system_test/flow_tests/ReleaseToFlowSetupRateMapping.py
atsgen/tf-test
2748fcd81491450c75dadc71849d2a1c11061029
[ "Apache-2.0" ]
5
2020-09-29T00:36:57.000Z
2022-02-16T06:51:32.000Z
serial_scripts/system_test/flow_tests/ReleaseToFlowSetupRateMapping.py
vkolli/contrail-test-perf
db04b8924a2c330baabe3059788b149d957a7d67
[ "Apache-2.0" ]
27
2019-11-02T02:18:34.000Z
2022-02-24T18:49:08.000Z
serial_scripts/system_test/flow_tests/ReleaseToFlowSetupRateMapping.py
vkolli/contrail-test-perf
db04b8924a2c330baabe3059788b149d957a7d67
[ "Apache-2.0" ]
20
2019-11-28T16:02:25.000Z
2022-01-06T05:56:58.000Z
# Here the rate is set for Policy flows, local to a compute, which is # lesser than policy flows across computes expected_flow_setup_rate = {} expected_flow_setup_rate['policy'] = { '1.04': 6000, '1.05': 9000, '1.06': 10000, '1.10': 10000, '2.10': 13000} expected_flow_setup_rate['nat'] = { '1.04': 4200, '1.05': 6300, '1.06': 7500, '1.10': 7500, '2.10': 10000}
46.25
76
0.654054
64
370
3.640625
0.53125
0.154506
0.218884
0.270386
0
0
0
0
0
0
0
0.237942
0.159459
370
7
77
52.857143
0.511254
0.291892
0
0
0
0
0.189189
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
45b52df1d91555aa0d10d06f736b608de959c083
60
py
Python
tests/__init__.py
ExterraGroup/pyrsi
7999e58dc9260ec61dd7efe33c32dbcf02fdfa86
[ "MIT" ]
8
2018-12-10T17:07:35.000Z
2020-12-25T01:25:15.000Z
tests/__init__.py
ExterraGroup/pyrsi
7999e58dc9260ec61dd7efe33c32dbcf02fdfa86
[ "MIT" ]
1
2020-01-15T03:52:47.000Z
2020-01-15T03:52:47.000Z
tests/__init__.py
ExterraGroup/pyrsi
7999e58dc9260ec61dd7efe33c32dbcf02fdfa86
[ "MIT" ]
2
2019-05-16T16:43:30.000Z
2020-06-05T11:24:07.000Z
# -*- coding: utf-8 -*- """Unit test package for pyrsi."""
15
34
0.55
8
60
4.125
1
0
0
0
0
0
0
0
0
0
0
0.020408
0.183333
60
3
35
20
0.653061
0.85
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
45bc6a97de7432ce7d1214a830d477fd1ce1003b
222
py
Python
app/test/test_DQI.py
qianjing2020/lambda_lab
53a29796e42fe1d2c4e5785eaa65938cb64af67c
[ "MIT" ]
null
null
null
app/test/test_DQI.py
qianjing2020/lambda_lab
53a29796e42fe1d2c4e5785eaa65938cb64af67c
[ "MIT" ]
6
2021-04-30T21:10:55.000Z
2022-03-12T00:32:07.000Z
app/test/test_DQI.py
qianjing2020/lambda_lab
53a29796e42fe1d2c4e5785eaa65938cb64af67c
[ "MIT" ]
null
null
null
import context from modules.data_preprocess import DataCleaning, DataQualityCheck from modules.db_connect import dbConnect from test_sequence import sale qc = DataQualityCheck() result = qc.generate_QC(sale) print(result)
27.75
66
0.846847
29
222
6.344828
0.62069
0.119565
0
0
0
0
0
0
0
0
0
0
0.099099
222
8
67
27.75
0.92
0
0
0
1
0
0
0
0
0
0
0
0
1
0
false
0
0.571429
0
0.571429
0.142857
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
45d23d997247a4451b26d7281ff5d784047731ab
1,066
py
Python
django_blog/blog/models.py
lidysun/test1
94db6637be6c0008f2454a0192121b1f1286c437
[ "MIT" ]
null
null
null
django_blog/blog/models.py
lidysun/test1
94db6637be6c0008f2454a0192121b1f1286c437
[ "MIT" ]
null
null
null
django_blog/blog/models.py
lidysun/test1
94db6637be6c0008f2454a0192121b1f1286c437
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from time import timezone from django.db import models # Create your models here. class UserInfo(models.Model): user= models.CharField(max_length = 30) pwd = models.CharField(max_length = 30) # class Publisher(models.Model): # name = models.CharField(max_length=30) # address = models.CharField(max_length=50) # website = models.URLField() # class Author(models.Model): # email = models.EmailField() # first_name = models.CharField(max_length = 30) # last_name = models.CharField(max_length = 30) # class Book(models.Model): # title = models.CharField(max_length = 150) # authors = models.ManyToManyField(Author) # publisher = models.ForeignKey(Publisher) class DoubanMovie(models.Model): name = models.CharField(max_length = 200) info = models.CharField(max_length = 10000) rating = models.CharField(max_length = 20) num = models.CharField(max_length = 50) quote = models.CharField(max_length = 150) img_url = models.CharField(max_length = 300)
31.352941
50
0.721388
136
1,066
5.5
0.389706
0.260695
0.312834
0.417112
0.411765
0.235294
0.104278
0
0
0
0
0.038031
0.161351
1,066
33
51
32.30303
0.798658
0.464353
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.230769
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
45d84ac7252e18717ee747f54457d11d39050e86
162
py
Python
src/controllers/main_ctrl.py
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
5de1021173240b2f9b325510e2c75f59cf3b14e1
[ "MIT" ]
null
null
null
src/controllers/main_ctrl.py
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
5de1021173240b2f9b325510e2c75f59cf3b14e1
[ "MIT" ]
null
null
null
src/controllers/main_ctrl.py
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
5de1021173240b2f9b325510e2c75f59cf3b14e1
[ "MIT" ]
1
2022-03-25T10:08:41.000Z
2022-03-25T10:08:41.000Z
from PyQt5.QtCore import QObject, pyqtSlot class MainController(QObject): def __init__(self, model): super().__init__() self._model = model
20.25
42
0.679012
18
162
5.611111
0.722222
0.158416
0.257426
0
0
0
0
0
0
0
0
0.007937
0.222222
162
7
43
23.142857
0.793651
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
aff28c5107917776458b32939693487f544f849c
1,198
py
Python
tests/no_train_or_test/model.py
NehzUx/autodl
c80fdc4b297ed1ec2b9e6911d313f1fe31d83cb9
[ "Apache-2.0" ]
25
2018-09-26T14:07:11.000Z
2021-12-02T15:19:08.000Z
tests/no_train_or_test/model.py
NehzUx/autodl
c80fdc4b297ed1ec2b9e6911d313f1fe31d83cb9
[ "Apache-2.0" ]
8
2018-11-23T15:35:28.000Z
2020-02-27T14:55:11.000Z
tests/no_train_or_test/model.py
NehzUx/autodl
c80fdc4b297ed1ec2b9e6911d313f1fe31d83cb9
[ "Apache-2.0" ]
5
2019-03-05T11:05:59.000Z
2020-01-08T13:05:35.000Z
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Modified by: Zhengying Liu, Isabelle Guyon """An example of code submission for the AutoDL challenge. It implements 3 compulsory methods: __init__, train, and test. model.py follows the template of the abstract class algorithm.py found in folder AutoDL_ingestion_program/. To create a valid submission, zip model.py together with an empty file called metadata (this just indicates your submission is a code submission and has nothing to do with the dataset metadata. """ class Model(object): """Fully connected neural network with no hidden layer.""" def __init__(self, metadata): pass
37.4375
78
0.769616
183
1,198
4.983607
0.655738
0.065789
0.028509
0.035088
0
0
0
0
0
0
0
0.009018
0.166945
1,198
31
79
38.645161
0.90481
0.90985
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0.333333
0
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
affe1f7c5acaa073ae333db92fc0f2ba9b660efd
28
py
Python
tests/math/__init__.py
Ejjaffe/dit
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
[ "BSD-3-Clause" ]
1
2021-03-15T08:51:42.000Z
2021-03-15T08:51:42.000Z
tests/math/__init__.py
Ejjaffe/dit
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
[ "BSD-3-Clause" ]
null
null
null
tests/math/__init__.py
Ejjaffe/dit
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
[ "BSD-3-Clause" ]
null
null
null
""" Tests for dit.math. """
7
19
0.535714
4
28
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.178571
28
3
20
9.333333
0.652174
0.678571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
b3002f19f23fd7a651f2fd21a8598fc87385b360
306
py
Python
FATS/featureFunction.py
serdarozsoy/FATS
e2a1bf4f142c20eada5d0d63435599e9139d4a9d
[ "MIT" ]
null
null
null
FATS/featureFunction.py
serdarozsoy/FATS
e2a1bf4f142c20eada5d0d63435599e9139d4a9d
[ "MIT" ]
null
null
null
FATS/featureFunction.py
serdarozsoy/FATS
e2a1bf4f142c20eada5d0d63435599e9139d4a9d
[ "MIT" ]
null
null
null
<<<<<<< HEAD import os,sys,time import numpy as np import pandas as pd import matplotlib.pyplot as plt import Base ======= import os,sys,time import numpy as np import pandas as pd import matplotlib.pyplot as plt import Base >>>>>>> e5e6c78995f79de751f6aa5e3ad47cb15bd3fffc from FeatureFunctionLib import *
21.857143
48
0.777778
43
306
5.534884
0.395349
0.067227
0.092437
0.12605
0.697479
0.697479
0.697479
0.697479
0.697479
0.697479
0
0.075188
0.130719
306
14
49
21.857143
0.819549
0
0
0.714286
0
0
0
0
0
0
0
0
0
0
null
null
0
0.785714
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
b302348ee4bab662ae5fc00d100adcd638224ab8
56
py
Python
result_helpers/__init__.py
CFM-MSG/CMAN_pytorch
e176debef6888ae96781a6cfafabcbd438fbfcb0
[ "MIT" ]
null
null
null
result_helpers/__init__.py
CFM-MSG/CMAN_pytorch
e176debef6888ae96781a6cfafabcbd438fbfcb0
[ "MIT" ]
null
null
null
result_helpers/__init__.py
CFM-MSG/CMAN_pytorch
e176debef6888ae96781a6cfafabcbd438fbfcb0
[ "MIT" ]
null
null
null
from result_helpers.mem_one_class import MEMResultHelper
56
56
0.928571
8
56
6.125
1
0
0
0
0
0
0
0
0
0
0
0
0.053571
56
1
56
56
0.924528
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
b3231dd9d1c15c0eaaa1376b68c6482d2fb00f29
104
py
Python
constants.py
pvantonov/kodi-amvnews
504eeb59dc0b2b9fe60a0aa7debbe35140dc4156
[ "MIT" ]
3
2019-05-14T21:41:18.000Z
2020-08-06T13:25:45.000Z
constants.py
pvantonov/kodi-amvnews
504eeb59dc0b2b9fe60a0aa7debbe35140dc4156
[ "MIT" ]
null
null
null
constants.py
pvantonov/kodi-amvnews
504eeb59dc0b2b9fe60a0aa7debbe35140dc4156
[ "MIT" ]
null
null
null
# coding=utf-8 """ Definition of constants. """ from xbmcswift2.plugin import Plugin PLUGIN = Plugin()
13
36
0.721154
13
104
5.769231
0.769231
0.32
0
0
0
0
0
0
0
0
0
0.022472
0.144231
104
7
37
14.857143
0.820225
0.365385
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
b32a64ff545e87a33e77785c0b313ddafd790edf
38
py
Python
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeChainedAssignmentStatementFirstTarget_after.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeChainedAssignmentStatementFirstTarget_after.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
null
null
null
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeChainedAssignmentStatementFirstTarget_after.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
null
null
null
def f(): <caret>b = 0 return b
12.666667
16
0.473684
7
38
2.571429
0.857143
0
0
0
0
0
0
0
0
0
0
0.041667
0.368421
38
3
17
12.666667
0.708333
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
b347c2e19c64fdbc38703b93493ffd39b10d1790
104
py
Python
gpt/config.py
jimth001/formality_emnlp19
fa2f48f2ac6efd98c0cf986681747ea41adbac48
[ "MIT" ]
22
2019-08-28T16:36:51.000Z
2022-01-13T07:30:36.000Z
gpt/config.py
jimth001/formality_emnlp19
fa2f48f2ac6efd98c0cf986681747ea41adbac48
[ "MIT" ]
11
2020-01-28T22:16:38.000Z
2022-02-09T23:31:41.000Z
gpt/config.py
jimth001/formality_emnlp19
fa2f48f2ac6efd98c0cf986681747ea41adbac48
[ "MIT" ]
5
2019-11-12T13:28:36.000Z
2022-01-13T07:30:39.000Z
from gpt.src import encoder text_enc = encoder.get_encoder('./models/117M') config_path='./models/117M'
26
47
0.769231
16
104
4.8125
0.75
0.25974
0
0
0
0
0
0
0
0
0
0.0625
0.076923
104
3
48
34.666667
0.739583
0
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
2fb9ac82ed503a345c0e22abf61b359a4bddd5eb
10,806
py
Python
userbot/modules/warn.py
akborana/Devil
30ef9c5ac910d6344e206921e343a0932ffd6460
[ "MIT" ]
1
2021-05-06T18:30:50.000Z
2021-05-06T18:30:50.000Z
userbot/modules/warn.py
hellboi-atul/javes-3.0
8777d482bd1ee877a96332a2cd84d880c151fa43
[ "MIT" ]
null
null
null
userbot/modules/warn.py
hellboi-atul/javes-3.0
8777d482bd1ee877a96332a2cd84d880c151fa43
[ "MIT" ]
null
null
null
import re import hashlib import asyncio import datetime import logging from userbot import CMD_HELP import os import math import html import os.path import sys import time from typing import Tuple, Union from userbot import bot from telethon import errors from telethon import events from telethon.tl import types from telethon.tl.functions.channels import (EditAdminRequest, EditBannedRequest, EditPhotoRequest) from telethon.utils import get_display_name from telethon.tl.functions.messages import GetPeerDialogsRequest from telethon.tl.functions.channels import GetParticipantRequest from telethon.tl.types import ChannelParticipantAdmin, ChannelParticipantCreator, ChatBannedRights import userbot.modules.sql_helper.warns_sql as sql from userbot.events import javes05 async def is_admin(chat_id, user_id): req_jo = await bot(GetParticipantRequest(channel=chat_id,user_id=user_id)) chat_participant = req_jo.participant if isinstance(chat_participant, ChannelParticipantCreator) or isinstance(chat_participant, ChannelParticipantAdmin): return True return False MUTE_RIGHTS = ChatBannedRights(until_date=None, send_messages=True) javes = bot from userbot.events import rekcah05 @javes05(outgoing=True, pattern="^!warn(?: |$)(.*)") async def _(event): if event.fwd_from: return try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator warn_reason = event.pattern_match.group(1) reply_message = await event.get_reply_message() except: return await event.edit("`Sorry canot warn users here`") if not admin and not creator: return await event.edit("`I have to be admin to warn people.`") if await is_admin(event.chat_id, reply_message.sender.id): return await event.edit("`I'm not going to warn an admin!`") limit, soft_warn = sql.get_warn_setting(event.chat_id) num_warns, reasons = sql.warn_user(reply_message.sender.id, event.chat_id, warn_reason) if num_warns >= limit: if soft_warn: reply = "{} warnings, <u><a href='tg://user?id={}'>user</a></u> has been muted!".format(limit, reply_message.sender.id) await event.client.edit_permissions(chat, reply_message.sender.id, until_date=None, send_messages=False) else: await event.client.edit_permissions(chat, reply_message.sender.id, until_date=None, view_messages=False) reply = "{} warnings, <u><a href='tg://user?id={}'>user</a></u> has been banned!".format(limit, reply_message.sender.id) else: reply = "<u><a href='tg://user?id={}'>user</a></u> has {}/{} warnings... watch out!".format(reply_message.sender.id, num_warns, limit) if warn_reason: reply += "\nReason for last warn:\n{}".format(html.escape(warn_reason)) # await event.edit(reply, parse_mode="html") @javes.on(rekcah05(pattern=f"warn(?: |$)(.*)", allow_sudo=True)) async def _(event): if event.fwd_from: return try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator warn_reason = event.pattern_match.group(1) reply_message = await event.get_reply_message() except: return await event.reply("`Sorry canot warn users here`") if not admin and not creator: return await event.reply("`I have to be admin to warn people.`") if await is_admin(event.chat_id, reply_message.sender.id): return await event.reply("`I'm not going to warn an admin!`") limit, soft_warn = sql.get_warn_setting(event.chat_id) num_warns, reasons = sql.warn_user(reply_message.sender.id, event.chat_id, warn_reason) if num_warns >= limit: if soft_warn: reply = "{} warnings, <u><a href='tg://user?id={}'>user</a></u> has been muted!".format(limit, reply_message.sender.id) await event.client.edit_permissions(chat, reply_message.sender.id, until_date=None, send_messages=False) else: await event.client.edit_permissions(chat, reply_message.sender.id, until_date=None, view_messages=False) reply = "{} warnings, <u><a href='tg://user?id={}'>user</a></u> has been banned!".format(limit, reply_message.sender.id) else: reply = "<u><a href='tg://user?id={}'>user</a></u> has {}/{} warnings... watch out!".format(reply_message.sender.id, num_warns, limit) if warn_reason: reply += "\nReason for last warn:\n{}".format(html.escape(warn_reason)) # await event.reply(reply, parse_mode="html") @javes05(outgoing=True, pattern="^!warns(?: |$)(.*)") async def _(event): if event.fwd_from: return reply_message = await event.get_reply_message() result = sql.get_warns(reply_message.sender.id, event.chat_id) if result and result[0] != 0: num_warns, reasons = result limit, soft_warn = sql.get_warn_setting(event.chat_id) if reasons: text = "This user has {}/{} warnings, for the following reasons:".format(num_warns, limit) text += "\r\n" text += reasons await event.edit(text) else: await event.edit("This user has {} / {} warning, but no reasons for any of them.".format(num_warns, limit)) else: await event.edit("This user hasn't got any warnings!") @javes.on(rekcah05(pattern=f"warns(?: |$)(.*)", allow_sudo=True)) async def _(event): if event.fwd_from: return reply_message = await event.get_reply_message() result = sql.get_warns(reply_message.sender.id, event.chat_id) if result and result[0] != 0: num_warns, reasons = result limit, soft_warn = sql.get_warn_setting(event.chat_id) if reasons: text = "This user has {}/{} warnings, for the following reasons:".format(num_warns, limit) text += "\r\n" text += reasons await event.reply(text) else: await event.reply("This user has {} / {} warning, but no reasons for any of them.".format(num_warns, limit)) else: await event.reply("This user hasn't got any warnings!") @javes05(outgoing=True, pattern="^!setwarnmode(?: |$)(.*)") async def set_warn_strength(event): try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator args = event.pattern_match.group(1) except: return await event.edit("`Error`") if args: if args in ("ban"): sql.set_warn_strength(event.chat_id, False) await event.edit("Warn mode Set To Ban User.") return elif args in ("mute"): sql.set_warn_strength(event.chat_id, True) await event.edit("Warn mode Set To Kick User.") return else: await event.edit("`Error usage !setwarnmode kick or mute`") else: limit, soft_warn = sql.get_warn_setting(event.chat_id) if soft_warn: await event.edit("I Am **muting** User's For Now.") else: await event.edit("I Am **Baning** User's For Now.") return "" @javes.on(rekcah05(pattern=f"setwarnmode(?: |$)(.*)", allow_sudo=True)) async def set_warn_strength(event): try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator args = event.pattern_match.group(1) except: return await event.reply("`Error`") if args: if args in ("ban"): sql.set_warn_strength(event.chat_id, False) await event.reply("Warn mode Set To Ban User.") return elif args in ("mute"): sql.set_warn_strength(event.chat_id, True) await event.reply("warn mode Set To Kick User.") return else: await event.reply("`Error usage !setwarnmode kick or mute`") else: limit, soft_warn = sql.get_warn_setting(event.chat_id) if soft_warn: await event.reply("I Am **muting** User's For Now.") else: await event.reply("I Am **Baning** User's For Now.") return "" @javes05(outgoing=True, pattern="^!setwarnlimit(?: |$)(.*)") async def set_warn_limit(event): try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator input_str = event.pattern_match.group(1) except: return await event.edit("`Error`") if input_str: if int(input_str) < 3: await event.edit("`The minimum warn limit is 3!`") else: sql.set_warn_limit(event.chat_id, int(input_str)) await event.edit("`Updated the warn limit to` {}".format(input_str)) return else: limit, soft_warn = sql.get_warn_setting(event.chat_id) await event.edit("`The current warn limit is {}`".format(limit)) return "" @javes.on(rekcah05(pattern=f"setwarnlimit(?: |$)(.*)", allow_sudo=True)) async def set_warn_limit(event): try: chat = await event.get_chat() admin = chat.admin_rights creator = chat.creator input_str = event.pattern_match.group(1) except: return await event.reply("`Error`") if input_str: if int(input_str) < 3: await event.reply("`The minimum warn limit is 3!`") else: sql.set_warn_limit(event.chat_id, int(input_str)) await event.reply("`Updated the warn limit to` {}".format(input_str)) return else: limit, soft_warn = sql.get_warn_setting(event.chat_id) await event.reply("`The current warn limit is {}`".format(limit)) return "" @javes05(outgoing=True, pattern="^!resetwarns(?: |$)(.*)") async def _(event): if event.fwd_from: return reply_message = await event.get_reply_message() sql.reset_warns(reply_message.sender.id, event.chat_id) await event.edit("Warnings have been reset!") @javes.on(rekcah05(pattern=f"resetwarns(?: |$)(.*)", allow_sudo=True)) async def _(event): if event.fwd_from: return reply_message = await event.get_reply_message() sql.reset_warns(reply_message.sender.id, event.chat_id) await event.reply("Warnings have been reset!") CMD_HELP.update({ "warn": "!warn\ \nUsage: Warn a user.\ \n\n!warns \ \nUsage: See a user's warnings.\ \n\n!setwarnmode <ban/mute>\ \nUsage: Set the chat's warn mode. \ \n\n!setwarnlimit <number>\ \nUsage: Set the number of warnings before users are punished. \ \n\n!resetwarns \ \nUsage: Reset all of a user's warnings to 0. \ \n\nAll commands support Sudo ( type !help sudo for more info)\ " })
35.781457
142
0.638627
1,463
10,806
4.571429
0.129187
0.077751
0.036184
0.053828
0.7811
0.752542
0.72503
0.710377
0.691238
0.691238
0
0.004731
0.237091
10,806
301
143
35.900332
0.806526
0
0
0.637795
0
0.023622
0.165556
0.019444
0
0
0
0
0
1
0
false
0
0.098425
0
0.208661
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
2fc7dff3416e231ccfed10d4c3e8b24fde9aebea
200
py
Python
opmd_viewer/__init__.py
soerenjalas/openPMD-viewer
1754ade96040920016a79ecc5b7b40597a5a6bf5
[ "BSD-3-Clause-LBNL" ]
null
null
null
opmd_viewer/__init__.py
soerenjalas/openPMD-viewer
1754ade96040920016a79ecc5b7b40597a5a6bf5
[ "BSD-3-Clause-LBNL" ]
1
2016-01-04T18:09:31.000Z
2016-01-04T18:09:31.000Z
opmd_viewer/__init__.py
soerenjalas/openPMD-viewer
1754ade96040920016a79ecc5b7b40597a5a6bf5
[ "BSD-3-Clause-LBNL" ]
null
null
null
# Make the OpenPMDTimeSeries object accessible from outside the package from .openpmd_timeseries import OpenPMDTimeSeries, FieldMetaInformation __all__ = ['OpenPMDTimeSeries', 'FieldMetaInformation']
50
71
0.85
18
200
9.166667
0.722222
0.448485
0
0
0
0
0
0
0
0
0
0
0.095
200
3
72
66.666667
0.911602
0.345
0
0
0
0
0.286822
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
2fd2bfab59ea98625232ec8c5527f3a9ee521861
28
py
Python
python/testData/editing/enterAfterColonOfCaseClauseWithoutBody.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
null
null
null
python/testData/editing/enterAfterColonOfCaseClauseWithoutBody.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
null
null
null
python/testData/editing/enterAfterColonOfCaseClauseWithoutBody.py
06needhamt/intellij-community
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
[ "Apache-2.0" ]
null
null
null
match x: case 42:<caret>
14
19
0.607143
5
28
3.4
1
0
0
0
0
0
0
0
0
0
0
0.095238
0.25
28
2
19
14
0.714286
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
6441c2d5103ee3c27b662443bb780f5d7c5a8064
132
py
Python
ppgr/__init__.py
PolarPayne/ppgr
a3ce482999bbd0c71042f18880553ebba60074c5
[ "MIT" ]
null
null
null
ppgr/__init__.py
PolarPayne/ppgr
a3ce482999bbd0c71042f18880553ebba60074c5
[ "MIT" ]
null
null
null
ppgr/__init__.py
PolarPayne/ppgr
a3ce482999bbd0c71042f18880553ebba60074c5
[ "MIT" ]
null
null
null
from .terminal import write, no_cursor from .screen import Screen __all__ = ["write", "no_cursor", "Screen"] __version__ = "0.5.0"
22
42
0.719697
19
132
4.473684
0.578947
0.164706
0.305882
0
0
0
0
0
0
0
0
0.026316
0.136364
132
5
43
26.4
0.719298
0
0
0
0
0
0.189394
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
ff2cc3e4fa0010425f995d6994abc80dcad5bb40
92
py
Python
codes_auto/1203.print-in-order.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/1203.print-in-order.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/1203.print-in-order.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
# # @lc app=leetcode.cn id=1203 lang=python3 # # [1203] print-in-order # None # @lc code=end
13.142857
42
0.663043
16
92
3.8125
0.875
0
0
0
0
0
0
0
0
0
0
0.115385
0.152174
92
7
43
13.142857
0.666667
0.815217
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
ff59e3ae0336b603d2a05d8f243ce3a51a93e303
802
py
Python
brdashsite/batchrecords/migrations/0003_auto_20190226_2017.py
JNDib/brdashproject
f9a7cb543024bfb6bb3556b9affdcddb46add2e1
[ "MIT" ]
null
null
null
brdashsite/batchrecords/migrations/0003_auto_20190226_2017.py
JNDib/brdashproject
f9a7cb543024bfb6bb3556b9affdcddb46add2e1
[ "MIT" ]
null
null
null
brdashsite/batchrecords/migrations/0003_auto_20190226_2017.py
JNDib/brdashproject
f9a7cb543024bfb6bb3556b9affdcddb46add2e1
[ "MIT" ]
null
null
null
# Generated by Django 2.1.5 on 2019-02-27 02:17 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('batchrecords', '0002_auto_20190226_1939'), ] operations = [ migrations.RemoveField( model_name='historicalbatchrecord', name='created_by', ), migrations.RemoveField( model_name='historicalbatchrecord', name='history_user', ), migrations.RemoveField( model_name='historicalbatchrecord', name='product', ), migrations.RemoveField( model_name='historicalbatchrecord', name='updated_by', ), migrations.DeleteModel( name='HistoricalBatchRecord', ), ]
24.30303
52
0.574813
63
802
7.15873
0.539683
0.277162
0.230599
0.266075
0.487805
0.487805
0
0
0
0
0
0.057196
0.32419
802
32
53
25.0625
0.774908
0.05611
0
0.5
1
0
0.237086
0.169536
0
0
0
0
0
1
0
false
0
0.038462
0
0.153846
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
ff759b9f072d1f9296066ec627c9c855987dabdc
29
py
Python
security/open_alchemy/__init__.py
open-alchemy/OpenAlchemyPackage
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
[ "Apache-2.0" ]
null
null
null
security/open_alchemy/__init__.py
open-alchemy/OpenAlchemyPackage
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
[ "Apache-2.0" ]
79
2020-11-28T04:02:25.000Z
2021-01-06T08:52:30.000Z
security/open_alchemy/__init__.py
open-alchemy/Package
8bf0ed62ed7f6c5015f1bf1c4658dc353395fe9b
[ "Apache-2.0" ]
null
null
null
"""Namespace placeholder."""
14.5
28
0.689655
2
29
10
1
0
0
0
0
0
0
0
0
0
0
0
0.068966
29
1
29
29
0.740741
0.758621
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
ff8936a87c1d6aa51ae7c9ad6e4ebecebdf9df3e
317
py
Python
DPGAnalysis/SiStripTools/python/poolSource_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
DPGAnalysis/SiStripTools/python/poolSource_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
DPGAnalysis/SiStripTools/python/poolSource_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
import FWCore.ParameterSet.Config as cms source = cms.Source("PoolSource", fileNames = cms.untracked.vstring(), # skipBadFiles = cms.untracked.bool(True), inputCommands = cms.untracked.vstring("keep *", "drop *_MEtoEDMConverter_*_*") )
35.222222
98
0.564669
26
317
6.769231
0.692308
0.204545
0.215909
0
0
0
0
0
0
0
0
0
0.321767
317
8
99
39.625
0.818605
0.126183
0
0
0
0
0.168627
0.086275
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
ffa0bb48d7404e573b6c9e5fa52446d5003efd93
99
py
Python
cdp_scrapers/instances/__init__.py
dhanya-shraddha/cdp-scrapers
7e0d841a2a64963405a075cd91985d24e3dedfa6
[ "MIT" ]
null
null
null
cdp_scrapers/instances/__init__.py
dhanya-shraddha/cdp-scrapers
7e0d841a2a64963405a075cd91985d24e3dedfa6
[ "MIT" ]
1
2021-10-01T05:27:21.000Z
2021-10-01T05:27:21.000Z
cdp_scrapers/instances/__init__.py
dhanya-shraddha/cdp-scrapers
7e0d841a2a64963405a075cd91985d24e3dedfa6
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Individual scratchpad and maybe up-to-date CDP instance scrapers. """
16.5
65
0.656566
13
99
5
1
0
0
0
0
0
0
0
0
0
0
0.012048
0.161616
99
5
66
19.8
0.771084
0.888889
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
ffa5465bfb3e2c7f5391b04142db505e1022b582
424
py
Python
tests/dictionary1.py
Oshanath/lpython
582e1718c04fcccd2b6e444d85dda3aae3e3292e
[ "BSD-3-Clause" ]
null
null
null
tests/dictionary1.py
Oshanath/lpython
582e1718c04fcccd2b6e444d85dda3aae3e3292e
[ "BSD-3-Clause" ]
null
null
null
tests/dictionary1.py
Oshanath/lpython
582e1718c04fcccd2b6e444d85dda3aae3e3292e
[ "BSD-3-Clause" ]
null
null
null
def test_Dict(): x: dict[i32, i32] x = {1: 2, 3: 4} # x = {1: "2", "3": 4} -> sematic error y: dict[str, i32] y = {"a": -1, "b": -2} z: i32 z = y["a"] z = y["b"] z = x[1] def test_dict_insert(): y: dict[str, i32] y = {"a": -1, "b": -2} y["c"] = -3 def test_dict_get(): y: dict[str, i32] y = {"a": -1, "b": -2} x: i32 x = y.get("a") x = y.get("a", 0)
15.703704
43
0.379717
79
424
1.974684
0.253165
0.051282
0.211538
0.211538
0.371795
0.307692
0.307692
0.307692
0.307692
0
0
0.112727
0.351415
424
26
44
16.307692
0.454545
0.087264
0
0.315789
0
0
0.028571
0
0
0
0
0
0
1
0.157895
false
0
0
0
0.157895
0
0
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4400856d181d08220fa50547c19d7bab6d8e6fbd
2,393
py
Python
marlgrid/pz_envs/contentFB.py
aivaslab/marlgrid
10b53d27ce224fadeeb5830d6034350a69feb4b4
[ "Apache-2.0" ]
null
null
null
marlgrid/pz_envs/contentFB.py
aivaslab/marlgrid
10b53d27ce224fadeeb5830d6034350a69feb4b4
[ "Apache-2.0" ]
null
null
null
marlgrid/pz_envs/contentFB.py
aivaslab/marlgrid
10b53d27ce224fadeeb5830d6034350a69feb4b4
[ "Apache-2.0" ]
null
null
null
from ..base_AEC import * from ..objects import * from random import randrange import random import math class ContentFBEnv(para_MultiGridEnv): """ Environment with a door and key, sparse reward. Similar to DoorKeyEnv in https://github.com/maximecb/gym-minigrid/blob/master/gym_minigrid/envs/doorkey.py """ mission = "use the key to open the door and then get to the goal" metadata = {} def init_agents(self, arg, agent_kwargs): if arg == 0: self.apos = [(6,11,3)] for agent in self.apos: self.add_agent(GridAgentInterface(**agent_kwargs)) def _gen_grid(self, width, height): # Create an empty grid self.grid = MultiGrid((width, height)) colors = random.sample(['green','purple','orange','yellow','blue','pink','red'], 4) # Generate the surrounding walls self.grid.wall_rect(0, 0, width-2, height) for k, x in enumerate(range(0,width-4,4)): self.grid.wall_rect(x, 0, 5, 5) self.put_obj(Goal(color=colors[k], reward=1), x+2, 2) self.put_obj(Door(color=colors[k]), x+2, 4) #self.put_obj(Key(color=colors[k],), x+2, 4) self.agent_spawn_kwargs = {'top':(1,1)} self.place_agents(**self.agent_spawn_kwargs) class ContentFBEnv2(para_MultiGridEnv): """ Environment with a door and key, sparse reward. Similar to DoorKeyEnv in https://github.com/maximecb/gym-minigrid/blob/master/gym_minigrid/envs/doorkey.py """ mission = "use the key to open the door and then get to the goal" metadata = {} def init_agents(self, arg, agent_kwargs): if arg == 0: self.apos = [(6,11,3)] for agent in self.apos: self.add_agent(GridAgentInterface(**agent_kwargs)) def _gen_grid(self, width, height): # Create an empty grid self.grid = MultiGrid((width, height)) colors = ['green','purple','orange'] # Generate the surrounding walls self.grid.wall_rect(0, 0, width-2, height) for k, x in enumerate(range(0,width-4,4)): self.grid.wall_rect(x, 0, 5, 5) self.put_obj(Ball(color=colors[k],), x+2, 2) self.put_obj(Wall(color=colors[(k+1) % 3],), x+2, 4) self.agent_spawn_kwargs = {} self.place_agents(**self.agent_spawn_kwargs)
32.337838
91
0.610113
341
2,393
4.175953
0.284457
0.033708
0.035112
0.044944
0.829354
0.820927
0.802669
0.699438
0.699438
0.699438
0
0.025267
0.255746
2,393
73
92
32.780822
0.774284
0.195153
0
0.634146
0
0
0.085379
0
0
0
0
0
0
1
0.097561
false
0
0.121951
0
0.365854
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
442b82682f3eee1189a613f243e844bcbfce1414
62
py
Python
tests/__init__.py
NickolasHKraus/statikos
7d098760caa2ae8969801249c8c4761f02fd2372
[ "MIT" ]
null
null
null
tests/__init__.py
NickolasHKraus/statikos
7d098760caa2ae8969801249c8c4761f02fd2372
[ "MIT" ]
null
null
null
tests/__init__.py
NickolasHKraus/statikos
7d098760caa2ae8969801249c8c4761f02fd2372
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Unit test package for statikos."""
20.666667
37
0.580645
8
62
4.5
1
0
0
0
0
0
0
0
0
0
0
0.019231
0.16129
62
2
38
31
0.673077
0.870968
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
443e880e410b850c486af0532ecd8869505aeb29
60
py
Python
investigateModules/compareModulePatches.py
kromerh/solarAnalytics
8d450746dcf45b5ae3933b5e2d8838a23c8b32ea
[ "MIT" ]
null
null
null
investigateModules/compareModulePatches.py
kromerh/solarAnalytics
8d450746dcf45b5ae3933b5e2d8838a23c8b32ea
[ "MIT" ]
null
null
null
investigateModules/compareModulePatches.py
kromerh/solarAnalytics
8d450746dcf45b5ae3933b5e2d8838a23c8b32ea
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np import os import re
10
19
0.783333
12
60
3.916667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.216667
60
5
20
12
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
44762b0d5bcbe4b280011acbe66456167cfb82a3
7,580
py
Python
arabert_summarize_example.py
abdullah-abunada/bert-extractive-summarizer
45e69deed3dcc00c4073cb07507889ff44f1ca72
[ "MIT" ]
2
2021-10-02T08:12:35.000Z
2022-01-09T20:41:14.000Z
arabert_summarize_example.py
abdullah-abunada/bert-extractive-summarizer
45e69deed3dcc00c4073cb07507889ff44f1ca72
[ "MIT" ]
null
null
null
arabert_summarize_example.py
abdullah-abunada/bert-extractive-summarizer
45e69deed3dcc00c4073cb07507889ff44f1ca72
[ "MIT" ]
1
2021-12-29T21:53:31.000Z
2021-12-29T21:53:31.000Z
from summarizer import Summarizer from transformers import AutoTokenizer, AutoModel from rouge import Rouge body=''' أعلن اليوم الخميس في ماليزيا عن دخول ملك البلاد في حجر صحي بعد إصابة 7 عاملين في القصر بفيروس كورونا، ليكون بذلك أحدث زعماء العالم التحاقا بقائمة القادة الذين تحوم حولهم شبهة الإصابة بهذا الفيروس. وقال مشرف القصر الوطني في ماليزيا أحمد فاضل شمس الدين اليوم إن الملك السلطان عبد الله رعاية الدين المصطفى بالله شاه والملكة الحاجة عزيزة أمينة ميمونة الإسكندرية قد خضعا لفحص طبي واختبار تشخيصي للفيروس، حيث جاءت نتائج تحاليلهما سلبية. وقال إن الملك والملكة يخضعان حاليا للحجر الصحي في القصر، ولن يقبلا أي زيارة أو مقابلة رسمية إلى أن تنتهي فترة الحجر الصحي التي بدأت أمس ومن المقرر أن تستمر لمدة 14 يوما. ويوم أمس الأربعاء، أعلن مقر إقامة ولي العهد البريطاني الأمير تشارلز إصابة الأمير بفيروس كورونا. وقال متحدث باسم مقر إقامة الأمير تشارلز ثبتت إصابة الأمير تشارلز بفيروس كورونا، لقد ظهرت عليه أعراض طفيفة لكن صحته جيدة، وكان يعمل من البيت طوال الأيام الماضية كالمعتاد. ومساء الأحد الماضي، أعلن في ألمانيا عن الاشتباه في إصابة المستشارة الألمانية أنجيلا ميركل بفيروس، وخضوعها لحجر منزلي رغم أن نتائج الفحوص التي أجرتها كانت سلبية، ومع ذلك قررت الخضوع للحجر الصحي والبقاء في المنزل. في دوائر السلطة وقصور الحكم وخلال الأسابيع الماضية ومع اتساع دائرة العدوى وعدد الإصابات بفيروس كورونا في عدة دول ومناطق عبر العالم، بدأ الفيروس تدريجيا يقترب من مراكز اتخاذ القرار، وربما يعرض حياة قادة ومسؤولين كبار للخطر. وخضع الرئيس الأميركي دونالد ترامب لفحص تأكد بعده أنه غير مصاب، وذلك بعد لقائه بوفد برازيلي أحد أفراده مصاب بالفيروس. ولاحقا، أعلنت السلطات البرازيلية أن الوزير الذي التقى ترامب مصاب بالفيروس، كما وضع الرئيس جايير بولسونارو تحت المراقبة الصحية للتأكد من عدم إصابته. وذكرت وسائل إعلام برازيلية اليوم أن سكرتير الرئيس أيضا مصاب بكورونا ووضع في الحجر الصحي. كما خضع مايك بنس نائب الرئيس الأميركي وزوجته للفحص بعد إصابة موظف في مكتبه بالفيروس. وخلال الأسابيع الماضية أعلنت السلطات النرويجية أن الملك هارالد الخامس والملكة وكامل أعضاء الحكومة وضعوا في الحجر الصحي بسبب الفيروس. وفي كندا، قرر رئيس الوزراء جاستن ترودو العمل من منزله بعدما تبين أن زوجته صوفي غريغوار مصابة بالوباء. وفي فرنسا، أصيب وزير الثقافة فرانك ريستر بفيروس كورونا، كما أصيب أيضا 10 نواب على الأقل، في حين تقرر نقل اجتماعات مجلس الوزراء من القاعة التي تعقد فيها تقليديا إلى أخرى أكبر مساحة لضمان ترتيب المقاعد على مسافة متر على الأقل. كما أعلنت وزارة الدفاع الأميركية في وقت سابق أن الوزير مارك إسبر ونائبه وضعا في الحجر الصحي بعد تأكيد إصابة عشرات من طاقم الوزارة بفيروس كورونا. وقبل ذلك، أعلن الجيش الأميركي أن قائده في أوروبا إلى جانب عدد من الموظفين ربما تعرضوا لفيروس كورونا خلال مؤتمر عقد في الآونة الأخيرة. كما أعلن قبل أيام مكتب السيناتور الأميركي الجمهوري راند بول أنه أصيب بكورونا المستجد، ليصبح بذلك أول عضو في مجلس الشيوخ تتأكد إصابته بالفيروس. وسبق لعضو مجلس الشيوخ الأميركي السيناتور تيد كروز أن أعلن عزل نفسه بعدما التقى شخصا ثبتت إصابته بالفيروس. الاصابات الأكبر في إيران، أصاب الفيروس عددا من المسؤولين، وأدى في بداية الشهر الجاري إلى وفاة عضو مجلس تشخيص مصلحة النظام محمد مير محمدي. كما أصيب بالوباء علي أكبر ولايتي مستشار المرشد الإيراني، ومعصومة ابتكار نائبة الرئيس الإيراني لشؤون المرأة، وإيرج حريرجي نائب وزير الصحة الإيراني الذي ظهر أمس الجمعة في مؤتمر صحفي عقب تعافيه من المرض. وفي أستراليا، نقل وزير الداخلية بيتر دوتون إلى المستشفى بعدما ثبتت إصابته بفيروس كورونا، بينما أعلن في إندونيسيا أن وزير النقل بودي كاريا سومادي نقل إلى المستشفى عقب إصابته بالفيروس. وزراء بوركينا فاسو ولئن كان الفيروس استهدف قادة عدد من الدول ومسؤوليها بشكل فردي غالبا، فقد اختلف الحال في دولة بوركينا فاسو، إذ أصاب عددا من وزرائها بشكل متزامن، ويخشى أن يشل حكومتها إذا تكشفت إصابات جديدة. وقالت صحيفة لوموند الفرنسية إن بوركينا فاسو هي البلد الأكثر تضررا بوباء كورونا (كوفيد-19) حتى الآن في غربي أفريقيا، حيث توفيت النائبة الثانية لرئيس البرلمان وأصيب خمسة وزراء، إلى جانب الحديث عن إصابة كل من السفير الإيطالي والأميركي، مما أثار غضبا على شبكات التواصل الاجتماعي بسبب ما اعتبر "تراخي" الحكومة في إدارة الوباء. وقالت الصحيفة في مقال بقلم صوفي دوس مراسلتها في العاصمة واغادوغو إن الأمر اعتبر في البداية أنباء كاذبة، قبل أن يؤكده أصحاب الشأن أنفسهم، فقد أعلن وزير التعليم ستانيسلاس أوارو على فيسبوك إصابته يوم 19 مارس الجاري، قائلا عقب التشخيص ظهر أنني إيجابي بالنسبة لفيروس كورونا، وفي اليوم التالي أعلن نظيراه في الإدارة الإقليمية والمعادن إصابتهما بكورونا. ولم يتأخر وزير الشؤون الخارجية ألفا باري كثيرا بعدهم، حيث قال بعد يومين فقط من نفي الإشاعة رسميا إنه مصاب بالفيروس لقد تحققت الشائعات، تلقيت للتو اختبارا إيجابيا لكوفيد-19. وفي 23 مارس الحالي جاء دور وزير التجارة هارونا كابوري ليعلن إصابته هو الآخر، ليكون خامس الوزراء الذين تأكدت إصابتهم من أصل 29 عضوا في حكومة بوركينا فاسو، كما تقول المراسلة، مشيرة كذلك إلى إصابة كل من السفير الإيطالي والأميركي في هذا البلد بكورونا المستجد. ''' summary_evaluation = ''' أعلن اليوم الخميس في ماليزيا عن دخول ملك البلاد في حجر صحي بعد إصابة 7 عاملين في القصر بفيروس كورونا، ليكون بذلك أحدث زعماء العالم التحاقا بقائمة القادة الذين تحوم حولهم شبهة الإصابة بهذا الفيروس. ويوم أمس الأربعاء، أعلن مقر إقامة ولي العهد البريطاني الأمير تشارلز إصابة الأمير بفيروس كورونا. ومساء الأحد الماضي، أعلن في ألمانيا عن الاشتباه في إصابة المستشارة الألمانية أنجيلا ميركل بفيروس، وخضوعها لحجر منزلي رغم أن نتائج الفحوص التي أجرتها كانت سلبية، ومع ذلك قررت الخضوع للحجر الصحي والبقاء في المنزل. في دوائر السلطة وقصور الحكم وخلال الأسابيع الماضية ومع اتساع دائرة العدوى وعدد الإصابات بفيروس كورونا في عدة دول ومناطق عبر العالم، بدأ الفيروس تدريجيا يقترب من مراكز اتخاذ القرار، وربما يعرض حياة قادة ومسؤولين كبار للخطر. ولاحقا، أعلنت السلطات البرازيلية أن الوزير الذي التقى ترامب مصاب بالفيروس، كما وضع الرئيس جايير بولسونارو تحت المراقبة الصحية للتأكد من عدم إصابته. وخضع الرئيس الأميركي دونالد ترامب لفحص تأكد بعده أنه غير مصاب، وذلك بعد لقائه بوفد برازيلي أحد أفراده مصاب بالفيروس. وخلال الأسابيع الماضية أعلنت السلطات النرويجية أن الملك هارالد الخامس والملكة وكامل أعضاء الحكومة وضعوا في الحجر الصحي بسبب الفيروس. وفي كندا، قرر رئيس الوزراء جاستن ترودو العمل من منزله بعدما تبين أن زوجته صوفي غريغوار مصابة بالوباء. وفي فرنسا، أصيب وزير الثقافة فرانك ريستر بفيروس كورونا، كما أصيب أيضا 10 نواب على الأقل، في حين تقرر نقل اجتماعات مجلس الوزراء من القاعة التي تعقد فيها تقليديا إلى أخرى أكبر مساحة لضمان ترتيب المقاعد على مسافة متر على الأقل. كما أعلنت وزارة الدفاع الأميركية في وقت سابق أن الوزير مارك إسبر ونائبه وضعا في الحجر الصحي بعد تأكيد إصابة عشرات من طاقم الوزارة بفيروس كورونا. الاصابات الأكبر في إيران، أصاب الفيروس عددا من المسؤولين، وأدى في بداية الشهر الجاري إلى وفاة عضو مجلس تشخيص مصلحة النظام محمد مير محمدي. وزراء بوركينا فاسو ولئن كان الفيروس استهدف قادة عدد من الدول ومسؤوليها بشكل فردي غالبا، فقد اختلف الحال في دولة بوركينا فاسو، إذ أصاب عددا من وزرائها بشكل متزامن، ويخشى أن يشل حكومتها إذا تكشفت إصابات جديدة. وقالت صحيفة لوموند الفرنسية إن بوركينا فاسو هي البلد الأكثر تضررا بوباء كورونا (كوفيد-19) حتى الآن في غربي أفريقيا، حيث توفيت النائبة الثانية لرئيس البرلمان وأصيب خمسة وزراء، إلى جانب الحديث عن إصابة كل من السفير الإيطالي والأميركي، مما أثار غضبا على شبكات التواصل الاجتماعي بسبب ما اعتبر "تراخي" الحكومة في إدارة الوباء. ولم يتأخر وزير الشؤون الخارجية ألفا باري كثيرا بعدهم، حيث قال بعد يومين فقط من نفي الإشاعة رسميا إنه مصاب بالفيروس لقد تحققت الشائعات، تلقيت للتو اختبارا إيجابيا لكوفيد-19. ''' albert_model = AutoModel.from_pretrained('asafaya/bert-base-arabic') albert_tokenizer = AutoTokenizer.from_pretrained('asafaya/bert-base-arabic') modelSummarizer = Summarizer(custom_model=albert_model, custom_tokenizer=albert_tokenizer) result = modelSummarizer(body) generated_summary = ''.join(result) print(generated_summary) rouge = Rouge() scores = rouge.get_scores(generated_summary, summary_evaluation) print(scores)
118.4375
346
0.812797
1,323
7,580
4.695389
0.402116
0.023181
0.012556
0.007244
0.663554
0.663554
0.647135
0.647135
0.647135
0.647135
0
0.003471
0.163852
7,580
63
347
120.31746
0.966393
0
0
0.545455
0
0.581818
0.929938
0.006333
0
0
0
0
0
1
0
false
0
0.054545
0
0.054545
0.036364
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
9242274087f97ac266387b636296af92b2d7d9f8
422
py
Python
global_covid_tracker/plotting/__init__.py
kvanderveen/global_covid_tracker
ad4466b099aae30cbc73b62cce440c62a4fc87f9
[ "MIT" ]
null
null
null
global_covid_tracker/plotting/__init__.py
kvanderveen/global_covid_tracker
ad4466b099aae30cbc73b62cce440c62a4fc87f9
[ "MIT" ]
3
2021-08-23T20:45:46.000Z
2022-03-12T00:33:28.000Z
global_covid_tracker/plotting/__init__.py
kvanderveen/global_covid_tracker
ad4466b099aae30cbc73b62cce440c62a4fc87f9
[ "MIT" ]
null
null
null
from .plot_positive_test_rates import plot_positive_test_rates from .plot_total_cases import plot_total_cases from .plot_total_deaths import plot_total_deaths from .plot_deaths_by_country import plot_deaths_by_country from .plot_cases_by_country import plot_cases_by_country from .plot_total_tests import plot_total_tests from .plot_cases_growth import plot_cases_growth from .plot_deaths_growth import plot_deaths_growth
46.888889
62
0.905213
70
422
4.914286
0.185714
0.186047
0.113372
0.122093
0
0
0
0
0
0
0
0
0.075829
422
8
63
52.75
0.882051
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
92460118208d7c2379de4fa4f0cbfe2ad3d68845
223
py
Python
Cogs/Nullify.py
TheMasterGhost/CorpBot
3133d5b7fdfef09ac4b75fb42d91628b86d94ac0
[ "MIT" ]
null
null
null
Cogs/Nullify.py
TheMasterGhost/CorpBot
3133d5b7fdfef09ac4b75fb42d91628b86d94ac0
[ "MIT" ]
null
null
null
Cogs/Nullify.py
TheMasterGhost/CorpBot
3133d5b7fdfef09ac4b75fb42d91628b86d94ac0
[ "MIT" ]
null
null
null
def clean(string): # A helper script to strip out @here and @everyone mentions zerospace = "​" return string.replace("@everyone", "@{}everyone".format(zerospace)).replace("@here", "@{}here".format(zerospace))
55.75
117
0.668161
26
223
5.730769
0.653846
0.201342
0
0
0
0
0
0
0
0
0
0
0.152466
223
4
117
55.75
0.78836
0.255605
0
0
0
0
0.203704
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
92a2ade8e25bb89801f783432bcd0131ea069e28
68
py
Python
atcoder/abc149/b.py
sugitanishi/competitive-programming
51af65fdce514ece12f8afbf142b809d63eefb5d
[ "MIT" ]
null
null
null
atcoder/abc149/b.py
sugitanishi/competitive-programming
51af65fdce514ece12f8afbf142b809d63eefb5d
[ "MIT" ]
null
null
null
atcoder/abc149/b.py
sugitanishi/competitive-programming
51af65fdce514ece12f8afbf142b809d63eefb5d
[ "MIT" ]
null
null
null
a,b,k=map(int,input().split()) print(max(a-k,0),max(b-max(k-a,0),0))
34
37
0.602941
19
68
2.157895
0.526316
0
0
0
0
0
0
0
0
0
0
0.044776
0.014706
68
2
37
34
0.567164
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
92a704fccf87ceb7853e4a72b95d94d37432b6bb
18
py
Python
anchore_manager/version.py
Nordix/anchore-engine
f25baa5cbf9aa34d56e56b341a90f577d85e6146
[ "Apache-2.0" ]
110
2017-09-14T02:15:15.000Z
2022-03-30T20:14:21.000Z
anchore_manager/version.py
Nordix/anchore-engine
f25baa5cbf9aa34d56e56b341a90f577d85e6146
[ "Apache-2.0" ]
115
2017-09-22T12:15:30.000Z
2022-01-17T12:31:21.000Z
anchore_manager/version.py
Nordix/anchore-engine
f25baa5cbf9aa34d56e56b341a90f577d85e6146
[ "Apache-2.0" ]
56
2017-09-22T11:26:25.000Z
2022-03-03T14:14:58.000Z
version = "0.9.4"
9
17
0.555556
4
18
2.5
1
0
0
0
0
0
0
0
0
0
0
0.2
0.166667
18
1
18
18
0.466667
0
0
0
0
0
0.277778
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
92abf94301c78871fdaeb06a8f1ddaf5bff99c2c
268
py
Python
floodsystem/analysis.py
AndrewKeYanzhe/part-ia-flood-warning-system
eb3a32d3722be7b45acf3924fd4d2431652d3053
[ "MIT" ]
null
null
null
floodsystem/analysis.py
AndrewKeYanzhe/part-ia-flood-warning-system
eb3a32d3722be7b45acf3924fd4d2431652d3053
[ "MIT" ]
null
null
null
floodsystem/analysis.py
AndrewKeYanzhe/part-ia-flood-warning-system
eb3a32d3722be7b45acf3924fd4d2431652d3053
[ "MIT" ]
null
null
null
import matplotlib import numpy as np def polyfit (dates, levels, p): # dates = matplotlib.dates.date2num(dates) p_coeff = np.polyfit(dates,levels,p) # p_coeff = np.polyfit(dates-dates[0],levels,p) poly = np.poly1d(p_coeff) return poly, dates[0]
22.333333
51
0.682836
41
268
4.390244
0.390244
0.2
0.2
0.211111
0.222222
0
0
0
0
0
0
0.018433
0.190299
268
12
52
22.333333
0.81106
0.320896
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
2b94150c3b79e3074d21274247ea053c2cb884e0
178
py
Python
scripts/core/pass_types.py
evolving-dev/holo
d8ef8dd58302d9f2589e5d2b3011015ff145528d
[ "MIT" ]
null
null
null
scripts/core/pass_types.py
evolving-dev/holo
d8ef8dd58302d9f2589e5d2b3011015ff145528d
[ "MIT" ]
null
null
null
scripts/core/pass_types.py
evolving-dev/holo
d8ef8dd58302d9f2589e5d2b3011015ff145528d
[ "MIT" ]
null
null
null
class HoloResponse: def __init__(self, success, response=None): self.success = success if response != None: self.response = response
17.8
48
0.578652
17
178
5.823529
0.529412
0.222222
0.323232
0
0
0
0
0
0
0
0
0
0.342697
178
9
49
19.777778
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
2bbd518b542fcf51bf60ace183944d104900efbf
55
py
Python
luigi/contrib/__init__.py
Mappy/luigi
539cd2cf69902bb6cef688afdf55e991cae4b537
[ "Apache-2.0" ]
2
2017-05-03T12:15:20.000Z
2018-09-14T02:28:54.000Z
luigi/contrib/__init__.py
Mappy/luigi
539cd2cf69902bb6cef688afdf55e991cae4b537
[ "Apache-2.0" ]
null
null
null
luigi/contrib/__init__.py
Mappy/luigi
539cd2cf69902bb6cef688afdf55e991cae4b537
[ "Apache-2.0" ]
null
null
null
"""Package containing optional and-on functionality."""
55
55
0.781818
6
55
7.166667
1
0
0
0
0
0
0
0
0
0
0
0
0.072727
55
1
55
55
0.843137
0.890909
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
2be799f355f713970380efb0ff0e7ecb819eb1f7
77
py
Python
terrascript/consul/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
null
null
null
terrascript/consul/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
null
null
null
terrascript/consul/__init__.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
1
2018-11-15T16:23:05.000Z
2018-11-15T16:23:05.000Z
# Consul provider is not created through makecode.py # because of issues 24.
25.666667
52
0.779221
12
77
5
1
0
0
0
0
0
0
0
0
0
0
0.03125
0.168831
77
2
53
38.5
0.90625
0.935065
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
2bec0c607c3e98c35152779520d4098296b2008b
226
py
Python
key_server/key_management_system/apps.py
TV-Encryption/key_server
db243f3941cef82a9eb8f01bbb3fa72d7b32d77e
[ "MIT" ]
null
null
null
key_server/key_management_system/apps.py
TV-Encryption/key_server
db243f3941cef82a9eb8f01bbb3fa72d7b32d77e
[ "MIT" ]
null
null
null
key_server/key_management_system/apps.py
TV-Encryption/key_server
db243f3941cef82a9eb8f01bbb3fa72d7b32d77e
[ "MIT" ]
null
null
null
from django.apps import AppConfig class KeyManagementSystemConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" name = "key_server.key_management_system" verbose_name = "Key Management System"
28.25
56
0.787611
26
226
6.615385
0.730769
0.081395
0.22093
0
0
0
0
0
0
0
0
0
0.137168
226
7
57
32.285714
0.882051
0
0
0
0
0
0.362832
0.269912
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
920aca29cb5a19cdc1c9d17f62a1e4f246af7860
112
py
Python
datrie/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
130
2015-07-28T03:41:21.000Z
2022-03-16T03:07:41.000Z
datrie/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
119
2015-08-01T00:54:06.000Z
2021-01-05T13:00:46.000Z
datrie/run_test.py
nikicc/anaconda-recipes
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
[ "BSD-3-Clause" ]
72
2015-07-29T02:35:56.000Z
2022-02-26T14:31:15.000Z
import string import datrie trie = datrie.Trie(string.ascii_lowercase) trie[u'foo'] = 5 assert u'foo' in trie
14
42
0.75
19
112
4.368421
0.578947
0.240964
0
0
0
0
0
0
0
0
0
0.010417
0.142857
112
7
43
16
0.854167
0
0
0
0
0
0.053571
0
0
0
0
0
0.2
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
920ae2c4b1ffab09874d3603b23d3df7ab55beeb
962
py
Python
projects/models.py
plaf2000/webspec
487ccccff088ddbda0e5e475aaad167a01f4aab2
[ "MIT" ]
null
null
null
projects/models.py
plaf2000/webspec
487ccccff088ddbda0e5e475aaad167a01f4aab2
[ "MIT" ]
null
null
null
projects/models.py
plaf2000/webspec
487ccccff088ddbda0e5e475aaad167a01f4aab2
[ "MIT" ]
null
null
null
from django.db import models from django.contrib.auth.models import User class Project(models.Model): hf = models.PositiveIntegerField(default=18000) lf = models.PositiveIntegerField(default=0) nfft_view = models.PositiveIntegerField(default=2048) nfft_project = models.PositiveIntegerField(default=2048) fft_window_view = models.PositiveIntegerField(default=2048) fft_window_project = models.PositiveIntegerField(default=2048) title = models.CharField(max_length=50) description = models.TextField(blank=True) created = models.DateTimeField(auto_now_add=True) created_user = models.ForeignKey(User,on_delete=models.PROTECT,related_name='created_user') last_edit = models.DateTimeField(auto_now=True) last_edit_user = models.ForeignKey(User,on_delete=models.PROTECT,related_name='last_edit_user')
56.588235
104
0.697505
105
962
6.190476
0.419048
0.24
0.304615
0.227692
0.461538
0.313846
0.172308
0.172308
0.172308
0.172308
0
0.031957
0.219335
962
17
105
56.588235
0.833555
0
0
0
0
0
0.026999
0
0
0
0
0
0
1
0
false
0
0.133333
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
92169128033b3d7a3bf8aa8e0095c438a88138a8
297
py
Python
salty/exceptions.py
Markcial/salty
c9e5ad3e71af94cb5260f175e4d3734885efda30
[ "MIT" ]
null
null
null
salty/exceptions.py
Markcial/salty
c9e5ad3e71af94cb5260f175e4d3734885efda30
[ "MIT" ]
null
null
null
salty/exceptions.py
Markcial/salty
c9e5ad3e71af94cb5260f175e4d3734885efda30
[ "MIT" ]
null
null
null
__all__ = ['EncryptException', 'DecryptException', 'DefaultKeyNotSet', 'NoValidKeyFound'] class EncryptException(BaseException): pass class DecryptException(BaseException): pass class DefaultKeyNotSet(EncryptException): pass class NoValidKeyFound(DecryptException): pass
14.85
89
0.76431
21
297
10.619048
0.380952
0.121076
0.197309
0
0
0
0
0
0
0
0
0
0.148148
297
19
90
15.631579
0.881423
0
0
0.444444
0
0
0.213559
0
0
0
0
0
0
1
0
false
0.444444
0
0
0.444444
0
1
0
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
a61004468602027708717a182ff1b7708af746b8
1,907
py
Python
mongox/fields.py
Collector0/mongox
40fc6b7076c959e7bf8c001527584891fca8631c
[ "MIT" ]
1
2021-11-13T15:03:04.000Z
2021-11-13T15:03:04.000Z
mongox/fields.py
ischaojie/mongox
29e1dc3cb5f34190f6678c727132776068a09655
[ "MIT" ]
null
null
null
mongox/fields.py
ischaojie/mongox
29e1dc3cb5f34190f6678c727132776068a09655
[ "MIT" ]
null
null
null
import typing import bson from pydantic import Field from pydantic.fields import ModelField as PydanticModelField __all__ = ["Field", "ObjectId"] class ObjectId(bson.ObjectId): """ Pydantic ObjectId field with validators """ @classmethod def __get_validators__(cls) -> typing.Generator[bson.ObjectId, None, None]: yield cls.validate @classmethod def validate(cls, v: typing.Any) -> bson.ObjectId: if not bson.ObjectId.is_valid(v): raise ValueError("Invalid ObjectId") return bson.ObjectId(v) @classmethod def __modify_schema__(cls, field_schema: dict) -> None: field_schema.update(type="string") class ModelField(PydanticModelField): """ Custom ModelField to create query building """ __slots__: typing.Tuple[str, ...] = tuple() def __lt__( self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: return {self.name: {"$lt": other}} def __le__( self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: return {self.name: {"$lte": other}} def __eq__( # type: ignore[override] self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: # Using $eq instead of simple dict to allow regex return {self.name: {"$eq": other}} def __ne__( # type: ignore[override] self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: return {self.name: {"$ne": other}} def __gt__( self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: return {self.name: {"$gt": other}} def __ge__( self, other: typing.Any ) -> typing.Dict[str, typing.Dict[str, typing.Any]]: return {self.name: {"$gte": other}} def __hash__(self) -> int: return super().__hash__()
27.242857
79
0.617724
229
1,907
4.912664
0.30131
0.104
0.138667
0.202667
0.376889
0.376889
0.376889
0.376889
0.376889
0.376889
0
0
0.240692
1,907
69
80
27.637681
0.776934
0.092816
0
0.333333
0
0
0.03241
0
0
0
0
0
0
1
0.222222
false
0
0.088889
0.155556
0.555556
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
a618f828871a4f9f42a3e61da6a4defd594afbdb
176
py
Python
d2/detr/__init__.py
reubenwenisch/detr_custom
ae03ed599336f184e471eaf0048614dd788ffaf9
[ "Apache-2.0" ]
8,849
2020-05-27T00:52:55.000Z
2022-03-31T14:21:30.000Z
d2/detr/__init__.py
reubenwenisch/detr_custom
ae03ed599336f184e471eaf0048614dd788ffaf9
[ "Apache-2.0" ]
453
2020-05-27T04:01:32.000Z
2022-03-30T03:48:26.000Z
d2/detr/__init__.py
reubenwenisch/detr_custom
ae03ed599336f184e471eaf0048614dd788ffaf9
[ "Apache-2.0" ]
1,691
2020-05-27T02:16:40.000Z
2022-03-31T05:44:39.000Z
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from .config import add_detr_config from .detr import Detr from .dataset_mapper import DetrDatasetMapper
35.2
70
0.818182
25
176
5.64
0.72
0
0
0
0
0
0
0
0
0
0
0
0.130682
176
4
71
44
0.921569
0.386364
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
a641d2aa2cfe97b524c9e4fc4f5456576d4c7c5c
663
py
Python
provarme_dashboard/migrations/0006_auto_20190623_1914.py
arferreira/dropazul_app
f341da5f2bcccd2c1f40fad00c6e5d77bba4c6f3
[ "MIT" ]
null
null
null
provarme_dashboard/migrations/0006_auto_20190623_1914.py
arferreira/dropazul_app
f341da5f2bcccd2c1f40fad00c6e5d77bba4c6f3
[ "MIT" ]
9
2020-06-05T23:49:20.000Z
2022-01-13T01:43:03.000Z
provarme_dashboard/migrations/0006_auto_20190623_1914.py
arferreira/dropazul_app
f341da5f2bcccd2c1f40fad00c6e5d77bba4c6f3
[ "MIT" ]
null
null
null
# Generated by Django 2.0.5 on 2019-06-23 19:14 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('provarme_dashboard', '0005_devolution_traffic'), ] operations = [ migrations.RemoveField( model_name='devolution', name='address', ), migrations.RemoveField( model_name='devolution', name='city', ), migrations.RemoveField( model_name='devolution', name='state', ), migrations.RemoveField( model_name='devolution', name='zipcode', ), ]
22.1
58
0.544495
56
663
6.321429
0.571429
0.237288
0.293785
0.338983
0.497175
0.497175
0
0
0
0
0
0.043779
0.3454
663
29
59
22.862069
0.771889
0.067873
0
0.521739
1
0
0.168831
0.037338
0
0
0
0
0
1
0
false
0
0.043478
0
0.173913
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
a65a378d0980f12b970ed55c7de79ac039724e55
48
py
Python
python/testData/keywordCompletion/finallyInExcept.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2018-12-29T09:53:39.000Z
2018-12-29T09:53:42.000Z
python/testData/keywordCompletion/finallyInExcept.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/keywordCompletion/finallyInExcept.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
try: a = 1 except: a = 2 fina<caret>
9.6
15
0.458333
8
48
2.75
0.875
0
0
0
0
0
0
0
0
0
0
0.071429
0.416667
48
5
15
9.6
0.714286
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
a66ef729e29a7b137d77c4428bb5f454b35d79aa
330
py
Python
backend/src/controllers/util/time_util.py
tmdt-buw/gideon-ts
b839672fcc19f13562f6da23e6407fff0b18d3ec
[ "MIT" ]
null
null
null
backend/src/controllers/util/time_util.py
tmdt-buw/gideon-ts
b839672fcc19f13562f6da23e6407fff0b18d3ec
[ "MIT" ]
null
null
null
backend/src/controllers/util/time_util.py
tmdt-buw/gideon-ts
b839672fcc19f13562f6da23e6407fff0b18d3ec
[ "MIT" ]
null
null
null
import datetime import datetime as dt import pytz def current_time(): return dt.datetime.now().strftime("%H:%M:%S") def time_string_to_js_timestamp(time: datetime) -> int: # js need * 1000 because of different standards timezone = pytz.timezone("UTC") return round(timezone.localize(time).timestamp() * 1000)
22
60
0.715152
46
330
5.021739
0.630435
0.121212
0
0
0
0
0
0
0
0
0
0.028986
0.163636
330
14
61
23.571429
0.807971
0.136364
0
0
0
0
0.039007
0
0
0
0
0
0
1
0.25
false
0
0.375
0.125
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
a68cf49a12bc62a074851902de8ab970dee1d0f4
376
py
Python
tests/protocol/primary/a.py
gufolabs/gufo_loader
ffb4e17b2e8f36d938a145d50b7bd27d976f9fce
[ "BSD-3-Clause" ]
4
2022-03-04T07:49:18.000Z
2022-03-08T07:57:05.000Z
tests/protocol/primary/a.py
gufolabs/gufo_loader
ffb4e17b2e8f36d938a145d50b7bd27d976f9fce
[ "BSD-3-Clause" ]
null
null
null
tests/protocol/primary/a.py
gufolabs/gufo_loader
ffb4e17b2e8f36d938a145d50b7bd27d976f9fce
[ "BSD-3-Clause" ]
1
2022-03-08T07:57:07.000Z
2022-03-08T07:57:07.000Z
# --------------------------------------------------------------------- # Gufo Labs Loader: # a plugin # --------------------------------------------------------------------- # Copyright (C) 2022, Gufo Labs # --------------------------------------------------------------------- class APlugin(object): name = "a" def get_name(self) -> str: return self.name
26.857143
71
0.260638
23
376
4.217391
0.73913
0.164948
0
0
0
0
0
0
0
0
0
0.012232
0.130319
376
13
72
28.923077
0.284404
0.707447
0
0
0
0
0.009709
0
0
0
0
0
0
1
0.25
false
0
0
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
a6987b7f5d13e03773ebbfeb594e4576981e0f1d
72
py
Python
Python/Matts_Lessons/comments.py
Josh-Luedke/Vision-Notes
544e9ef53dbf34e19af5144012b90bfa19012c16
[ "MIT" ]
null
null
null
Python/Matts_Lessons/comments.py
Josh-Luedke/Vision-Notes
544e9ef53dbf34e19af5144012b90bfa19012c16
[ "MIT" ]
null
null
null
Python/Matts_Lessons/comments.py
Josh-Luedke/Vision-Notes
544e9ef53dbf34e19af5144012b90bfa19012c16
[ "MIT" ]
null
null
null
# Here is something that I am typing # print("I am printing something")
36
37
0.736111
12
72
4.416667
0.75
0.113208
0
0
0
0
0
0
0
0
0
0
0.180556
72
2
38
36
0.898305
0.944444
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
a6f626e57a1d82eddc8ff7025f8974f6515c91cf
113
py
Python
docs/constants.py
djangothon/django-mptt-docs
802aebdbd3181ec006f4711b0a03d3d2a00d6af9
[ "BSD-3-Clause" ]
1
2017-09-02T20:06:41.000Z
2017-09-02T20:06:41.000Z
docs/constants.py
djangothon/django-mptt-docs
802aebdbd3181ec006f4711b0a03d3d2a00d6af9
[ "BSD-3-Clause" ]
null
null
null
docs/constants.py
djangothon/django-mptt-docs
802aebdbd3181ec006f4711b0a03d3d2a00d6af9
[ "BSD-3-Clause" ]
null
null
null
"""HackerEarth docs constants""" DOC_URL_DICT = { # 'doc_name': 'doc_url1', # 'doc_name': 'doc_url2', }
16.142857
32
0.60177
14
113
4.428571
0.642857
0.225806
0.322581
0
0
0
0
0
0
0
0
0.021978
0.19469
113
6
33
18.833333
0.659341
0.663717
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
47179f5bf12e9b03efc4a0874bcc2fffe620b647
347
py
Python
context_cache/context_cache.py
tervay/the-blue-alliance
e14c15cb04b455f90a2fcfdf4c1cdbf8454e17f8
[ "MIT" ]
266
2015-01-04T00:10:48.000Z
2022-03-28T18:42:05.000Z
context_cache/context_cache.py
gregmarra/the-blue-alliance
5bedaf5c80b4623984760d3da3289640639112f9
[ "MIT" ]
2,673
2015-01-01T20:14:33.000Z
2022-03-31T18:17:16.000Z
context_cache/context_cache.py
gregmarra/the-blue-alliance
5bedaf5c80b4623984760d3da3289640639112f9
[ "MIT" ]
230
2015-01-04T00:10:48.000Z
2022-03-26T18:12:04.000Z
from google.appengine.ext import ndb CACHE_DATA = {} def get(cache_key): full_cache_key = '{}:{}'.format(cache_key, ndb.get_context().__hash__()) return CACHE_DATA.get(full_cache_key, None) def set(cache_key, value): full_cache_key = '{}:{}'.format(cache_key, ndb.get_context().__hash__()) CACHE_DATA[full_cache_key] = value
23.133333
76
0.70317
51
347
4.294118
0.372549
0.292237
0.219178
0.164384
0.392694
0.392694
0.392694
0.392694
0.392694
0.392694
0
0
0.138329
347
14
77
24.785714
0.732441
0
0
0.25
0
0
0.028818
0
0
0
0
0
0
1
0.25
false
0
0.125
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
5b375610f2df94425cc6ff0fc566d0f540dd1a07
93
py
Python
app/crawlzero/apps.py
rputh055/crawlzerotest
6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f
[ "MIT" ]
null
null
null
app/crawlzero/apps.py
rputh055/crawlzerotest
6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f
[ "MIT" ]
null
null
null
app/crawlzero/apps.py
rputh055/crawlzerotest
6dfd919b2d59b5d3eec7ea9b2f5ff149428ae83f
[ "MIT" ]
null
null
null
from django.apps import AppConfig class crawlzeroConfig(AppConfig): name = 'crawlzero'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5b41dca0dc7f5a816592454fec9d09eb00293d8b
199
py
Python
users/urls.py
a-vek/news-aggregator
d1aafbf7f2eed365ba734912e494af5c92728379
[ "MIT" ]
null
null
null
users/urls.py
a-vek/news-aggregator
d1aafbf7f2eed365ba734912e494af5c92728379
[ "MIT" ]
null
null
null
users/urls.py
a-vek/news-aggregator
d1aafbf7f2eed365ba734912e494af5c92728379
[ "MIT" ]
null
null
null
from django.urls import path from news.views import scrape, news_list from . import views urlpatterns = [ # path('', views.index, name="home"), # path('newslist', news_list, name="home"), ]
22.111111
47
0.678392
27
199
4.925926
0.518519
0.120301
0
0
0
0
0
0
0
0
0
0
0.170854
199
8
48
24.875
0.806061
0.386935
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.6
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
5bb15a5ffb620445f0397de1c06daebdacb8541a
908
py
Python
main/api/fields.py
lipis/gae-init-magic
6b1e0b50f8e5200cb2dacebca9ac65e796b241a9
[ "MIT" ]
465
2015-01-01T17:49:09.000Z
2021-12-06T15:00:40.000Z
main/api/fields.py
lipis/gae-init-magic
6b1e0b50f8e5200cb2dacebca9ac65e796b241a9
[ "MIT" ]
652
2018-10-26T12:28:08.000Z
2021-08-02T09:13:48.000Z
main/api/fields.py
lipis/gae-init-magic
6b1e0b50f8e5200cb2dacebca9ac65e796b241a9
[ "MIT" ]
171
2015-01-01T16:48:09.000Z
2022-03-15T21:48:52.000Z
# coding: utf-8 import urllib from flask_restful import fields from flask_restful.fields import * class BlobKey(fields.Raw): def format(self, value): return urllib.quote(str(value)) class Blob(fields.Raw): def format(self, value): return repr(value) class DateTime(fields.DateTime): def format(self, value): return value.isoformat() class GeoPt(fields.Raw): def format(self, value): return '%s,%s' % (value.lat, value.lon) class Id(fields.Raw): def output(self, key, obj): try: value = getattr(obj, 'key', None).id() return super(Id, self).output(key, {'id': value}) except AttributeError: return None class Integer(fields.Integer): def format(self, value): if value > 9007199254740992 or value < -9007199254740992: return str(value) return value class Key(fields.Raw): def format(self, value): return value.urlsafe()
18.916667
61
0.67511
123
908
4.96748
0.341463
0.08838
0.12766
0.176759
0.271686
0.271686
0.216039
0
0
0
0
0.045267
0.197137
908
47
62
19.319149
0.792867
0.014317
0
0.2
0
0
0.011198
0
0
0
0
0
0
1
0.233333
false
0
0.1
0.166667
0.866667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
5bc10292408a422083d964a15112bb688f34a1cb
3,592
py
Python
scripts/molecules.py
abelcarreras/PyQchem
2edf984ba17373ad3fd450b18592c8b7827b72e5
[ "MIT" ]
16
2020-03-06T00:15:16.000Z
2022-02-21T12:54:46.000Z
scripts/molecules.py
abelcarreras/qchem_scripts
992fc3d650f3b7651c63aba5759ba0f986eccffe
[ "MIT" ]
3
2020-12-24T12:44:14.000Z
2021-03-30T03:11:16.000Z
scripts/molecules.py
abelcarreras/qchem_scripts
992fc3d650f3b7651c63aba5759ba0f986eccffe
[ "MIT" ]
3
2020-06-05T20:55:41.000Z
2021-03-23T18:17:15.000Z
from pyqchem.structure import Structure import numpy as np # Ethene parallel position def dimer_ethene(distance, slide_y, slide_z): coordinates = [[0.0000000, 0.0000000, 0.6660120], [0.0000000, 0.0000000, -0.6660120], [0.0000000, 0.9228100, 1.2279200], [0.0000000, -0.9228100, 1.2279200], [0.0000000, -0.9228100, -1.2279200], [0.0000000, 0.9228100, -1.2279200], [distance, 0.0000000, 0.6660120], [distance, 0.0000000, -0.6660120], [distance, 0.9228100, 1.2279200], [distance, -0.9228100, 1.2279200], [distance, -0.9228100, -1.2279200], [distance, 0.9228100, -1.2279200]] coordinates = np.array(coordinates) coordinates[6:, 1] = coordinates[6:, 1] + slide_y coordinates[6:, 2] = coordinates[6:, 2] + slide_z symbols = ['C', 'C', 'H', 'H', 'H', 'H', 'C', 'C', 'H', 'H', 'H', 'H'] molecule = Structure(coordinates=coordinates, symbols=symbols, charge=0) return molecule, {'state_threshold': 0.2, 'n_mon': 6} # Tetracloroethene def dimer_tetrafluoroethene(distance, slide_y, slide_z): monomer = [[ 0.6624670117, 0.0000000000, 0.0000000000], [-0.6624670117, 0.0000000000, 0.0000000000], [ 1.3834661472, 1.0993897934, 0.0000000000], [ 1.3834661472, -1.0993897934, 0.0000000000], [-1.3834661472, -1.0993897934, 0.0000000000], [-1.3834661472, 1.0993897934, 0.0000000000]] symbols = ['C', 'C', 'F', 'F', 'F', 'F'] monomer2 = np.array(monomer) #monomer2 = np.dot(monomer, rotation_matrix([0, 1, 0], np.pi / 2)) monomer2[:, 2] = monomer2[:, 2] + distance monomer2[:, 1] = monomer2[:, 1] + slide_y monomer2[:, 0] = monomer2[:, 0] + slide_z coordinates = np.vstack([monomer, monomer2]) molecule = Structure(coordinates=coordinates, symbols=symbols * 2, charge=0) return molecule, {'state_threshold': 0.2, 'n_mon': len(monomer)} # Tetracloroethene def dimer_mix(distance, slide_y, slide_z): monomer1 = [[ 0.6660120, 0.0000000, 0.0000000,], [-0.6660120, 0.0000000, 0.0000000,], [ 1.2279200, 0.9228100, 0.0000000,], [ 1.2279200, -0.9228100, 0.0000000,], [-1.2279200, -0.9228100, 0.0000000,], [-1.2279200, 0.9228100, 0.0000000,]] symbols1 = ['C', 'C', 'H', 'H', 'H', 'H'] monomer2 = [[ 0.6624670117, 0.0000000000, 0.0000000000], [-0.6624670117, 0.0000000000, 0.0000000000], [ 1.3834661472, 1.0993897934, 0.0000000000], [ 1.3834661472, -1.0993897934, 0.0000000000], [-1.3834661472, -1.0993897934, 0.0000000000], [-1.3834661472, 1.0993897934, 0.0000000000]] symbols2 = ['C', 'C', 'F', 'F', 'F', 'F'] monomer2 = np.array(monomer2) monomer2[:, 2] = monomer2[:, 2] + distance monomer2[:, 1] = monomer2[:, 1] + slide_y monomer2[:, 0] = monomer2[:, 0] + slide_z coordinates = np.vstack([monomer1, monomer2]) symbols = symbols1 + symbols2 molecule = Structure(coordinates=coordinates, symbols=symbols, charge=0) return molecule, {'state_threshold': 0.4, 'n_mon': len(monomer1)}
36.653061
74
0.52422
391
3,592
4.759591
0.143223
0.077378
0.062869
0.06878
0.758195
0.725954
0.687802
0.66201
0.66201
0.639441
0
0.342148
0.315702
3,592
98
75
36.653061
0.414972
0.034243
0
0.382353
0
0
0.025974
0
0
0
0
0
0
1
0.044118
false
0
0.029412
0
0.117647
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5bcb99c6eaf056e3bdbaec31592016bcd7721ad5
118
py
Python
python/rna-transcription/rna_transcription.py
guillaume-martin/exercism
7008c31668bb8cc1d386421c665341568339c380
[ "MIT" ]
null
null
null
python/rna-transcription/rna_transcription.py
guillaume-martin/exercism
7008c31668bb8cc1d386421c665341568339c380
[ "MIT" ]
null
null
null
python/rna-transcription/rna_transcription.py
guillaume-martin/exercism
7008c31668bb8cc1d386421c665341568339c380
[ "MIT" ]
null
null
null
def to_rna(dna_strand): pairs = {'G':'C','C':'G','T':'A','A':'U'} return ''.join(pairs[n] for n in dna_strand)
39.333333
48
0.542373
23
118
2.652174
0.695652
0.295082
0
0
0
0
0
0
0
0
0
0
0.152542
118
3
48
39.333333
0.61
0
0
0
0
0
0.067227
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
5bd366b30abb8fedacf7c21ecc291b16250a6276
9,194
py
Python
shc/utils.py
TransparentHealth/healthcards_python_sample_scripts
85ffa973c21912399a6abc8b14faf99b5ee86f4d
[ "Apache-2.0" ]
1
2022-03-08T22:29:23.000Z
2022-03-08T22:29:23.000Z
shc/utils.py
TransparentHealth/healthcards_python_sample_scripts
85ffa973c21912399a6abc8b14faf99b5ee86f4d
[ "Apache-2.0" ]
null
null
null
shc/utils.py
TransparentHealth/healthcards_python_sample_scripts
85ffa973c21912399a6abc8b14faf99b5ee86f4d
[ "Apache-2.0" ]
1
2021-10-20T13:28:08.000Z
2021-10-20T13:28:08.000Z
#!/usr/bin/env python import zlib import requests from jose import jwk as jose_jwk, jws import json import qrcode sample_payload = "eyJ6aXAiOiJERUYiLCJhbGciOiJFUzI1NiIsImtpZCI6IjNLZmRnLVh3UC03Z1h5eXd0VWZVQUR3QnVtRE9QS01ReC1pRUxMMTFXOXMifQ.3VRNT9swGP4rkbmmTZxQSnIaZZPWCRBSy3aYOLiOSbw5duSPlq7qf99rp4FOg952WW62Xz_v8-E3O8SNQSVqrO1MmSQUF2NKdGUS2HdMoxhxYlGJJwVO82lxnsZoTVG5Qx-okpY9w9n3l9ubzWa8ycdK10mW4suEalYxaTkRJllj9Bgju-2Yv_GVaf7EyUqw65ca6DUAmZZo2zAibNPTOaNqzStcnKzpFyO_OFnH29ZJ_otYrqQn9Upz4VY_GLVe31PDNbA0vqZE5-N0jAHU786crATzNZoZ5TRly6AKHQ4GlYgqIQDNI8QIGugtSAdkJ8SDFlAw3C9TKBgWbwDfA1W471GeLZM9J0ByAWUQWnPbuNWYqjZZaiJNRzRc-hyUJ5dpOrrIR_nIamcsWPLz7B7w1kQ4dq2q0AWjPbghSct6nqTlAiijKwk0tAkyar5mvjn6ohpJbEP85ozXtWDGKhnN53P06GFqJit4PyVqSfBkxSGFj8T6TriY4FGKR5nXTaoK5JrQslPGEnHgg7P8fAIFVLneO_SwAIb7ffymh_i0h_Pj0GMEfawzIaS2E8yyKrhBKZcHP3ZwVHFZB2JmayxrD2aD142Yhmfu30NieJXQ9XOgGphn6dQTjVF3CC7QeWKQh-d2nDsUKUqdDkfeniVve4gsWJR6WZ3mkNh2cdAGEbIYCUV7Me-B5x5cKHvn2lVI4kbZ6CwNH0T9jo3Zf2ljVvwTG6fv2piftvFm6DrMG7qaXUf3DdEtodvjqdghAV76icsKfJlFwN420S3hMlrY6O6T_4H9MTaTfJpdeBO59TNzSypuXsPyFd_mf03V3s_Vfv8b.6RJ6ZFwPRqsVdDXsEUaDhkRo0u3nKC1cSCgN7YyPM1tteqPziRNbEkMdvURrkZ3baECxqmDybQvpGKVmEorTNw" sample_numeric_encoded_payload = "5676290952432060346029243740446031222959532654603460292540772804336028702864716745222809286133314564376531415906402203064504590856435503414245413640370636654171372412363803043756220467374075323239254334433260573601064137333912707426350769625364643945753638652852454535422237213876065244343160343853740855723835636810685712126967072534581208054242090543775073110024063411383336384168693404326438364025053330552640380527553266755975446326292974413411056375657045663941260836750066626744127412650663127253774252117654553004036442077072245263447552396237296363611221586172000544735026415257102476406874090854520923402064454566057720605333353934523368773871546530776725763450342565270512452950667144696836651240677707414450263141560604352333532003736845240800330361202740101109546920397733456645083937637609203027360726634458682836233328113628267258713820556229113823256320740622123930215842537423572004420710042656314532122903217620424036426535537233424468614545526029333777375400597640290855673469692837506528526454704235317710211074046236075568056803204261355358593854710965683963206060613074620371206276526908647361650966596729532435110866774371422326305965330806350309262568296071073576416838572162753826256111390939696044072526303708654339526630082969367063352624652758581035115720282541316556345038742028531057577664595060035950356103263224575274772852380524117676306959213045542735064574412725452105296452767569230552230407054459645772060333605629433612433458266759650955363961412506222210365642303659005505652370403040685523625756656041735607587042293709424506646233590554552026245328442240411032560021087508543027736505634128076253450922743327033912616606455705636141737439260957730567605771732564092369322610685243405706235524716655736168555859204110096054703057296325743307050338065932636944093409395007271232395239572967555621340812393377387667724370357625555064570306410670504157731153010937290945257435376870415523437024405223596237660372066530220454382258331044763532047171566835776037335324623255734037696245065352242275686423765336736726304164246669393374" SMALLEST_B64_CHAR_CODE = ord('-') SMART_HEALTH_CARD_PREFIX = 'shc:/' # https://stackoverflow.com/a/1089787 def deflate(data, compresslevel=9): compress = zlib.compressobj( compresslevel, # level: 0-9 zlib.DEFLATED, # method: must be DEFLATED -zlib.MAX_WBITS, # window size in bits: # -15..-8: negate, suppress header # 8..15: normal # 16..30: subtract 16, gzip header zlib.DEF_MEM_LEVEL, # mem level: 1..8/9 0 # strategy: # 0 = Z_DEFAULT_STRATEGY # 1 = Z_FILTERED # 2 = Z_HUFFMAN_ONLY # 3 = Z_RLE # 4 = Z_FIXED ) deflated = compress.compress(data) deflated += compress.flush() return deflated def inflate(data): # needed to add `-zlib.MAX_WBITS` here due to # zlib.error: Error -3 while decompressing data: incorrect header check decompress = zlib.decompressobj( -zlib.MAX_WBITS # see above ) inflated = decompress.decompress(data) inflated += decompress.flush() return inflated def resolve_key_from_issuer(): def resolve(iss, kid, algorithm): r = requests.get(f'{iss}/.well-known/jwks.json') r.raise_for_status() jwks = r.json() for key in jwks['keys']: if kid == key.get('kid'): return key # TODO - the following line causes an exception to occur during verficiation # There's a fix on master for this, but for now, it does not work # # File "/usr/local/lib/python3.7/site-packages/jose/jws.py", line 233, in _get_keys # if 'keys' in key: # TypeError: argument of type 'CryptographyECKey' is not iterable # return jwk.construct(key, algorithm) raise Exception(f'Key with kid = {kid} not found') return resolve def resolve_key_from_file(jwks_filename): def resolve(iss, kid, algorithm): with open(jwks_filename, 'r', newline='') as jwks_file: jwks = json.load(jwks_file) for key in jwks['keys']: if kid == key.get('kid'): return key # TODO - the following line causes an exception to occur during verficiation # There's a fix on master for this, but for now, it does not work # # File "/usr/local/lib/python3.7/site-packages/jose/jws.py", line 233, in _get_keys # if 'keys' in key: # TypeError: argument of type 'CryptographyECKey' is not iterable # return jwk.construct(key, algorithm) raise Exception(f'Key with kid = {kid} not found') return resolve def load_private_key_from_file(jwks_filename, use, algorithm): with open(jwks_filename, 'r', newline='') as jwks_file: jwks = json.load(jwks_file) for key in jwks['keys']: if algorithm == key.get('alg') and use == key.get('use'): return (key.get('kid'), key) # TODO - the following line causes an exception to occur during verficiation # There's a fix on master for this, but for now, it does not work # # File "/usr/local/lib/python3.7/site-packages/jose/jws.py", line 233, in _get_keys # if 'keys' in key: # TypeError: argument of type 'CryptographyECKey' is not iterable # return jwk.construct(key, algorithm) # return (key.get('kid'), jose_jwk.construct(key, algorithm)) raise Exception(f'Key with use = {use} algorithm = {algorithm} not found') def _decode_vc(jws_raw, key_resolver): # before we can verify the vc, we first need to resolve the key # the key ID is stored in the header # Per the health cards IG, ## "Issuers SHALL publish keys as JSON Web Key Sets (see RFC7517), available at <<iss value from Signed JWT>> + .well-known/jwks.json" # therefore, we need decode the claims to get the iss value in order to resolve the key # The claims are compressed via Deflate, so decompress the data # then, extract the iss claim to get access to the base URL, use that to resolve key with id = kid # then, verify the jws unverified_headers = jws.get_unverified_headers(jws_raw) # we expect data to be zipped, so deflate the data if unverified_headers.get('zip') == 'DEF': unverfied_claims_zip = jws.get_unverified_claims(jws_raw) raw_data = inflate(unverfied_claims_zip) data = json.loads(raw_data) else: raise Exception('Expecting payload to be compressed') iss = data['iss'] kid = unverified_headers['kid'] key = key_resolver(iss, kid, 'ES256') verified_jws = jws.verify(jws_raw, key, algorithms='ES256') payload = json.loads(inflate(verified_jws)) return payload def decode_vc(jws_raw): resolver = resolve_key_from_issuer() return _decode_vc(jws_raw, resolver) def decode_vc_from_local_issuer(jws_raw, jwks_file): resolver = resolve_key_from_file(jwks_file) return _decode_vc(jws_raw, resolver) def encode_vc(payload, private_signing_key, kid): payload_bytes = json.dumps(payload, separators=(',', ':')).encode('utf-8') compressed_payload = deflate(payload_bytes) headers = {"kid": kid, 'zip': 'DEF'} return jws.sign(compressed_payload, private_signing_key, headers=headers, algorithm='ES256') def encode_char_to_numeric(ch): numeric_value = ord(ch) - SMALLEST_B64_CHAR_CODE return '%02d' % (numeric_value) def encode_to_numeric(payload): return ''.join([encode_char_to_numeric(ch) for ch in payload]) def create_qr_code(numeric_encoded_payload): qr = qrcode.QRCode() qr.add_data(SMART_HEALTH_CARD_PREFIX) qr.add_data(numeric_encoded_payload) return qr.make_image(fill_color="black", back_color="white")
52.83908
2,103
0.758647
851
9,194
8.029377
0.297297
0.007025
0.008196
0.014049
0.215132
0.180594
0.180594
0.171521
0.171521
0.164789
0
0.302727
0.174353
9,194
173
2,104
53.144509
0.597418
0.229824
0
0.215909
0
0
0.479164
0.445029
0
1
0
0.00578
0
1
0.159091
false
0
0.056818
0.011364
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
4
5be7e3e5fb635d52ffc510b5892ea90ed38f68fa
151
py
Python
configs/example/spectre_benchmark.py
Yujie-Cui/cleanupspec
5bdfc551e27e673b170c15b3328559736f3d7848
[ "BSD-3-Clause" ]
9
2019-12-23T06:06:22.000Z
2021-05-03T21:33:43.000Z
configs/example/spectre_benchmark.py
limengming/cleanupspec
5426a0e2e3ce5c00edcb9734617dacf26ab18ef5
[ "BSD-3-Clause" ]
1
2020-11-12T06:23:36.000Z
2020-11-12T06:23:36.000Z
configs/example/spectre_benchmark.py
limengming/cleanupspec
5426a0e2e3ce5c00edcb9734617dacf26ab18ef5
[ "BSD-3-Clause" ]
9
2020-08-20T10:08:13.000Z
2022-03-20T13:00:38.000Z
import m5 from m5.objects import * #Spectre spectre = Process() # Added by Gururaj spectre.executable = 'spectre' spectre.cmd = [spectre.executable]
16.777778
38
0.748344
19
151
5.947368
0.578947
0.247788
0
0
0
0
0
0
0
0
0
0.015504
0.145695
151
8
39
18.875
0.860465
0.15894
0
0
0
0
0.056
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
f33d3bd029f3e85a817ccb3908d36449089ade03
691
py
Python
pyning/tail_recursion/fibonacci_test.py
rkoyanagui/pyning
3e8905e240d2554f217f168c48c9edeba8658dec
[ "Apache-2.0" ]
null
null
null
pyning/tail_recursion/fibonacci_test.py
rkoyanagui/pyning
3e8905e240d2554f217f168c48c9edeba8658dec
[ "Apache-2.0" ]
null
null
null
pyning/tail_recursion/fibonacci_test.py
rkoyanagui/pyning
3e8905e240d2554f217f168c48c9edeba8658dec
[ "Apache-2.0" ]
null
null
null
import unittest from pyning.tail_recursion.fibonacci import naive_fib from pyning.utils.testutils import BaseTest class FibonacciTailRecursionTest(BaseTest): def test_zero(self): self.check(f=naive_fib, xr=0, n=0) def test_one(self): self.check(f=naive_fib, xr=1, n=1) def test_two(self): self.check(f=naive_fib, xr=1, n=2) def test_three(self): self.check(f=naive_fib, xr=2, n=3) def test_four(self): self.check(f=naive_fib, xr=3, n=4) def test_five(self): self.check(f=naive_fib, xr=5, n=5) def test_six(self): self.check(f=naive_fib, xr=8, n=6) if __name__ == '__main__': unittest.main()
20.939394
53
0.646889
115
691
3.678261
0.33913
0.1513
0.21513
0.231678
0.406619
0.406619
0.406619
0.122931
0.122931
0
0
0.025974
0.219971
691
32
54
21.59375
0.758813
0
0
0
0
0
0.011577
0
0
0
0
0
0
1
0.35
false
0
0.15
0
0.55
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
f3588062eb6aef89f6985ff4837093034c99ff99
156
py
Python
GwasJP/utils/__init__.py
2waybene/GwasJP
ddd54b276655baa79556b5f10d7959099a2e3a0b
[ "BSD-3-Clause" ]
null
null
null
GwasJP/utils/__init__.py
2waybene/GwasJP
ddd54b276655baa79556b5f10d7959099a2e3a0b
[ "BSD-3-Clause" ]
null
null
null
GwasJP/utils/__init__.py
2waybene/GwasJP
ddd54b276655baa79556b5f10d7959099a2e3a0b
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """This is utility folder that contains useful functions""" from . import statFittings # from .model_eval_cv_genotyped import *
17.333333
59
0.711538
20
156
5.4
0.9
0
0
0
0
0
0
0
0
0
0
0.007692
0.166667
156
8
60
19.5
0.823077
0.737179
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
f35db14649e34fcd913939f8f437aec72367b212
344
py
Python
Python/kata/bankocr.py
caichinger/BankOCR-Outside-in-Kata
0296cc64d2559464300d2eb996bae41a5e13e26b
[ "BSD-3-Clause" ]
2
2021-04-26T19:21:48.000Z
2021-06-05T15:48:08.000Z
Python/kata/bankocr.py
caichinger/BankOCR-Outside-in-Kata
0296cc64d2559464300d2eb996bae41a5e13e26b
[ "BSD-3-Clause" ]
1
2021-01-21T19:50:21.000Z
2021-01-21T21:01:07.000Z
Python/kata/bankocr.py
caichinger/BankOCR-Outside-in-Kata
0296cc64d2559464300d2eb996bae41a5e13e26b
[ "BSD-3-Clause" ]
3
2020-09-19T07:42:26.000Z
2021-01-20T18:08:28.000Z
# coding=utf-8 from kata.accountnumber import AccountNumber class BankOcr(object): """Example for the outside interface of the API we need to create.""" def __init__(self): pass def parse(self, raw_lines): # TODO return an array of AccountNumber raise NotImplementedError("not implemented")
24.571429
74
0.665698
42
344
5.333333
0.857143
0
0
0
0
0
0
0
0
0
0
0.003922
0.258721
344
13
75
26.461538
0.87451
0.334302
0
0
0
0
0.07177
0
0
0
0
0.076923
0
1
0.333333
false
0.166667
0.166667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
4
f3653c62ef4fa176cfb63a279e20fab0dda730c3
188
py
Python
tests/spec/cms/blogs/test_blogs.py
fakepop/hubspot-api-python
f04103a09f93f5c26c99991b25fa76801074f3d3
[ "Apache-2.0" ]
117
2020-04-06T08:22:53.000Z
2022-03-18T03:41:29.000Z
tests/spec/cms/blogs/test_blogs.py
fakepop/hubspot-api-python
f04103a09f93f5c26c99991b25fa76801074f3d3
[ "Apache-2.0" ]
62
2020-04-06T16:21:06.000Z
2022-03-17T16:50:44.000Z
tests/spec/cms/blogs/test_blogs.py
fakepop/hubspot-api-python
f04103a09f93f5c26c99991b25fa76801074f3d3
[ "Apache-2.0" ]
45
2020-04-06T16:13:52.000Z
2022-03-30T21:33:17.000Z
from hubspot import HubSpot from hubspot.discovery.cms.blogs.discovery import Discovery def test_is_discoverable(): apis = HubSpot().cms assert isinstance(apis.blogs, Discovery)
23.5
59
0.781915
24
188
6.041667
0.541667
0.151724
0
0
0
0
0
0
0
0
0
0
0.138298
188
7
60
26.857143
0.895062
0
0
0
0
0
0
0
0
0
0
0
0.2
1
0.2
false
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f377fe060e7eef928a73ab32445d3e0a5afd5056
4,096
py
Python
testproject/testapp/tests/test_password_reset.py
d1opensource/djoser
ebdd2f25f84df22891372afb53cc6b956917d1ba
[ "MIT" ]
null
null
null
testproject/testapp/tests/test_password_reset.py
d1opensource/djoser
ebdd2f25f84df22891372afb53cc6b956917d1ba
[ "MIT" ]
null
null
null
testproject/testapp/tests/test_password_reset.py
d1opensource/djoser
ebdd2f25f84df22891372afb53cc6b956917d1ba
[ "MIT" ]
null
null
null
from django.conf import settings from django.contrib.sites.shortcuts import get_current_site from django.core import mail from django.test.utils import override_settings from djet import assertions, restframework from rest_framework import status import djoser.views from djoser.compat import get_user_email from djoser.conf import settings as default_settings from .common import create_user, mock from testapp.models import CustomUser class PasswordResetViewTest( restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin, assertions.EmailAssertionsMixin, ): view_class = djoser.views.PasswordResetView def test_post_should_send_email_to_user_with_password_reset_link(self): user = create_user() data = {"email": user.email} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_204_NO_CONTENT) self.assert_emails_in_mailbox(1) self.assert_email_exists(to=[user.email]) site = get_current_site(request) self.assertIn(site.domain, mail.outbox[0].body) self.assertIn(site.name, mail.outbox[0].body) def test_post_send_email_to_user_with_request_domain_and_site_name(self): user = create_user() data = {"email": user.email} request = self.factory.post(data=data) self.view(request) self.assertIn(request.get_host(), mail.outbox[0].body) def test_post_should_not_send_email_to_user_if_user_does_not_exist(self): data = {"email": "john@beatles.com"} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_204_NO_CONTENT) self.assert_emails_in_mailbox(0) def test_post_should_return_no_content_if_user_does_not_exist(self): data = {"email": "john@beatles.com"} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_204_NO_CONTENT) @override_settings( DJOSER=dict(settings.DJOSER, **{"PASSWORD_RESET_SHOW_EMAIL_NOT_FOUND": True}) ) def test_post_should_return_bad_request_if_user_does_not_exist(self): data = {"email": "john@beatles.com"} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.data["email"][0], default_settings.CONSTANTS.messages.EMAIL_NOT_FOUND, ) @mock.patch("djoser.serializers.User", CustomUser) @mock.patch("djoser.views.User", CustomUser) @override_settings(AUTH_USER_MODEL="testapp.CustomUser") def test_post_should_send_email_to_custom_user_with_password_reset_link(self): user = create_user(use_custom_data=True) data = {"custom_email": get_user_email(user)} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_204_NO_CONTENT) self.assert_emails_in_mailbox(1) self.assert_email_exists(to=[get_user_email(user)]) site = get_current_site(request) self.assertIn(site.domain, mail.outbox[0].body) self.assertIn(site.name, mail.outbox[0].body) @mock.patch("djoser.serializers.User", CustomUser) @mock.patch("djoser.views.User", CustomUser) @override_settings( AUTH_USER_MODEL="testapp.CustomUser", DJOSER=dict(settings.DJOSER, **{"PASSWORD_RESET_SHOW_EMAIL_NOT_FOUND": True}), ) def test_post_should_return_bad_request_with_custom_email_field_if_user_does_not_exist( self ): data = {"custom_email": "john@beatles.com"} request = self.factory.post(data=data) response = self.view(request) self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.data["custom_email"][0], default_settings.CONSTANTS.messages.EMAIL_NOT_FOUND, )
36.247788
91
0.7146
529
4,096
5.21172
0.187146
0.071817
0.027929
0.055858
0.727602
0.712369
0.712369
0.669206
0.669206
0.607544
0
0.008406
0.186768
4,096
112
92
36.571429
0.819273
0
0
0.522727
0
0
0.077148
0.02832
0
0
0
0
0.238636
1
0.079545
false
0.068182
0.125
0
0.227273
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
f38cd9bf72cf20ce46ec9a0d171610abb4fc724e
544
py
Python
main.py
traduttore/traduttore-model
c7c2f3eba11226a32bad547c4f89186afab676da
[ "MIT" ]
null
null
null
main.py
traduttore/traduttore-model
c7c2f3eba11226a32bad547c4f89186afab676da
[ "MIT" ]
null
null
null
main.py
traduttore/traduttore-model
c7c2f3eba11226a32bad547c4f89186afab676da
[ "MIT" ]
null
null
null
from run_translation.TestModelComputer import asl_translation from run_translation.TextToSpeech import tts from run_translation.RunPiModelStream import rasp_translation # from run_translation.RunPiModelTesting import rasp_translation from run_translation.TestModelComputerLetters import asl_translation_letters # from run_translation.SpeechToText import stt if __name__ == "__main__": # sentence = rasp_translation() sentence = asl_translation(CAM_ID=1) # rasp_sentence = rasp_translation([]) # tts(sentence) # print(stt())
41.846154
76
0.816176
61
544
6.885246
0.377049
0.1
0.257143
0.207143
0.185714
0.185714
0
0
0
0
0
0.002088
0.119485
544
13
77
41.846154
0.874739
0.369485
0
0
0
0
0.023739
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f38fbb026f37e5802966766250eb1e180269abf5
446
py
Python
terrascript/data/logicmonitor.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
507
2017-07-26T02:58:38.000Z
2022-01-21T12:35:13.000Z
terrascript/data/logicmonitor.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
135
2017-07-20T12:01:59.000Z
2021-10-04T22:25:40.000Z
terrascript/data/logicmonitor.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
81
2018-02-20T17:55:28.000Z
2022-01-31T07:08:40.000Z
# terrascript/data/logicmonitor.py import terrascript class logicmonitor_collectors(terrascript.Data): pass class logicmonitor_dashboard(terrascript.Data): pass class logicmonitor_dashboard_group(terrascript.Data): pass class logicmonitor_device_group(terrascript.Data): pass __all__ = [ "logicmonitor_collectors", "logicmonitor_dashboard", "logicmonitor_dashboard_group", "logicmonitor_device_group", ]
16.518519
53
0.773543
43
446
7.651163
0.27907
0.227964
0.231003
0.218845
0.382979
0.273556
0
0
0
0
0
0
0.147982
446
26
54
17.153846
0.865789
0.071749
0
0.266667
0
0
0.237864
0.237864
0
0
0
0
0
1
0
false
0.266667
0.066667
0
0.333333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
f3bfdc81eb1e2339210b6f5811b1e4a0893f03f6
218
py
Python
flask_resultful_plugin/error.py
PushyZqin/flask-restful-plugin
7a142de96500910f5f7648d5edf8986afaa72b70
[ "MIT" ]
2
2018-11-28T13:49:18.000Z
2018-11-29T11:13:40.000Z
flask_resultful_plugin/error.py
PushyZqin/flask-restful-plugin
7a142de96500910f5f7648d5edf8986afaa72b70
[ "MIT" ]
null
null
null
flask_resultful_plugin/error.py
PushyZqin/flask-restful-plugin
7a142de96500910f5f7648d5edf8986afaa72b70
[ "MIT" ]
null
null
null
# encoding:utf-8 # 401 错误 class UnauthorizedError(Exception): pass # 400 错误 class BadRequestError(Exception): pass class MediaTypeError(Exception): pass # 父异常 class RestfulException(Exception): pass
13.625
35
0.729358
24
218
6.625
0.583333
0.327044
0
0
0
0
0
0
0
0
0
0.039548
0.188073
218
16
36
13.625
0.858757
0.146789
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
45ff2ecb9b4c51dd290f8df481731ee3dacbf736
67
py
Python
localized_fields/__init__.py
GabLeRoux/django-localized-fields
f0ac0f7f2503317fde5d75ba8481e34db83512bd
[ "MIT" ]
null
null
null
localized_fields/__init__.py
GabLeRoux/django-localized-fields
f0ac0f7f2503317fde5d75ba8481e34db83512bd
[ "MIT" ]
null
null
null
localized_fields/__init__.py
GabLeRoux/django-localized-fields
f0ac0f7f2503317fde5d75ba8481e34db83512bd
[ "MIT" ]
null
null
null
default_app_config = 'localized_fields.apps.LocalizedFieldsConfig'
33.5
66
0.880597
7
67
8
1
0
0
0
0
0
0
0
0
0
0
0
0.044776
67
1
67
67
0.875
0
0
0
0
0
0.641791
0.641791
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3401e7d0ecf41d08ab5511f500baa59628ea1b4d
336
py
Python
2409.py
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
[ "MIT" ]
1
2022-01-14T08:45:32.000Z
2022-01-14T08:45:32.000Z
2409.py
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
[ "MIT" ]
null
null
null
2409.py
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
[ "MIT" ]
null
null
null
a, b, c = map(int, input().split()) h, l = map(int, input().split()) if a <= h and b <= l: print("S") elif a <= h and c <= l: print("S") elif b <= h and a <= l: print("S") elif b <= h and c <= l: print("S") elif c <= h and a <= l: print("S") elif c <= h and b <= l: print("S") else: print("N")
21
36
0.4375
64
336
2.296875
0.25
0.163265
0.285714
0.37415
0.62585
0.62585
0.489796
0
0
0
0
0
0.324405
336
16
37
21
0.647577
0
0
0.375
0
0
0.021739
0
0
0
0
0
0
1
0
true
0
0
0
0
0.4375
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
3405aae6a2b0713fec880df0831625e4b08bb01c
8,578
py
Python
naoqi-sdk-2.5.5.5-linux64/lib/python2.7/site-packages/ialbehavior.py
applejenny66/docker_pepper
2469cc4db6585161a31ac44c8fcf2605d71318b1
[ "MIT" ]
null
null
null
naoqi-sdk-2.5.5.5-linux64/lib/python2.7/site-packages/ialbehavior.py
applejenny66/docker_pepper
2469cc4db6585161a31ac44c8fcf2605d71318b1
[ "MIT" ]
null
null
null
naoqi-sdk-2.5.5.5-linux64/lib/python2.7/site-packages/ialbehavior.py
applejenny66/docker_pepper
2469cc4db6585161a31ac44c8fcf2605d71318b1
[ "MIT" ]
null
null
null
# This file was automatically generated by SWIG (http://www.swig.org). # Version 2.0.11 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info if version_info >= (2,6,0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_ialbehavior', [dirname(__file__)]) except ImportError: import _ialbehavior return _ialbehavior if fp is not None: try: _mod = imp.load_module('_ialbehavior', fp, pathname, description) finally: fp.close() return _mod _ialbehavior = swig_import_helper() del swig_import_helper else: import _ialbehavior del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError(name) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object : pass _newclass = 0 class SwigPyIterator(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name) def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr __swig_destroy__ = _ialbehavior.delete_SwigPyIterator __del__ = lambda self : None; def value(self): return _ialbehavior.SwigPyIterator_value(self) def incr(self, n=1): return _ialbehavior.SwigPyIterator_incr(self, n) def decr(self, n=1): return _ialbehavior.SwigPyIterator_decr(self, n) def distance(self, *args): return _ialbehavior.SwigPyIterator_distance(self, *args) def equal(self, *args): return _ialbehavior.SwigPyIterator_equal(self, *args) def copy(self): return _ialbehavior.SwigPyIterator_copy(self) def next(self): return _ialbehavior.SwigPyIterator_next(self) def __next__(self): return _ialbehavior.SwigPyIterator___next__(self) def previous(self): return _ialbehavior.SwigPyIterator_previous(self) def advance(self, *args): return _ialbehavior.SwigPyIterator_advance(self, *args) def __eq__(self, *args): return _ialbehavior.SwigPyIterator___eq__(self, *args) def __ne__(self, *args): return _ialbehavior.SwigPyIterator___ne__(self, *args) def __iadd__(self, *args): return _ialbehavior.SwigPyIterator___iadd__(self, *args) def __isub__(self, *args): return _ialbehavior.SwigPyIterator___isub__(self, *args) def __add__(self, *args): return _ialbehavior.SwigPyIterator___add__(self, *args) def __sub__(self, *args): return _ialbehavior.SwigPyIterator___sub__(self, *args) def __iter__(self): return self SwigPyIterator_swigregister = _ialbehavior.SwigPyIterator_swigregister SwigPyIterator_swigregister(SwigPyIterator) import inaoqi class behavior(inaoqi.baseModule): __swig_setmethods__ = {} for _s in [inaoqi.baseModule]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{})) __setattr__ = lambda self, name, value: _swig_setattr(self, behavior, name, value) __swig_getmethods__ = {} for _s in [inaoqi.baseModule]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{})) __getattr__ = lambda self, name: _swig_getattr(self, behavior, name) __repr__ = _swig_repr def __init__(self, *args): this = _ialbehavior.new_behavior(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _ialbehavior.delete_behavior __del__ = lambda self : None; def autoBind(self, *args): return _ialbehavior.behavior_autoBind(self, *args) def setEnabled(self, *args): return _ialbehavior.behavior_setEnabled(self, *args) def isEnabled(self): return _ialbehavior.behavior_isEnabled(self) def setResources(self, *args): return _ialbehavior.behavior_setResources(self, *args) def waitFor(self, *args): return _ialbehavior.behavior_waitFor(self, *args) def session(self): return _ialbehavior.behavior_session(self) def acquireResources(self): return _ialbehavior.behavior_acquireResources(self) def waitResourcesCallback(self, *args): return _ialbehavior.behavior_waitResourcesCallback(self, *args) def isResourceFree(self, *args): return _ialbehavior.behavior_isResourceFree(self, *args) def waitResourceFree(self): return _ialbehavior.behavior_waitResourceFree(self) def waitResources(self): return _ialbehavior.behavior_waitResources(self) def releaseResource(self): return _ialbehavior.behavior_releaseResource(self) def addInput(self, *args): return _ialbehavior.behavior_addInput(self, *args) def addOutput(self, *args): return _ialbehavior.behavior_addOutput(self, *args) def addParameter(self, *args): return _ialbehavior.behavior_addParameter(self, *args) def getParameter(self, *args): return _ialbehavior.behavior_getParameter(self, *args) def getParametersList(self): return _ialbehavior.behavior_getParametersList(self) def setParameter(self, *args): return _ialbehavior.behavior_setParameter(self, *args) def setParentFromName(self, *args): return _ialbehavior.behavior_setParentFromName(self, *args) def stimulateIO(self, *args): return _ialbehavior.behavior_stimulateIO(self, *args) def exit(self): return _ialbehavior.behavior_exit(self) def getBrokerName(self): return _ialbehavior.behavior_getBrokerName(self) def version(self): return _ialbehavior.behavior_version(self) def hasTimeline(self): return _ialbehavior.behavior_hasTimeline(self) def getTimeline(self): return _ialbehavior.behavior_getTimeline(self) def hasParentTimeline(self): return _ialbehavior.behavior_hasParentTimeline(self) def getParentTimeline(self): return _ialbehavior.behavior_getParentTimeline(self) def connectInput(self, *args): return _ialbehavior.behavior_connectInput(self, *args) def connectParameter(self, *args): return _ialbehavior.behavior_connectParameter(self, *args) def connectOutput(self, *args): return _ialbehavior.behavior_connectOutput(self, *args) def _reportError(self, *args): return _ialbehavior.behavior__reportError(self, *args) behavior_swigregister = _ialbehavior.behavior_swigregister behavior_swigregister(behavior) class timeline(_object): __swig_setmethods__ = {} __setattr__ = lambda self, name, value: _swig_setattr(self, timeline, name, value) __swig_getmethods__ = {} __getattr__ = lambda self, name: _swig_getattr(self, timeline, name) __repr__ = _swig_repr def __init__(self, *args): this = _ialbehavior.new_timeline(*args) try: self.this.append(this) except: self.this = this __swig_destroy__ = _ialbehavior.delete_timeline __del__ = lambda self : None; def play(self): return _ialbehavior.timeline_play(self) def pause(self): return _ialbehavior.timeline_pause(self) def stop(self): return _ialbehavior.timeline_stop(self) def goTo(self, *args): return _ialbehavior.timeline_goTo(self, *args) def getSize(self): return _ialbehavior.timeline_getSize(self) def getFPS(self): return _ialbehavior.timeline_getFPS(self) def setFPS(self, *args): return _ialbehavior.timeline_setFPS(self, *args) timeline_swigregister = _ialbehavior.timeline_swigregister timeline_swigregister(timeline) # This file is compatible with both classic and new-style classes.
49.017143
107
0.742597
996
8,578
5.963855
0.176707
0.079461
0.130471
0.117845
0.352525
0.174074
0.152862
0.142761
0.116498
0.108081
0
0.002078
0.158429
8,578
174
108
49.298851
0.820751
0.03439
0
0.184211
1
0
0.022483
0
0
0
0
0
0
1
0.414474
false
0.013158
0.065789
0.368421
0.677632
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
34210500bd30a4f38c9990538692f030d9f6352e
57
py
Python
card_detection_module/nanodet/__init__.py
nhatnxn/ID_Passport-OCR
78322ec2b9648d0b027326dced7c4aec967bcab3
[ "MIT" ]
1
2021-06-30T11:01:25.000Z
2021-06-30T11:01:25.000Z
card_detection_module/nanodet/__init__.py
nhatnxn/ID_Passport-OCR
78322ec2b9648d0b027326dced7c4aec967bcab3
[ "MIT" ]
null
null
null
card_detection_module/nanodet/__init__.py
nhatnxn/ID_Passport-OCR
78322ec2b9648d0b027326dced7c4aec967bcab3
[ "MIT" ]
null
null
null
from .dectect import detect_card __all__ = [detect_card]
19
32
0.807018
8
57
5
0.75
0.5
0
0
0
0
0
0
0
0
0
0
0.122807
57
3
33
19
0.8
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
3444a8319c7228a4e4f05ad69676ae8d14fa03cb
271
py
Python
galleries/admin.py
Ingabineza12/gallery-app
5c28558203d68dd582b77df408cf6f21ccc01100
[ "Unlicense" ]
1
2021-08-02T01:29:38.000Z
2021-08-02T01:29:38.000Z
galleries/admin.py
Ingabineza12/gallery-app
5c28558203d68dd582b77df408cf6f21ccc01100
[ "Unlicense" ]
null
null
null
galleries/admin.py
Ingabineza12/gallery-app
5c28558203d68dd582b77df408cf6f21ccc01100
[ "Unlicense" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import Photographer,Location,Image,Category # Register your models here. admin.site.register(Photographer) admin.site.register(Location) admin.site.register(Image) admin.site.register(Category)
24.636364
56
0.819188
36
271
6.166667
0.388889
0.162162
0.306306
0.198198
0
0
0
0
0
0
0
0
0.088561
271
10
57
27.1
0.898785
0.195572
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
344fe3a1bf94dc0bbf294fb4e913b4735993b476
1,318
py
Python
autorest/multiapi/models/config.py
qwordy/autorest.python
6b12df51c2a39a1285546b5a771b69f5896e794f
[ "MIT" ]
null
null
null
autorest/multiapi/models/config.py
qwordy/autorest.python
6b12df51c2a39a1285546b5a771b69f5896e794f
[ "MIT" ]
null
null
null
autorest/multiapi/models/config.py
qwordy/autorest.python
6b12df51c2a39a1285546b5a771b69f5896e794f
[ "MIT" ]
null
null
null
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- import json from typing import Any, Dict from .imports import FileImport class Config: def __init__(self, default_version_metadata: Dict[str, Any]): self.credential = default_version_metadata["config"]["credential"] self.credential_scopes = default_version_metadata["config"]["credential_scopes"] self.credential_default_policy_type = default_version_metadata["config"]["credential_default_policy_type"] self.credential_default_policy_type_has_async_version = ( default_version_metadata["config"]["credential_default_policy_type_has_async_version"] ) self.credential_key_header_name = default_version_metadata["config"]["credential_key_header_name"] self.default_version_metadata = default_version_metadata def imports(self, async_mode: bool) -> FileImport: imports_to_load = "async_imports" if async_mode else "sync_imports" return FileImport(json.loads(self.default_version_metadata['config'][imports_to_load]))
54.916667
114
0.679059
142
1,318
5.908451
0.380282
0.150179
0.235995
0.200238
0.376639
0.199046
0.199046
0.131108
0
0
0
0
0.132777
1,318
23
115
57.304348
0.734033
0.226859
0
0
0
0
0.189536
0.102665
0
0
0
0
0
1
0.125
false
0
0.375
0
0.625
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
346219eff16b54d9f3ede1460d4a16bec732ce9e
170
py
Python
TestMain/cool.py
ppcrong/TestMain
38fa0d64439a7d02d2806be3b09043a4294912de
[ "Apache-2.0" ]
null
null
null
TestMain/cool.py
ppcrong/TestMain
38fa0d64439a7d02d2806be3b09043a4294912de
[ "Apache-2.0" ]
null
null
null
TestMain/cool.py
ppcrong/TestMain
38fa0d64439a7d02d2806be3b09043a4294912de
[ "Apache-2.0" ]
null
null
null
# cool.py def cool_func(): print('cool_func(): Super Cool!') print('__name__:', __name__) if __name__ == '__main__': print('Call it locally') cool_func()
14.166667
37
0.629412
22
170
4
0.545455
0.272727
0
0
0
0
0
0
0
0
0
0
0.194118
170
11
38
15.454545
0.642336
0.041176
0
0
0
0
0.347826
0
0
0
0
0
0
1
0.166667
true
0
0
0
0.166667
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
3463bda8bf81306d7b5d5fb016561e09a511a66d
95
py
Python
superlists/apps.py
cidyoon/django-blog
9bbe90de935e694a5aafb19df1f4c0c87584418c
[ "MIT" ]
null
null
null
superlists/apps.py
cidyoon/django-blog
9bbe90de935e694a5aafb19df1f4c0c87584418c
[ "MIT" ]
null
null
null
superlists/apps.py
cidyoon/django-blog
9bbe90de935e694a5aafb19df1f4c0c87584418c
[ "MIT" ]
null
null
null
from django.apps import AppConfig class SuperlistsConfig(AppConfig): name = 'superlists'
15.833333
34
0.768421
10
95
7.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.157895
95
5
35
19
0.9125
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
346d8c6aecb6d5c799c07698354cb687ab52ff19
216
py
Python
function/python/brightics/function/statistics/__init__.py
janrenz/studio
a0714ed8dcd9dcd8d024162104d3b4de89ac2b49
[ "Apache-2.0" ]
null
null
null
function/python/brightics/function/statistics/__init__.py
janrenz/studio
a0714ed8dcd9dcd8d024162104d3b4de89ac2b49
[ "Apache-2.0" ]
null
null
null
function/python/brightics/function/statistics/__init__.py
janrenz/studio
a0714ed8dcd9dcd8d024162104d3b4de89ac2b49
[ "Apache-2.0" ]
null
null
null
from .profile_table import profile_table from .correlation import correlation from .pairplot import pairplot from .anova import bartletts_test from .anova import oneway_anova from .anova import tukeys_range_test
36
41
0.842593
30
216
5.866667
0.4
0.153409
0.255682
0
0
0
0
0
0
0
0
0
0.12963
216
6
42
36
0.93617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
caa3dd046c62cf7f7f653e5201f7bf879ec9032d
164
py
Python
model_wrappers/errors.py
SelfHacked/django-model-wrappers
2aa0bb773d61c6b5c794126b1dc4f06d622ff079
[ "MIT" ]
null
null
null
model_wrappers/errors.py
SelfHacked/django-model-wrappers
2aa0bb773d61c6b5c794126b1dc4f06d622ff079
[ "MIT" ]
null
null
null
model_wrappers/errors.py
SelfHacked/django-model-wrappers
2aa0bb773d61c6b5c794126b1dc4f06d622ff079
[ "MIT" ]
1
2021-06-03T12:04:22.000Z
2021-06-03T12:04:22.000Z
class FieldDoesNotExist(Exception): def __init__(self, **kwargs): super().__init__(f"{self.__class__.__name__}: {kwargs}") self.kwargs = kwargs
32.8
64
0.664634
17
164
5.470588
0.588235
0.215054
0
0
0
0
0
0
0
0
0
0
0.182927
164
4
65
41
0.69403
0
0
0
0
0
0.213415
0.158537
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
cabbf72f74049f95a62204023ec60f67c663a913
1,130
py
Python
exercises/exercise7.py
AsBeeb/DistributedExercisesAAU
b84343d5d5b86ccb4750d47a8594a428ecbd83ce
[ "MIT" ]
4
2021-09-16T12:52:04.000Z
2022-01-09T15:44:49.000Z
exercises/exercise7.py
AsBeeb/DistributedExercisesAAU
b84343d5d5b86ccb4750d47a8594a428ecbd83ce
[ "MIT" ]
null
null
null
exercises/exercise7.py
AsBeeb/DistributedExercisesAAU
b84343d5d5b86ccb4750d47a8594a428ecbd83ce
[ "MIT" ]
21
2021-09-06T09:39:18.000Z
2022-03-08T12:18:23.000Z
import math import random import threading import time from emulators.Medium import Medium from emulators.Device import Device from emulators.MessageStub import MessageStub class Vote(MessageStub): def __init__(self, sender: int, destination: int, vote: int, decided: bool): super().__init__(sender, destination) self._vote = vote self._decided = decided def vote(self): return self._vote def decided(self): return self._decided def __str__(self): return f'Vote: {self.source} -> {self.destination}, voted for {self._vote}, decided? {self._decided}' class Bully(Device): def __init__(self, index: int, number_of_devices: int, medium: Medium): super().__init__(index, number_of_devices, medium) self._leader = None self._shut_up = False self._election = False def largest(self): return self.index() == max(self.medium().ids()) def run(self): """TODO""" def start_election(self): """TODO""" def print_result(self): print(f'Leader seen from {self._id} is {self._leader}')
24.565217
109
0.652212
140
1,130
4.992857
0.35
0.057225
0.060086
0
0
0
0
0
0
0
0
0
0.232743
1,130
46
110
24.565217
0.806228
0.007965
0
0
0
0.033333
0.122412
0
0
0
0
0.021739
0
1
0.3
false
0
0.233333
0.133333
0.733333
0.066667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
1
0
0
4
cac1340a7afa2d15888e2ebc2d6dd2802d2bd8de
85
py
Python
basics_data_structure/queue.py
corenel/algorithm-exercises
f3f31f709e289e590c98247c019d36fc9cc44faf
[ "MIT" ]
null
null
null
basics_data_structure/queue.py
corenel/algorithm-exercises
f3f31f709e289e590c98247c019d36fc9cc44faf
[ "MIT" ]
null
null
null
basics_data_structure/queue.py
corenel/algorithm-exercises
f3f31f709e289e590c98247c019d36fc9cc44faf
[ "MIT" ]
null
null
null
""" Queue https://algorithm.yuanbin.me/zh-hans/basics_data_structure/queue.html """
14.166667
69
0.752941
12
85
5.166667
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.058824
85
5
70
17
0.775
0.894118
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
cae8b2ce05a3582cc0f1a01c67f2c8e7637a486b
99
py
Python
staff/__init__.py
alvienzo720/Dep_Nadine
b23688aa87ba3cfe138f9b243eed3f50a74e1486
[ "Apache-2.0" ]
1
2019-08-15T00:10:38.000Z
2019-08-15T00:10:38.000Z
staff/__init__.py
alvienzo720/Dep_Nadine
b23688aa87ba3cfe138f9b243eed3f50a74e1486
[ "Apache-2.0" ]
4
2021-03-19T16:10:13.000Z
2022-03-12T00:55:50.000Z
staff/__init__.py
alvienzo720/Dep_Nadine
b23688aa87ba3cfe138f9b243eed3f50a74e1486
[ "Apache-2.0" ]
1
2020-02-24T08:23:45.000Z
2020-02-24T08:23:45.000Z
"""The Django app which provides member tracking and billing calculation for a coworking space."""
49.5
98
0.787879
14
99
5.571429
1
0
0
0
0
0
0
0
0
0
0
0
0.141414
99
1
99
99
0.917647
0.929293
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
caf860b3185ef94fc3db805ba75414ec7fad05a1
174
py
Python
example/app/views.py
aolkin/django-bootstrap-form
5fff56f715bd9f2f29793f6a5a87baa1be25e409
[ "BSD-3-Clause" ]
324
2015-01-12T19:30:51.000Z
2022-02-11T07:13:19.000Z
example/app/views.py
caumons/django-bootstrap-form
62af3e076612a5d3b67ebc020c05a8db20e3fe62
[ "BSD-3-Clause" ]
37
2015-03-04T23:43:45.000Z
2021-10-18T16:08:52.000Z
example/app/views.py
caumons/django-bootstrap-form
62af3e076612a5d3b67ebc020c05a8db20e3fe62
[ "BSD-3-Clause" ]
132
2015-01-01T18:13:06.000Z
2022-01-10T07:06:19.000Z
from django.shortcuts import render from app.forms import ExampleForm def index(request): form = ExampleForm() return render(request, 'index.html', {'form': form})
21.75
56
0.724138
22
174
5.727273
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.16092
174
7
57
24.857143
0.863014
0
0
0
0
0
0.08046
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
1b316ea404a1e338f174eb77075582820fee88dd
21
py
Python
vnpy/trader/dbHistory/historyDataFile/__init__.py
chenzj810/vnpy-stock
ca30eb309e38f9f916e9877538b98096303e0b60
[ "MIT" ]
2
2021-01-03T05:28:14.000Z
2021-01-03T05:28:19.000Z
vnpy/trader/dbHistory/historyDataFile/__init__.py
chenzj810/vnpy
ca30eb309e38f9f916e9877538b98096303e0b60
[ "MIT" ]
null
null
null
vnpy/trader/dbHistory/historyDataFile/__init__.py
chenzj810/vnpy
ca30eb309e38f9f916e9877538b98096303e0b60
[ "MIT" ]
1
2021-04-26T14:08:23.000Z
2021-04-26T14:08:23.000Z
# encoding: UTF-8
7
18
0.571429
3
21
4
1
0
0
0
0
0
0
0
0
0
0
0.066667
0.285714
21
2
19
10.5
0.733333
0.714286
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
1b3eb815d7dc1334894f133246fe3bca46ed81e2
216,073
py
Python
fixtures/webui_test.py
rombie/contrail-test
a68c71d6f282142501a7e2e889bbb232fdd82dc3
[ "Apache-2.0" ]
null
null
null
fixtures/webui_test.py
rombie/contrail-test
a68c71d6f282142501a7e2e889bbb232fdd82dc3
[ "Apache-2.0" ]
null
null
null
fixtures/webui_test.py
rombie/contrail-test
a68c71d6f282142501a7e2e889bbb232fdd82dc3
[ "Apache-2.0" ]
null
null
null
from netaddr import IPNetwork from selenium import webdriver from pyvirtualdisplay import Display from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.common.exceptions import WebDriverException import time import random import fixtures from ipam_test import * from project_test import * from util import * from vnc_api.vnc_api import * from netaddr import * from time import sleep from contrail_fixtures import * from pyvirtualdisplay import Display import inspect import policy_test_utils import threading import sys from webui_common import * class WebuiTest: def __init__(self, connections, inputs): self.proj_check_flag = 0 self.inputs = inputs self.connections = connections self.logger = self.inputs.logger self.browser = self.connections.browser self.browser_openstack = self.connections.browser_openstack self.delay = 10 self.frequency = 1 self.logger = inputs.logger self.webui_common = WebuiCommon(self) self.dash = "-" * 60 self.vnc_lib = connections.vnc_lib_fixture def _click_if_element_found(self, element_name, elements_list): for element in elements_list: if element.text == element_name: element.click() # end _click_if_element_found def create_vn_in_webui(self, fixture): result = True try: fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present( fixture.vn_name, fixture.project_id) if not fixture.obj: self.logger.info("Creating VN %s using webui..." % (fixture.vn_name)) if not self.webui_common.click_configure_networks(): result = result and False self.webui_common.select_project(fixture.project_name) self.browser.get_screenshot_as_file( 'createVN' + self.webui_common.date_time_string() + '.png') self.webui_common.click_element( self.browser, 'btnCreateVN', 'id') self.webui_common.wait_till_ajax_done(self.browser) txtVNName = self.webui_common.find_element( self.browser, 'txtVNName', 'id') txtVNName.send_keys(fixture.vn_name) if type(fixture.vn_subnets) is list: for subnet in fixture.vn_subnets: self.webui_common.click_element( self.browser, 'btnCommonAddIpam', 'id') self.webui_common.wait_till_ajax_done(self.browser) self.webui_common.click_element( self.browser, ['ipamTuples', 'select2-choice'], ['id', 'class']) ipam_list = self.webui_common.find_element( self.browser, ['select2-drop', 'li'], ['id', 'tag'], [1]) self.webui_common.wait_till_ajax_done(self.browser) for ipam in ipam_list: ipam_text = ipam.find_element_by_tag_name( 'div').text time.sleep(2) if ipam_text.find(fixture.ipam_fq_name[2]) != -1: ipam.click() break self.browser.find_element_by_xpath( "//input[@placeholder = 'IP Block'] ").send_keys(subnet) else: self.browser.find_element_by_id('btnCommonAddIpam').click() self.browser.find_element_by_id( "select2-drop-mask").click() ipam_list = self.browser.find_element_by_id( "select2-drop").find_element_by_tag_name('ul').find_elements_by_tag_name('li') for ipam in ipam_list: ipam_text = ipam.get_attribute("innerHTML") if ipam_text == self.ipam_fq_name: ipam.click() break self.browser.find_element_by_xpath( "//input[@placeholder = 'IP Block'] ").send_keys(fixture.vn_subnets) self.browser.find_element_by_id('btnCreateVNOK').click() time.sleep(3) if not self.webui_common.check_error_msg("create VN"): raise Exception("vn creation failed") else: fixture.already_present = True self.logger.info('VN %s already exists, skipping creation ' % (fixture.vn_name)) self.logger.debug('VN %s exists, already there' % (fixture.vn_name)) fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present( fixture.vn_name, fixture.project_id) fixture.vn_id = fixture.obj['network']['id'] fixture.vn_fq_name = ':'.join(self.vnc_lib.id_to_fq_name( fixture.obj['network']['id'])) except Exception as e: with fixture.lock: self.logger.exception( "Got exception as %s while creating %s" % (e, fixture.vn_name)) sys.exit(-1) # end create_vn_in_webui def create_dns_server_in_webui(self): ass_ipam_list = ['ipam1', 'ipam_1'] if not self.webui_common.click_configure_dns_server(): result = result and False WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCreateDNSServer')).click() WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtDNSServerName')).send_keys('server1') WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtDomainName')).send_keys('domain1') self.browser.find_elements_by_class_name( 'control-group')[2].find_element_by_tag_name('i').click() options = self.browser.find_element_by_class_name( 'ui-autocomplete').find_elements_by_tag_name('li') for i in range(len(options)): if (options[i].find_element_by_tag_name('a').text == 'default-domain:dnss'): options[i].click() time.sleep(2) self.browser.find_element_by_id( 's2id_ddLoadBal').find_element_by_tag_name('a').click() rro_list = self.browser.find_element_by_id( 'select2-drop').find_elements_by_tag_name('li') rro_opt_list = [element.find_element_by_tag_name('div') for element in rro_list] for rros in rro_opt_list: rros_text = rros.text if rros_text == 'Round-Robin': rros.click() break WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtTimeLive')).send_keys('300') for ipam in range(len(ass_ipam_list)): self.browser.find_element_by_id( 's2id_msIPams').find_element_by_tag_name('input').click() ipam_list = self.browser.find_element_by_id( 'select2-drop').find_element_by_class_name('select2-results').find_elements_by_tag_name('li') ipam_opt_list = [element.find_element_by_tag_name('div') for element in ipam_list] for ipams in ipam_opt_list: ipams_text = ipams.text if ipams_text == 'admin:' + ass_ipam_list[ipam]: ipams.click() break WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCreateDNSServerOK')).click() if not self.webui_common.check_error_msg("create DNS"): raise Exception("DNS creation failed") # end create_dns_server_in_webui def create_dns_record_in_webui(self): if not self.webui_common.click_configure_dns_record(): result = result and False WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCreateDNSRecord')).click() self.browser.find_element_by_id( 's2id_cmbRecordType').find_element_by_tag_name('a').click() type_list = self.browser.find_element_by_id( 'select2-drop').find_elements_by_tag_name('li') type_opt_list = [element.find_element_by_tag_name('div') for element in type_list] for types in type_opt_list: types_text = types.text if types_text == 'NS (Delegation Record)': types.click() if types_text == 'CNAME (Alias Record)': self.browser.find_element_by_id( 'txtRecordName').send_keys('abc') self.browser.find_element_by_id( 'txtRecordData').send_keys('bcd') if types_text == 'A (IP Address Record)': self.browser.find_element_by_id( 'txtRecordName').send_keys('abc') self.browser.find_element_by_id( 'txtRecordData').send_keys('189.32.3.2/21') if types_text == 'PTR (Reverse DNS Record)': self.browser.find_element_by_id( 'txtRecordName').send_keys('187.23.2.1/27') self.browser.find_element_by_id( 'txtRecordData').send_keys('bcd') if types_text == 'NS (Delegation Record)': self.browser.find_element_by_id( 'txtRecordName').send_keys('abc') self.browser.find_elements_by_class_name( 'control-group')[2].find_element_by_tag_name('i').click() dns_servers = self.browser.find_element_by_class_name( 'ui-autocomplete').find_elements_by_tag_name('li') for servers in range(len(dns_servers)): if dns_servers[servers].find_element_by_tag_name('a').text == 'default-domain:' + 'dns2': dns_servers[servers].find_element_by_tag_name( 'a').click() break break self.browser.find_element_by_id( 's2id_cmbRecordClass').find_element_by_tag_name('a').click() class_list = self.browser.find_element_by_id( 'select2-drop').find_elements_by_tag_name('li') class_opt_list = [element.find_element_by_tag_name('div') for element in class_list] for classes in class_opt_list: classes_text = classes.text if classes_text == 'IN (Internet)': classes.click() break self.browser.find_element_by_id('txtRecordTTL').send_keys('300') self.browser.find_element_by_id('btnAddDNSRecordOk').click() if not self.webui_common.check_error_msg("create DNS Record"): raise Exception("DNS Record creation failed") # end create_dns_record_in_webui def create_svc_template_in_webui(self, fixture): result = True if not self.webui_common.click_configure_service_template(): result = result and False self.logger.info("Creating svc template %s using webui" % (fixture.st_name)) self.webui_common.click_element( self.browser, 'btnCreatesvcTemplate', 'id') self.webui_common.wait_till_ajax_done(self.browser) txt_temp_name = WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtTempName')) txt_temp_name.send_keys(fixture.st_name) self.browser.find_element_by_id( 's2id_ddserMode').find_element_by_class_name('select2-choice').click() service_mode_list = self.browser.find_element_by_id( "select2-drop").find_elements_by_tag_name('li') for service_mode in service_mode_list: service_mode_text = service_mode.text if service_mode_text.lower() == fixture.svc_mode: service_mode.click() break self.browser.find_element_by_id( 's2id_ddserType').find_element_by_class_name('select2-choice').click() service_type_list = self.browser.find_element_by_id( "select2-drop").find_elements_by_tag_name('li') for service_type in service_type_list: service_type_text = service_type.text if service_type_text.lower() == fixture.svc_type: service_type.click() break self.browser.find_element_by_id( 's2id_ddImageName').find_element_by_class_name('select2-choice').click() image_name_list = self.browser.find_element_by_id( "select2-drop").find_elements_by_tag_name('li') for image_name in image_name_list: image_name_text = image_name.text if image_name_text.lower() == fixture.image_name: image_name.click() break static_route = self.browser.find_element_by_id( 'widgetStaticRoutes').find_element_by_tag_name('i').click() for index, intf_element in enumerate(fixture.if_list): intf_text = intf_element[0] shared_ip = intf_element[1] static_routes = intf_element[2] self.browser.find_element_by_id('btnCommonAddInterface').click() self.browser.find_element_by_id( 'allInterface').find_elements_by_tag_name('i')[index * 3].click() if shared_ip: self.browser.find_element_by_id('allInterface').find_elements_by_tag_name( 'input')[index * 3 + 1].click() if static_routes: self.browser.find_element_by_id( 'allInterface').find_elements_by_tag_name('i')[index * 3 + 2].click() intf_types = self.browser.find_elements_by_class_name( 'ui-autocomplete')[index].find_elements_by_class_name('ui-menu-item') intf_dropdown = [element.find_element_by_tag_name('a') for element in intf_types] for intf in intf_dropdown: if intf.text.lower() == intf_text: intf.click() break self.browser.find_element_by_id( 's2id_ddFlavors').find_element_by_class_name('select2-choice').click() flavors_list = self.browser.find_elements_by_xpath( "//span[@class = 'select2-match']/..") for flavor in flavors_list: flavor_text = flavor.text if flavor_text.find(fixture.flavor) != -1: flavor.click() break if fixture.svc_scaling: self.browser.find_element_by_id('chkServiceEnabeling').click() self.browser.find_element_by_id('btnCreateSTempOK').click() time.sleep(3) if not self.webui_common.check_error_msg("create service template"): raise Exception("service template creation failed") # end create_svc_template_in_webui def create_svc_instance_in_webui(self, fixture): result = True if not self.webui_common.click_configure_service_instance(): result = result and False self.webui_common.select_project(fixture.project_name) self.logger.info("Creating svc instance %s using webui" % (fixture.si_name)) self.webui_common.click_element( self.browser, 'btnCreatesvcInstances', 'id') self.webui_common.wait_till_ajax_done(self.browser) txt_instance_name = WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtsvcInstanceName')) txt_instance_name.send_keys(fixture.si_name) self.browser.find_element_by_id( 's2id_ddsvcTemplate').find_element_by_class_name('select2-choice').click() service_template_list = self.browser.find_element_by_id( 'select2-drop').find_elements_by_tag_name('li') service_temp_list = [ element.find_element_by_tag_name('div') for element in service_template_list] for service_temp in service_temp_list: service_temp_text = service_temp.text if service_temp_text.find(fixture.st_name) != -1: service_temp.click() break intfs = self.browser.find_element_by_id( 'instanceDiv').find_elements_by_tag_name('a') self.browser.find_element_by_id('btnCreatesvcInstencesOK').click() time.sleep(3) if not self.webui_common.check_error_msg("create service instance"): raise Exception("service instance creation failed") time.sleep(30) # end create_svc_instance_in_webui def create_ipam_in_webui(self, fixture): result = True ip_blocks = False if not self.webui_common.click_configure_ipam(): result = result and False self.webui_common.select_project(fixture.project_name) self.logger.info("Creating ipam %s using webui" % (fixture.name)) WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCreateEditipam')).click() self.webui_common.wait_till_ajax_done(self.browser) WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtIPAMName')).send_keys(fixture.name) self.webui_common.wait_till_ajax_done(self.browser) ''' self.browser.find_element_by_id('s2id_ddDNS').find_element_by_class_name('select2-choice').click() dns_method_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li') dns_list = [ element.find_element_by_tag_name('div') for element in dns_method_list] for dns in dns_list : dns_text = dns.text if dns_text.find('Tenant') != -1 : dns.click() if dns_text == 'Tenant': self.browser.find_element_by_id('txtdnsTenant').send_keys('189.23.2.3/21') self.browser.find_element_by_id("txtNTPServer").send_keys('32.24.53.45/28') self.browser.find_element_by_id("txtDomainName").send_keys('domain_1') elif dns_text == 'Default' or dns.text == 'None': self.browser.find_element_by_id("txtNTPServer").send_keys('32.24.53.45/28') self.browser.find_element_by_id("txtDomainName").send_keys('domain_1') elif dns_text == 'Virtual DNS': self.browser.find_element_by_id('dnsvirtualBlock').find_element_by_tag_name('a').click() self.webui_common.wait_till_ajax_done(self.browser) virtual_dns_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li') vdns_list = [ element.find_element_by_tag_name('div') for element in virtual_dns_list] for vdns in vdns_list : vdns_text = vdns.text if vdns_text == 'default-domain:'+'dns': vdns.click() break break for net in range(len(net_list)): self.browser.find_element_by_id("btnCommonAddVN").click() self.browser.find_element_by_id('vnTuples').find_element_by_tag_name('a').click() self.webui_common.wait_till_ajax_done(self.browser) vn_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li') virtual_net_list = [ element.find_element_by_tag_name('div') for element in vn_list] for vns in virtual_net_list : vn_text = vns.text if vn_text == net_list[net] : vns.click() break self.browser.find_element_by_xpath("//*[contains(@placeholder, 'IP Block')]").send_keys('187.23.2.'+str(net+1)+'/21') ''' self.browser.find_element_by_id("btnCreateEditipamOK").click() if not self.webui_common.check_error_msg("Create ipam"): raise Exception("ipam creation failed") # end create_ipam_in_webui def create_policy_in_webui(self, fixture): result = True line = 0 try: fixture.policy_obj = fixture.quantum_fixture.get_policy_if_present( fixture.project_name, fixture.policy_name) if not fixture.policy_obj: self.logger.info("Creating policy %s using webui" % (fixture.policy_name)) if not self.webui_common.click_configure_policies(): result = result and False self.webui_common.select_project(fixture.project_name) WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCreatePolicy')).click() time.sleep(2) # self.webui_common.wait_till_ajax_done(self.browser) WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('txtPolicyName')).send_keys(fixture.policy_name) time.sleep(2) # self.webui_common.wait_till_ajax_done(self.browser) lists = 0 for rule in fixture.rules_list: action = rule['simple_action'] protocol = rule['protocol'] source_net = rule['source_network'] direction = rule['direction'] dest_net = rule['dest_network'] if rule['src_ports']: if type(rule['src_ports']) is list: src_port = ','.join(str(num) for num in rule['src_ports']) else: src_port = str(rule['src_ports']) if rule['dst_ports']: if type(rule['dst_ports']) is list: dst_port = ','.join(str(num) for num in rule['dst_ports']) else: dst_port = str(rule['dst_ports']) self.browser.find_element_by_id('btnCommonAddRule').click() self.webui_common.wait_till_ajax_done(self.browser) controls = WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_class_name('controls')) rules = self.webui_common.find_element( controls, ['ruleTuples', 'rule-item'], ['id', 'class'], [1])[line] rules = rules.find_elements_by_css_selector( "div[class$='pull-left']") li = self.browser.find_elements_by_css_selector( "ul[class^='ui-autocomplete']") for rule in range(len(rules)): if rule == 3: rules[rule].find_element_by_class_name( 'select2-container').find_element_by_tag_name('a').click() direction_list = self.browser.find_element_by_id( 'select2-drop').find_elements_by_tag_name('li') dir_list = [element.find_element_by_tag_name('div') for element in direction_list] for directions in dir_list: direction_text = directions.text if direction_text == direction: directions.click() break continue rules[rule].find_element_by_class_name( 'add-on').find_element_by_class_name('icon-caret-down').click() time.sleep(2) # self.webui_common.wait_till_ajax_done(self.browser) opt = li[lists].find_elements_by_tag_name('li') if rule == 0: self.sel(opt, action.upper()) elif rule == 1: self.sel(opt, protocol.upper()) elif rule == 2: self.sel(opt, source_net) rule_items = self.webui_common.find_element( controls, ['ruleTuples', 'rule-item'], ['id', 'class'], [1])[line] rule_items.find_elements_by_class_name( 'span1')[2].find_element_by_tag_name('input').send_keys(src_port) # controls.find_element_by_id('ruleTuples').find_elements_by_class_name('rule-item')[line].find_elements_by_class_name('span1')[2].find_element_by_tag_name('input').send_keys(src_port) else: self.sel(opt, dest_net) controls.find_element_by_id('ruleTuples').find_elements_by_class_name( 'rule-item')[line].find_elements_by_class_name('span1')[4].find_element_by_tag_name('input').send_keys(dst_port) break lists = lists + 1 lists = lists + 1 self.browser.find_element_by_id('btnCreatePolicyOK').click() self.webui_common.wait_till_ajax_done(self.browser) if not self.webui_common.check_error_msg("Create Policy"): raise Exception("Policy creation failed") fixture.policy_obj = fixture.quantum_fixture.get_policy_if_present( fixture.project_name, fixture.policy_name) else: fixture.already_present = True self.logger.info( 'Policy %s already exists, skipping creation ' % (fixture.policy_name)) self.logger.debug('Policy %s exists, already there' % (fixture.policy_name)) except Exception as e: self.logger.exception("Got exception as %s while creating %s" % (e, fixture.policy_name)) sys.exit(-1) def sel(self, opt, choice): for i in range(len(opt)): option = opt[i].find_element_by_class_name( 'ui-corner-all').get_attribute("innerHTML") if option == choice: btn = opt[i].find_element_by_class_name('ui-corner-all') time.sleep(1) btn.click() time.sleep(1) return continue def policy_delete_in_webui(self, fixture): if not self.webui_common.click_configure_policies(): result = result and False rows = self.webui_common.get_rows() for pol in range(len(rows)): tdArry = rows[pol].find_elements_by_class_name('slick-cell') if(len(tdArry) > 2): if (tdArry[2].text == fixture.policy_name): tdArry[0].find_element_by_tag_name('i').click() self.webui_common.wait_till_ajax_done(self.browser) rows = self.webui_common.get_rows() ass_net = rows[ pol + 1].find_elements_by_class_name('row-fluid')[1].find_element_by_xpath("//div[@class='span11']").text.split() if(ass_net[0] != '-'): for net in range(len(ass_net)): network.append(ass_net[net]) else: print("No networks associated") tdArry[5].find_element_by_tag_name('i').click() self.browser.find_element_by_id( 'gridPolicy-action-menu-' + str(i)).find_elements_by_tag_name('li')[1].find_element_by_tag_name('a').click() self.browser.find_element_by_id("btnRemovePopupOK").click() self.webui_common.wait_till_ajax_done(self.browser) if not self.webui_common.check_error_msg("Delete policy"): raise Exception("Policy deletion failed") self.logger.info("%s is deleted successfully using webui" % (fixture.policy_name)) break # end policy_delete_in_webui def verify_analytics_nodes_ops_basic_data(self): self.logger.info("Verifying analytics_node basic ops-data in Webui...") self.logger.debug(self.dash) if not self.webui_common.click_monitor_analytics_nodes(): result = result and False rows = self.webui_common.get_rows() analytics_nodes_list_ops = self.webui_common.get_collectors_list_ops() result = True for n in range(len(analytics_nodes_list_ops)): ops_analytics_node_name = analytics_nodes_list_ops[n]['name'] self.logger.info("Vn host name %s exists in op server..checking if exists in webui as well" % ( ops_analytics_node_name)) if not self.webui_common.click_monitor_analytics_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_analytics_node_name: self.logger.info("Analytics_node name %s found in webui..going to match basic details.." % ( ops_analytics_node_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error("Analytics_node name %s did not match in webui...not found in webui" % ( ops_analytics_node_name)) self.logger.debug(self.dash) else: self.logger.info("Click and retrieve analytics_node basic view details in webui for \ analytics_node-name %s " % (ops_analytics_node_name)) self.webui_common.click_monitor_analytics_nodes_basic( match_index) dom_basic_view = self.webui_common.get_basic_view_infra() # special handling for overall node status value node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name( 'p').get_attribute('innerHTML').replace('\n', '').strip() for i, item in enumerate(dom_basic_view): if item.get('key') == 'Overall Node Status': dom_basic_view[i]['value'] = node_status # filter analytics_node basic view details from opserver data analytics_nodes_ops_data = self.webui_common.get_details( analytics_nodes_list_ops[n]['href']) ops_basic_data = [] host_name = analytics_nodes_list_ops[n]['name'] ip_address = analytics_nodes_ops_data.get( 'CollectorState').get('self_ip_list') ip_address = ', '.join(ip_address) generators_count = str( len(analytics_nodes_ops_data.get('CollectorState').get('generator_infos'))) version = json.loads(analytics_nodes_ops_data.get('CollectorState').get('build_info')).get( 'build-info')[0].get('build-id') version = self.webui_common.get_version_string(version) module_cpu_info_len = len( analytics_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')) for i in range(module_cpu_info_len): if analytics_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')[i][ 'module_id'] == 'Collector': cpu_mem_info_dict = analytics_nodes_ops_data.get( 'ModuleCpuState').get('module_cpu_info')[i] break cpu = self.webui_common.get_cpu_string(cpu_mem_info_dict) memory = self.webui_common.get_memory_string(cpu_mem_info_dict) modified_ops_data = [] process_state_list = analytics_nodes_ops_data.get( 'ModuleCpuState').get('process_state_list') process_down_stop_time_dict = {} process_up_start_time_dict = {} exclude_process_list = [ 'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr', 'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema'] for i, item in enumerate(process_state_list): if item['process_name'] == 'redis-query': redis_query_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-qe': contrail_qe_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-analytics-nodemgr': contrail_analytics_nodemgr_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'redis-uve': redis_uve_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-opserver': contrail_opserver_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-collector': contrail_collector_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) for k, v in process_down_stop_time_dict.items(): if k not in exclude_process_list: reduced_process_keys_dict[k]=v if not reduced_process_keys_dict: for process in exclude_process_list: process_up_start_time_dict.pop(process, None) recent_time = min(process_up_start_time_dict.values()) overall_node_status_time = self.webui_common.get_node_status_string( str(recent_time)) overall_node_status_string = [ 'Up since ' + status for status in overall_node_status_time] else: overall_node_status_down_time = self.webui_common.get_node_status_string( str(max(reduced_process_keys_dict.values()))) process_down_count = len(reduced_process_keys_dict) overall_node_status_string = str( process_down_count) + ' Process down' modified_ops_data.extend( [{'key': 'Hostname', 'value': host_name}, {'key': 'Generators', 'value': generators_count}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'Collector', 'value': contrail_collector_string}, {'key': 'Query Engine', 'value': contrail_qe_string}, {'key': 'OpServer', 'value': contrail_opserver_string}, {'key': 'Redis Query', 'value': redis_query_string}, {'key': 'Redis UVE', 'value': redis_uve_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}]) if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view): self.logger.info( "Ops %s uves analytics_nodes basic view details data matched in webui" % (ops_analytics_node_name)) else: self.logger.error( "Ops %s uves analytics_nodes basic view details data match failed in webui" % (ops_analytics_node_name)) result = result and False return result # end verify_analytics_nodes_ops_basic_data_in_webui def verify_config_nodes_ops_basic_data(self): self.logger.info( "Verifying config_node basic ops-data in Webui monitor->infra->Config Nodes->details(basic view)...") self.logger.debug(self.dash) if not self.webui_common.click_monitor_config_nodes(): result = result and False rows = self.webui_common.get_rows() config_nodes_list_ops = self.webui_common.get_config_nodes_list_ops() result = True for n in range(len(config_nodes_list_ops)): ops_config_node_name = config_nodes_list_ops[n]['name'] self.logger.info("Vn host name %s exists in op server..checking if exists in webui as well" % ( ops_config_node_name)) if not self.webui_common.click_monitor_config_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_config_node_name: self.logger.info("Config_node name %s found in webui..going to match basic details..." % ( ops_config_node_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error("Config_node name %s did not match in webui...not found in webui" % ( ops_config_node_name)) self.logger.debug(self.dash) else: self.logger.info("Click and retrieve config_node basic view details in webui for \ config_node-name %s " % (ops_config_node_name)) # filter config_node basic view details from opserver data config_nodes_ops_data = self.webui_common.get_details( config_nodes_list_ops[n]['href']) self.webui_common.click_monitor_config_nodes_basic(match_index) dom_basic_view = self.webui_common.get_basic_view_infra() ops_basic_data = [] host_name = config_nodes_list_ops[n]['name'] ip_address = config_nodes_ops_data.get( 'ModuleCpuState').get('config_node_ip') if not ip_address: ip_address = '--' else: ip_address = ', '.join(ip_address) process_state_list = config_nodes_ops_data.get( 'ModuleCpuState').get('process_state_list') process_down_stop_time_dict = {} process_up_start_time_dict = {} exclude_process_list = [ 'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr', 'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema'] for i, item in enumerate(process_state_list): if item['process_name'] == 'contrail-api:0': api_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'ifmap': ifmap_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-discovery:0': discovery_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-schema': schema_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-svc-monitor': monitor_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) for k, v in process_down_stop_time_dict.items(): if k not in exclude_process_list: reduced_process_keys_dict[k]=v if not reduced_process_keys_dict: for process in exclude_process_list: process_up_start_time_dict.pop(process, None) recent_time = max(process_up_start_time_dict.values()) overall_node_status_time = self.webui_common.get_node_status_string( str(recent_time)) overall_node_status_string = [ 'Up since ' + status for status in overall_node_status_time] else: overall_node_status_down_time = self.webui_common.get_node_status_string( str(max(reduced_process_keys_dict.values()))) process_down_count = len(reduced_process_keys_dict) overall_node_status_string = str( process_down_count) + ' Process down' # special handling for overall node status value node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name( 'p').get_attribute('innerHTML').replace('\n', '').strip() for i, item in enumerate(dom_basic_view): if item.get('key') == 'Overall Node Status': dom_basic_view[i]['value'] = node_status version = config_nodes_ops_data.get( 'ModuleCpuState').get('build_info') if not version: version = '--' else: version = json.loads(config_nodes_ops_data.get('ModuleCpuState').get('build_info')).get( 'build-info')[0].get('build-id') version = self.webui_common.get_version_string(version) module_cpu_info_len = len( config_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')) cpu_mem_info_dict = {} for i in range(module_cpu_info_len): if config_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')[i][ 'module_id'] == 'ApiServer': cpu_mem_info_dict = config_nodes_ops_data.get( 'ModuleCpuState').get('module_cpu_info')[i] break if not cpu_mem_info_dict: cpu = '--' memory = '--' else: cpu = self.webui_common.get_cpu_string(cpu_mem_info_dict) memory = self.webui_common.get_memory_string( cpu_mem_info_dict) modified_ops_data = [] generator_list = self.webui_common.get_generators_list_ops() for element in generator_list: if element['name'] == ops_config_node_name + ':Config:Contrail-Config-Nodemgr:0': analytics_data = element['href'] generators_vrouters_data = self.webui_common.get_details( element['href']) analytics_data = generators_vrouters_data.get( 'ModuleClientState').get('client_info') if analytics_data['status'] == 'Established': analytics_primary_ip = analytics_data[ 'primary'].split(':')[0] + ' (Up)' modified_ops_data.extend( [{'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'API Server', 'value': api_string}, {'key': 'Discovery', 'value': discovery_string}, {'key': 'Service Monitor', 'value': monitor_string}, {'key': 'Ifmap', 'value': ifmap_string}, {'key': 'Schema Transformer', 'value': schema_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}]) self.webui_common.match_ops_with_webui( modified_ops_data, dom_basic_view) if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view): self.logger.info( "Ops %s uves config_nodes basic view details data matched in webui" % (ops_config_node_name)) else: self.logger.error( "Ops %s uves config_nodes basic view details data match failed in webui" % (ops_config_node_name)) result = result and False return result # end verify_config_nodes_ops_basic_data_in_webui def verify_vrouter_ops_basic_data(self): result = True self.logger.info( "Verifying vrouter basic ops-data in Webui monitor->infra->Virtual routers->details(basic view)...") self.logger.debug(self.dash) if not self.webui_common.click_monitor_vrouters(): result = result and False rows = self.webui_common.get_rows() vrouters_list_ops = self.webui_common.get_vrouters_list_ops() for n in range(len(vrouters_list_ops)): ops_vrouter_name = vrouters_list_ops[n]['name'] self.logger.info( "Vn host name %s exists in op server..checking if exists in webui as well" % (ops_vrouter_name)) if not self.webui_common.click_monitor_vrouters(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_vrouter_name: self.logger.info( "Vrouter name %s found in webui..going to match basic details..." % (ops_vrouter_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error( "Vrouter name %s did not match in webui...not found in webui" % (ops_vrouter_name)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve vrouter basic view details in webui for vrouter-name %s " % (ops_vrouter_name)) self.webui_common.click_monitor_vrouters_basic(match_index) dom_basic_view = self.webui_common.get_basic_view_infra() # special handling for overall node status value node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name( 'p').get_attribute('innerHTML').replace('\n', '').strip() for i, item in enumerate(dom_basic_view): if item.get('key') == 'Overall Node Status': dom_basic_view[i]['value'] = node_status # special handling for control nodes control_nodes = self.browser.find_element_by_class_name( 'table-cell').text for i, item in enumerate(dom_basic_view): if item.get('key') == 'Control Nodes': dom_basic_view[i]['value'] = control_nodes # filter vrouter basic view details from opserver data vrouters_ops_data = self.webui_common.get_details( vrouters_list_ops[n]['href']) ops_basic_data = [] host_name = vrouters_list_ops[n]['name'] ip_address = vrouters_ops_data.get( 'VrouterAgent').get('self_ip_list')[0] version = json.loads(vrouters_ops_data.get('VrouterAgent').get('build_info')).get( 'build-info')[0].get('build-id') version = version.split('-') version = version[0] + ' (Build ' + version[1] + ')' xmpp_messages = vrouters_ops_data.get( 'VrouterStatsAgent').get('xmpp_stats_list') for i, item in enumerate(xmpp_messages): if item['ip'] == ip_address: xmpp_in_msgs = item['in_msgs'] xmpp_out_msgs = item['out_msgs'] xmpp_msgs_string = str(xmpp_in_msgs) + \ ' In ' + \ str(xmpp_out_msgs) + ' Out' break total_flows = vrouters_ops_data.get( 'VrouterStatsAgent').get('total_flows') active_flows = vrouters_ops_data.get( 'VrouterStatsAgent').get('active_flows') flow_count_string = str(active_flows) + \ ' Active, ' + \ str(total_flows) + ' Total' if vrouters_ops_data.get('VrouterAgent').get('connected_networks'): networks = str( len(vrouters_ops_data.get('VrouterAgent').get('connected_networks'))) else: networks = '--' interfaces = str(vrouters_ops_data.get('VrouterAgent') .get('total_interface_count')) if vrouters_ops_data.get('VrouterAgent').get('virtual_machine_list'): instances = str( len(vrouters_ops_data.get('VrouterAgent').get('virtual_machine_list'))) else: instances = '--' cpu = vrouters_ops_data.get('VrouterStatsAgent').get( 'cpu_info').get('cpu_share') cpu = str(round(cpu, 2)) + ' %' memory = vrouters_ops_data.get('VrouterStatsAgent').get( 'cpu_info').get('meminfo').get('virt') memory = memory / 1024.0 if memory < 1024: memory = str(round(memory, 2)) + ' MB' else: memory = str(round(memory / 1024), 2) + ' GB' last_log = vrouters_ops_data.get( 'VrouterAgent').get('total_interface_count') modified_ops_data = [] process_state_list = vrouters_ops_data.get( 'VrouterStatsAgent').get('process_state_list') process_down_stop_time_dict = {} process_up_start_time_dict = {} exclude_process_list = [ 'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr', 'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema'] for i, item in enumerate(process_state_list): if item['process_name'] == 'contrail-vrouter': contrail_vrouter_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-vrouter-nodemgr': contrail_vrouter_nodemgr_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'openstack-nova-compute': openstack_nova_compute_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) for k, v in process_down_stop_time_dict.items(): if k not in exclude_process_list: reduced_process_keys_dict[k] = v ''' if not reduced_process_keys_dict : recent_time = max(process_up_start_time_dict.values()) overall_node_status_time = self.webui_common.get_node_status_string(str(recent_time)) overall_node_status_string = ['Up since ' + status for status in overall_node_status_time] else: overall_node_status_down_time = self.webui_common.get_node_status_string(str(max(reduced_process_keys_dict.values()))) overall_node_status_string = ['Down since ' + status for status in overall_node_status_down_time] ''' if not reduced_process_keys_dict: for process in exclude_process_list: process_up_start_time_dict.pop(process, None) recent_time = max(process_up_start_time_dict.values()) overall_node_status_time = self.webui_common.get_node_status_string( str(recent_time)) overall_node_status_string = [ 'Up since ' + status for status in overall_node_status_time] else: overall_node_status_down_time = self.webui_common.get_node_status_string( str(max(reduced_process_keys_dict.values()))) process_down_count = len(reduced_process_keys_dict) process_down_list = reduced_process_keys_dict.keys() overall_node_status_string = str( process_down_count) + ' Process down' generator_list = self.webui_common.get_generators_list_ops() for element in generator_list: if element['name'] == ops_vrouter_name + ':Compute:VRouterAgent:0': analytics_data = element['href'] break generators_vrouters_data = self.webui_common.get_details( element['href']) analytics_data = generators_vrouters_data.get( 'ModuleClientState').get('client_info') if analytics_data['status'] == 'Established': analytics_primary_ip = analytics_data[ 'primary'].split(':')[0] + ' (Up)' tx_socket_bytes = analytics_data.get( 'tx_socket_stats').get('bytes') tx_socket_size = self.webui_common.get_memory_string( int(tx_socket_bytes)) analytics_msg_count = generators_vrouters_data.get( 'ModuleClientState').get('session_stats').get('num_send_msg') offset = 5 analytics_msg_count_list = range( int(analytics_msg_count) - offset, int(analytics_msg_count) + offset) analytics_messages_string = [ str(count) + ' [' + str(size) + ']' for count in analytics_msg_count_list for size in tx_socket_size] control_nodes_list = vrouters_ops_data.get( 'VrouterAgent').get('xmpp_peer_list') control_nodes_string = '' for node in control_nodes_list: if node['status'] == True and node['primary'] == True: control_ip = node['ip'] control_nodes_string = control_ip + '* (Up)' index = control_nodes_list.index(node) del control_nodes_list[index] for node in control_nodes_list: node_ip = node['ip'] if node['status'] == True: control_nodes_string = control_nodes_string + \ ', ' + node_ip + ' (Up)' else: control_nodes_string = control_nodes_string + \ ', ' + node_ip + ' (Down)' modified_ops_data.extend( [{'key': 'Flow Count', 'value': flow_count_string}, {'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'Networks', 'value': networks}, {'key': 'Instances', 'value': instances}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'vRouter Agent', 'value': contrail_vrouter_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}, {'key': 'Analytics Node', 'value': analytics_primary_ip}, {'key': 'Analytics Messages', 'value': analytics_messages_string}, {'key': 'Control Nodes', 'value': control_nodes_string}]) self.webui_common.match_ops_with_webui( modified_ops_data, dom_basic_view) if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view): self.logger.info( "Ops %s uves vrouters basic view details data matched in webui" % (ops_vrouter_name)) else: self.logger.error( "Ops %s uves vrouters basic view details data match failed in webui" % (ops_vrouter_name)) result = result and False return result # end verify_vrouter_ops_basic_data_in_webui def verify_vrouter_ops_advance_data(self): self.logger.info( "Verifying vrouter Ops-data in Webui monitor->infra->Virtual Routers->details(advance view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_vrouters(): result = result and False rows = self.webui_common.get_rows() vrouters_list_ops = self.webui_common.get_vrouters_list_ops() result = True for n in range(len(vrouters_list_ops)): ops_vrouter_name = vrouters_list_ops[n]['name'] self.logger.info( "Vn host name %s exists in op server..checking if exists in webui as well" % (ops_vrouter_name)) if not self.webui_common.click_monitor_vrouters(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_vrouter_name: self.logger.info( "Vrouter name %s found in webui..going to match advance details..." % (ops_vrouter_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error( "Vrouter name %s did not match in webui...not found in webui" % (ops_vrouter_name)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve vrouter advance details in webui for vrouter-name %s " % (ops_vrouter_name)) self.webui_common.click_monitor_vrouters_advance(match_index) vrouters_ops_data = self.webui_common.get_details( vrouters_list_ops[n]['href']) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = self.webui_common.get_advanced_view_str() dom_arry_num = self.webui_common.get_advanced_view_num() dom_arry_num_new = [] for item in dom_arry_num: dom_arry_num_new.append( {'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']}) dom_arry_num = dom_arry_num_new merged_arry = dom_arry + dom_arry_str + dom_arry_num if vrouters_ops_data.has_key('VrouterStatsAgent'): ops_data = vrouters_ops_data['VrouterStatsAgent'] history_del_list = [ 'total_in_bandwidth_utilization', 'cpu_share', 'used_sys_mem', 'one_min_avg_cpuload', 'virt_mem', 'total_out_bandwidth_utilization'] for item in history_del_list: if ops_data.get(item): for element in ops_data.get(item): if element.get('history-10'): del element['history-10'] if element.get('s-3600-topvals'): del element['s-3600-topvals'] modified_ops_data = [] self.webui_common.extract_keyvalue( ops_data, modified_ops_data) if vrouters_ops_data.has_key('VrouterAgent'): ops_data_agent = vrouters_ops_data['VrouterAgent'] modified_ops_data_agent = [] self.webui_common.extract_keyvalue( ops_data_agent, modified_ops_data_agent) complete_ops_data = modified_ops_data + \ modified_ops_data_agent for k in range(len(complete_ops_data)): if type(complete_ops_data[k]['value']) is list: for m in range(len(complete_ops_data[k]['value'])): complete_ops_data[k]['value'][m] = str( complete_ops_data[k]['value'][m]) elif type(complete_ops_data[k]['value']) is unicode: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) else: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops %s uves virual networks advance view data matched in webui" % (ops_vrouter_name)) else: self.logger.error( "Ops %s uves virual networks advance data match failed in webui" % (ops_vrouter_name)) result = result and False return result # end verify_vrouter_ops_advance_data_in_webui def verify_bgp_routers_ops_basic_data(self): self.logger.info( "Verifying Control Nodes basic ops-data in Webui monitor->infra->Control Nodes->details(basic view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_control_nodes(): result = result and False rows = self.webui_common.get_rows() bgp_routers_list_ops = self.webui_common.get_bgp_routers_list_ops() result = True for n in range(len(bgp_routers_list_ops)): ops_bgp_routers_name = bgp_routers_list_ops[n]['name'] self.logger.info("Control node host name %s exists in op server..checking if exists \ in webui as well" % (ops_bgp_routers_name)) if not self.webui_common.click_monitor_control_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_bgp_routers_name: self.logger.info("Bgp routers name %s found in webui..going to match basic details..." % ( ops_bgp_routers_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error("Bgp routers name %s did not match in webui...not found in webui" % ( ops_bgp_routers_name)) self.logger.debug(self.dash) else: self.logger.info("Click and retrieve control nodes basic view details in webui for \ control node name %s " % (ops_bgp_routers_name)) self.webui_common.click_monitor_control_nodes_basic( match_index) dom_basic_view = self.webui_common.get_basic_view_infra() # special handling for overall node status value node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name( 'p').get_attribute('innerHTML').replace('\n', '').strip() for i, item in enumerate(dom_basic_view): if item.get('key') == 'Overall Node Status': dom_basic_view[i]['value'] = node_status # filter bgp_routers basic view details from opserver data bgp_routers_ops_data = self.webui_common.get_details( bgp_routers_list_ops[n]['href']) ops_basic_data = [] host_name = bgp_routers_list_ops[n]['name'] ip_address = bgp_routers_ops_data.get( 'BgpRouterState').get('bgp_router_ip_list')[0] if not ip_address: ip_address = '--' version = json.loads(bgp_routers_ops_data.get('BgpRouterState').get('build_info')).get( 'build-info')[0].get('build-id') version = self.webui_common.get_version_string(version) bgp_peers_string = 'BGP Peers: ' + \ str(bgp_routers_ops_data.get('BgpRouterState') .get('num_bgp_peer')) + ' Total' vrouters = 'vRouters: ' + \ str(bgp_routers_ops_data.get('BgpRouterState') .get('num_up_xmpp_peer')) + ' Established in Sync' cpu = bgp_routers_ops_data.get('BgpRouterState') memory = bgp_routers_ops_data.get('BgpRouterState') if not cpu: cpu = '--' memory = '--' else: cpu = self.webui_common.get_cpu_string(cpu) memory = self.webui_common.get_memory_string(memory) generator_list = self.webui_common.get_generators_list_ops() for element in generator_list: if element['name'] == ops_bgp_routers_name + ':Control:ControlNode:0': analytics_data = element['href'] generators_vrouters_data = self.webui_common.get_details( element['href']) analytics_data = generators_vrouters_data.get( 'ModuleClientState').get('client_info') if analytics_data['status'] == 'Established': analytics_primary_ip = analytics_data[ 'primary'].split(':')[0] + ' (Up)' tx_socket_bytes = analytics_data.get( 'tx_socket_stats').get('bytes') tx_socket_size = self.webui_common.get_memory_string( int(tx_socket_bytes)) analytics_msg_count = generators_vrouters_data.get( 'ModuleClientState').get('session_stats').get('num_send_msg') offset = 10 analytics_msg_count_list = range( int(analytics_msg_count) - offset, int(analytics_msg_count) + offset) analytics_messages_string = [ str(count) + ' [' + str(size) + ']' for count in analytics_msg_count_list for size in tx_socket_size] ifmap_ip = bgp_routers_ops_data.get('BgpRouterState').get( 'ifmap_info').get('url').split(':')[0] ifmap_connection_status = bgp_routers_ops_data.get( 'BgpRouterState').get('ifmap_info').get('connection_status') ifmap_connection_status_change = bgp_routers_ops_data.get( 'BgpRouterState').get('ifmap_info').get('connection_status_change_at') ifmap_connection_string = [ifmap_ip + ' (' + ifmap_connection_status + ' since ' + time + ')' for time in self.webui_common.get_node_status_string(ifmap_connection_status_change)] process_state_list = bgp_routers_ops_data.get( 'BgpRouterState').get('process_state_list') process_down_stop_time_dict = {} process_up_start_time_dict = {} exclude_process_list = [ 'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr', 'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema'] for i, item in enumerate(process_state_list): if item['process_name'] == 'contrail-control': control_node_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-control-nodemgr': control_nodemgr_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-dns': contrail_dns_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) if item['process_name'] == 'contrail-named': contrail_named_string = self.webui_common.get_process_status_string( item, process_down_stop_time_dict, process_up_start_time_dict) for k, v in process_down_stop_time_dict.items(): if k not in exclude_process_list: reduced_process_keys_dict[k] = v if not reduced_process_keys_dict: for process in exclude_process_list: process_up_start_time_dict.pop(process, None) recent_time = max(process_up_start_time_dict.values()) overall_node_status_time = self.webui_common.get_node_status_string( str(recent_time)) overall_node_status_string = [ 'Up since ' + status for status in overall_node_status_time] else: overall_node_status_down_time = self.webui_common.get_node_status_string( str(max(reduced_process_keys_dict.values()))) process_down_list = reduced_process_keys_dict.keys() process_down_count = len(reduced_process_keys_dict) overall_node_status_string = str( process_down_count) + ' Process down' modified_ops_data = [] modified_ops_data.extend( [{'key': 'Peers', 'value': bgp_peers_string}, {'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'Analytics Node', 'value': analytics_primary_ip}, {'key': 'Analytics Messages', 'value': analytics_messages_string}, {'key': 'Ifmap Connection', 'value': ifmap_connection_string}, {'key': 'Control Node', 'value': control_node_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}]) self.webui_common.match_ops_with_webui( modified_ops_data, dom_basic_view) if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view): self.logger.info( "Ops %s uves bgp_routers basic view details data matched in webui" % (ops_bgp_routers_name)) else: self.logger.error( "Ops %s uves bgp_routers basic view details data match failed in webui" % (ops_bgp_routers_name)) result = result and False return result # end verify_bgp_routers_ops_basic_data_in_webui def verify_bgp_routers_ops_advance_data(self): self.logger.info( "Verifying Control Nodes ops-data in Webui monitor->infra->Control Nodes->details(advance view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_control_nodes(): result = result and False rows = self.webui_common.get_rows() bgp_routers_list_ops = self.webui_common.get_bgp_routers_list_ops() result = True for n in range(len(bgp_routers_list_ops)): ops_bgp_router_name = bgp_routers_list_ops[n]['name'] self.logger.info( "Bgp router %s exists in op server..checking if exists in webui " % (ops_bgp_router_name)) self.logger.info( "Clicking on bgp_routers in monitor page in Webui...") if not self.webui_common.click_monitor_control_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_bgp_router_name: self.logger.info( "Bgp router name %s found in webui..going to match advance details..." % (ops_bgp_router_name)) match_flag = 1 match_index = i break if not match_flag: self.logger.error("Bgp router name %s not found in webui" % (ops_bgp_router_name)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve bgp advance view details in webui for bgp router-name %s " % (ops_bgp_router_name)) self.webui_common.click_monitor_control_nodes_advance( match_index) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = self.webui_common.get_advanced_view_str() dom_arry_num = self.webui_common.get_advanced_view_num() dom_arry_num_new = [] for item in dom_arry_num: dom_arry_num_new.append( {'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']}) dom_arry_num = dom_arry_num_new merged_arry = dom_arry + dom_arry_str + dom_arry_num bgp_routers_ops_data = self.webui_common.get_details( bgp_routers_list_ops[n]['href']) bgp_router_state_ops_data = bgp_routers_ops_data[ 'BgpRouterState'] history_del_list = [ 'total_in_bandwidth_utilization', 'cpu_share', 'used_sys_mem', 'one_min_avg_cpuload', 'virt_mem', 'total_out_bandwidth_utilization'] for item in history_del_list: if bgp_router_state_ops_data.get(item): for element in bgp_router_state_ops_data.get(item): if element.get('history-10'): del element['history-10'] if element.get('s-3600-topvals'): del element['s-3600-topvals'] if bgp_routers_ops_data.has_key('BgpRouterState'): bgp_router_state_ops_data = bgp_routers_ops_data[ 'BgpRouterState'] modified_bgp_router_state_ops_data = [] self.webui_common.extract_keyvalue( bgp_router_state_ops_data, modified_bgp_router_state_ops_data) complete_ops_data = modified_bgp_router_state_ops_data for k in range(len(complete_ops_data)): if type(complete_ops_data[k]['value']) is list: for m in range(len(complete_ops_data[k]['value'])): complete_ops_data[k]['value'][m] = str( complete_ops_data[k]['value'][m]) elif type(complete_ops_data[k]['value']) is unicode: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) else: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops uves bgp router advanced view data matched in webui") else: self.logger.error( "Ops uves bgp router advanced view bgp router match failed in webui") result = result and False return result # end verify_bgp_routers_ops_advance_data_in_webui def verify_analytics_nodes_ops_advance_data(self): self.logger.info( "Verifying analytics_nodes(collectors) ops-data in Webui monitor->infra->Analytics Nodes->details(advance view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_analytics_nodes(): result = result and False rows = self.webui_common.get_rows() analytics_nodes_list_ops = self.webui_common.get_collectors_list_ops() result = True for n in range(len(analytics_nodes_list_ops)): ops_analytics_node_name = analytics_nodes_list_ops[n]['name'] self.logger.info( "Analytics node %s exists in op server..checking if exists in webui " % (ops_analytics_node_name)) self.logger.info( "Clicking on analytics_nodes in monitor page in Webui...") if not self.webui_common.click_monitor_analytics_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_analytics_node_name: self.logger.info( "Analytics node name %s found in webui..going to match advance details..." % (ops_analytics_node_name)) match_flag = 1 match_index = i break if not match_flag: self.logger.error("Analytics node name %s not found in webui" % (ops_analytics_node_name)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve analytics advance view details in webui for analytics node-name %s " % (ops_analytics_node_name)) self.webui_common.click_monitor_analytics_nodes_advance( match_index) analytics_nodes_ops_data = self.webui_common.get_details( analytics_nodes_list_ops[n]['href']) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = self.webui_common.get_advanced_view_str() dom_arry_num = self.webui_common.get_advanced_view_num() dom_arry_num_new = [] for item in dom_arry_num: dom_arry_num_new.append( {'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']}) dom_arry_num = dom_arry_num_new merged_arry = dom_arry + dom_arry_str + dom_arry_num modified_query_perf_info_ops_data = [] modified_module_cpu_state_ops_data = [] modified_analytics_cpu_state_ops_data = [] modified_collector_state_ops_data = [] history_del_list = [ 'opserver_mem_virt', 'queryengine_cpu_share', 'opserver_cpu_share', 'collector_cpu_share', 'collector_mem_virt', 'queryengine_mem_virt', 'enq_delay'] if analytics_nodes_ops_data.has_key('QueryPerfInfo'): query_perf_info_ops_data = analytics_nodes_ops_data[ 'QueryPerfInfo'] for item in history_del_list: if query_perf_info_ops_data.get(item): for element in query_perf_info_ops_data.get(item): if element.get('history-10'): del element['history-10'] if element.get('s-3600-topvals'): del element['s-3600-topvals'] if element.get('s-3600-summary'): del element['s-3600-summary'] self.webui_common.extract_keyvalue( query_perf_info_ops_data, modified_query_perf_info_ops_data) if analytics_nodes_ops_data.has_key('ModuleCpuState'): module_cpu_state_ops_data = analytics_nodes_ops_data[ 'ModuleCpuState'] for item in history_del_list: if module_cpu_state_ops_data.get(item): for element in module_cpu_state_ops_data.get(item): if element.get('history-10'): del element['history-10'] if element.get('s-3600-topvals'): del element['s-3600-topvals'] if element.get('s-3600-summary'): del element['s-3600-summary'] self.webui_common.extract_keyvalue( module_cpu_state_ops_data, modified_module_cpu_state_ops_data) if analytics_nodes_ops_data.has_key('AnalyticsCpuState'): analytics_cpu_state_ops_data = analytics_nodes_ops_data[ 'AnalyticsCpuState'] modified_analytics_cpu_state_ops_data = [] self.webui_common.extract_keyvalue( analytics_cpu_state_ops_data, modified_analytics_cpu_state_ops_data) if analytics_nodes_ops_data.has_key('CollectorState'): collector_state_ops_data = analytics_nodes_ops_data[ 'CollectorState'] self.webui_common.extract_keyvalue( collector_state_ops_data, modified_collector_state_ops_data) complete_ops_data = modified_query_perf_info_ops_data + modified_module_cpu_state_ops_data + \ modified_analytics_cpu_state_ops_data + \ modified_collector_state_ops_data for k in range(len(complete_ops_data)): if type(complete_ops_data[k]['value']) is list: for m in range(len(complete_ops_data[k]['value'])): complete_ops_data[k]['value'][m] = str( complete_ops_data[k]['value'][m]) elif type(complete_ops_data[k]['value']) is unicode: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) else: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops uves analytics node advance view data matched in webui") else: self.logger.error( "Ops uves analytics node match failed in webui") result = result and False return result # end verify_analytics_nodes_ops_advance_data_in_webui def verify_vm_ops_basic_data(self): self.logger.info( "Verifying VM basic ops-data in Webui monitor->Networking->instances summary(basic view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() vm_list_ops = self.webui_common.get_vm_list_ops() result = True for k in range(len(vm_list_ops)): ops_uuid = vm_list_ops[k]['name'] if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() self.logger.info( "Vm uuid %s exists in op server..checking if exists in webui as well" % (ops_uuid)) for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[2].text == ops_uuid: self.logger.info( "Vm uuid %s matched in webui..going to match basic view details..." % (ops_uuid)) self.logger.debug(self.dash) match_index = i match_flag = 1 vm_name = rows[i].find_elements_by_class_name( 'slick-cell')[1].text break if not match_flag: self.logger.error( "Uuid exists in opserver but uuid %s not found in webui..." % (ops_uuid)) self.logger.debug(self.dash) else: self.webui_common.click_monitor_instances_basic(match_index) self.logger.info( "Click and retrieve basic view details in webui for uuid %s " % (ops_uuid)) dom_arry_basic = self.webui_common.get_vm_basic_view() len_dom_arry_basic = len(dom_arry_basic) elements = self.browser.find_element_by_xpath( "//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid') len_elements = len(elements) vm_ops_data = self.webui_common.get_details( vm_list_ops[k]['href']) complete_ops_data = [] if vm_ops_data.has_key('UveVirtualMachineAgent'): # get vm interface basic details from opserver ops_data_interface_list = vm_ops_data[ 'UveVirtualMachineAgent']['interface_list'] for k in range(len(ops_data_interface_list)): del ops_data_interface_list[k]['l2_active'] if ops_data_interface_list[k].get('floating_ips'): fip_list = ops_data_interface_list[ k].get('floating_ips') floating_ip = None fip_list_len = len(fip_list) for index, element in enumerate(fip_list): ops_data_interface_list[k][ 'floating_ips'] = element.get('ip_address') ops_data_interface_list[k][ 'floating_ip_pool'] = element.get('virtual_network') # if index == 0: # floating_ip = element.get('ip_address') + ' (' + element.get('virtual_network') + ')' # else: # floating_ip = floating_ip + ' , ' + element.get('ip_address') + ' (' + element.get('virtual_network') + ')' #ops_data_interface_list[k]['floating_ips'] = floating_ip modified_ops_data_interface_list = [] self.webui_common.extract_keyvalue( ops_data_interface_list[k], modified_ops_data_interface_list) complete_ops_data = complete_ops_data + \ modified_ops_data_interface_list for t in range(len(complete_ops_data)): if type(complete_ops_data[t]['value']) is list: for m in range(len(complete_ops_data[t]['value'])): complete_ops_data[t]['value'][m] = str( complete_ops_data[t]['value'][m]) elif type(complete_ops_data[t]['value']) is unicode: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) else: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) # get vm basic interface details excluding basic interface # details dom_arry_intf = [] dom_arry_intf.insert(0, {'key': 'vm_name', 'value': vm_name}) # insert non interface elements in list for i in range(len_dom_arry_basic): element_key = elements[ i].find_elements_by_tag_name('div')[0].text element_value = elements[ i].find_elements_by_tag_name('div')[1].text dom_arry_intf.append( {'key': element_key, 'value': element_value}) fip_rows_index = False for i in range(len_dom_arry_basic + 1, len_elements): if not fip_rows_index: elements_key = elements[ len_dom_arry_basic].find_elements_by_tag_name('div') else: elements_key = elements[ fip_rows_index].find_elements_by_tag_name('div') elements_value = elements[ i].find_elements_by_tag_name('div') if not elements_value[0].text == 'Floating IPs': for j in range(len(elements_key)): if j == 2 and not fip_rows_index: dom_arry_intf.append( {'key': 'ip_address', 'value': elements_value[j].text.split('/')[0].strip()}) dom_arry_intf.append( {'key': 'mac_address', 'value': elements_value[j].text.split('/')[1].strip()}) else: dom_arry_intf.append( {'key': elements_key[j].text, 'value': elements_value[j].text}) else: fip_rows_index = i continue for element in complete_ops_data: if element['key'] == 'name': index = complete_ops_data.index(element) del complete_ops_data[index] if self.webui_common.match_ops_values_with_webui(complete_ops_data, dom_arry_intf): self.logger.info( "Ops vm uves basic view data matched in webui") else: self.logger.error( "Ops vm uves basic data match failed in webui") result = result and False return result # end verify_vm_ops_basic_data_in_webui def verify_dashboard_details(self): self.logger.info("Verifying dashboard details...") self.logger.debug(self.dash) if not self.webui_common.click_monitor_dashboard(): result = result and False dashboard_node_details = self.browser.find_element_by_id( 'topStats').find_elements_by_class_name('infobox-data-number') dashboard_data_details = self.browser.find_element_by_id( 'sparkLineStats').find_elements_by_class_name('infobox-data-number') dashboard_system_details = self.browser.find_element_by_id( 'system-info-stat').find_elements_by_tag_name('li') servers_ver = self.webui_common.find_element( self.browser, ['system-info-stat', 'value'], ['id', 'class'], [1]) servers = servers_ver[0].text version = servers_ver[1].text dom_data = [] dom_data.append( {'key': 'vrouters', 'value': dashboard_node_details[0].text}) dom_data.append( {'key': 'control_nodes', 'value': dashboard_node_details[1].text}) dom_data.append( {'key': 'analytics_nodes', 'value': dashboard_node_details[2].text}) dom_data.append( {'key': 'config_nodes', 'value': dashboard_node_details[3].text}) dom_data.append( {'key': 'instances', 'value': dashboard_data_details[0].text}) dom_data.append( {'key': 'interfaces', 'value': dashboard_data_details[1].text}) dom_data.append( {'key': 'virtual_networks', 'value': dashboard_data_details[2].text}) dom_data.append({'key': dashboard_system_details[0].find_element_by_class_name( 'key').text, 'value': dashboard_system_details[0].find_element_by_class_name('value').text}) dom_data.append({'key': dashboard_system_details[1].find_element_by_class_name( 'key').text, 'value': dashboard_system_details[1].find_element_by_class_name('value').text}) ops_servers = str(len(self.webui_common.get_config_nodes_list_ops())) ops_version = self.webui_common.get_version() self.webui_common.append_to_list( dom_data, [('servers', servers), ('version', version)]) ops_dashborad_data = [] if not self.webui_common.click_configure_networks(): result = result and False rows = self.webui_common.get_rows() vrouter_total_vm = str(len(self.webui_common.get_vm_list_ops())) total_vrouters = str(len(self.webui_common.get_vrouters_list_ops())) total_control_nodes = str( len(self.webui_common.get_bgp_routers_list_ops())) total_analytics_nodes = str( len(self.webui_common.get_collectors_list_ops())) total_config_nodes = str( len(self.webui_common.get_config_nodes_list_ops())) vrouters_list_ops = self.webui_common.get_vrouters_list_ops() interface_count = 0 vrouter_total_vn = 0 for index in range(len(vrouters_list_ops)): vrouters_ops_data = self.webui_common.get_details( vrouters_list_ops[index]['href']) if vrouters_ops_data.get('VrouterAgent').get('total_interface_count'): interface_count = interface_count + \ vrouters_ops_data.get('VrouterAgent').get( 'total_interface_count') if vrouters_ops_data.get('VrouterAgent').get('connected_networks'): vrouter_total_vn = vrouter_total_vn + \ (len(vrouters_ops_data.get('VrouterAgent') .get('connected_networks'))) ops_dashborad_data.append({'key': 'vrouters', 'value': total_vrouters}) ops_dashborad_data.append( {'key': 'control_nodes', 'value': total_control_nodes}) ops_dashborad_data.append( {'key': 'analytics_nodes', 'value': total_analytics_nodes}) ops_dashborad_data.append( {'key': 'config_nodes', 'value': total_config_nodes}) ops_dashborad_data.append( {'key': 'instances', 'value': vrouter_total_vm}) ops_dashborad_data.append( {'key': 'interfaces', 'value': str(interface_count)}) ops_dashborad_data.append( {'key': 'virtual_networks', 'value': str(vrouter_total_vn)}) self.webui_common.append_to_list( ops_dashborad_data, [('servers', ops_servers), ('version', ops_version)]) result = True if self.webui_common.match_ops_with_webui(ops_dashborad_data, dom_data): self.logger.info("Monitor dashborad details matched") else: self.logger.error("Monitor dashborad details not matched") result = result and False return result # end verify_dashboard_details_in_webui def verify_vn_ops_basic_data(self): self.logger.info("Verifying VN basic ops-data in Webui...") self.logger.debug(self.dash) error = 0 if not self.webui_common.click_monitor_networks(): result = result and False rows = self.webui_common.get_rows() vn_list_ops = self.webui_common.get_vn_list_ops() for k in range(len(vn_list_ops)): ops_fq_name = vn_list_ops[k]['name'] if not self.webui_common.click_monitor_networks(): result = result and False rows = self.webui_common.get_rows() self.logger.info( "Vn fq_name %s exists in op server..checking if exists in webui as well" % (ops_fq_name)) for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[1].text == ops_fq_name: self.logger.info( "Vn fq_name %s matched in webui..going to match basic view details..." % (ops_fq_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 vn_fq_name = rows[i].find_elements_by_class_name( 'slick-cell')[1].text break if not match_flag: self.logger.error( "Vn fq_name exists in opserver but %s not found in webui..." % (ops_fq_name)) self.logger.debug(self.dash) else: self.webui_common.click_monitor_networks_basic(match_index) self.logger.info( "Click and retrieve basic view details in webui for VN fq_name %s " % (ops_fq_name)) # get vn basic details excluding basic interface details dom_arry_basic = self.webui_common.get_vm_basic_view() len_dom_arry_basic = len(dom_arry_basic) elements = self.browser.find_element_by_xpath( "//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid') len_elements = len(elements) vn_ops_data = self.webui_common.get_details( vn_list_ops[k]['href']) complete_ops_data = [] ops_data_ingress = {'key': 'ingress_flow_count', 'value': str(0)} ops_data_egress = {'key': 'egress_flow_count', 'value': str(0)} ops_data_acl_rules = {'key': 'total_acl_rules', 'value': str(0)} vn_name = ops_fq_name.split(':')[2] ops_data_interfaces_count = { 'key': 'interface_list_count', 'value': str(0)} if vn_ops_data.has_key('UveVirtualNetworkAgent'): # creating a list of basic view items retrieved from # opserver ops_data_basic = vn_ops_data.get('UveVirtualNetworkAgent') if ops_data_basic.get('ingress_flow_count'): ops_data_ingress = {'key': 'ingress_flow_count', 'value': ops_data_basic.get('ingress_flow_count')} if ops_data_basic.get('egress_flow_count'): ops_data_egress = {'key': 'egress_flow_count', 'value': ops_data_basic.get('egress_flow_count')} if ops_data_basic.get('total_acl_rules'): ops_data_acl_rules = { 'key': 'total_acl_rules', 'value': ops_data_basic.get('total_acl_rules')} if ops_data_basic.get('interface_list'): ops_data_interfaces_count = { 'key': 'interface_list_count', 'value': len(ops_data_basic.get('interface_list'))} if ops_data_basic.get('vrf_stats_list'): vrf_stats_list = ops_data_basic['vrf_stats_list'] vrf_stats_list_new = [vrf['name'] for vrf in vrf_stats_list] vrf_list_joined = ','.join(vrf_stats_list_new) ops_data_vrf = {'key': 'vrf_stats_list', 'value': vrf_list_joined} complete_ops_data.append(ops_data_vrf) if ops_data_basic.get('acl'): ops_data_acl = {'key': 'acl', 'value': ops_data_basic.get('acl')} complete_ops_data.append(ops_data_acl) if ops_data_basic.get('virtualmachine_list'): ops_data_instances = {'key': 'virtualmachine_list', 'value': ', '.join( ops_data_basic.get('virtualmachine_list'))} complete_ops_data.append(ops_data_instances) complete_ops_data.extend( [ops_data_ingress, ops_data_egress, ops_data_acl_rules, ops_data_interfaces_count]) if ops_fq_name.find('__link_local__') != -1 or ops_fq_name.find('default-virtual-network') != -1 or ops_fq_name.find('ip-fabric') != -1: for i, item in enumerate(complete_ops_data): if complete_ops_data[i]['key'] == 'vrf_stats_list': del complete_ops_data[i] if vn_ops_data.has_key('UveVirtualNetworkConfig'): ops_data_basic = vn_ops_data.get('UveVirtualNetworkConfig') if ops_data_basic.get('attached_policies'): ops_data_policies = ops_data_basic.get( 'attached_policies') if ops_data_policies: pol_name_list = [pol['vnp_name'] for pol in ops_data_policies] pol_list_joined = ', '.join(pol_name_list) ops_data_policies = { 'key': 'attached_policies', 'value': pol_list_joined} complete_ops_data.extend([ops_data_policies]) for t in range(len(complete_ops_data)): if type(complete_ops_data[t]['value']) is list: for m in range(len(complete_ops_data[t]['value'])): complete_ops_data[t]['value'][m] = str( complete_ops_data[t]['value'][m]) elif type(complete_ops_data[t]['value']) is unicode: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) else: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) if self.webui_common.match_ops_values_with_webui(complete_ops_data, dom_arry_basic): self.logger.info( "Ops uves virutal networks basic view data matched in webui") else: self.logger.error( "Ops uves virutal networks basic view data match failed in webui") error = 1 return not error # end verify_vn_ops_basic_data_in_webui def verify_config_nodes_ops_advance_data(self): self.logger.info( "Verifying config_nodes ops-data in Webui monitor->infra->Config Nodes->details(advance view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_config_nodes(): result = result and False rows = self.webui_common.get_rows() config_nodes_list_ops = self.webui_common.get_config_nodes_list_ops() result = True for n in range(len(config_nodes_list_ops)): ops_config_node_name = config_nodes_list_ops[n]['name'] self.logger.info( "Config node host name %s exists in op server..checking if exists in webui as well" % (ops_config_node_name)) if not self.webui_common.click_monitor_config_nodes(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_config_node_name: self.logger.info( "Config node name %s found in webui..going to match advance view details..." % (ops_config_node_name)) match_flag = 1 match_index = i break if not match_flag: self.logger.error( "Config node name %s did not match in webui...not found in webui" % (ops_config_node_name)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve config nodes advance view details in webui for config node-name %s " % (ops_config_node_name)) self.webui_common.click_monitor_config_nodes_advance( match_index) config_nodes_ops_data = self.webui_common.get_details( config_nodes_list_ops[n]['href']) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = self.webui_common.get_advanced_view_str() dom_arry_num = self.webui_common.get_advanced_view_num() dom_arry_num_new = [] for item in dom_arry_num: dom_arry_num_new.append( {'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']}) dom_arry_num = dom_arry_num_new merged_arry = dom_arry + dom_arry_str + dom_arry_num if config_nodes_ops_data.has_key('ModuleCpuState'): ops_data = config_nodes_ops_data['ModuleCpuState'] history_del_list = [ 'api_server_mem_virt', 'service_monitor_cpu_share', 'schema_xmer_mem_virt', 'service_monitor_mem_virt', 'api_server_cpu_share', 'schema_xmer_cpu_share'] for item in history_del_list: if ops_data.get(item): for element in ops_data.get(item): if element.get('history-10'): del element['history-10'] if element.get('s-3600-topvals'): del element['s-3600-topvals'] modified_ops_data = [] self.webui_common.extract_keyvalue( ops_data, modified_ops_data) complete_ops_data = modified_ops_data for k in range(len(complete_ops_data)): if type(complete_ops_data[k]['value']) is list: for m in range(len(complete_ops_data[k]['value'])): complete_ops_data[k]['value'][m] = str( complete_ops_data[k]['value'][m]) elif type(complete_ops_data[k]['value']) is unicode: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) else: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops uves config nodes advance view data matched in webui") else: self.logger.error( "Ops uves config nodes advance view data match failed in webui") result = result and False return result # end verify_config_nodes_ops_advance_data_in_webui def verify_vn_ops_advance_data(self): self.logger.info( "Verifying VN advance ops-data in Webui monitor->Networking->Networks Summary(basic view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_networks(): result = result and False rows = self.webui_common.get_rows() vn_list_ops = self.webui_common.get_vn_list_ops() result = True for n in range(len(vn_list_ops)): ops_fqname = vn_list_ops[n]['name'] self.logger.info( "Vn fq name %s exists in op server..checking if exists in webui as well" % (ops_fqname)) if not self.webui_common.click_monitor_networks(): result = result and False rows = self.webui_common.get_rows() for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[1].text == ops_fqname: self.logger.info( "Vn fq name %s found in webui..going to match advance view details..." % (ops_fqname)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error( "Vn fqname %s did not match in webui...not found in webui" % (ops_fqname)) self.logger.debug(self.dash) else: self.logger.info( "Click and retrieve advance view details in webui for fqname %s " % (ops_fqname)) self.webui_common.click_monitor_networks_advance(match_index) vn_ops_data = self.webui_common.get_details( vn_list_ops[n]['href']) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = self.webui_common.get_advanced_view_str() merged_arry = dom_arry + dom_arry_str if vn_ops_data.has_key('UveVirtualNetworkConfig'): ops_data = vn_ops_data['UveVirtualNetworkConfig'] modified_ops_data = [] self.webui_common.extract_keyvalue( ops_data, modified_ops_data) if vn_ops_data.has_key('UveVirtualNetworkAgent'): ops_data_agent = vn_ops_data['UveVirtualNetworkAgent'] if 'udp_sport_bitmap' in ops_data_agent: del ops_data_agent['udp_sport_bitmap'] if 'udp_dport_bitmap' in ops_data_agent: del ops_data_agent['udp_dport_bitmap'] self.logger.info( "VN details for %s got from ops server and going to match in webui : \n %s \n " % (vn_list_ops[i]['href'], ops_data_agent)) modified_ops_data_agent = [] self.webui_common.extract_keyvalue( ops_data_agent, modified_ops_data_agent) complete_ops_data = modified_ops_data + \ modified_ops_data_agent for k in range(len(complete_ops_data)): if type(complete_ops_data[k]['value']) is list: for m in range(len(complete_ops_data[k]['value'])): complete_ops_data[k]['value'][m] = str( complete_ops_data[k]['value'][m]) elif type(complete_ops_data[k]['value']) is unicode: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) else: complete_ops_data[k]['value'] = str( complete_ops_data[k]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops uves virtual networks advance view data matched in webui") else: self.logger.error( "Ops uves virtual networks advance view data match failed in webui") result = result and False return result # end verify_vn_ops_advance_data_in_webui def verify_vm_ops_advance_data(self): self.logger.info( "Verifying VM ops-data in Webui monitor->Networking->instances->Instances summary(Advance view)......") self.logger.debug(self.dash) if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() vm_list_ops = self.webui_common.get_vm_list_ops() result = True for k in range(len(vm_list_ops)): ops_uuid = vm_list_ops[k]['name'] if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() self.logger.info( "Vm uuid %s exists in op server..checking if exists in webui as well" % (ops_uuid)) for i in range(len(rows)): match_flag = 0 if rows[i].find_elements_by_class_name('slick-cell')[2].text == ops_uuid: self.logger.info( "Vm uuid %s matched in webui..going to match advance view details..." % (ops_uuid)) self.logger.debug(self.dash) match_index = i match_flag = 1 break if not match_flag: self.logger.error( "Uuid exists in opserver but uuid %s not found in webui..." % (ops_uuid)) self.logger.debug(self.dash) else: self.webui_common.click_monitor_instances_advance(match_index) self.logger.info( "Click and retrieve advance view details in webui for uuid %s " % (ops_uuid)) dom_arry = self.webui_common.parse_advanced_view() dom_arry_str = [] dom_arry_str = self.webui_common.get_advanced_view_str() merged_arry = dom_arry + dom_arry_str vm_ops_data = self.webui_common.get_details( vm_list_ops[k]['href']) if vm_ops_data.has_key('UveVirtualMachineAgent'): ops_data = vm_ops_data['UveVirtualMachineAgent'] modified_ops_data = [] self.webui_common.extract_keyvalue( ops_data, modified_ops_data) complete_ops_data = modified_ops_data for t in range(len(complete_ops_data)): if type(complete_ops_data[t]['value']) is list: for m in range(len(complete_ops_data[t]['value'])): complete_ops_data[t]['value'][m] = str( complete_ops_data[t]['value'][m]) elif type(complete_ops_data[t]['value']) is unicode: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) else: complete_ops_data[t]['value'] = str( complete_ops_data[t]['value']) if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry): self.logger.info( "Ops vm uves advance view data matched in webui") else: self.logger.error( "Ops vm uves advance data match failed in webui") result = result and False return result # end verify_vm_ops_advance_data_in_webui def verify_vn_api_data(self): self.logger.info( "Verifying VN api details in Webui config networks...") self.logger.debug(self.dash) result = True vn_list_api = self.webui_common.get_vn_list_api() for vns in range(len(vn_list_api['virtual-networks']) - 3): pol_list = [] pol_list1 = [] ip_block_list = [] ip_block = [] pool_list = [] floating_pool = [] route_target_list = [] host_route_main = [] api_fq_name = vn_list_api['virtual-networks'][vns]['fq_name'][2] self.webui_common.click_configure_networks() rows = self.webui_common.get_rows() self.logger.info( "Vn fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name)) for i in range(len(rows)): match_flag = 0 dom_arry_basic = [] if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name: self.logger.info( "Vn fq_name %s matched in webui..going to match basic view details..." % (api_fq_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 vn_fq_name = rows[ i].find_elements_by_tag_name('div')[2].text policies = rows[i].find_elements_by_tag_name( 'div')[3].text.splitlines() dom_arry_basic.append( {'key': 'Attached Policies', 'value': policies}) dom_arry_basic.append( {'key': 'Network', 'value': rows[i].find_elements_by_tag_name('div')[2].text}) dom_arry_basic.append( {'key': 'ip_blocks_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text.split()}) break if not match_flag: self.logger.error( "Vn fq_name exists in apiserver but %s not found in webui..." % (api_fq_name)) self.logger.debug(self.dash) else: self.webui_common.click_configure_networks_basic(match_index) rows = self.webui_common.get_rows() self.logger.info( "Click and retrieve basic view details in webui for VN fq_name %s " % (api_fq_name)) rows_detail = rows[ match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_tag_name('label') for detail in range(len(rows_detail)): text1 = rows_detail[detail].text if text1 == 'Attached Network Policies': poli = str(rows_detail[detail].find_element_by_xpath('..').text).replace( text1, '').strip().split() dom_arry_basic.append( {'key': str(rows_detail[detail].text), 'value': poli}) elif text1 == 'IP Blocks' or text1 == 'Host Routes': dom_arry_basic.append({'key': str(text1), 'value': str( rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip().splitlines()}) elif text1 == 'Floating IP Pools': pools = rows_detail[detail].find_element_by_xpath( '..').text.replace(text1, '').strip().splitlines() for pool in range(len(pools)): pool_list.append(pools[pool].split()[0]) dom_arry_basic.append( {'key': text1, 'value': pool_list}) elif text1 == 'Route Targets': dom_arry_basic.append({'key': str(text1), 'value': str( rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip().split(', ')}) else: dom_arry_basic.append({'key': str(text1), 'value': str( rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip()}) vn_api_data = self.webui_common.get_details( vn_list_api['virtual-networks'][vns]['href']) complete_api_data = [] if vn_api_data.has_key('virtual-network'): api_data_basic = vn_api_data.get('virtual-network') if api_data_basic.get('name'): complete_api_data.append( {'key': 'Network', 'value': api_data_basic['name']}) if api_data_basic.has_key('network_policy_refs'): for ass_pol in range(len(api_data_basic['network_policy_refs'])): pol_list.append( str(api_data_basic['network_policy_refs'][ass_pol]['to'][2])) if len(pol_list) > 2: for item in range(len(policies)): for items in range(len(pol_list)): if policies[item] == pol_list[items]: pol_list1.append(pol_list[items]) pol_string = '(' + str(len(pol_list) - 2) + ' more)' pol_list1.append(pol_string) else: pol_list1 = policies complete_api_data.append( {'key': 'Attached Network Policies', 'value': pol_list}) complete_api_data.append( {'key': 'Attached Policies', 'value': pol_list1}) if api_data_basic.has_key('network_ipam_refs'): for ip in range(len(api_data_basic['network_ipam_refs'])): dom_arry_basic.append( {'key': 'Attached Policies', 'value': rows[i].find_elements_by_tag_name('div')[3].text.split()}) if(api_data_basic['network_ipam_refs'][ip]['to'][2]) == 'default-network-ipam': for ip_sub in range(len(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'])): ip_block_list.append(str(api_data_basic['network_ipam_refs'][ip]['to'][0] + ':' + api_data_basic['network_ipam_refs'][ip]['to'][1] + ':' + api_data_basic['network_ipam_refs'][ip]['to'][2]) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'] [ip_sub]['subnet']['ip_prefix']) + '/' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub]['subnet']['ip_prefix_len']) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub]['default_gateway'])) else: for ip_sub1 in range(len(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'])): ip_block_list.append(str(api_data_basic['network_ipam_refs'][ip]['to'][2]) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['subnet']['ip_prefix']) + '/' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['subnet']['ip_prefix_len']) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['default_gateway'])) if len(ip_block_list) > 2: for ips in range(2): ip_block.append(ip_block_list[ips].split()[1]) ip_string = '(' + \ str(len(ip_block_list) - 2) + ' more)' ip_block.append(ip_string) else: for ips in range(len(ip_block_list)): ip_block.append(ip_block_list[ips].split()[1]) complete_api_data.append( {'key': 'IP Blocks', 'value': ip_block_list}) complete_api_data.append( {'key': 'ip_blocks_grid_row', 'value': ip_block}) if api_data_basic.has_key('route_target_list'): if api_data_basic['route_target_list'].has_key('route_target'): for route in range(len(api_data_basic['route_target_list']['route_target'])): route_target_list.append( str(api_data_basic['route_target_list']['route_target'][route]).strip('target:')) complete_api_data.append( {'key': 'Route Targets', 'value': route_target_list}) if api_data_basic.has_key('floating_ip_pools'): for fip in range(len(api_data_basic['floating_ip_pools'])): floating_pool.append( str(api_data_basic['floating_ip_pools'][fip]['to'][3])) complete_api_data.append( {'key': 'Floating IP Pools', 'value': floating_pool}) if api_data_basic.has_key('network_ipam_refs'): for ipams in range(len(api_data_basic['network_ipam_refs'])): if api_data_basic['network_ipam_refs'][ipams]['attr'].get('host_routes'): if api_data_basic['network_ipam_refs'][ipams]['to'][2] == 'default-network-ipam': host_route_sub = [] for host_route in range(len(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'])): host_route_sub.append( str(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'][host_route]['prefix'])) host_route_string = ",".join(host_route_sub) host_route_main.append(str(api_data_basic['network_ipam_refs'][ipams]['to'][ 0] + ':' + api_data_basic['network_ipam_refs'][ipams]['to'][1] + ':' + api_data_basic['network_ipam_refs'][ipams]['to'][2]) + ' ' + host_route_string) else: host_route_sub = [] for host_route1 in range(len(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'])): host_route_sub.append( str(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'][host_route1]['prefix'])) host_route_string = ", ".join(host_route_sub) host_route_main.append( str(api_data_basic['network_ipam_refs'][ipams]['to'][2]) + ' ' + host_route_string) if(len(host_route_main) > 0): complete_api_data.append( {'key': 'Host Routes', 'value': host_route_main}) if api_data_basic['virtual_network_properties'].has_key('forwarding_mode'): forwarding_mode = api_data_basic[ 'virtual_network_properties']['forwarding_mode'] if forwarding_mode == 'l2': forwarding_mode = forwarding_mode.title() + ' Only' else: forwarding_mode = 'L2 and L3' complete_api_data.append( {'key': 'Forwarding Mode', 'value': forwarding_mode}) if api_data_basic['virtual_network_properties'].has_key('vxlan_network_identifier'): complete_api_data.append({'key': 'VxLAN Identifier', 'value': str( api_data_basic['virtual_network_properties']['vxlan_network_identifier']).replace('None', 'Automatic')}) if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic): self.logger.info( "Api virutal networks details matched in webui config networks") else: self.logger.error( "Api virutal networks details not match in webui config networks") result = result and False return result # end verify_vn_api_basic_data_in_webui def verify_service_template_api_basic_data(self): self.logger.info("Verifying service template api-data in Webui...") self.logger.debug(self.dash) result = True service_temp_list_api = self.webui_common.get_service_template_list_api( ) for temp in range(len(service_temp_list_api['service-templates']) - 1): interface_list = [] api_fq_name = service_temp_list_api[ 'service-templates'][temp + 1]['fq_name'][1] self.webui_common.click_configure_service_template() rows = self.webui_common.get_rows() self.logger.info( "Service template fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name)) for i in range(len(rows)): dom_arry_basic = [] match_flag = 0 j = 0 if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name: self.logger.info( "Service template fq_name %s matched in webui..going to match basic view details..." % (api_fq_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 dom_arry_basic.append( {'key': 'Name_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[2].text}) dom_arry_basic.append( {'key': 'Mode_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[3].text}) dom_arry_basic.append( {'key': 'Type_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text}) dom_arry_basic.append( {'key': 'Scaling_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[5].text}) dom_arry_basic.append( {'key': 'Interface_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[6].text}) dom_arry_basic.append( {'key': 'Image_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[7].text}) dom_arry_basic.append( {'key': 'Flavor_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[8].text}) break if not match_flag: self.logger.error( "Service template fq_name exists in apiserver but %s not found in webui..." % (api_fq_name)) self.logger.debug(self.dash) else: self.webui_common.click_configure_service_template_basic( match_index) rows = self.webui_common.get_rows() self.logger.info( "Click and retrieve basic view details in webui for service templatefq_name %s " % (api_fq_name)) rows_detail = rows[ match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid') for detail in range(len(rows_detail)): text1 = rows_detail[ detail].find_element_by_tag_name('label').text if text1 == 'Interface Type': dom_arry_basic.append( {'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text}) else: dom_arry_basic.append( {'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text}) service_temp_api_data = self.webui_common.get_details( service_temp_list_api['service-templates'][temp + 1]['href']) complete_api_data = [] if service_temp_api_data.has_key('service-template'): api_data_basic = service_temp_api_data.get( 'service-template') if api_data_basic.has_key('fq_name'): complete_api_data.append( {'key': 'Template', 'value': str(api_data_basic['fq_name'][1])}) complete_api_data.append( {'key': 'Name_grid_row', 'value': str(api_data_basic['fq_name'][1])}) if api_data_basic['service_template_properties'].has_key('service_mode'): complete_api_data.append({'key': 'Mode', 'value': str( api_data_basic['service_template_properties']['service_mode']).capitalize()}) complete_api_data.append({'key': 'Mode_grid_row', 'value': str( api_data_basic['service_template_properties']['service_mode']).capitalize()}) if api_data_basic['service_template_properties'].has_key('service_type'): complete_api_data.append({'key': 'Type', 'value': str( api_data_basic['service_template_properties']['service_type']).title()}) complete_api_data.append({'key': 'Type_grid_row', 'value': str( api_data_basic['service_template_properties']['service_type']).title()}) if api_data_basic['service_template_properties'].has_key('service_scaling'): if api_data_basic['service_template_properties']['service_scaling'] == True: complete_api_data.append({'key': 'Scaling', 'value': str( api_data_basic['service_template_properties']['service_scaling']).replace('True', 'Enabled')}) complete_api_data.append({'key': 'Scaling_grid_row', 'value': str( api_data_basic['service_template_properties']['service_scaling']).replace('True', 'Enabled')}) else: complete_api_data.append({'key': 'Scaling', 'value': str( api_data_basic['service_template_properties']['service_scaling']).replace('False', 'Disabled')}) complete_api_data.append({'key': 'Scaling_grid_row', 'value': str( api_data_basic['service_template_properties']['service_scaling']).replace('False', 'Disabled')}) if api_data_basic['service_template_properties'].has_key('interface_type'): for interface in range(len(api_data_basic['service_template_properties']['interface_type'])): if api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == True and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == True: interface_type = api_data_basic['service_template_properties']['interface_type'][ interface]['service_interface_type'].title() + '(' + 'Shared IP' + ', ' + 'Static Route' + ')' elif api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == False and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == True: interface_type = api_data_basic['service_template_properties']['interface_type'][ interface]['service_interface_type'].title() + '(' + 'Static Route' + ')' elif api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == True and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == False: interface_type = api_data_basic['service_template_properties']['interface_type'][ interface]['service_interface_type'].title() + '(' + 'Shared IP' + ')' else: interface_type = api_data_basic['service_template_properties'][ 'interface_type'][interface]['service_interface_type'].title() interface_list.append(interface_type) interface_string = ", ".join(interface_list) complete_api_data.append( {'key': 'Interface Type', 'value': interface_string}) complete_api_data.append( {'key': 'Interface_grid_row', 'value': interface_string}) if api_data_basic['service_template_properties'].has_key('image_name'): complete_api_data.append( {'key': 'Image', 'value': str(api_data_basic['service_template_properties']['image_name'])}) complete_api_data.append({'key': 'Image_grid_row', 'value': str( api_data_basic['service_template_properties']['image_name'])}) if api_data_basic.has_key('service_instance_back_refs'): service_instances = api_data_basic[ 'service_instance_back_refs'] si_text = '' for index, si in enumerate(service_instances): if index == 0: si_text = si['to'][1] + ':' + si['to'][2] else: si_text = si_text + ', ' + \ si['to'][1] + ':' + si['to'][2] complete_api_data.append( {'key': 'Instances', 'value': si_text}) else: complete_api_data.append( {'key': 'Instances', 'value': '-'}) if api_data_basic['service_template_properties'].has_key('flavor'): complete_api_data.append( {'key': 'Flavor', 'value': str(api_data_basic['service_template_properties']['flavor'])}) complete_api_data.append({'key': 'Flavor_grid_row', 'value': str( api_data_basic['service_template_properties']['flavor'])}) if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic): self.logger.info( "Api service templates details matched in webui") else: self.logger.error( "Api uves service templates details match failed in webui") result = result and False return result # end verify_service_template_api_basic_data_in_webui def verify_floating_ip_api_data(self): self.logger.info("Verifying FIP api-data in Webui...") self.logger.info(self.dash) result = True fip_list_api = self.webui_common.get_fip_list_api() for fips in range(len(fip_list_api['floating-ips'])): api_fq_id = fip_list_api['floating-ips'][fips]['uuid'] self.webui_common.click_configure_fip() project_name = fip_list_api.get('floating-ips')[fips].get('fq_name')[1] self.webui_common.select_project(project_name) rows = self.webui_common.get_rows() self.logger.info( "fip fq_id %s exists in api server..checking if exists in webui as well" % (api_fq_id)) for i in range(len(rows)): match_flag = 0 j = 0 if rows[i].find_elements_by_tag_name('div')[4].text == api_fq_id: self.logger.info( "fip fq_id %s matched in webui..going to match basic view details now" % (api_fq_id)) self.logger.info(self.dash) match_index = i match_flag = 1 dom_arry_basic = [] dom_arry_basic.append( {'key': 'IP Address', 'value': rows[i].find_elements_by_tag_name('div')[1].text}) dom_arry_basic.append( {'key': 'Instance', 'value': rows[i].find_elements_by_tag_name('div')[2].text}) dom_arry_basic.append( {'key': 'Floating IP and Pool', 'value': rows[i].find_elements_by_tag_name('div')[3].text}) dom_arry_basic.append( {'key': 'UUID', 'value': rows[i].find_elements_by_tag_name('div')[4].text}) break if not match_flag: self.logger.error( "fip fq_id exists in apiserver but %s not found in webui..." % (api_fq_id)) self.logger.info(self.dash) else: fip_api_data = self.webui_common.get_details( fip_list_api['floating-ips'][fips]['href']) complete_api_data = [] if fip_api_data.has_key('floating-ip'): # creating a list of basic view items retrieved from # opserver api_data_basic = fip_api_data.get('floating-ip') if api_data_basic.get('floating_ip_address'): complete_api_data.append( {'key': 'IP Address', 'value': api_data_basic['floating_ip_address']}) if api_data_basic.get('virtual_machine_interface_refs'): vm_api_data = self.webui_common.get_details( api_data_basic['virtual_machine_interface_refs'][0]['href']) if vm_api_data.has_key('virtual-machine-interface'): if vm_api_data['virtual-machine-interface'].get('virtual_machine_refs'): complete_api_data.append( {'key': 'Instance', 'value': vm_api_data['virtual-machine-interface']['virtual_machine_refs'][0]['to']}) else: complete_api_data.append( {'key': 'Instance', 'value': '-'}) if api_data_basic.get('fq_name'): complete_api_data.append( {'key': 'Floating IP and Pool', 'value': api_data_basic['fq_name'][2] + ':' + api_data_basic['fq_name'][3]}) if api_data_basic.get('fq_name'): complete_api_data.append( {'key': 'UUID', 'value': api_data_basic['fq_name'][4]}) if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic): self.logger.info("api fip data matched in webui") else: self.logger.error("api fip data match failed in webui") result = False return result # end verify_floating_ip_api_data_in_webui def verify_policy_api_data(self): self.logger.info("Verifying policy details in Webui...") self.logger.debug(self.dash) result = True policy_list_api = self.webui_common.get_policy_list_api() for policy in range(len(policy_list_api['network-policys']) - 1): pol_list = [] net_list = [] service_list = [] api_fq_name = policy_list_api[ 'network-policys'][policy]['fq_name'][2] project_name = policy_list_api[ 'network-policys'][policy]['fq_name'][1] self.webui_common.click_configure_policies() self.webui_common.select_project(project_name) rows = self.webui_common.get_rows() self.logger.info( "Policy fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name)) for i in range(len(rows)): dom_arry_basic = [] match_flag = 0 detail = 0 if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name: self.logger.info( "Policy fq_name %s matched in webui..going to match basic view details..." % (api_fq_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 dom_arry_basic.append( {'key': 'Policy', 'value': rows[i].find_elements_by_tag_name('div')[2].text}) net_grid_row_value = rows[i].find_elements_by_tag_name('div')[3].text.splitlines() dom_arry_basic.append({'key':'Associated_Networks_grid_row','value': net_grid_row_value}) dom_arry_basic.append( {'key': 'Rules_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text.splitlines()}) break if not match_flag: self.logger.error( "Policy fq_name exists in apiserver but %s not found in webui..." % (api_fq_name)) self.logger.debug(self.dash) else: self.webui_common.click_configure_policies_basic(match_index) rows = self.webui_common.get_rows() self.logger.info( "Click and retrieve basic view details in webui for policy fq_name %s " % (api_fq_name)) rows_detail = rows[ match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid') while(detail < len(rows_detail)): text1 = rows_detail[ detail].find_element_by_tag_name('label').text if text1 == 'Associated Networks': dom_arry_basic.append( {'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span11').text.split()}) elif text1 == 'Rules': dom_arry_basic.append({'key': str(text1), 'value': rows_detail[ detail].find_element_by_class_name('span11').text.splitlines()}) detail = detail + 2 policy_api_data = self.webui_common.get_details( policy_list_api['network-policys'][policy]['href']) complete_api_data = [] if policy_api_data.has_key('network-policy'): api_data_basic = policy_api_data.get('network-policy') if api_data_basic.has_key('fq_name'): complete_api_data.append( {'key': 'Policy', 'value': api_data_basic['fq_name'][2]}) if api_data_basic.has_key('virtual_network_back_refs'): for net in range(len(api_data_basic['virtual_network_back_refs'])): api_project = api_data_basic[ 'virtual_network_back_refs'][net]['to'][1] if project_name == api_project: fq = api_data_basic[ 'virtual_network_back_refs'][net]['to'][2] else: fq = ':'.join( api_data_basic['virtual_network_back_refs'][net]['to']) net_list.append(fq) complete_api_data.append( {'key': 'Associated Networks', 'value': net_list}) net_list_len = len(net_list) if net_list_len > 2 : net_list_grid_row = net_list[:2] more_string = '(' + str(net_list_len-2) + ' more)' net_list_grid_row.append(more_string) complete_api_data.append({'key':'Associated_Networks_grid_row', 'value':net_list_grid_row}) else: complete_api_data.append({'key':'Associated_Networks_grid_row', 'value':net_list}) if api_data_basic.has_key('network_policy_entries'): for rules in range(len(api_data_basic['network_policy_entries']['policy_rule'])): dst_ports = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['dst_ports'] src_ports = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['src_ports'] source_port = [] desti_port = [] if dst_ports[0]['start_port'] == -1: desti_port = 'any' else: for item in dst_ports: if item['start_port'] == item['end_port']: desti_port.append(item['start_port']) else: port_range = str(item['start_port']) + \ '-' + \ str(item['end_port']) desti_port.append(port_range) if type(desti_port) is list: desti_port = str(desti_port) desti_port = '[ ' + desti_port[1:-1] + ' ]' if src_ports[0]['start_port'] == -1: source_port = 'any' else: for item in src_ports: if item['start_port'] == item['end_port']: source_port.append(item['start_port']) else: port_range = str(item['start_port']) + \ '-' + \ str(item['end_port']) source_port.append(port_range) if type(source_port) is list: source_port = str(source_port) source_port = '[ ' + source_port[1:-1] + ' ]' api_src_vnet = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['src_addresses'][0]['virtual_network'] api_dst_vnet = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['dst_addresses'][0]['virtual_network'] api_vnet_match_list = [ 'default-domain:default-project:default-virtual-network', 'any', 'default-domain:default-project:__link_local__', 'default-domain:default-project:ip-fabric'] if api_src_vnet in api_vnet_match_list: source_network = api_src_vnet else: source_network = api_src_vnet.split(':')[2] if api_dst_vnet in api_vnet_match_list: dest_network = api_dst_vnet else: dest_network = api_dst_vnet.split(':')[2] action_list = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['action_list'] protocol = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['protocol'] direction = api_data_basic['network_policy_entries'][ 'policy_rule'][rules]['direction'] if action_list.get('apply_service'): for service in range(len(action_list['apply_service'])): service_list.append( action_list['apply_service'][service]) service_string = ",".join(service_list) policy_text = 'protocol' + ' ' + protocol + ' ' + 'network' + ' ' + source_network + ' ' + 'port' + ' ' + source_port + ' ' + \ direction + ' ' + 'network' + ' ' + dest_network + ' ' + 'port' + \ ' ' + desti_port + ' ' + \ 'apply_service' + ' ' + service_string pol_list.append(policy_text) else: policy_text = action_list['simple_action'] + ' ' + 'protocol' + ' ' + protocol + ' ' + 'network' + ' ' + source_network + \ ' ' + 'port' + ' ' + source_port + ' ' + direction + ' ' + \ 'network' + ' ' + dest_network + \ ' ' + 'port' + ' ' + desti_port pol_list.append(policy_text) complete_api_data.append( {'key': 'Rules', 'value': pol_list}) if len(pol_list) > 2: more_count = len(pol_list) - 2 pol_list_grid_row = pol_list[:2] more_text = '(' + str(more_count) + ' more)' pol_list_grid_row.append(more_text) else: pol_list_grid_row = pol_list complete_api_data.append( {'key': 'Rules_grid_row', 'value': pol_list_grid_row}) if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic): self.logger.info("Api policy details matched in webui") else: self.logger.error( "Api policy details match failed in webui") result = result and False return result # end verify_policy_api_basic_data_in_webui def verify_ipam_api_data(self): self.logger.info("Verifying ipam details in Webui...") self.logger.debug(self.dash) result = True ipam_list_api = self.webui_common.get_ipam_list_api() for ipam in range(len(ipam_list_api['network-ipams'])): net_list = [] api_fq_name = ipam_list_api['network-ipams'][ipam]['fq_name'][2] project_name = ipam_list_api['network-ipams'][ipam]['fq_name'][1] self.webui_common.click_configure_ipam() self.webui_common.select_project(project_name) rows = self.webui_common.get_rows() self.logger.info( "Ipam fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name)) for i in range(len(rows)): match_flag = 0 j = 0 dom_arry_basic = [] if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name: self.logger.info( "Ipam fq_name %s matched in webui..going to match basic view details..." % (api_fq_name)) self.logger.debug(self.dash) match_index = i match_flag = 1 ipam_fq_name = rows[ i].find_elements_by_tag_name('div')[2].text dom_arry_basic.append( {'key': 'Name_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[2].text}) ip_grid_row_value = ' '.join( rows[i].find_elements_by_tag_name('div')[3].text.splitlines()) dom_arry_basic.append( {'key': 'IP_grid_row', 'value': ip_grid_row_value}) dom_arry_basic.append( {'key': 'DNS_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text}) dom_arry_basic.append( {'key': 'NTP_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[5].text}) break if not match_flag: self.logger.error( "Ipam fq_name exists in apiserver but %s not found in webui..." % (api_fq_name)) self.logger.debug(self.dash) else: self.webui_common.click_configure_ipam_basic(match_index) rows = self.webui_common.get_rows() self.logger.info( "Click and retrieve basic view details in webui for ipam fq_name %s " % (api_fq_name)) rows_detail = rows[ match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid') for detail in range(len(rows_detail)): text1 = rows_detail[ detail].find_element_by_tag_name('label').text if text1 == 'IP Blocks': dom_arry_basic.append( {'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text}) else: dom_arry_basic.append( {'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text}) ipam_api_data = self.webui_common.get_details( ipam_list_api['network-ipams'][ipam]['href']) complete_api_data = [] if ipam_api_data.has_key('network-ipam'): api_data_basic = ipam_api_data.get('network-ipam') if api_data_basic.has_key('fq_name'): complete_api_data.append( {'key': 'IPAM Name', 'value': str(api_data_basic['fq_name'][2])}) complete_api_data.append( {'key': 'Name_grid_row', 'value': str(api_data_basic['fq_name'][2])}) if api_data_basic.get('network_ipam_mgmt'): if api_data_basic['network_ipam_mgmt'].get('ipam_dns_method'): if api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'default-dns-server': complete_api_data.append( {'key': 'DNS Server', 'value': '-'}) complete_api_data.append( {'key': 'DNS_grid_row', 'value': '-'}) elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'none': complete_api_data.append( {'key': 'DNS Server', 'value': 'DNS Mode : None'}) complete_api_data.append( {'key': 'DNS_grid_row', 'value': 'DNS Mode : None'}) elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'virtual-dns-server': complete_api_data.append( {'key': 'DNS Server', 'value': 'Virtual DNS:' + ' ' + api_data_basic['network_ipam_mgmt']['ipam_dns_server']['virtual_dns_server_name']}) complete_api_data.append({'key': 'DNS_grid_row', 'value': 'Virtual DNS:' + ' ' + api_data_basic['network_ipam_mgmt']['ipam_dns_server']['virtual_dns_server_name']}) elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'tenant-dns-server': dns_server_value = str(api_data_basic['network_ipam_mgmt']['ipam_dns_method']).split( '-')[0].title() + ' ' + 'Managed' + ' ' + 'DNS' + ':' + ' ' + str(api_data_basic['network_ipam_mgmt']['ipam_dns_server']['tenant_dns_server_address']['ip_address'][0]) complete_api_data.append( {'key': 'DNS Server', 'value': dns_server_value}) complete_api_data.append( {'key': 'DNS_grid_row', 'value': dns_server_value}) else: complete_api_data.append( {'key': 'DNS Server', 'value': '-'}) complete_api_data.append( {'key': 'DNS_grid_row', 'value': '-'}) if api_data_basic.get('network_ipam_mgmt'): if api_data_basic['network_ipam_mgmt'].get('dhcp_option_list'): if api_data_basic['network_ipam_mgmt']['dhcp_option_list'].get('dhcp_option'): if len(api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option']) > 1: ntp_server_value = str(api_data_basic['network_ipam_mgmt']['dhcp_option_list'][ 'dhcp_option'][0]['dhcp_option_value']) complete_api_data.append({'key': 'Domain Name', 'value': str( api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][1]['dhcp_option_value'])}) complete_api_data.append( {'key': 'NTP Server', 'value': ntp_server_value}) complete_api_data.append( {'key': 'NTP_grid_row', 'value': ntp_server_value}) elif api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_name'] == '4': ntp_server_value = str(api_data_basic['network_ipam_mgmt']['dhcp_option_list'][ 'dhcp_option'][0]['dhcp_option_value']) complete_api_data.append( {'key': 'NTP Server', 'value': ntp_server_value}) complete_api_data.append( {'key': 'NTP_grid_row', 'value': ntp_server_value}) elif api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_name'] == '15': complete_api_data.append({'key': 'Domain Name', 'value': str( api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_value'])}) else: complete_api_data.append( {'key': 'NTP Server', 'value': '-'}) complete_api_data.append( {'key': 'NTP_grid_row', 'value': '-'}) complete_api_data.append( {'key': 'Domain Name', 'value': '-'}) else: complete_api_data.append( {'key': 'NTP Server', 'value': '-'}) complete_api_data.append( {'key': 'NTP_grid_row', 'value': '-'}) complete_api_data.append( {'key': 'Domain Name', 'value': '-'}) if api_data_basic.has_key('virtual_network_back_refs'): for net in range(len(api_data_basic['virtual_network_back_refs'])): for ip_sub in range(len(api_data_basic['virtual_network_back_refs'][net]['attr']['ipam_subnets'])): api_project = api_data_basic[ 'virtual_network_back_refs'][net]['to'][1] if project_name == api_project: fq = str( api_data_basic['virtual_network_back_refs'][net]['to'][2]) else: fq = ':'.join( api_data_basic['virtual_network_back_refs'][net]['to']) ip_prefix = str(api_data_basic['virtual_network_back_refs'][net][ 'attr']['ipam_subnets'][ip_sub]['subnet']['ip_prefix']) ip_prefix_len = str(api_data_basic['virtual_network_back_refs'][net]['attr'][ 'ipam_subnets'][ip_sub]['subnet']['ip_prefix_len']) default_gateway = str(api_data_basic['virtual_network_back_refs'][net][ 'attr']['ipam_subnets'][ip_sub]['default_gateway']) net_list.append( fq + ' - ' + ip_prefix + '/' + ip_prefix_len + '(' + default_gateway + ')') net_string = ' '.join(net_list) complete_api_data.append( {'key': 'IP Blocks', 'value': net_string}) if len(net_list) > 2: net_string_grid_row = ' '.join( net_list[:2]) + ' (' + str(len(net_list) - 2) + ' more )' else: net_string_grid_row = net_string complete_api_data.append( {'key': 'IP_grid_row', 'value': net_string_grid_row}) if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic): self.logger.info( "Api uves ipam basic view data matched in webui") else: self.logger.error( "Api uves ipam basic view data match failed in webui") result = result and False return result # end verify_ipam_api_data_in_webui def verify_vm_ops_data_in_webui(self, fixture): self.logger.info("Verifying VN %s ops-data in Webui..." % (fixture.vn_name)) vm_list = self.webui_common.get_vm_list_ops() if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() if len(rows) != len(vm_list): self.logger.error(" VM count in webui and opserver not matched ") else: self.logger.info(" VM count in webui and opserver matched") for i in range(len(vm_list)): vm_name = vm_list[i]['name'] # end verify_vm_ops_data_in_webui def verify_vn_ops_data_in_webui(self, fixture): vn_list = self.webui_common.get_vn_list_ops(fixture) self.logger.info( "VN details for %s got from ops server and going to match in webui : " % (vn_list)) if not self.webui_common.click_configure_networks(): result = result and False rows = self.webui_common.get_rows() #rows = self.browser.find_element_by_id('gridVN').find_element_by_tag_name('tbody').find_elements_by_tag_name('tr') ln = len(vn_list) for i in range(ln): vn_name = vn_list[i]['name'] details = self.webui_common.get_vn_details(vn_list[i]['href']) UveVirtualNetworkConfig if details.has_key('UveVirtualNetwokConfig'): total_acl_rules_ops if details.has_key('UveVirtualNetworkAgent'): UveVirtualNetworkAgent_dict = details['UveVirtualNetworkAgent'] egress_flow_count_api = details[ 'UveVirtualNetworkAgent']['egress_flow_count'] ingress_flow_count_api = details[ 'UveVirtualNetworkAgent']['ingress_flow_count'] interface_list_count_api = len( details['UveVirtualNetworkAgent']['interface_list_count']) total_acl_rules_count = details[ 'UveVirtualNetworkAgent']['total_acl_rules'] if self.webui_common.check_element_exists_by_xpath(row[j + 1], "//label[contains(text(), 'Ingress Flows')]"): for n in range(floating_ip_length_api): fip_api = details[ 'virtual-network']['floating_ip_pools'][n]['to'] if fip_ui[n] == fip_api[3] + ' (' + fip_api[0] + ':' + fip_api[1] + ')': self.logger.info(" Fip matched ") if not self.webui_common.click_monitor_networks(): result = result and False for j in range(len(rows)): rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') fq_name = rows[j].find_elements_by_tag_name('a')[1].text if(fq_name == vn_list[i]['name']): self.logger.info(" %s VN verified in monitor page " % (fq_name)) rows[j].find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('a').click() rows = self.webui_common.get_rows() expanded_row = rows[ j + 1].find_element_by_class_name('inline row-fluid position-relative pull-right margin-0-5') expanded_row.find_element_by_class_name( 'icon-cog icon-only bigger-110').click() expanded_row.find_elements_by_tag_name('a')[1].click() basicdetails_ui_data = rows[ j + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]").find_elements_by_class_name("row-fluid") ingress_ui = basicdetails_ui_data[0].text.split('\n')[1] egress_ui = basicdetails_ui_data[1].text.split('\n')[1] acl_ui = basicdetails_ui_data[2].text.split('\n')[1] intf_ui = basicdetails_ui_data[3].text.split('\n')[1] vrf_ui = basicdetails_ui_data[4].text.split('\n')[1] break else: self.logger.error(" %s VN not found in monitor page " % (fq_name)) details = self.webui_common.get_vn_details_api(vn_list[i]['href']) j = 0 for j in range(len(rows)): if not self.webui_common.click_monitor_networks(): result = result and False rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') if (rows[j].find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == details['virtual-network']['fq_name'][2]): if rows[j].find_elements_by_tag_name('td')[4].text == ip_block: self.logger.info("Ip blocks verified ") rows[j].find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('a').click() rows = self.webui_common.get_rows() ui_ip_block = rows[ j + 1].find_element_by_class_name('span11').text.split('\n')[1] if (ui_ip_block.split(' ')[0] == ':'.join(details['virtual-network']['network_ipam_refs'][0]['to']) and ui_ip_block.split(' ')[1] == ip_block and ui_ip_block.split(' ')[2] == details['virtual-network']['network_ipam_refs'][0]['attr']['ipam_subnets'][0]['default_gateway']): self.logger.info( "Ip block and details matched in webui advance view details ") else: self.logger.error("Ip block not matched") forwarding_mode = rows[ j + 1].find_elements_by_class_name('span2')[0].text.split('\n')[1] vxlan = rows[ j + 1].find_elements_by_class_name('span2')[1].text.split('\n')[1] network_dict = {'l2_l3': 'L2 and L3'} if network_dict[details['virtual-network']['virtual_network_properties']['forwarding_mode']] == forwarding_mode: self.logger.info(" Forwarding mode matched ") else: self.logger.error("Forwarding mode not matched ") if details['virtual-network']['virtual_network_properties']['vxlan_network_identifier'] == None: vxlan_api = 'Automatic' else: vxlan_api = details[ 'virtual-network']['virtual_network_properties']['vxlan_network_identifier'] if vxlan_api == vxlan: self.logger.info(" Vxlan matched ") else: self.logger.info(" Vxlan not matched ") rows[j].find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('a').click() break elif (j == range(len(rows))): self.logger.info( "Vn name %s : %s is not matched in webui " % (fixture.vn_name, details['virtual-network']['fq_name'][2])) # end verify_vn_ops_data_in_webui def verify_vn_in_webui(self, fixture): self.browser.get_screenshot_as_file('vm_verify.png') if not self.webui_common.click_configure_networks(): result = result and False time.sleep(2) rows = self.webui_common.get_rows() ln = len(rows) vn_flag = 0 for i in range(len(rows)): if (rows[i].find_elements_by_tag_name('div')[2].get_attribute('innerHTML') == fixture.vn_name and rows[i].find_elements_by_tag_name( 'div')[4].text == fixture.vn_subnets[0]): vn_flag = 1 rows[i].find_elements_by_tag_name( 'div')[0].find_element_by_tag_name('i').click() rows = self.webui_common.get_rows() ip_blocks = rows[ i + 1].find_element_by_class_name('span11').text.split('\n')[1] if (ip_blocks.split(' ')[0] == ':'.join(fixture.ipam_fq_name) and ip_blocks.split(' ')[1] == fixture.vn_subnets[0]): self.logger.info( "Vn name %s and ip block %s verified in configure page " % (fixture.vn_name, fixture.vn_subnets)) else: self.logger.error( "Ip block details failed to verify in configure page %s " % (fixture.vn_subnets)) self.browser.get_screenshot_as_file( 'Verify_vn_configure_page_ip_block.png') vn_flag = 0 break if not self.webui_common.click_monitor_networks(): result = result and False time.sleep(3) rows = self.webui_common.get_rows() vn_entry_flag = 0 for i in range(len(rows)): fq_name = rows[i].find_elements_by_tag_name( 'div')[1].find_element_by_tag_name('div').text if(fq_name == fixture.ipam_fq_name[0] + ":" + fixture.project_name + ":" + fixture.vn_name): self.logger.info(" %s VN verified in monitor page " % (fq_name)) vn_entry_flag = 1 break if not vn_entry_flag: self.logger.error("VN %s Verification failed in monitor page" % (fixture.vn_name)) self.browser.get_screenshot_as_file('verify_vn_monitor_page.png') if vn_entry_flag: self.logger.info( " VN %s and subnet verified in webui config and monitor pages" % (fixture.vn_name)) # if self.webui_common.verify_uuid_table(fixture.vn_id): # self.logger.info( "VN %s UUID verified in webui table " %(fixture.vn_name)) # else: # self.logger.error( "VN %s UUID Verification failed in webui table " %(fixture.vn_name)) # self.browser.get_screenshot_as_file('verify_vn_configure_page_ip_block.png') fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present( fixture.vn_name, fixture.project_id) fq_type = 'virtual_network' full_fq_name = fixture.vn_fq_name + ':' + fixture.vn_id # if self.webui_common.verify_fq_name_table(full_fq_name, fq_type): # self.logger.info( "fq_name %s found in fq Table for %s VN" %(fixture.vn_fq_name,fixture.vn_name)) # else: # self.logger.error( "fq_name %s failed in fq Table for %s VN" %(fixture.vn_fq_name,fixture.vn_name)) # self.browser.get_screenshot_as_file('setting_page_configure_fq_name_error.png') return True # end verify_vn_in_webui def vn_delete_in_webui(self, fixture): result = True self.browser.get_screenshot_as_file('vm_delete.png') if not self.webui_common.click_configure_networks(): result = result and False rows = self.webui_common.get_rows() ln = len(rows) for net in rows: if (net.find_elements_by_tag_name('div')[2].text == fixture.vn_name): net.find_elements_by_tag_name( 'div')[1].find_element_by_tag_name('input').click() break self.browser.find_element_by_id('btnDeleteVN').click() self.webui_common.wait_till_ajax_done(self.browser) time.sleep(2) self.browser.find_element_by_id('btnCnfRemoveMainPopupOK').click() self.logger.info("%s is deleted successfully using webui" % (fixture.vn_name)) # end vn_delete_in_webui def ipam_delete_in_webui(self, fixture): if not self.webui_common.click_configure_ipam(): result = result and False rows = self.webui_common.get_rows() for ipam in range(len(rows)): tdArry = rows[ipam].find_elements_by_class_name('slick-cell') if (len(tdArry) > 2): if (tdArry[2].text == fixture.name): tdArry[1].find_element_by_tag_name('input').click() self.browser.find_element_by_id( 'btnDeleteIpam').find_element_by_tag_name('i').click() self.browser.find_element_by_id( 'btnCnfRemoveMainPopupOK').click() if not self.webui_common.check_error_msg("Delete ipam"): raise Exception("Ipam deletion failed") break self.webui_common.wait_till_ajax_done(self.browser) self.logger.info( "%s is deleted successfully using webui" % (name)) break # end ipam_delete_in_webui def service_template_delete_in_webui(self, fixture): if not self.webui_common.click_configure_service_template(): result = result and False rows = self.webui_common.get_rows() for temp in range(len(rows)): tdArry = rows[temp].find_elements_by_class_name('slick-cell') if (len(tdArry) > 2): if (tdArry[2].text == fixture.st_name): tdArry[1].find_element_by_tag_name('input').click() self.browser.find_element_by_id( 'btnDeletesvcTemplate').find_element_by_tag_name('i').click() self.browser.find_element_by_id('btnCnfDelPopupOK').click() if not self.webui_common.check_error_msg("Delete service template"): raise Exception("Service template deletion failed") break self.webui_common.wait_till_ajax_done(self.browser) self.logger.info("%s is deleted successfully using webui" % (fixture.st_name)) break # end service_template_delete_in_webui def service_instance_delete_in_webui(self, fixture): if not self.webui_common.click_configure_service_instance(): result = result and False rows = self.webui_common.get_rows() for inst in range(len(rows)): tdArry = rows[inst].find_elements_by_class_name('slick-cell') if (len(tdArry) > 2): if (tdArry[2].text == fixture.si_name): tdArry[1].find_element_by_tag_name('input').click() self.browser.find_element_by_id( 'btnDeletesvcInstances').find_element_by_tag_name('i').click() self.browser.find_element_by_id( 'btnCnfDelSInstPopupOK').click() if not self.webui_common.check_error_msg("Delete service instance"): raise Exception("Service instance deletion failed") break self.webui_common.wait_till_ajax_done(self.browser) self.logger.info("%s is deleted successfully using webui" % (fixture.si_name)) break # end service_instance_delete_in_webui def dns_server_delete(self, name): if not self.webui_common.click_configure_dns_server(): result = result and False rows = self.webui_common.get_rows() for server in range(len(rows)): tdArry = rows[server].find_elements_by_class_name('slick-cell') if (len(tdArry) > 2): if (tdArry[2].text == name): tdArry[1].find_element_by_tag_name('input').click() self.browser.find_element_by_id( 'btnDeleteDNSServer').click() self.browser.find_element_by_id('btnCnfDelPopupOK').click() if not self.webui_common.check_error_msg("Delete dns server"): raise Exception("Dns server deletion failed") break self.webui_common.wait_till_ajax_done(self.browser) self.logger.info( "%s is deleted successfully using webui" % (name)) break # end dns_server_delete_in_webui def dns_record_delete(self, name): if not self.webui_common.click_configure_dns_record(): result = result and False rows = self.webui_common.get_rows() for record in range(len(rows)): tdArry = rows[record].find_elements_by_class_name('slick-cell') if (len(tdArry) > 2): if (tdArry[2].text == name): tdArry[1].find_element_by_tag_name('input').click() self.browser.find_element_by_id( 'btnDeleteDNSRecord').click() self.browser.find_element_by_id( 'btnCnfDelMainPopupOK').click() if not self.webui_common.check_error_msg("Delete dns record"): raise Exception("Dns record deletion failed") break self.webui_common.wait_till_ajax_done(self.browser) self.logger.info( "%s is deleted successfully using webui" % (name)) break # end dns_record_delete_in_webui def create_vm_in_openstack(self, fixture): try: if not self.proj_check_flag: WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text('Project')).click() time.sleep(3) WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_css_selector('h4')).click() WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_id('tenant_list')).click() current_project = WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_css_selector('h3')).text if not current_project == fixture.project_name: WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_css_selector('h3')).click() WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text(fixture.project_name)).click() self.webui_common.wait_till_ajax_done( self.browser_openstack) self.proj_check_flag = 1 WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text('Project')).click() self.webui_common.wait_till_ajax_done(self.browser_openstack) instance = WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text('Instances')).click() self.webui_common.wait_till_ajax_done(self.browser_openstack) fixture.image_name = 'ubuntu' fixture.nova_fixture.get_image(image_name=fixture.image_name) time.sleep(2) launch_instance = WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text('Launch Instance')).click() self.webui_common.wait_till_ajax_done(self.browser_openstack) self.logger.debug('Creating instance name %s with image name %s using openstack' % (fixture.vm_name, fixture.image_name)) self.logger.info('Creating instance name %s with image name %s using openstack' % (fixture.vm_name, fixture.image_name)) time.sleep(3) self.browser_openstack.find_element_by_xpath( "//select[@name='source_type']/option[contains(text(), 'image') or contains(text(),'Image')]").click() self.webui_common.wait_till_ajax_done(self.browser_openstack) self.browser_openstack.find_element_by_xpath( "//select[@name='image_id']/option[contains(text(), '" + fixture.image_name + "')]").click() WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_id( 'id_name')).send_keys(fixture.vm_name) self.browser_openstack.find_element_by_xpath( "//select[@name='flavor']/option[text()='m1.small']").click() WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_xpath( "//input[@value='Launch']")).click() networks = WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_id ('available_network')).find_elements_by_tag_name('li') for net in networks: vn_match = net.text.split('(')[0] if (vn_match == fixture.vn_name): net.find_element_by_class_name('btn').click() break WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_xpath( "//input[@value='Launch']")).click() self.webui_common.wait_till_ajax_done(self.browser_openstack) self.logger.debug('VM %s launched using openstack' % (fixture.vm_name)) self.logger.info('Waiting for VM %s to come into active state' % (fixture.vm_name)) time.sleep(10) rows_os = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') for i in range(len(rows_os)): rows_os = self.browser_openstack.find_element_by_tag_name( 'form') rows_os = WebDriverWait(rows_os, self.delay).until( lambda a: a.find_element_by_tag_name('tbody')) rows_os = WebDriverWait(rows_os, self.delay).until( lambda a: a.find_elements_by_tag_name('tr')) if(rows_os[i].find_elements_by_tag_name('td')[1].text == fixture.vm_name): counter = 0 vm_active = False while not vm_active: vm_active_status1 = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr')[i].find_elements_by_tag_name( 'td')[6].text vm_active_status2 = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr')[i].find_elements_by_tag_name('td')[5].text if(vm_active_status1 == 'Active' or vm_active_status2 == 'Active'): self.logger.info( "%s status changed to Active in openstack" % (fixture.vm_name)) vm_active = True time.sleep(5) elif(vm_active_status1 == 'Error' or vm_active_status2 == 'Error'): self.logger.error( "%s state went into Error state in openstack" % (fixture.vm_name)) self.browser_openstack.get_screenshot_as_file( 'verify_vm_state_openstack_' + 'fixture.vm_name' + '.png') return "Error" else: self.logger.info( "%s state is not yet Active in openstack, waiting for more time..." % (fixture.vm_name)) counter = counter + 1 time.sleep(3) self.browser_openstack.find_element_by_link_text( 'Instances').click() self.webui_common.wait_till_ajax_done( self.browser_openstack) time.sleep(3) if(counter >= 100): fixuture.logger.error( "VM %s failed to come into active state" % (fixture.vm_name)) self.browser_openstack.get_screenshot_as_file( 'verify_vm_not_active_openstack_' + 'fixture.vm_name' + '.png') break fixture.vm_obj = fixture.nova_fixture.get_vm_if_present( fixture.vm_name, fixture.project_fixture.uuid) fixture.vm_objs = fixture.nova_fixture.get_vm_list( name_pattern=fixture.vm_name, project_id=fixture.project_fixture.uuid) except ValueError: self.logger.error('Error while creating VM %s with image name %s failed in openstack' % (fixture.vm_name, fixture.image_name)) self.browser_openstack.get_screenshot_as_file( 'verify_vm_error_openstack_' + 'fixture.vm_name' + '.png') # end create_vm_in_openstack def vm_delete_in_openstack(self, fixture): rows = self.browser_openstack.find_element_by_id('instances').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') for instance in rows: if fixture.vm_name == instance.find_element_by_tag_name('a').text: instance.find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('input').click() break ln = len(rows) launch_instance = WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_id('instances__action_terminate')).click() WebDriverWait(self.browser_openstack, self.delay).until( lambda a: a.find_element_by_link_text('Terminate Instances')).click() time.sleep(5) self.logger.info("VM %s deleted successfully using openstack" % (fixture.vm_name)) # end vm_delete_in_openstack def verify_vm_in_webui(self, fixture): result = True try: if not self.webui_common.click_monitor_instances(): result = result and False rows = self.webui_common.get_rows() ln = len(rows) vm_flag = 0 for i in range(len(rows)): rows_count = len(rows) vm_name = rows[i].find_elements_by_class_name( 'slick-cell')[1].text vm_uuid = rows[i].find_elements_by_class_name( 'slick-cell')[2].text vm_vn = rows[i].find_elements_by_class_name( 'slick-cell')[3].text.split(' ')[0] if(vm_name == fixture.vm_name and fixture.vm_obj.id == vm_uuid and fixture.vn_name == vm_vn): self.logger.info( "VM %s vm exists..will verify row expansion basic details" % (fixture.vm_name)) retry_count = 0 while True: self.logger.debug("Count is" + str(retry_count)) if retry_count > 20: self.logger.error('Vm details failed to load') break self.browser.find_element_by_xpath( "//*[@id='mon_net_instances']").find_element_by_tag_name('a').click() time.sleep(1) rows = self.webui_common.get_rows() rows[i].find_elements_by_tag_name( 'div')[0].find_element_by_tag_name('i').click() try: retry_count = retry_count + 1 rows = self.webui_common.get_rows() rows[ i + 1].find_elements_by_class_name('row-fluid')[0].click() self.webui_common.wait_till_ajax_done(self.browser) break except WebDriverException: pass rows = self.webui_common.get_rows() row_details = rows[ i + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')[5] vm_status = row_details.find_elements_by_tag_name( 'div')[8].text vm_ip_and_mac = row_details.find_elements_by_tag_name( 'div')[2].text assert vm_status == 'Active' assert vm_ip_and_mac.splitlines()[0] == fixture.vm_ip vm_flag = 1 break assert vm_flag, "VM name or VM uuid or VM ip or VM status verifications in WebUI for VM %s failed" % ( fixture.vm_name) self.browser.get_screenshot_as_file('vm_create_check.png') self.logger.info( "Vm name,vm uuid,vm ip and vm status,vm network verification in WebUI for VM %s passed" % (fixture.vm_name)) mon_net_networks = WebDriverWait(self.browser, self.delay).until(lambda a: a.find_element_by_id( 'mon_net_networks')).find_element_by_link_text('Networks').click() time.sleep(4) self.webui_common.wait_till_ajax_done(self.browser) rows = self.webui_common.get_rows() for i in range(len(rows)): if(rows[i].find_elements_by_class_name('slick-cell')[1].text == fixture.vn_fq_name.split(':')[0] + ":" + fixture.project_name + ":" + fixture.vn_name): rows[i].find_elements_by_tag_name( 'div')[0].find_element_by_tag_name('i').click() time.sleep(2) self.webui_common.wait_till_ajax_done(self.browser) rows = self.webui_common.get_rows() vm_ids = rows[ i + 1].find_element_by_xpath("//div[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')[5].find_elements_by_tag_name('div')[1].text if fixture.vm_id in vm_ids: self.logger.info( "Vm_id matched in webui monitor network basic details page %s" % (fixture.vn_name)) else: self.logger.error( "Vm_id not matched in webui monitor network basic details page %s" % (fixture.vm_name)) self.browser.get_screenshot_as_file( 'monitor_page_vm_id_not_match' + fixture.vm_name + fixture.vm_id + '.png') result = result and False break # if self.webui_common.verify_uuid_table(fixture.vm_id): # self.logger.info( "UUID %s found in UUID Table for %s VM" %(fixture.vm_name,fixture.vm_id)) # else: # self.logger.error( "UUID %s failed in UUID Table for %s VM" %(fixture.vm_name,fixture.vm_id)) # fq_type='virtual_machine' # full_fq_name=fixture.vm_id+":"+fixture.vm_id # if self.webui_common.verify_fq_name_table(full_fq_name,fq_type): # self.logger.info( "fq_name %s found in fq Table for %s VM" %(fixture.vm_id,fixture.vm_name)) # else: # self.logger.error( "fq_name %s failed in fq Table for %s VM" %(fixture.vm_id,fixture.vm_name)) self.logger.info("VM verification in WebUI %s passed" % (fixture.vm_name)) return result except ValueError: self.logger.error("vm %s test error " % (fixture.vm_name)) self.browser.get_screenshot_as_file( 'verify_vm_test_openstack_error' + 'fixture.vm_name' + '.png') # end verify_vm_in_webui def create_floatingip_pool_webui(self, fixture, pool_name, vn_name): try: if not self.webui_common.click_configure_networks(): result = result and False self.webui_common.select_project(fixture.project_name) rows = self.webui_common.get_rows() self.logger.info("Creating floating ip pool %s using webui" % (pool_name)) for net in rows: if (net.find_elements_by_class_name('slick-cell')[2].get_attribute('innerHTML') == fixture.vn_name): net.find_element_by_class_name('icon-cog').click() self.webui_common.wait_till_ajax_done(self.browser) time.sleep(3) self.browser.find_element_by_class_name( 'tooltip-success').find_element_by_tag_name('i').click() ip_text = net.find_element_by_xpath( "//span[contains(text(), 'Floating IP Pools')]") ip_text.find_element_by_xpath( '..').find_element_by_tag_name('i').click() route = self.browser.find_element_by_xpath( "//div[@title='Add Floating IP Pool below']") route.find_element_by_class_name('icon-plus').click() self.webui_common.wait_till_ajax_done(self.browser) self.browser.find_element_by_xpath( "//input[@placeholder='Pool Name']").send_keys(fixture.pool_name) self.browser.find_element_by_id( 'fipTuples').find_elements_by_tag_name('input')[1].click() project_elements = self.browser.find_elements_by_xpath( "//*[@class = 'select2-match']/..") self._click_if_element_found( fixture.project_name, project_elements) self.webui_common.wait_till_ajax_done(self.browser) self.browser.find_element_by_xpath( "//button[@id = 'btnCreateVNOK']").click() self.webui_common.wait_till_ajax_done(self.browser) time.sleep(2) if not self.webui_common.check_error_msg("Creating fip pool"): raise Exception("Create fip pool failed") self.logger.info("Fip pool %s created using webui" % (fixture.pool_name)) break except ValueError: self.logger.error("Fip %s Error while creating floating ip pool " % (fixture.pool_name)) # end create_floatingip_pool_webui def create_and_assoc_fip_webui(self, fixture, fip_pool_vn_id, vm_id, vm_name, project=None): try: fixture.vm_name = vm_name fixture.vm_id = vm_id if not self.webui_common.click_configure_networks(): result = result and False rows = self.webui_common.get_rows() self.logger.info("Creating and associating fip %s using webui" % (fip_pool_vn_id)) for net in rows: if (net.find_elements_by_class_name('slick-cell')[2].get_attribute('innerHTML') == fixture.vn_name): self.browser.find_element_by_xpath( "//*[@id='config_net_fip']/a").click() self.browser.get_screenshot_as_file('fip.png') time.sleep(3) self.browser.find_element_by_id('btnCreatefip').click() self.webui_common.wait_till_ajax_done(self.browser) time.sleep(1) pool = self.browser.find_element_by_xpath("//div[@id='s2id_ddFipPool']").find_element_by_tag_name( 'a').click() time.sleep(2) self.webui_common.wait_till_ajax_done(self.browser) fip = self.browser.find_element_by_id( "select2-drop").find_elements_by_tag_name('li') for i in range(len(fip)): if fip[i].find_element_by_tag_name('div').get_attribute("innerHTML") == fixture.project_name + ':' + fixture.vn_name + ':' + fixture.pool_name: fip[i].click() self.browser.find_element_by_id('btnCreatefipOK').click() if not self.webui_common.check_error_msg("Creating Fip"): raise Exception("Create fip failed") self.webui_common.wait_till_ajax_done(self.browser) rows1 = self.webui_common.get_rows() for element in rows1: if element.find_elements_by_class_name('slick-cell')[3].get_attribute('innerHTML') == fixture.vn_name + ':' + fixture.pool_name: element.find_element_by_class_name( 'icon-cog').click() self.webui_common.wait_till_ajax_done(self.browser) element.find_element_by_xpath( "//a[@class='tooltip-success']").click() self.webui_common.wait_till_ajax_done(self.browser) break pool = self.browser.find_element_by_xpath( "//div[@id='s2id_ddAssociate']").find_element_by_tag_name('a').click() time.sleep(1) self.webui_common.wait_till_ajax_done(self.browser) fip = self.browser.find_element_by_id( "select2-drop").find_elements_by_tag_name('li') for i in range(len(fip)): if fip[i].find_element_by_tag_name('div').get_attribute("innerHTML").split(' ')[1] == vm_id: fip[i].click() self.browser.find_element_by_id( 'btnAssociatePopupOK').click() self.webui_common.wait_till_ajax_done(self.browser) if not self.webui_common.check_error_msg("Fip Associate"): raise Exception("Fip association failed") time.sleep(1) break except ValueError: self.logger.info( "Error while creating floating ip and associating it.") # end create_and_assoc_fip_webui def verify_fip_in_webui(self, fixture): if not self.webui_common.click_configure_networks(): result = result and False rows = WebDriverWait(self.browser, self.delay).until(lambda a: a.find_element_by_id( 'gridVN')).find_element_by_tag_name('tbody').find_elements_by_tag_name('tr') for i in range(len(rows)): vn_name = rows[i].find_elements_by_tag_name('td')[2].text if vn_name == fixture.vn_name: rows[i].find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('a').click() rows = self.webui_common.get_rows() fip_check = rows[ i + 1].find_elements_by_xpath("//td/div/div/div")[1].text if fip_check.split('\n')[1].split(' ')[0] == fixture.pool_name: self.logger.info( "Fip pool %s verified in WebUI configure network page" % (fixture.pool_name)) break WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_xpath("//*[@id='config_net_fip']/a")).click() self.webui_common.wait_till_ajax_done(self.browser) rows = self.browser.find_element_by_xpath( "//div[@id='gridfip']/table/tbody").find_elements_by_tag_name('tr') for i in range(len(rows)): fip = rows[i].find_elements_by_tag_name('td')[3].text.split(':')[1] vn = rows[i].find_elements_by_tag_name('td')[3].text.split(':')[0] fip_ip = rows[i].find_elements_by_class_name('slick-cell')[1].text if rows[i].find_elements_by_tag_name('td')[2].text == fixture.vm_id: if vn == fixture.vn_name and fip == fixture.pool_name: self.logger.info("Fip is found attached with vm %s " % (fixture.vm_name)) self.logger.info("VM %s is found associated with FIP %s " % (fixture.vm_name, fip)) else: self.logger.info( "Association of %s VM failed with FIP %s " % (fixture.vm_name, fip)) break if not self.webui_common.click_monitor_instances(): result = result and False rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') ln = len(rows) vm_flag = 0 for i in range(len(rows)): vm_name = rows[i].find_elements_by_tag_name( 'td')[1].find_element_by_tag_name('div').text vm_uuid = rows[i].find_elements_by_tag_name('td')[2].text vm_vn = rows[i].find_elements_by_tag_name( 'td')[3].text.split(' ')[0] if(vm_name == fixture.vm_name and fixture.vm_id == vm_uuid and vm_vn == fixture.vn_name): rows[i].find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('a').click() self.webui_common.wait_till_ajax_done(self.browser) rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') fip_check_vm = rows[i + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]" ).find_elements_by_tag_name('div')[0].find_elements_by_tag_name('div')[1].text if fip_check_vm.split(' ')[0] == fip_ip and fip_check_vm.split( ' ')[1] == '\(' + 'default-domain' + ':' + fixture.project_name + ':' + fixture.vn_name + '\)': self.logger.info( "FIP verified in monitor instance page for vm %s " % (fixture.vm_name)) else: self.logger.info( "FIP failed to verify in monitor instance page for vm %s" % (fixture.vm_name)) break # end verify_fip_in_webui def delete_fip_in_webui(self, fixture): if not self.webui_common.click_configure_fip(): result = result and False rows = self.browser.find_element_by_id('gridfip').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') for net in rows: if (net.find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == fixture.vm_id): net.find_elements_by_tag_name('td')[5].find_element_by_tag_name( 'div').find_element_by_tag_name('div').click() self.webui_common.wait_till_ajax_done(self.browser) net.find_element_by_xpath( "//a[@class='tooltip-error']").click() self.webui_common.wait_till_ajax_done(self.browser) WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnDisassociatePopupOK')).click() self.webui_common.wait_till_ajax_done(self.browser) self.webui_common.wait_till_ajax_done(self.browser) rows = self.browser.find_element_by_id('gridfip').find_element_by_tag_name( 'tbody').find_elements_by_tag_name('tr') for net in rows: if (net.find_elements_by_tag_name('td')[3].get_attribute('innerHTML') == fixture.vn_name + ':' + fixture.pool_name): net.find_elements_by_tag_name( 'td')[0].find_element_by_tag_name('input').click() WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnDeletefip')).click() WebDriverWait(self.browser, self.delay).until( lambda a: a.find_element_by_id('btnCnfReleasePopupOK')).click() if not self.webui_common.click_configure_networks(): result = result and False rows = self.webui_common.get_rows() for net in rows: if (net.find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == fixture.vn_name): net.find_element_by_class_name('dropdown-toggle').click() net.find_elements_by_tag_name( 'li')[0].find_element_by_tag_name('a').click() ip_text = net.find_element_by_xpath( "//span[contains(text(), 'Floating IP Pools')]") ip_text.find_element_by_xpath( '..').find_element_by_tag_name('i').click() pool_con = self.browser.find_element_by_id('fipTuples') fip = pool_con.find_elements_by_xpath( "//*[contains(@id, 'rule')]") for pool in fip: if(pool.find_element_by_tag_name('input').get_attribute('value') == fixture.pool_name): pool.find_element_by_class_name( 'icon-minus').click() self.browser.find_element_by_xpath( "//button[@id = 'btnCreateVNOK']").click() break # end delete_fip_in_webui
60.406206
542
0.542983
24,095
216,073
4.519485
0.029674
0.037981
0.051654
0.0281
0.815036
0.774411
0.720231
0.668246
0.627024
0.578372
0
0.005311
0.356019
216,073
3,576
543
60.423098
0.777293
0.019614
0
0.539857
0
0.001487
0.156228
0.024829
0.000892
0
0
0
0.000892
0
null
null
0.000892
0.006544
null
null
0.000297
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
1b5b58c46eceee94ff2083f63bbd9128f0ea25c7
555
py
Python
ivy/functional/backends/jax/statistical.py
odehDanOps/ivy
996cabd0901ed331d5e8761e2cdee429d0ca8d43
[ "Apache-2.0" ]
null
null
null
ivy/functional/backends/jax/statistical.py
odehDanOps/ivy
996cabd0901ed331d5e8761e2cdee429d0ca8d43
[ "Apache-2.0" ]
null
null
null
ivy/functional/backends/jax/statistical.py
odehDanOps/ivy
996cabd0901ed331d5e8761e2cdee429d0ca8d43
[ "Apache-2.0" ]
null
null
null
# global import jax.numpy as jnp from typing import Tuple, Union # Array API Standard # # -------------------# def min(x: jnp.ndarray, axis: Union[int, Tuple[int]] = None, keepdims = False, device = None) \ -> jnp.ndarray: return jnp.min(a = jnp.asarray(x), axis = axis, keepdims = keepdims) def max(x: jnp.ndarray, axis: Union[int, Tuple[int]] = None, keepdims = False, device = None) \ -> jnp.ndarray: return jnp.max(a = jnp.asarray(x), axis = axis, keepdims = keepdims) # Extra # # ------#
23.125
72
0.569369
71
555
4.450704
0.394366
0.126582
0.06962
0.094937
0.71519
0.71519
0.71519
0.71519
0.487342
0.487342
0
0
0.25045
555
23
73
24.130435
0.759615
0.108108
0
0.5
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
0.5
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
1ba6078890512ae4e2a9c49facfe629996e04ba9
1,405
py
Python
Server/Protocol/Messages/Server/LoginOkMessage.py
Voeed/Brawl-stars-v11
b60743c306ab471053d81aa59f812b19a2c7f8f3
[ "MIT" ]
3
2021-04-14T18:33:53.000Z
2021-09-26T13:53:54.000Z
Server/Protocol/Messages/Server/LoginOkMessage.py
ImColette-dev/Brawl-stars-v11-server
b3232b574c5b2fcc29803e5c8c9e9650d0d59d73
[ "MIT" ]
1
2021-09-09T12:23:34.000Z
2021-09-09T12:23:34.000Z
Server/Protocol/Messages/Server/LoginOkMessage.py
ImColette-dev/Brawl-stars-v11-server
b3232b574c5b2fcc29803e5c8c9e9650d0d59d73
[ "MIT" ]
1
2021-07-28T16:15:29.000Z
2021-07-28T16:15:29.000Z
from DataStream.ByteStream import ByteStream from Logic.Player import Player class LoginOkMessage(ByteStream): def __init__(self, client, player): super().__init__(client) self.id = 20104 self.version = 1 self.player = player; def encode(self): self.writeInt(self.player.HighID) self.writeInt(self.player.LowID) self.writeInt(self.player.HighID) self.writeInt(self.player.LowID) self.writeString(self.player.Token) self.writeString() self.writeString() self.writeInt(11) self.writeInt(112) self.writeInt(1) self.writeString("integration") self.writeInt(0) #1 self.writeInt(0) #1 self.writeInt(0) #61 self.writeString() #isAtEnd self.writeString() self.writeString() #isAtEnd self.writeInt(0) self.writeString() self.writeString() self.writeString() self.writeInt(0) self.writeString() self.writeString() self.writeString() #isAtEnd self.writeVInt(0) #isAtEnd #TODO: stringReference print("[INFO] Message LoginOkMessage has been sent.")
25.089286
61
0.525267
126
1,405
5.793651
0.301587
0.267123
0.234247
0.246575
0.494521
0.494521
0.371233
0.315068
0.315068
0.156164
0
0.025316
0.381495
1,405
56
61
25.089286
0.81473
0.037722
0
0.555556
0
0
0.042636
0
0
0
0
0.017857
0
1
0.055556
false
0
0.055556
0
0.138889
0.027778
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
4
1bcec325b98d968512d016ef80af5e36f6c9e424
3,422
py
Python
backend/db/entities/mixin/pengaturan.py
R-N/sistem_gaji_vue_thrift
9ba800b4d8e7849e2c6c4016cb32633caab087be
[ "MIT" ]
null
null
null
backend/db/entities/mixin/pengaturan.py
R-N/sistem_gaji_vue_thrift
9ba800b4d8e7849e2c6c4016cb32633caab087be
[ "MIT" ]
null
null
null
backend/db/entities/mixin/pengaturan.py
R-N/sistem_gaji_vue_thrift
9ba800b4d8e7849e2c6c4016cb32633caab087be
[ "MIT" ]
null
null
null
from sqlalchemy import Column, Integer, Numeric from sqlalchemy.ext.declarative import declared_attr from .pengaturan_base import MxPengaturanBase class MxPengaturan(MxPengaturanBase): # TODO: Set precision & scale for Numerics @declared_attr def bpjs_ketenagakerjaan_perusahaan(cls): return Column(Numeric, nullable=False) @declared_attr def bpjs_ketenagakerjaan_karyawan(cls): return Column(Numeric, nullable=False) @declared_attr def bpjs_kesehatan_perusahaan(cls): return Column(Numeric, nullable=False) @declared_attr def bpjs_kesehatan_karyawan(cls): return Column(Numeric, nullable=False) @declared_attr def upah_minimum(cls): return Column(Integer, nullable=False) @declared_attr def iuran_rumah(cls): return Column(Integer, nullable=False) @declared_attr def iuran_koperasi(cls): return Column(Integer, nullable=False) @declared_attr def pendaftaran_koperasi(cls): return Column(Integer, nullable=False) @declared_attr def uang_makan(cls): return Column(Integer, nullable=False) @declared_attr def uang_transport(cls): return Column(Integer, nullable=False) @declared_attr def koef_absen(cls): return Column(Numeric, nullable=False) ''' def mx_init( self, *args, bpjs_ketenagakerjaan_perusahaan, bpjs_ketenagakerjaan_karyawan, bpjs_kesehatan_perusahaan, bpjs_kesehatan_karyawan, upah_minimum, iuran_rumah, iuran_koperasi, pendaftaran_koperasi, uang_makan, uang_transport, koef_absen, **kwargs ): MxPengaturanBase.mx_init(*args, **kwargs) self.bpjs_ketenagakerjaan_perusahaan = bpjs_ketenagakerjaan_perusahaan self.bpjs_ketenagakerjaan_karyawan = bpjs_ketenagakerjaan_karyawan self.bpjs_kesehatan_perusahaan = bpjs_kesehatan_perusahaan self.bpjs_ketenagakerjaan_karyawan = bpjs_ketenagakerjaan_karyawan self.upah_minimum = upah_minimum self.iuran_rumah = iuran_rumah self.iuran_koperasi = iuran_koperasi self.pendaftaran_koperasi = pendaftaran_koperasi self.uang_makan = uang_makan self.uang_transport = uang_transport self.koef_absen = koef_absen ''' def mx_reconstruct(self): MxPengaturanBase.mx_reconstruct(self) def mx_repr(self): return '%s' % (MxPengaturanBase.mx_repr(self),) ''' def mx_repr(self): return "TODO" % ( self.id, self.nama, ) ''' def mx_init_repr(self): ret = MxPengaturanBase.mx_init_repr(self) ret.update({ 'bpjs_ketenagakerjaan_perusahaan': self.bpjs_ketenagakerjaan_perusahaan, 'bpjs_ketenagakerjaan_karyawan': self.bpjs_ketenagakerjaan_karyawan, 'bpjs_kesehatan_perusahaan': self.bpjs_kesehatan_perusahaan, 'bpjs_kesehatan_karyawan': self.bpjs_kesehatan_karyawan, 'upah_minimum': self.upah_minimum, 'iuran_rumah': self.iuran_rumah, 'iuran_koperasi': self.iuran_koperasi, 'pendaftaran_koperasi': self.pendaftaran_koperasi, 'uang_makan': self.uang_makan, 'uang_transport': self.uang_transport, 'koef_absen': self.koef_absen }) return ret
30.553571
84
0.677382
358
3,422
6.156425
0.150838
0.12069
0.074864
0.11343
0.603448
0.544465
0.337568
0.337568
0.337568
0.249546
0
0
0.247808
3,422
111
85
30.828829
0.856255
0.011689
0
0.385965
0
0
0.089692
0.048193
0
0
0
0.018018
0
1
0.245614
false
0
0.052632
0.210526
0.54386
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
1
0
0
4
1bcf777e137c1ad0fceae7a7461a0f96f18dbe05
127
py
Python
test1/views/models/axfundaddress.py
biz2013/xwjy
8f4b5e3e3fc964796134052ff34d58d31ed41904
[ "Apache-2.0" ]
1
2019-12-15T16:56:44.000Z
2019-12-15T16:56:44.000Z
coinExchange/trading/views/models/axfundaddress.py
biz2013/xwjy
8f4b5e3e3fc964796134052ff34d58d31ed41904
[ "Apache-2.0" ]
87
2018-01-06T10:18:31.000Z
2022-03-11T23:32:30.000Z
test1/views/models/axfundaddress.py
biz2013/xwjy
8f4b5e3e3fc964796134052ff34d58d31ed41904
[ "Apache-2.0" ]
null
null
null
class AXFundAddress(object): def __init__(self, address, alias): self.address = address self.alias = alias
25.4
39
0.653543
14
127
5.642857
0.571429
0.278481
0
0
0
0
0
0
0
0
0
0
0.251969
127
4
40
31.75
0.831579
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
59f29643b40421242d0bea0de29a9058ef247935
49
py
Python
pis_client/__init__.py
ignertic/pis_client
12dd56b6801b53388cb46199a27a0a7d3d214523
[ "MIT" ]
null
null
null
pis_client/__init__.py
ignertic/pis_client
12dd56b6801b53388cb46199a27a0a7d3d214523
[ "MIT" ]
null
null
null
pis_client/__init__.py
ignertic/pis_client
12dd56b6801b53388cb46199a27a0a7d3d214523
[ "MIT" ]
null
null
null
from .model import Client __version__= "0.0.1"
9.8
25
0.714286
8
49
3.875
0.875
0
0
0
0
0
0
0
0
0
0
0.073171
0.163265
49
4
26
12.25
0.682927
0
0
0
0
0
0.102041
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
942b66c8fc0310ad42fc87a38834d2efbab930c1
185
py
Python
tests/core/test_setproctitle.py
STATION-I/STAI-blockchain
a8ca05cbd2602eee7c2e4ce49c74c447a091ef0f
[ "Apache-2.0" ]
10
2021-10-02T18:33:56.000Z
2021-11-14T17:10:48.000Z
tests/core/test_setproctitle.py
STATION-I/STAI-blockchain
a8ca05cbd2602eee7c2e4ce49c74c447a091ef0f
[ "Apache-2.0" ]
14
2021-10-07T22:10:15.000Z
2021-12-21T09:13:49.000Z
tests/core/test_setproctitle.py
STATION-I/STAI-blockchain
a8ca05cbd2602eee7c2e4ce49c74c447a091ef0f
[ "Apache-2.0" ]
6
2021-10-29T19:36:59.000Z
2021-12-19T19:52:57.000Z
import unittest from stai.util.setproctitle import setproctitle class TestSetProcTitle(unittest.TestCase): def test_does_not_crash(self): setproctitle("stai test title")
20.555556
47
0.778378
22
185
6.409091
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.151351
185
8
48
23.125
0.898089
0
0
0
0
0
0.081081
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
942f5cd48c58389e331452b5be336e85b4cdd20b
415
py
Python
crmsystem/__init__.py
iomegak12/pythondockertry
dd91dc57a09141f94cb0a73e18a8ad9da4d5aa85
[ "MIT" ]
null
null
null
crmsystem/__init__.py
iomegak12/pythondockertry
dd91dc57a09141f94cb0a73e18a8ad9da4d5aa85
[ "MIT" ]
null
null
null
crmsystem/__init__.py
iomegak12/pythondockertry
dd91dc57a09141f94cb0a73e18a8ad9da4d5aa85
[ "MIT" ]
null
null
null
from .config import GlobalConfiguration from .controllers import DataController from .utilities import ErrorProvider, CustomerEncoder, OrderEncoder, PrettyTableGenerator from .services import CustomerService, OrderService from .models import Customer, Order, CRMSystemError from .decorators import Logger from .routing import CustomerRouteHandler from .hosting import app as CRMSystemHost from .app import flaskApp
41.5
89
0.857831
44
415
8.090909
0.613636
0
0
0
0
0
0
0
0
0
0
0
0.106024
415
9
90
46.111111
0.959569
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
94464711b51aaed6bb644bb94d8782573a3c211b
302
py
Python
mmocr/models/common/__init__.py
yangrisheng/mmocr
3ad4a8d3f8d2d22b7854b72ee68a7977a3f3631f
[ "Apache-2.0" ]
2
2022-01-02T13:33:10.000Z
2022-02-08T07:40:30.000Z
mmocr/models/common/__init__.py
yangrisheng/mmocr
3ad4a8d3f8d2d22b7854b72ee68a7977a3f3631f
[ "Apache-2.0" ]
null
null
null
mmocr/models/common/__init__.py
yangrisheng/mmocr
3ad4a8d3f8d2d22b7854b72ee68a7977a3f3631f
[ "Apache-2.0" ]
null
null
null
# Copyright (c) OpenMMLab. All rights reserved. from . import backbones, layers, losses, modules from .backbones import * # NOQA from .layers import * # NOQA from .losses import * # NOQA from .modules import * # NOQA __all__ = backbones.__all__ + losses.__all__ + layers.__all__ + modules.__all__
33.555556
79
0.731788
37
302
5.432432
0.351351
0.199005
0.208955
0
0
0
0
0
0
0
0
0
0.172185
302
8
80
37.75
0.804
0.215232
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
944b49db9922d734d432d502d558b6874b8edee9
350
py
Python
module3-nosql-and-document-oriented-databases/mongoDB.py
cocoisland/DS-Unit-3-Sprint-2-SQL-and-Databases
063af0488fa5694ee233298ed76de71b4229fe4c
[ "MIT" ]
null
null
null
module3-nosql-and-document-oriented-databases/mongoDB.py
cocoisland/DS-Unit-3-Sprint-2-SQL-and-Databases
063af0488fa5694ee233298ed76de71b4229fe4c
[ "MIT" ]
null
null
null
module3-nosql-and-document-oriented-databases/mongoDB.py
cocoisland/DS-Unit-3-Sprint-2-SQL-and-Databases
063af0488fa5694ee233298ed76de71b4229fe4c
[ "MIT" ]
null
null
null
#!/usr/bin/env python import pymongo conn_string="mongodb://dbUser19:LSVyKnHW@cluster0-shard-00-00-nadgn.mongodb.net:27017,cluster0-shard-00-01-nadgn.mongodb.net:27017,cluster0-shard-00-02-nadgn.mongodb.net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin&retryWrites=true" client=pymongo.MongoClient(conn_string) db=client.test
31.818182
252
0.814286
54
350
5.240741
0.555556
0.183746
0.159011
0.212014
0.24735
0.24735
0.24735
0
0
0
0
0.100592
0.034286
350
10
253
35
0.736686
0.057143
0
0
0
0.25
0.72561
0.72561
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
9453f91d17fbd3f26c0b9b47751d7481f19b12e8
109
gyp
Python
binding.gyp
artik-snu/node-addon-gpio
9530478d8543b0ffbfbcf8780a7b91ccbd56a111
[ "MIT" ]
null
null
null
binding.gyp
artik-snu/node-addon-gpio
9530478d8543b0ffbfbcf8780a7b91ccbd56a111
[ "MIT" ]
null
null
null
binding.gyp
artik-snu/node-addon-gpio
9530478d8543b0ffbfbcf8780a7b91ccbd56a111
[ "MIT" ]
null
null
null
{ "targets": [ { "target_name": "gpio", "sources": ["gpio.cc", "tizen-gpio.cc"] } ] }
13.625
45
0.422018
10
109
4.5
0.7
0.266667
0
0
0
0
0
0
0
0
0
0
0.321101
109
8
46
13.625
0.608108
0
0
0
0
0
0.445455
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
946e26227518ff513554910aa81069946aa27b6e
858
py
Python
simpleml/models/classifiers/sklearn/mixture.py
ptoman/SimpleML
a829ee05da01a75b64982d91a012e9274b6f7c6e
[ "BSD-3-Clause" ]
15
2018-08-19T19:36:23.000Z
2021-11-09T17:47:18.000Z
simpleml/models/classifiers/sklearn/mixture.py
ptoman/SimpleML
a829ee05da01a75b64982d91a012e9274b6f7c6e
[ "BSD-3-Clause" ]
75
2020-10-11T17:58:59.000Z
2022-03-29T22:34:54.000Z
simpleml/models/classifiers/sklearn/mixture.py
ptoman/SimpleML
a829ee05da01a75b64982d91a012e9274b6f7c6e
[ "BSD-3-Clause" ]
4
2018-04-30T23:09:42.000Z
2022-01-19T08:03:18.000Z
''' Wrapper module around `sklearn.mixture` ''' __author__ = 'Elisha Yadgaran' from .base_sklearn_classifier import SklearnClassifier from simpleml.models.classifiers.external_models import ClassificationExternalModelMixin from sklearn.mixture import BayesianGaussianMixture, GaussianMixture ''' Gaussian Mixture ''' class WrappedSklearnBayesianGaussianMixture(BayesianGaussianMixture, ClassificationExternalModelMixin): pass class SklearnBayesianGaussianMixture(SklearnClassifier): def _create_external_model(self, **kwargs): return WrappedSklearnBayesianGaussianMixture(**kwargs) class WrappedSklearnGaussianMixture(GaussianMixture, ClassificationExternalModelMixin): pass class SklearnGaussianMixture(SklearnClassifier): def _create_external_model(self, **kwargs): return WrappedSklearnGaussianMixture(**kwargs)
26.8125
103
0.825175
64
858
10.859375
0.515625
0.040288
0.117986
0.097842
0.158273
0.158273
0.158273
0.158273
0
0
0
0
0.107226
858
31
104
27.677419
0.907311
0.045455
0
0.285714
0
0
0.01906
0
0
0
0
0
0
1
0.142857
false
0.142857
0.214286
0.142857
0.785714
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
1
0
0
4
946ec27fdd23802b4feeaeb4d5ee19100b13e2f5
140
py
Python
tools/bacommon/__init__.py
ritiek/ballistica
5f909d0b91bfbed3e96c21dbf342616a2d2e7b41
[ "MIT" ]
null
null
null
tools/bacommon/__init__.py
ritiek/ballistica
5f909d0b91bfbed3e96c21dbf342616a2d2e7b41
[ "MIT" ]
null
null
null
tools/bacommon/__init__.py
ritiek/ballistica
5f909d0b91bfbed3e96c21dbf342616a2d2e7b41
[ "MIT" ]
null
null
null
# Released under the MIT License. See LICENSE for details. # """Bits of functionality common to ballistica client and server components."""
35
78
0.771429
19
140
5.684211
0.947368
0
0
0
0
0
0
0
0
0
0
0
0.15
140
3
79
46.666667
0.907563
0.928571
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
946f30f9387c6c8f7e6cd66ecde7c286751d1fc1
140
py
Python
tests/tests/screens/screens/test_screens.py
centergy/flex_ussd
ddc0ccd192e3a0a82e8b7705f088862d59656c28
[ "MIT" ]
null
null
null
tests/tests/screens/screens/test_screens.py
centergy/flex_ussd
ddc0ccd192e3a0a82e8b7705f088862d59656c28
[ "MIT" ]
null
null
null
tests/tests/screens/screens/test_screens.py
centergy/flex_ussd
ddc0ccd192e3a0a82e8b7705f088862d59656c28
[ "MIT" ]
null
null
null
from flex.ussd.screens import UssdScreen class AbsractOne(UssdScreen): class Meta: abstract = True class Home(AbsractOne): pass
9.333333
40
0.742857
17
140
6.117647
0.764706
0.288462
0
0
0
0
0
0
0
0
0
0
0.185714
140
14
41
10
0.912281
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.166667
0.166667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
9481c1171edc52dfcbc1a9877ebb6348ed166a5e
84
py
Python
lumin/nn/ensemble/__init__.py
choisant/lumin
c039136eb096e8f3800f13925f9325b99cf7e76b
[ "Apache-2.0" ]
43
2019-02-11T16:16:42.000Z
2021-12-13T15:35:20.000Z
lumin/nn/ensemble/__init__.py
choisant/lumin
c039136eb096e8f3800f13925f9325b99cf7e76b
[ "Apache-2.0" ]
48
2020-05-21T02:40:50.000Z
2021-08-10T11:07:08.000Z
lumin/nn/ensemble/__init__.py
choisant/lumin
c039136eb096e8f3800f13925f9325b99cf7e76b
[ "Apache-2.0" ]
14
2019-05-02T15:09:41.000Z
2022-01-12T21:13:34.000Z
# from .ensemble import * # noqa 403 # __all__ = [*ensemble.__all__] # noqa F405
21
44
0.654762
10
84
4.7
0.7
0
0
0
0
0
0
0
0
0
0
0.090909
0.214286
84
3
45
28
0.621212
0.904762
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
9488f4460b704b58bd7868b851740603de9d3660
6,962
py
Python
overwatch/stats/ids.py
jonghwanhyeon/overwatch-stats
37b19414dc746d0a9bd5a2cf38ee949ffdf62e25
[ "MIT" ]
12
2017-09-02T10:47:24.000Z
2018-06-11T16:09:21.000Z
overwatch/stats/ids.py
jonghwanhyeon/overwatch-stats
37b19414dc746d0a9bd5a2cf38ee949ffdf62e25
[ "MIT" ]
5
2018-04-01T09:30:39.000Z
2021-01-14T09:52:04.000Z
overwatch/stats/ids.py
hyeon0145/overwatch-stats
37b19414dc746d0a9bd5a2cf38ee949ffdf62e25
[ "MIT" ]
2
2017-09-19T15:11:49.000Z
2018-03-30T20:02:50.000Z
OVERALL_CATEGORY_ID = '0x02E00000FFFFFFFF' HERO_CATEGORY_IDS = { 'reaper': '0x02E0000000000002', 'tracer': '0x02E0000000000003', 'mercy': '0x02E0000000000004', 'hanzo': '0x02E0000000000005', 'torbjorn': '0x02E0000000000006', 'reinhardt': '0x02E0000000000007', 'pharah': '0x02E0000000000008', 'winston': '0x02E0000000000009', 'widowmaker': '0x02E000000000000A', 'bastion': '0x02E0000000000015', 'symmetra': '0x02E0000000000016', 'zenyatta': '0x02E0000000000020', 'genji': '0x02E0000000000029', 'roadhog': '0x02E0000000000040', 'mccree': '0x02E0000000000042', 'junkrat': '0x02E0000000000065', 'zarya': '0x02E0000000000068', 'soldier76': '0x02E000000000006E', 'lucio': '0x02E0000000000079', 'dva': '0x02E000000000007A', 'mei': '0x02E00000000000DD', 'sombra': '0x02E000000000012E', 'ana': '0x02E000000000013B', 'orisa': '0x02E000000000013E', 'doomfist': '0x02E000000000012F', 'moira': '0x02E00000000001A2', 'brigitte': '0x02E0000000000195', 'wrecking_ball': '0x02E00000000001CA', } INVERTED_HERO_CATEGORY_IDS = {category_id: hero for hero, category_id in HERO_CATEGORY_IDS.items()} # Taken from https://github.com/SunDwarf/OWAPI/blob/master/owapi/prestige.py LEVEL_IDS = { # Bronze '0x0250000000000918': 0, '0x0250000000000919': 0, '0x025000000000091A': 0, '0x025000000000091B': 0, '0x025000000000091C': 0, '0x025000000000091D': 0, '0x025000000000091E': 0, '0x025000000000091F': 0, '0x0250000000000920': 0, '0x0250000000000921': 0, '0x0250000000000922': 100, '0x0250000000000924': 100, '0x0250000000000925': 100, '0x0250000000000926': 100, '0x025000000000094C': 100, '0x0250000000000927': 100, '0x0250000000000928': 100, '0x0250000000000929': 100, '0x025000000000092B': 100, '0x0250000000000950': 100, '0x025000000000092A': 200, '0x025000000000092C': 200, '0x0250000000000937': 200, '0x025000000000093B': 200, '0x0250000000000933': 200, '0x0250000000000923': 200, '0x0250000000000944': 200, '0x0250000000000948': 200, '0x025000000000093F': 200, '0x0250000000000951': 200, '0x025000000000092D': 300, '0x0250000000000930': 300, '0x0250000000000934': 300, '0x0250000000000938': 300, '0x0250000000000940': 300, '0x0250000000000949': 300, '0x0250000000000952': 300, '0x025000000000094D': 300, '0x0250000000000945': 300, '0x025000000000093C': 300, '0x025000000000092E': 400, '0x0250000000000931': 400, '0x0250000000000935': 400, '0x025000000000093D': 400, '0x0250000000000946': 400, '0x025000000000094A': 400, '0x0250000000000953': 400, '0x025000000000094E': 400, '0x0250000000000939': 400, '0x0250000000000941': 400, '0x025000000000092F': 500, '0x0250000000000932': 500, '0x025000000000093E': 500, '0x0250000000000936': 500, '0x025000000000093A': 500, '0x0250000000000942': 500, '0x0250000000000947': 500, '0x025000000000094F': 500, '0x025000000000094B': 500, '0x0250000000000954': 500, # Silver '0x0250000000000956': 600, '0x025000000000095C': 600, '0x025000000000095D': 600, '0x025000000000095E': 600, '0x025000000000095F': 600, '0x0250000000000960': 600, '0x0250000000000961': 600, '0x0250000000000962': 600, '0x0250000000000963': 600, '0x0250000000000964': 600, '0x0250000000000957': 700, '0x0250000000000965': 700, '0x0250000000000966': 700, '0x0250000000000967': 700, '0x0250000000000968': 700, '0x0250000000000969': 700, '0x025000000000096A': 700, '0x025000000000096B': 700, '0x025000000000096C': 700, '0x025000000000096D': 700, '0x0250000000000958': 800, '0x025000000000096E': 800, '0x025000000000096F': 800, '0x0250000000000970': 800, '0x0250000000000971': 800, '0x0250000000000972': 800, '0x0250000000000973': 800, '0x0250000000000974': 800, '0x0250000000000975': 800, '0x0250000000000976': 800, '0x0250000000000959': 900, '0x0250000000000977': 900, '0x0250000000000978': 900, '0x0250000000000979': 900, '0x025000000000097A': 900, '0x025000000000097B': 900, '0x025000000000097C': 900, '0x025000000000097D': 900, '0x025000000000097E': 900, '0x025000000000097F': 900, '0x025000000000095A': 1000, '0x0250000000000980': 1000, '0x0250000000000981': 1000, '0x0250000000000982': 1000, '0x0250000000000983': 1000, '0x0250000000000984': 1000, '0x0250000000000985': 1000, '0x0250000000000986': 1000, '0x0250000000000987': 1000, '0x0250000000000988': 1000, '0x025000000000095B': 1100, '0x0250000000000989': 1100, '0x025000000000098A': 1100, '0x025000000000098B': 1100, '0x025000000000098C': 1100, '0x025000000000098D': 1100, '0x025000000000098E': 1100, '0x025000000000098F': 1100, '0x0250000000000991': 1100, '0x0250000000000990': 1100, # Gold '0x0250000000000992': 1200, '0x0250000000000993': 1200, '0x0250000000000994': 1200, '0x0250000000000995': 1200, '0x0250000000000996': 1200, '0x0250000000000997': 1200, '0x0250000000000998': 1200, '0x0250000000000999': 1200, '0x025000000000099A': 1200, '0x025000000000099B': 1200, '0x025000000000099C': 1300, '0x025000000000099D': 1300, '0x025000000000099E': 1300, '0x025000000000099F': 1300, '0x02500000000009A0': 1300, '0x02500000000009A1': 1300, '0x02500000000009A2': 1300, '0x02500000000009A3': 1300, '0x02500000000009A4': 1300, '0x02500000000009A5': 1300, '0x02500000000009A6': 1400, '0x02500000000009A7': 1400, '0x02500000000009A8': 1400, '0x02500000000009A9': 1400, '0x02500000000009AA': 1400, '0x02500000000009AB': 1400, '0x02500000000009AC': 1400, '0x02500000000009AD': 1400, '0x02500000000009AE': 1400, '0x02500000000009AF': 1400, '0x02500000000009B0': 1500, '0x02500000000009B1': 1500, '0x02500000000009B2': 1500, '0x02500000000009B3': 1500, '0x02500000000009B4': 1500, '0x02500000000009B5': 1500, '0x02500000000009B6': 1500, '0x02500000000009B7': 1500, '0x02500000000009B8': 1500, '0x02500000000009B9': 1500, '0x02500000000009BA': 1600, '0x02500000000009BB': 1600, '0x02500000000009BC': 1600, '0x02500000000009BD': 1600, '0x02500000000009BE': 1600, '0x02500000000009BF': 1600, '0x02500000000009C0': 1600, '0x02500000000009C1': 1600, '0x02500000000009C2': 1600, '0x02500000000009C3': 1600, '0x02500000000009C4': 1700, '0x02500000000009C5': 1700, '0x02500000000009C6': 1700, '0x02500000000009C7': 1700, '0x02500000000009C8': 1700, '0x02500000000009C9': 1700, '0x02500000000009CA': 1700, '0x02500000000009CB': 1700, '0x02500000000009CC': 1700, '0x02500000000009CD': 1700, }
31.502262
99
0.682275
457
6,962
10.365427
0.612691
0.010133
0.0095
0
0
0
0
0
0
0
0
0.703704
0.185579
6,962
220
100
31.645455
0.131746
0.013358
0
0
0
0
0.574738
0
0
0
0.548077
0
0
1
0
false
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
4
847a476fd67e7e3cc4666225c8b259822acfd9f5
2,861
py
Python
mincaml/beta.py
aita/MinCaml.py
17ccd4377a08dffeaefab483032c46ec4979787d
[ "Apache-2.0" ]
1
2022-01-26T10:50:28.000Z
2022-01-26T10:50:28.000Z
mincaml/beta.py
aita/py-mincaml
17ccd4377a08dffeaefab483032c46ec4979787d
[ "Apache-2.0" ]
null
null
null
mincaml/beta.py
aita/py-mincaml
17ccd4377a08dffeaefab483032c46ec4979787d
[ "Apache-2.0" ]
null
null
null
from pyrsistent import pmap from . import logger from .util import find class Visitor: def visit(self, env, e): method = "visit_" + e[0] visitor = getattr(self, method) return visitor(env, e) def visit_Unit(self, env, e): return e def visit_Int(self, env, e): return e def visit_Float(self, env, e): return e def visit_Neg(self, env, e): return (e[0], find(env, e[1])) def visit_Add(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_Sub(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_FNeg(self, env, e): return (e[0], find(env, e[1])) def visit_FAdd(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_FSub(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_FMul(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_FDiv(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_IfEq(self, env, e): return ( e[0], find(env, e[1]), find(env, e[2]), self.visit(env, e[3]), self.visit(env, e[4]), ) def visit_IfLE(self, env, e): return ( e[0], find(env, e[1]), find(env, e[2]), self.visit(env, e[3]), self.visit(env, e[4]), ) def visit_Let(self, env, e): (x, t), e1, e2 = e[1], e[2], e[3] new_e1 = self.visit(env, e1) if new_e1[0] == "Var": y = new_e1[1] logger.info(f"beta-reduction {x} = {y}.") return self.visit(env.set(x, y), e2) else: new_e2 = self.visit(env, e2) return (e[0], (x, t), new_e1, new_e2) def visit_Var(self, env, e): return (e[0], find(env, e[1])) def visit_LetRec(self, env, e): fundef = e[1] return ( e[0], fundef._replace(body=self.visit(env, fundef.body)), self.visit(env, e[2]), ) def visit_App(self, env, e): return (e[0], find(env, e[1]), [find(env, arg) for arg in e[2]]) def visit_Tuple(self, env, e): return (e[0], [find(env, name) for name in e[1]]) def visit_LetTuple(self, env, e): return (e[0], e[1], find(env, e[2]), self.visit(env, e[3])) def visit_Get(self, env, e): return (e[0], find(env, e[1]), find(env, e[2])) def visit_Put(self, env, e): return (e[0], find(env, e[1]), find(env, e[2]), find(env, e[3])) def visit_ExtArray(self, env, e): return e def visit_ExtFunApp(self, env, e): return (e[0], e[1], [find(env, arg) for arg in e[2]]) def reduction(e): return Visitor().visit(pmap(), e)
26.009091
72
0.496679
467
2,861
2.976445
0.139186
0.164029
0.14964
0.211511
0.608633
0.584173
0.584173
0.517986
0.501439
0.501439
0
0.037968
0.31877
2,861
109
73
26.247706
0.675218
0
0
0.345679
0
0
0.011884
0
0
0
0
0
0
1
0.308642
false
0
0.037037
0.271605
0.679012
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
848241c80266ef1ad52d7380feee252a6561ee95
217
py
Python
lista/schemas/prestador_schema.py
ViniciusGarciaSilva/izi-serv-backend
6b9288b9bf4b20bfe86c291dc321f3e1476f1198
[ "MIT" ]
null
null
null
lista/schemas/prestador_schema.py
ViniciusGarciaSilva/izi-serv-backend
6b9288b9bf4b20bfe86c291dc321f3e1476f1198
[ "MIT" ]
null
null
null
lista/schemas/prestador_schema.py
ViniciusGarciaSilva/izi-serv-backend
6b9288b9bf4b20bfe86c291dc321f3e1476f1198
[ "MIT" ]
null
null
null
from marshmallow_sqlalchemy import ModelSchema from marshmallow import fields from lista.models.prestador_model import PrestadorModel class PrestadorSchema(ModelSchema): class Meta: model = PrestadorModel
31
55
0.824885
23
217
7.695652
0.608696
0.169492
0
0
0
0
0
0
0
0
0
0
0.142857
217
7
56
31
0.951613
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
84a06d1506beb62e6347ce70533ace4e51a84886
229
py
Python
api/admin.py
SchoolOrchestration/Firehose
4b1424c8b86e601a9fe87d64dd760627969e1a4b
[ "MIT" ]
null
null
null
api/admin.py
SchoolOrchestration/Firehose
4b1424c8b86e601a9fe87d64dd760627969e1a4b
[ "MIT" ]
null
null
null
api/admin.py
SchoolOrchestration/Firehose
4b1424c8b86e601a9fe87d64dd760627969e1a4b
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Payload class PayloadAdmin(admin.ModelAdmin): list_display = ('method', 'path','get','post') search_fields = ('get','post') admin.site.register(Payload, PayloadAdmin)
28.625
50
0.733624
28
229
5.928571
0.714286
0.084337
0
0
0
0
0
0
0
0
0
0
0.122271
229
8
51
28.625
0.825871
0
0
0
0
0
0.104348
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
84c2f3a3d6bcd71b7de2df01f159831079d4eb6a
210
py
Python
core/wsgi.py
nicolaerario/djano-custom-user
c18f109bd1dbe3ae7a5a20fe1fd33313caace246
[ "MIT" ]
1
2019-08-23T05:05:33.000Z
2019-08-23T05:05:33.000Z
core/wsgi.py
nicolaerario/djano-custom-user
c18f109bd1dbe3ae7a5a20fe1fd33313caace246
[ "MIT" ]
2
2021-06-04T21:56:46.000Z
2021-09-22T17:58:55.000Z
core/wsgi.py
nicolaerario/djano-custom-user
c18f109bd1dbe3ae7a5a20fe1fd33313caace246
[ "MIT" ]
null
null
null
import os from django.core.wsgi import get_wsgi_application from dotenv import load_dotenv load_dotenv() os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'core.settings') application = get_wsgi_application()
21
64
0.828571
29
210
5.724138
0.482759
0.084337
0.216867
0
0
0
0
0
0
0
0
0
0.090476
210
9
65
23.333333
0.86911
0
0
0
0
0
0.166667
0.104762
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
ca0bdbd7fc80cf5dd847073ab59694f33d27e22f
2,678
py
Python
test/test_store.py
64bit/wikiracer
f2bb66dc0c24f80640578f79f952d58991028d6d
[ "MIT" ]
null
null
null
test/test_store.py
64bit/wikiracer
f2bb66dc0c24f80640578f79f952d58991028d6d
[ "MIT" ]
null
null
null
test/test_store.py
64bit/wikiracer
f2bb66dc0c24f80640578f79f952d58991028d6d
[ "MIT" ]
null
null
null
import unittest import sys sys.path.append("../") from store.store import Store from store.neo4jstore import Neo4jStore from store.sqlitestore import SqliteStore from neo4j.v1 import GraphDatabase, basic_auth #TODO fix tests ''' class TestStore(unittest.TestCase): def setUp(self): self.store = Neo4jStore() self.pages = [ { 'pageid': 1, 'title': 'one', 'fullurl' : 'https://wiki.com/one' }, { 'pageid': 2, 'title': 'two', 'fullurl' : 'https://wiki.com/two' }, ] self.pages_dist_1 = [ { 'pageid': 3, 'title': 'three', 'fullurl' : 'https://wiki.com/three' }, { 'pageid': 4, 'title': 'four', 'fullurl' : 'https://wiki.com/four' }, ] self.pages_dist_2 = [ { 'pageid': 5, 'title': 'five', 'fullurl' : 'https://wiki.com/five' }, { 'pageid': 6, 'title': 'six', 'fullurl' : 'https://wiki.com/six' }, ] def tearDown(self): pass def test_save_pages(self): self.store.save_pages(self.pages) for page in self.pages: read_page = self.store.get_page_from_id(page['pageid']) self.assertEqual(read_page, page) def test_save_and_get_page_links(self): self.store.save_pages(self.pages) self.store.save_page_links(1, self.pages_dist_1) self.store.save_page_links(3, self.pages_dist_2) read_page_links = self.store.get_page_links(1) self.assertEqual(2, len(read_page_links)) page_3 = filter(lambda p: p['pageid'] == 3, read_page_links)[0] page_4 = filter(lambda p: p['pageid'] == 4, read_page_links)[0] self.assertEqual(self.pages_dist_1[0], page_3) self.assertEqual(self.pages_dist_1[1], page_4) read_page_links2 = self.store.get_page_links(3) self.assertEqual(2, len(read_page_links2)) page_5 = filter(lambda p: p['pageid'] == 5, read_page_links2)[0] page_6 = filter(lambda p: p['pageid'] == 6, read_page_links2)[0] self.assertEqual(self.pages_dist_2[0], page_5) self.assertEqual(self.pages_dist_2[1], page_6) self.assertEqual([], self.store.get_page_links(2)) self.assertEqual([], self.store.get_page_links(4)) self.assertEqual([], self.store.get_page_links(5)) self.assertEqual([], self.store.get_page_links(6)) def test_get_page_from_url_title(self): self.store.save_pages(self.pages) page1 = self.store.get_page_from_url_title('one') self.assertEqual(1, len(page1)) self.assertEqual(self.pages[0], page1[0]) ''' if __name__ == "__main__": unittest.main()
26.78
72
0.612024
363
2,678
4.269972
0.184573
0.08129
0.110323
0.082581
0.414194
0.263871
0.152903
0
0
0
0
0.028823
0.235624
2,678
99
73
27.050505
0.728383
0.005228
0
0
0
0
0.042471
0
0
0
0
0.010101
0
1
0
true
0
0.666667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
1
0
0
4