hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d08c9461b14e681141c812f809bd03da1d2d9d88
| 62
|
py
|
Python
|
tests/test_client.py
|
JoelLefkowitz/vault-wrapper
|
5b689add64da2f91d216aa3a98bd7aaa726cf98a
|
[
"MIT"
] | 1
|
2021-08-03T17:34:37.000Z
|
2021-08-03T17:34:37.000Z
|
tests/test_client.py
|
JoelLefkowitz/vault-wrapper
|
5b689add64da2f91d216aa3a98bd7aaa726cf98a
|
[
"MIT"
] | null | null | null |
tests/test_client.py
|
JoelLefkowitz/vault-wrapper
|
5b689add64da2f91d216aa3a98bd7aaa726cf98a
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.skip
def test_client():
pass
| 8.857143
| 18
| 0.709677
| 9
| 62
| 4.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 62
| 6
| 19
| 10.333333
| 0.86
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d0bef7012a3695db58b52487af42e5d424414d1d
| 978
|
py
|
Python
|
data_utils/lob_data_utils/test_overview.py
|
vevurka/mt-lob
|
70989bcb61f4cfa7884437e1cff2db2454b3ceff
|
[
"MIT"
] | 2
|
2019-04-17T02:19:22.000Z
|
2019-05-23T12:14:59.000Z
|
data_utils/lob_data_utils/test_overview.py
|
vevurka/mt-lob
|
70989bcb61f4cfa7884437e1cff2db2454b3ceff
|
[
"MIT"
] | 10
|
2020-01-28T22:32:13.000Z
|
2021-09-08T00:41:37.000Z
|
data_utils/lob_data_utils/test_overview.py
|
vevurka/mt-lob
|
70989bcb61f4cfa7884437e1cff2db2454b3ceff
|
[
"MIT"
] | 6
|
2018-12-05T22:17:05.000Z
|
2020-09-03T03:00:50.000Z
|
import unittest
import pandas as pd
from lob_data_utils.overview import Overview
class Test(unittest.TestCase):
def test_is_in_results_for_one_key_value(self):
overview = Overview('1', 2)
df = pd.DataFrame()
df['a'] = [1]
self.assertTrue(overview.is_in_results(df, {'a': 1}))
def test_is_in_results_for_more_key_value(self):
overview = Overview('1', 2)
df = pd.DataFrame()
df['a'] = [1, 2, 3]
df['b'] = [1, 2, 3]
self.assertTrue(overview.is_in_results(df, {'a': 1, 'b': 1}))
self.assertFalse(overview.is_in_results(df, {'a': 1, 'b': 2}))
def test_is_in_results_for_more_key_value_strings(self):
overview = Overview('1', 2)
df = pd.DataFrame()
df['a'] = [1, 2, 3]
df['b'] = ['str1', 'str2', 'str3']
self.assertTrue(overview.is_in_results(df, {'a': 1, 'b': 'str1'}))
self.assertFalse(overview.is_in_results(df, {'a': 1, 'b': 'str2'}))
| 33.724138
| 75
| 0.587935
| 145
| 978
| 3.731034
| 0.248276
| 0.05915
| 0.162662
| 0.175601
| 0.763401
| 0.763401
| 0.724584
| 0.724584
| 0.724584
| 0.548983
| 0
| 0.037433
| 0.235174
| 978
| 28
| 76
| 34.928571
| 0.685829
| 0
| 0
| 0.347826
| 0
| 0
| 0.037832
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.130435
| false
| 0
| 0.130435
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ef8bbc67e4fc84d44c51f7c81ba2dde433dae16b
| 127
|
py
|
Python
|
Inheritance/players_and_monsters_03E/project/knight.py
|
MNikov/Python-OOP-October-2020
|
a53e4555758ec810605e31e7b2c71b65c49b2332
|
[
"MIT"
] | null | null | null |
Inheritance/players_and_monsters_03E/project/knight.py
|
MNikov/Python-OOP-October-2020
|
a53e4555758ec810605e31e7b2c71b65c49b2332
|
[
"MIT"
] | null | null | null |
Inheritance/players_and_monsters_03E/project/knight.py
|
MNikov/Python-OOP-October-2020
|
a53e4555758ec810605e31e7b2c71b65c49b2332
|
[
"MIT"
] | null | null | null |
from project.hero import Hero
class Knight(Hero):
def __init__(self, name, level):
super().__init__(name, level)
| 18.142857
| 37
| 0.677165
| 17
| 127
| 4.588235
| 0.705882
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204724
| 127
| 6
| 38
| 21.166667
| 0.772277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
4be576a661ad20fab6115fe1489a1c557147ddb8
| 22
|
py
|
Python
|
emotion_detection/__init__.py
|
GrayFlash/DL
|
329beb77f7898084ecb38f7e81ae58df5a248569
|
[
"MIT"
] | null | null | null |
emotion_detection/__init__.py
|
GrayFlash/DL
|
329beb77f7898084ecb38f7e81ae58df5a248569
|
[
"MIT"
] | null | null | null |
emotion_detection/__init__.py
|
GrayFlash/DL
|
329beb77f7898084ecb38f7e81ae58df5a248569
|
[
"MIT"
] | null | null | null |
from .haar import haar
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4beadf8a92677231a178030d5fb2ca1e95b21b09
| 235
|
py
|
Python
|
backend/domain/exceptions/__init__.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
backend/domain/exceptions/__init__.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
backend/domain/exceptions/__init__.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
from .auth import AuthorizationError
from .checkout import CheckoutError
from .common import BaseDomainException, DuplicatedDataError, NotFoundError
from .payment import PaymentRefusedError
from .product import ProductUnavaliableError
| 39.166667
| 75
| 0.876596
| 22
| 235
| 9.363636
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093617
| 235
| 5
| 76
| 47
| 0.967136
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ef667441bfca94b09f1dc4bd1aaf36310fcc06a8
| 484
|
py
|
Python
|
Chapter05-old/Exercise5_18/Exercise5_18.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | 7
|
2019-11-11T13:17:04.000Z
|
2021-01-18T22:09:44.000Z
|
Chapter05-old/Exercise5_18/Exercise5_18.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | null | null | null |
Chapter05-old/Exercise5_18/Exercise5_18.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | 12
|
2020-04-20T16:23:51.000Z
|
2021-07-07T20:37:45.000Z
|
spark = SparkSession \
.builder \
.appName("exercise_eighteen") \
.getOrCreate()
(df.select("id", "first_name", "last_name", "gender", "country", "birthdate", "salary")
.filter(df["country"] == "United States")
.orderBy(df["gender"].asc(), df["salary"].asc())
.show())
df.select("id", "first_name", "last_name", "gender", "country", "birthdate", "salary") \
.filter(df["country"] == "United States") \
.orderBy(df["gender"].asc(), df["salary"].asc()) \
.show()
| 32.266667
| 88
| 0.605372
| 55
| 484
| 5.236364
| 0.418182
| 0.055556
| 0.069444
| 0.104167
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0.798611
| 0
| 0
| 0.130165
| 484
| 14
| 89
| 34.571429
| 0.684086
| 0
| 0
| 0
| 0
| 0
| 0.369835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
322f083fc4c7496fe517acf1bb6fc048b888dd00
| 266
|
py
|
Python
|
cde-root/local/munk/data/phd/phd/ptgraph/ptversion.py
|
NirBenTalLab/proorigami-cde-package
|
273379075830a9b94d3f2884661a54f853777ff6
|
[
"MIT"
] | null | null | null |
cde-root/local/munk/data/phd/phd/ptgraph/ptversion.py
|
NirBenTalLab/proorigami-cde-package
|
273379075830a9b94d3f2884661a54f853777ff6
|
[
"MIT"
] | null | null | null |
cde-root/local/munk/data/phd/phd/ptgraph/ptversion.py
|
NirBenTalLab/proorigami-cde-package
|
273379075830a9b94d3f2884661a54f853777ff6
|
[
"MIT"
] | null | null | null |
# autogenerated by /home/astivala/phd/ptgraph/buildversion.sh
# Fri Aug 10 09:43:43 EST 2012
def get_version():
"""
Return version string containing global version number and 'build' date
"""
return "Revision 4288:4291, Fri Aug 10 09:43:43 EST 2012"
| 33.25
| 75
| 0.710526
| 41
| 266
| 4.585366
| 0.707317
| 0.06383
| 0.085106
| 0.106383
| 0.223404
| 0.223404
| 0.223404
| 0.223404
| 0
| 0
| 0
| 0.148148
| 0.18797
| 266
| 7
| 76
| 38
| 0.722222
| 0.605263
| 0
| 0
| 1
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
326d7f19f31cc37076ad0eed70bc2f052844c8dc
| 190
|
py
|
Python
|
tomopal/__init__.py
|
robinthibaut/TomoPal
|
bb3d1f9d56afc53c641a72b47e4419ee0cfd587b
|
[
"BSD-3-Clause"
] | 2
|
2021-03-01T11:06:17.000Z
|
2021-09-24T11:49:31.000Z
|
tomopal/__init__.py
|
robinthibaut/TomoPal
|
bb3d1f9d56afc53c641a72b47e4419ee0cfd587b
|
[
"BSD-3-Clause"
] | 53
|
2021-03-30T14:05:17.000Z
|
2022-03-31T09:55:14.000Z
|
tomopal/__init__.py
|
robinthibaut/TomoPal
|
bb3d1f9d56afc53c641a72b47e4419ee0cfd587b
|
[
"BSD-3-Clause"
] | 1
|
2020-06-16T11:16:39.000Z
|
2020-06-16T11:16:39.000Z
|
__name__ = "tomopal"
__author__ = "Robin Thibaut"
__all__ = ["crtomopy", "geoview", "model", "post", "spatial", "utils"]
from tomopal import crtomopy, geoview, model, post, spatial, utils
| 27.142857
| 70
| 0.7
| 21
| 190
| 5.761905
| 0.666667
| 0.247934
| 0.330579
| 0.396694
| 0.595041
| 0.595041
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 190
| 6
| 71
| 31.666667
| 0.737805
| 0
| 0
| 0
| 0
| 0
| 0.294737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3282191d7a7cc535be8716667853c3faf09c11de
| 2,956
|
py
|
Python
|
tests/test_layout_compilation.py
|
Mischback/django-calingen
|
3354c751e29d301609ec44e64d69a8729ec36de4
|
[
"MIT"
] | null | null | null |
tests/test_layout_compilation.py
|
Mischback/django-calingen
|
3354c751e29d301609ec44e64d69a8729ec36de4
|
[
"MIT"
] | 51
|
2021-11-15T20:44:19.000Z
|
2022-02-10T08:33:08.000Z
|
tests/test_layout_compilation.py
|
Mischback/django-calingen
|
3354c751e29d301609ec44e64d69a8729ec36de4
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
"""Provide tests for calingen.checks."""
# Python imports
from unittest import mock, skip # noqa: F401
# Django imports
from django.test import modify_settings, override_settings, tag # noqa: F401
# app imports
from calingen.contrib.layouts.simple_event_list.simple_event_list import SimpleEventList
from calingen.contrib.layouts.year_by_week.year_by_week import YearByWeek
# local imports
from .util.testcases import CalingenTeXLayoutCompilationTestCase
class CalingenContribLayoutCompilationTest(CalingenTeXLayoutCompilationTestCase):
@modify_settings(
INSTALLED_APPS={"append": "calingen.contrib.layouts.simple_event_list"}
)
def test_simple_event_list_empty(self):
# Arrange
test_context = {}
test_context["target_year"] = 2021
test_context["layout_configuration"] = None
test_filename = "simple_event_list_empty.tex"
rendered_tex = SimpleEventList.render(test_context)
# Act
return_value = self.write_tex_to_tmp(rendered_tex, test_filename)
# Assert
self.assertTrue(return_value)
@modify_settings(
INSTALLED_APPS={"append": "calingen.contrib.layouts.simple_event_list"}
)
def test_simple_event_list_with_events(self):
# Arrange
test_target_year = 2021
test_context = {}
test_context["entries"] = self.get_entries(test_target_year)
test_context["target_year"] = test_target_year
test_context["layout_configuration"] = None
test_filename = "simple_event_list_full.tex"
rendered_tex = SimpleEventList.render(test_context)
# Act
return_value = self.write_tex_to_tmp(rendered_tex, test_filename)
# Assert
self.assertTrue(return_value)
@modify_settings(INSTALLED_APPS={"append": "calingen.contrib.layouts.year_by_week"})
def test_year_by_week_empty(self):
# Arrange
test_context = {}
test_context["target_year"] = 2021
test_context["layout_configuration"] = None
test_filename = "year_by_week_empty.tex"
rendered_tex = YearByWeek.render(test_context)
# Act
return_value = self.write_tex_to_tmp(rendered_tex, test_filename)
# Assert
self.assertTrue(return_value)
@modify_settings(INSTALLED_APPS={"append": "calingen.contrib.layouts.year_by_week"})
def test_year_by_week_with_events(self):
# Arrange
test_target_year = 2021
test_context = {}
test_context["entries"] = self.get_entries(test_target_year)
test_context["target_year"] = test_target_year
test_context["layout_configuration"] = None
test_filename = "year_by_week_full.tex"
rendered_tex = YearByWeek.render(test_context)
# Act
return_value = self.write_tex_to_tmp(rendered_tex, test_filename)
# Assert
self.assertTrue(return_value)
| 31.115789
| 88
| 0.704668
| 343
| 2,956
| 5.688047
| 0.206997
| 0.101486
| 0.061507
| 0.055356
| 0.784726
| 0.784726
| 0.749359
| 0.749359
| 0.749359
| 0.749359
| 0
| 0.009426
| 0.210419
| 2,956
| 94
| 89
| 31.446809
| 0.826478
| 0.073748
| 0
| 0.68
| 0
| 0
| 0.153279
| 0.093589
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.08
| false
| 0
| 0.1
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
328571a08ebd50e94b32288403df178e236c3208
| 192
|
py
|
Python
|
learn-to-code-with-python/03-Numbers-Booleans-and-Equality/boolean-mathematical-operators.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/03-Numbers-Booleans-and-Equality/boolean-mathematical-operators.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
learn-to-code-with-python/03-Numbers-Booleans-and-Equality/boolean-mathematical-operators.py
|
MaciejZurek/python_practicing
|
0a426f2aed151573e1f8678e0239ff596d92bbde
|
[
"MIT"
] | null | null | null |
print(True)
print(False)
print("True")
print("False")
print(5 == 1)
print(5 != 1)
print("Ham" == "Ham")
print("ham " == "ham")
print(5 == 5.0)
print(5 < 1)
print(5 >= 5)
print(5 < 8 <= 7)
| 11.294118
| 22
| 0.541667
| 34
| 192
| 3.058824
| 0.264706
| 0.346154
| 0.201923
| 0.346154
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089172
| 0.182292
| 192
| 17
| 23
| 11.294118
| 0.573248
| 0
| 0
| 0
| 0
| 0
| 0.11399
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
32860e4637848b26d65cd6660e8bf45cc658af7c
| 32
|
py
|
Python
|
conan/tools/gnu/__init__.py
|
a4z/conan
|
dec9e0288f81462c53b9222a206002fbc525ea65
|
[
"MIT"
] | null | null | null |
conan/tools/gnu/__init__.py
|
a4z/conan
|
dec9e0288f81462c53b9222a206002fbc525ea65
|
[
"MIT"
] | 1
|
2020-12-21T10:06:30.000Z
|
2020-12-21T10:06:30.000Z
|
conan/tools/gnu/__init__.py
|
a4z/conan
|
dec9e0288f81462c53b9222a206002fbc525ea65
|
[
"MIT"
] | null | null | null |
from .make import MakeToolchain
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
08c6e8f4424570b5d81157547d5aadc6620ea88d
| 38
|
py
|
Python
|
api/liberain/console/__main__.py
|
ViiSiX/Liberain
|
888b25ab0638fabf643e98bde325371757215211
|
[
"MIT"
] | null | null | null |
api/liberain/console/__main__.py
|
ViiSiX/Liberain
|
888b25ab0638fabf643e98bde325371757215211
|
[
"MIT"
] | null | null | null |
api/liberain/console/__main__.py
|
ViiSiX/Liberain
|
888b25ab0638fabf643e98bde325371757215211
|
[
"MIT"
] | null | null | null |
from . import manager
manager.run()
| 7.6
| 21
| 0.710526
| 5
| 38
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 38
| 4
| 22
| 9.5
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
08d97b1a43a93fd0f93057b8e3b5627a9e775f47
| 74
|
py
|
Python
|
btplotting-master/btplotting/analyzers/__init__.py
|
fredryce/stocker
|
041fbe8348f7a035a607a214477cf423c4259171
|
[
"MIT"
] | null | null | null |
btplotting-master/btplotting/analyzers/__init__.py
|
fredryce/stocker
|
041fbe8348f7a035a607a214477cf423c4259171
|
[
"MIT"
] | null | null | null |
btplotting-master/btplotting/analyzers/__init__.py
|
fredryce/stocker
|
041fbe8348f7a035a607a214477cf423c4259171
|
[
"MIT"
] | null | null | null |
from .plot import LivePlotAnalyzer
from .recorder import RecorderAnalyzer
| 24.666667
| 38
| 0.864865
| 8
| 74
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 39
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
08ea447b4a3faa1a86959cbd54f14402e8d92d9f
| 201
|
py
|
Python
|
allegro/proxy/__init__.py
|
xnetcat/allegro-scraper
|
463ca6830d44daa0533cf1b837802f5acfb705ff
|
[
"MIT"
] | null | null | null |
allegro/proxy/__init__.py
|
xnetcat/allegro-scraper
|
463ca6830d44daa0533cf1b837802f5acfb705ff
|
[
"MIT"
] | null | null | null |
allegro/proxy/__init__.py
|
xnetcat/allegro-scraper
|
463ca6830d44daa0533cf1b837802f5acfb705ff
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from allegro.proxy.proxy_checker import check_proxy, filter_proxies
from allegro.proxy.proxy_file import load_from_file
from allegro.proxy.proxy_gatherer import scrape_free_proxy_lists
| 33.5
| 67
| 0.870647
| 31
| 201
| 5.322581
| 0.516129
| 0.2
| 0.290909
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005435
| 0.084577
| 201
| 5
| 68
| 40.2
| 0.891304
| 0.059701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3ed37b2edece1334aadc390c637b4ee7ac13b4c5
| 84
|
py
|
Python
|
python-engine/lib/__init__.py
|
LordFitoi/PyVueGame
|
2204fbb6bbc46c0c9934334b4eae8e4762a6fb40
|
[
"MIT"
] | 2
|
2021-05-29T01:36:32.000Z
|
2021-08-06T18:16:34.000Z
|
python-engine/lib/__init__.py
|
LordFitoi/PygameXVue.js
|
2204fbb6bbc46c0c9934334b4eae8e4762a6fb40
|
[
"MIT"
] | 3
|
2021-05-30T01:51:43.000Z
|
2021-05-31T00:42:39.000Z
|
python-engine/lib/__init__.py
|
LordFitoi/PygameXVue.js
|
2204fbb6bbc46c0c9934334b4eae8e4762a6fb40
|
[
"MIT"
] | null | null | null |
from .server import WebSocketServer
from .color import Color
from .draw import Draw
| 21
| 35
| 0.821429
| 12
| 84
| 5.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 84
| 3
| 36
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ee6397c9ac0d4281749163fd1aaac01a1002c18
| 178
|
py
|
Python
|
solved_bronze/num10869.py
|
ilmntr/white_study
|
51d69d122b07e9a0922dddb134bff4ec79077eb9
|
[
"MIT"
] | null | null | null |
solved_bronze/num10869.py
|
ilmntr/white_study
|
51d69d122b07e9a0922dddb134bff4ec79077eb9
|
[
"MIT"
] | null | null | null |
solved_bronze/num10869.py
|
ilmntr/white_study
|
51d69d122b07e9a0922dddb134bff4ec79077eb9
|
[
"MIT"
] | null | null | null |
# 두 자연수 A와 B가 주어진다. 이때, A+B, A-B, A*B, A/B(몫), A%B(나머지)를 출력하는 프로그램을 작성하시오.
(a,b) = map(int,input().split())
print(a+b)
print(a-b)
print(a*b)
print(a//b)
print(a%b)
# //는 소수점 버림
| 17.8
| 74
| 0.573034
| 46
| 178
| 2.217391
| 0.478261
| 0.215686
| 0.343137
| 0.470588
| 0.421569
| 0.421569
| 0.343137
| 0.343137
| 0.343137
| 0.343137
| 0
| 0
| 0.157303
| 178
| 10
| 75
| 17.8
| 0.68
| 0.466292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.833333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f5b74e4c5f9bd79b88644f875527ffe0526d2522
| 110
|
py
|
Python
|
tests/test_null.py
|
numpde/twig
|
abe08512fe4532df511487f0b8afc7df1787f6ce
|
[
"Unlicense"
] | null | null | null |
tests/test_null.py
|
numpde/twig
|
abe08512fe4532df511487f0b8afc7df1787f6ce
|
[
"Unlicense"
] | null | null | null |
tests/test_null.py
|
numpde/twig
|
abe08512fe4532df511487f0b8afc7df1787f6ce
|
[
"Unlicense"
] | null | null | null |
# RA, 2020-12-14
from unittest import TestCase
class Null(TestCase):
def test_null(self):
pass
| 12.222222
| 29
| 0.663636
| 16
| 110
| 4.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 0.245455
| 110
| 8
| 30
| 13.75
| 0.771084
| 0.127273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f5fe5ec9a9bbde98466658acb59bbe4654ab03ac
| 159
|
py
|
Python
|
db_utils/__init__.py
|
rj79/lapmaster
|
edf12f7685df572f6022b50a808651ef5b0404d7
|
[
"BSD-3-Clause"
] | null | null | null |
db_utils/__init__.py
|
rj79/lapmaster
|
edf12f7685df572f6022b50a808651ef5b0404d7
|
[
"BSD-3-Clause"
] | null | null | null |
db_utils/__init__.py
|
rj79/lapmaster
|
edf12f7685df572f6022b50a808651ef5b0404d7
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2016 by Robert Johansson.
:license: BSD, see LICENSE for more details.
"""
from .db_upload import Uploader
| 22.714286
| 48
| 0.641509
| 21
| 159
| 4.809524
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03937
| 0.201258
| 159
| 6
| 49
| 26.5
| 0.755906
| 0.685535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eb065b8ae31bd201c350125a2e3ab212604f8f82
| 540
|
py
|
Python
|
tests/python/pants_test/subsystem/subsystem_util.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/subsystem/subsystem_util.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | 1
|
2018-09-04T17:37:34.000Z
|
2018-09-04T19:42:58.000Z
|
tests/python/pants_test/subsystem/subsystem_util.py
|
mpopenko-exos/pants
|
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.testutil.subsystem.util import global_subsystem_instance as global_subsystem_instance # noqa
from pants.testutil.subsystem.util import init_subsystem as init_subsystem # noqa
from pants.testutil.subsystem.util import init_subsystems as init_subsystems # noqa
from pants_test.deprecated_testinfra import deprecated_testinfra_module
deprecated_testinfra_module('pants.testutil.subsystem.util')
| 49.090909
| 104
| 0.844444
| 72
| 540
| 6.138889
| 0.416667
| 0.081448
| 0.199095
| 0.235294
| 0.280543
| 0.280543
| 0.199095
| 0.199095
| 0
| 0
| 0
| 0.01232
| 0.098148
| 540
| 10
| 105
| 54
| 0.895277
| 0.261111
| 0
| 0
| 0
| 0
| 0.073791
| 0.073791
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eb3828bd0110eeabd97fe6cff1e2c9dd24baf49e
| 182
|
py
|
Python
|
signer/config.py
|
DarkbordermanTemplate/web3
|
fe1deb783e64d8d624d314d715a6a7bdbade8a9e
|
[
"BSD-2-Clause"
] | null | null | null |
signer/config.py
|
DarkbordermanTemplate/web3
|
fe1deb783e64d8d624d314d715a6a7bdbade8a9e
|
[
"BSD-2-Clause"
] | null | null | null |
signer/config.py
|
DarkbordermanTemplate/web3
|
fe1deb783e64d8d624d314d715a6a7bdbade8a9e
|
[
"BSD-2-Clause"
] | null | null | null |
import os
class Config:
ACCOUNT = os.environ.get("ACCOUNT", "")
PASSWORD = os.environ.get("PASSWORD", "")
PROVIDER = os.environ.get("PROVIDER", "")
CONFIG = Config()
| 16.545455
| 45
| 0.620879
| 21
| 182
| 5.380952
| 0.428571
| 0.238938
| 0.318584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 182
| 10
| 46
| 18.2
| 0.768707
| 0
| 0
| 0
| 0
| 0
| 0.126374
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
de242c205f4aa468c1399907f066e304e500da96
| 67
|
py
|
Python
|
models/total3d/modules/__init__.py
|
chengzhag/DeepPanoContext
|
14f847e51ec2bd08e0fc178dd1640541752addb7
|
[
"MIT"
] | 52
|
2021-09-12T02:41:02.000Z
|
2022-03-05T21:55:36.000Z
|
models/total3d/modules/__init__.py
|
chengzhag/DeepPanoContext
|
14f847e51ec2bd08e0fc178dd1640541752addb7
|
[
"MIT"
] | 2
|
2021-11-25T06:50:19.000Z
|
2022-01-19T10:11:06.000Z
|
models/total3d/modules/__init__.py
|
chengzhag/DeepPanoContext
|
14f847e51ec2bd08e0fc178dd1640541752addb7
|
[
"MIT"
] | 9
|
2021-09-18T01:44:05.000Z
|
2022-01-12T16:19:46.000Z
|
from .method import Total3D
from .layout_estimation import PoseNet
| 22.333333
| 38
| 0.850746
| 9
| 67
| 6.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.119403
| 67
| 2
| 39
| 33.5
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de506d26ab98d14aa25b8f4d3106740cc482bb55
| 220
|
py
|
Python
|
projektrouska/logger.py
|
tomas-dostal/potrebujurousku
|
9589b53c21f116bd04bb3d11b67c59c3db0ca64f
|
[
"MIT"
] | 3
|
2020-10-25T08:32:52.000Z
|
2020-11-17T18:39:05.000Z
|
projektrouska/logger.py
|
tomas-dostal/potrebujurousku
|
9589b53c21f116bd04bb3d11b67c59c3db0ca64f
|
[
"MIT"
] | 12
|
2020-10-24T18:33:40.000Z
|
2021-07-12T16:21:32.000Z
|
projektrouska/logger.py
|
tomas-dostal/potrebujurousku
|
9589b53c21f116bd04bb3d11b67c59c3db0ca64f
|
[
"MIT"
] | null | null | null |
class Logger(object):
def log(self, str):
print("Log: {}".format(str))
def error(self, str):
print("Error: {}".format(str))
def message(self, str):
print("Message: {}".format(str))
| 20
| 40
| 0.540909
| 27
| 220
| 4.407407
| 0.407407
| 0.176471
| 0.302521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.254545
| 220
| 10
| 41
| 22
| 0.72561
| 0
| 0
| 0
| 0
| 0
| 0.122727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0
| 0.571429
| 0.428571
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 5
|
de731c896248eef5b23e42e6975879e05fd262f7
| 19
|
py
|
Python
|
docs/report/cloudmesh-openapi/cloudmesh/openapi/__version__.py
|
rickotten/cybertraining-dsc.github.io
|
c8ea59be4f09fd543040ba0908af118df5820a70
|
[
"Apache-2.0"
] | null | null | null |
docs/report/cloudmesh-openapi/cloudmesh/openapi/__version__.py
|
rickotten/cybertraining-dsc.github.io
|
c8ea59be4f09fd543040ba0908af118df5820a70
|
[
"Apache-2.0"
] | null | null | null |
docs/report/cloudmesh-openapi/cloudmesh/openapi/__version__.py
|
rickotten/cybertraining-dsc.github.io
|
c8ea59be4f09fd543040ba0908af118df5820a70
|
[
"Apache-2.0"
] | null | null | null |
version = "4.0.19"
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
deab84ab8e546f1095765aef5610d0b7dc2a499f
| 52
|
py
|
Python
|
enthought/traits/ui/qt4/constants.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/traits/ui/qt4/constants.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/traits/ui/qt4/constants.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from traitsui.qt4.constants import *
| 17.333333
| 36
| 0.788462
| 7
| 52
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.134615
| 52
| 2
| 37
| 26
| 0.888889
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
defb58d0d4be621a6b74623b622805b0a37ec2f5
| 224
|
py
|
Python
|
Triplets/__init__.py
|
itssamedozkan/TripletMAML
|
81c9a9e2018a735078e3afe538351fd850da70ab
|
[
"MIT"
] | null | null | null |
Triplets/__init__.py
|
itssamedozkan/TripletMAML
|
81c9a9e2018a735078e3afe538351fd850da70ab
|
[
"MIT"
] | 1
|
2022-03-01T17:28:18.000Z
|
2022-03-01T17:28:18.000Z
|
Triplets/__init__.py
|
itssamedozkan/TripletMAML
|
81c9a9e2018a735078e3afe538351fd850da70ab
|
[
"MIT"
] | 1
|
2022-02-24T16:51:19.000Z
|
2022-02-24T16:51:19.000Z
|
from .TripletFlowers import TripletFlowers
from .TripletFSCIFAR100 import TripletFSCIFAR100
from .TripletMiniImageNet import TripletMiniImageNet
from .TripletOmniglot import TripletOmniglot
from .TripletCUB import TripletCUB
| 44.8
| 52
| 0.892857
| 20
| 224
| 10
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029268
| 0.084821
| 224
| 5
| 53
| 44.8
| 0.946341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7232990ddd12d5a58d6a4b425578215578663815
| 6,749
|
py
|
Python
|
src/r2b2/tests/test_cli.py
|
gwexploratoryaudits/r2b2
|
76fcf1658e3055ee7e262a836fcb0b1af8e04233
|
[
"MIT"
] | 4
|
2020-02-05T20:51:42.000Z
|
2021-07-06T19:24:35.000Z
|
src/r2b2/tests/test_cli.py
|
gwexploratoryaudits/r2b2
|
76fcf1658e3055ee7e262a836fcb0b1af8e04233
|
[
"MIT"
] | 36
|
2020-02-13T19:18:30.000Z
|
2021-10-07T23:50:47.000Z
|
src/r2b2/tests/test_cli.py
|
gwexploratoryaudits/r2b2
|
76fcf1658e3055ee7e262a836fcb0b1af8e04233
|
[
"MIT"
] | 1
|
2021-03-19T10:35:38.000Z
|
2021-03-19T10:35:38.000Z
|
import os
from click.testing import CliRunner
from r2b2.cli import cli
# TODO: Test multiple round execution patterns
# TODO: Test all combinations of audit parameters and contest file
# TODO: Test election mode (once implemented)
# TODO: Test election file parsing (once election mode working)
def test_interactive_simple():
"""Testing `r2b2 interactive`
Simple test of interactive module where contest and audit creation occur without error
The audit should run and stop in the first round.
"""
runner = CliRunner()
user_in = 'brla\n0.1\n0.2\n1000\n2\nA\n700\nB\n300\n1\nA\nPLURALITY\ny\ny\nn\n200\n175\n25\n'
result = runner.invoke(cli, 'interactive', input=user_in)
output_file = open('src/r2b2/tests/data/cli_test_expected_out_interactive.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_interactive_given_audit():
"""Testing `r2b2 interactive -a brla -r 0.1 -m 0.2`
Test of interactive module where audit type, risk limit, and max fraction to draw are given
as cli option arguments. The audit should run and stop in the first round.
"""
runner = CliRunner()
user_in = '1000\n2\nA\n700\nB\n300\n1\nA\nPLURALITY\ny\ny\nn\n200\n175\n25\n'
result = runner.invoke(cli, 'interactive -a brla -r 0.1 -m 0.2', input=user_in)
output_file = open('src/r2b2/tests/data/cli_test_expected_out_interactive_given_audit.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_interactive_given_contest():
"""Testing `r2b2 interactive --contest-file=/.../single_contest_template.json`
Test of interactive module where contest is given as a JSON file and parsed into Contest object.
The audit should run and stop in the first round.
"""
runner = CliRunner()
user_in = 'brla\n0.1\n0.2\ny\ny\nn\n20\n19\n1\n'
result = runner.invoke(cli, 'interactive --contest-file=src/r2b2/tests/data/single_contest_template.json', input=user_in)
output_file = open('src/r2b2/tests/data/cli_test_expected_out_interactive_given_contest.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_interactive_given_both():
"""Testng `r2b2 interactive -a brla -r 0.1 -m 0.2 --contest-file=/.../single_contest_template.json`
Test of interactive module where contest JSON file and audit parameters are given as cli
arguments. The audit should run and stop in the first round.
"""
runner = CliRunner()
user_in = 'y\ny\nn\n20\n19\n1\n'
result = runner.invoke(cli,
'interactive -a brla -r 0.1 -m 0.2 --contest-file src/r2b2/tests/data/single_contest_template.json',
input=user_in)
output_file = open('src/r2b2/tests/data/cli_test_expected_out_interactive_given_both.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_interactive_multi_round():
"""Testing `r2b2 interactive -a blra -r 0.1 -m 0.1 --contest-file=/.../basic_contest.json`"""
runner = CliRunner()
user_in = 'y\ny\nn\n100\n63\n37\nn\nn\n200\n119\n81\nn\nn\n300\n175\n125\n'
result = runner.invoke(cli, 'interactive --contest-file src/r2b2/tests/data/basic_contest.json -a brla -r 0.1 -m 0.1', input=user_in)
output_file = open('src/r2b2/tests/data/cli_test_expected_out_interactive_multiround_.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_bulk_min_to_max():
"""Testing `r2b2 bulk /.../single_contest_template.json brla -r 0.1 -m 0.4`"""
runner = CliRunner()
result = runner.invoke(cli, 'bulk -p CandidateA-CandidateB src/r2b2/tests/data/single_contest_template.json brla 0.1 0.4')
output_file = open('src/r2b2/tests/data/cli_test_expected_out_bulk_min_to_max.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_bulk_round_list():
"""Testing `r2b2 bulk -l CandidateB -r '100 200 300' /.../basic_contest.json brla 0.1 0.05 `"""
runner = CliRunner()
result = runner.invoke(cli,
'bulk -p CandidateA-CandidateB -r \'100 200 300 400 500\' src/r2b2/tests/data/basic_contest.json brla 0.1 0.05')
output_file = open('src/r2b2/tests/data/cli_test_expected_out_bulk_rounds.txt', 'r')
expected_out = output_file.read()
assert result.output == expected_out
output_file.close()
def test_template_contest():
"""Testing `r2b2 template contest`"""
runner = CliRunner()
result = runner.invoke(cli, 'template contest')
output_file = open('src/r2b2/tests/data/single_contest_template.json', 'r')
expected_out = '\nWelcome to the R2B2 auditing tool!\n\n' + str(output_file.read()) + '\n'
assert result.output == expected_out
output_file.close()
def test_tempalte_election():
"""Testing `r2b2 template election`"""
runner = CliRunner()
result = runner.invoke(cli, 'template election')
output_file = open('src/r2b2/tests/data/election_template.json', 'r')
expected_out = '\nWelcome to the R2B2 auditing tool!\n\n' + str(output_file.read()) + '\n'
assert result.output == expected_out
output_file.close()
def test_template_contest_output_file():
"""Testing `r2b2 template -o test_contest.json contest`"""
runner = CliRunner()
result = runner.invoke(cli, 'template -o test_contest.json contest')
output_file = open('src/r2b2/tests/data/single_contest_template.json', 'r')
test_output_file = open('test_contest.json', 'r')
expected_out = '\nWelcome to the R2B2 auditing tool!\n\nTemplate written to test_contest.json\n'
expected_out_file = output_file.read()
result_output_file = test_output_file.read()
assert result.output == expected_out
assert result_output_file == expected_out_file
os.remove('test_contest.json')
output_file.close()
def test_tempalte_election_output_file():
"""Testing `r2b2 template -o test_election.json election`"""
runner = CliRunner()
result = runner.invoke(cli, 'template -o test_election.json election')
output_file = open('src/r2b2/tests/data/election_template.json', 'r')
test_output_file = open('test_election.json', 'r')
expected_out = '\nWelcome to the R2B2 auditing tool!\n\nTemplate written to test_election.json\n'
expected_out_file = output_file.read()
result_output_file = test_output_file.read()
assert result.output == expected_out
assert result_output_file == expected_out_file
os.remove('test_election.json')
output_file.close()
| 43.262821
| 139
| 0.711068
| 998
| 6,749
| 4.614228
| 0.140281
| 0.093377
| 0.041694
| 0.055592
| 0.819761
| 0.800217
| 0.779153
| 0.737676
| 0.682736
| 0.661455
| 0
| 0.037702
| 0.16684
| 6,749
| 155
| 140
| 43.541935
| 0.781256
| 0.221514
| 0
| 0.55102
| 0
| 0.071429
| 0.352518
| 0.224383
| 0
| 0
| 0
| 0.006452
| 0.132653
| 1
| 0.112245
| false
| 0
| 0.030612
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a0e60ffc36b8007fac2a803da2db9981282b02a4
| 55
|
py
|
Python
|
ekorpkit/corpora/__init__.py
|
entelecheia/ekorpkit
|
400cb15005fdbcaa2ab0c311e338799283f28fe0
|
[
"CC-BY-4.0"
] | 4
|
2022-02-26T10:54:16.000Z
|
2022-02-26T11:01:56.000Z
|
ekorpkit/corpora/__init__.py
|
entelecheia/ekorpkit
|
400cb15005fdbcaa2ab0c311e338799283f28fe0
|
[
"CC-BY-4.0"
] | 1
|
2022-03-25T06:37:12.000Z
|
2022-03-25T06:45:53.000Z
|
ekorpkit/corpora/__init__.py
|
entelecheia/ekorpkit
|
400cb15005fdbcaa2ab0c311e338799283f28fe0
|
[
"CC-BY-4.0"
] | null | null | null |
from .loader import Corpora
from .corpus import Corpus
| 18.333333
| 27
| 0.818182
| 8
| 55
| 5.625
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 55
| 2
| 28
| 27.5
| 0.957447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9d060d6f7113f1ce923259240afba72b4901681c
| 26
|
py
|
Python
|
server/tracker/tests.py
|
LaurentColoma/TicketManager-server
|
cbe66b54b78d1a6f034bf186d54784b68072462e
|
[
"MIT"
] | 2
|
2019-07-31T11:55:32.000Z
|
2020-12-04T08:36:48.000Z
|
server/tracker/tests.py
|
LaurentColoma/TicketManager-server
|
cbe66b54b78d1a6f034bf186d54784b68072462e
|
[
"MIT"
] | null | null | null |
server/tracker/tests.py
|
LaurentColoma/TicketManager-server
|
cbe66b54b78d1a6f034bf186d54784b68072462e
|
[
"MIT"
] | null | null | null |
"""
Test cases
TODO.
"""
| 4.333333
| 10
| 0.5
| 3
| 26
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 26
| 5
| 11
| 5.2
| 0.65
| 0.653846
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.2
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9d16db826e808064d01f7b6f1481e8610942f912
| 77
|
py
|
Python
|
log_collectors/simple_log_collector/__init__.py
|
sboagibm/ffdl-model-metrics
|
e1a27db3d130fb3227585ee30b860a085591e748
|
[
"Apache-2.0"
] | 680
|
2018-02-09T18:16:41.000Z
|
2022-03-25T06:54:13.000Z
|
log_collectors/simple_log_collector/__init__.py
|
sboagibm/ffdl-model-metrics
|
e1a27db3d130fb3227585ee30b860a085591e748
|
[
"Apache-2.0"
] | 118
|
2018-02-09T23:12:57.000Z
|
2022-03-02T02:24:59.000Z
|
log_collectors/simple_log_collector/__init__.py
|
sboagibm/ffdl-model-metrics
|
e1a27db3d130fb3227585ee30b860a085591e748
|
[
"Apache-2.0"
] | 202
|
2018-02-09T18:25:07.000Z
|
2021-12-05T09:47:15.000Z
|
from __future__ import absolute_import
from .src import tail_em_from_emfile
| 19.25
| 38
| 0.87013
| 12
| 77
| 4.916667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 77
| 3
| 39
| 25.666667
| 0.867647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9d568c05a44b0e01c769a8ba85d63e12a8ed91d5
| 3,065
|
py
|
Python
|
tests/test_magic_logger.py
|
rcourivaud/logstash_logger
|
135f9b314428717909f4cda17298db7100b1e7d3
|
[
"MIT"
] | null | null | null |
tests/test_magic_logger.py
|
rcourivaud/logstash_logger
|
135f9b314428717909f4cda17298db7100b1e7d3
|
[
"MIT"
] | null | null | null |
tests/test_magic_logger.py
|
rcourivaud/logstash_logger
|
135f9b314428717909f4cda17298db7100b1e7d3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `magic_logger` package."""
from magic_logger import MagicLogger
import os
def test_file_created():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None)
assert isinstance(TestMagicLogger, MagicLogger)
test_message = 'test_message'
TestMagicLogger.info(test_message)
assert os.path.exists('test_output.txt') == True
os.remove('test_output.txt')
def test_log_debug():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None)
test_message = 'test_message'
TestMagicLogger.debug(test_message)
with open('test_output.txt', 'r') as f:
log_split = f.read().split(' - ')
assert log_split[3].rstrip() == test_message
assert log_split[2] == 'DEBUG'
assert log_split[1] == logger_name
os.remove('test_output.txt')
def test_log_info():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None)
test_message = 'test_message'
TestMagicLogger.info(test_message)
with open('test_output.txt', 'r') as f:
log_split = f.read().split(' - ')
assert log_split[3].rstrip() == test_message
assert log_split[2] == 'INFO'
assert log_split[1] == logger_name
os.remove('test_output.txt')
def test_log_warning():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None)
test_message = 'test_message'
TestMagicLogger.warning(test_message)
with open('test_output.txt', 'r') as f:
log_split = f.read().split(' - ')
assert log_split[3].rstrip() == test_message
assert log_split[2] == 'WARNING'
assert log_split[1] == logger_name
os.remove('test_output.txt')
def test_log_error():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None)
test_message = 'test_message'
TestMagicLogger.error(test_message)
with open('test_output.txt', 'r') as f:
log_split = f.read().split(' - ')
assert log_split[3].rstrip() == test_message
assert log_split[2] == 'ERROR'
assert log_split[1] == logger_name
os.remove('test_output.txt')
def test_logger_extra():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None, extra={"test": "test"})
assert TestMagicLogger.extra.get("test")
assert TestMagicLogger.extra["test"] == "test"
def test_log_extra():
logger_name = 'test_name'
TestMagicLogger = MagicLogger(logger_name=logger_name, file_name='test_output.txt', host=None,
extra={"test": "test"})
l = TestMagicLogger.error("random_message", extra = {"test": "test"})
assert l.__dict__.get("test")
assert l.__dict__["test"] == "test"
| 35.229885
| 122
| 0.676998
| 401
| 3,065
| 4.885287
| 0.134663
| 0.127616
| 0.112813
| 0.064319
| 0.789689
| 0.789689
| 0.789689
| 0.789689
| 0.745278
| 0.745278
| 0
| 0.005208
| 0.185644
| 3,065
| 86
| 123
| 35.639535
| 0.779647
| 0.024796
| 0
| 0.606061
| 0
| 0
| 0.15996
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.106061
| false
| 0
| 0.030303
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c2147256410b21d088a218b8511676b9d5cb3b32
| 20
|
py
|
Python
|
detex/version.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 39
|
2015-08-15T20:10:14.000Z
|
2022-03-17T00:41:57.000Z
|
detex/version.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 39
|
2015-09-28T23:50:59.000Z
|
2019-07-16T20:38:31.000Z
|
detex/version.py
|
d-chambers/Detex
|
46602eb8e05e080a23111c8f2716065a016613c2
|
[
"BSD-3-Clause"
] | 8
|
2015-10-08T20:43:40.000Z
|
2020-08-05T22:47:45.000Z
|
__version__ = 1.0.9
| 10
| 19
| 0.7
| 4
| 20
| 2.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dfa607d4411cba7379e67918636e4ecdcf951412
| 3,549
|
py
|
Python
|
simphox/circuit/envelope.py
|
fancompute/simphox
|
917673cc3ef8fb54fcbbaaa93b8efdc09a8e3614
|
[
"MIT"
] | 6
|
2021-08-31T16:20:33.000Z
|
2021-12-27T18:04:52.000Z
|
simphox/circuit/envelope.py
|
fancompute/simphox
|
917673cc3ef8fb54fcbbaaa93b8efdc09a8e3614
|
[
"MIT"
] | 1
|
2021-08-29T21:09:30.000Z
|
2021-08-29T21:17:47.000Z
|
simphox/circuit/envelope.py
|
fancompute/simphox
|
917673cc3ef8fb54fcbbaaa93b8efdc09a8e3614
|
[
"MIT"
] | null | null | null |
"""This file is for back-of-the-envelope calculations / figures based on them."""
import numpy as np
def binary_svd_depth_size(n, k, interport_distance=25, device_length=200, loss_db: float = 0.3):
"""Binary SVD architecture depth and size.
Args:
n: Number of inputs.
k: Number of outputs (generally want :math:`k << n`).
interport_distance: Distance between the ports of the device.
device_length: Overall device length.
loss_db: Loss in dB for the circuit.
Returns:
Information about number of layers, length, height, footprint, loss, etc.
"""
num_input_layers = np.ceil(np.log2(n))
num_unitary_layers = k * np.ceil(np.log2(n))
attenuate_and_unitary_layer = k + 1
num_waveguides_height = np.ceil(n / k) * n
layers = num_input_layers + num_unitary_layers + attenuate_and_unitary_layer
height = num_waveguides_height * interport_distance
length = device_length * layers
return {
'layers': layers,
'length (cm)': length / 1e4,
'height (cm)': height / 1e4,
'footprint (cm^2)': length * height / 1e8,
'loss (dB)': -layers * loss_db + 10 * np.log10(1 / np.ceil(n / k)) + 10 * np.log10(1 / n) - 3
}
def rectangular_depth_size(n, interport_distance=25, device_length=200, loss_db: float = 0.3, svd: bool = True):
"""Rectangular architecture depth and size.
Args:
n: The number of outputs of the binary tree
interport_distance: Distance between each port in the network (include the phase shifters)
device_length: Length of the device
loss_db: Loss of the device
svd: Whether to use an SVD architecture (doubles the number of layers in the architecture)
Returns:
Information about number of layers, length, height, footprint, loss, etc.
"""
num_input_layers = np.ceil(np.log2(n))
num_unitary_layers = n * (1 + svd) + svd
num_waveguides_height = n
layers = num_input_layers + num_unitary_layers
height = num_waveguides_height * interport_distance
length = device_length * layers
return {
'layers': layers,
'length (cm)': length / 1e4,
'height (cm)': height / 1e4,
'footprint (cm^2)': length * height / 1e8,
'loss (dB)': -layers * loss_db - 10 * np.log10(n) - 3 * svd
}
def binary_equiv_cascade_size(n, n_equiv, interport_distance=25, device_length=200, loss_db: float = 0.3):
"""Binary architecture cascade size.
Args:
n: The number of outputs of the binary tree
n_equiv: Find the k (number of inputs) required to match the flops of n_equiv x n_equiv matrix
interport_distance: Distance between each port in the network (include the phase shifters)
device_length: Length of the device
loss_db: Loss of the device
Returns:
Information about number of layers, length, height, footprint, loss, etc.
"""
k = np.ceil(n_equiv ** 2 / n)
num_input_layers = np.ceil(np.log2(n))
num_unitary_layers = k * np.ceil(np.log2(n))
attenuate_and_unitary_layer = k + 1
num_waveguides_height = n
layers = num_input_layers + num_unitary_layers + attenuate_and_unitary_layer
height = num_waveguides_height * interport_distance
length = device_length * layers
return {
'layers': layers,
'length (cm)': length / 1e4,
'height (cm)': height / 1e4,
'footprint (cm^2)': length * height / 1e8,
'loss (dB)': -layers * loss_db + 10 * np.log10(1 / n)
}
| 37.755319
| 112
| 0.653705
| 496
| 3,549
| 4.5
| 0.191532
| 0.032258
| 0.037634
| 0.026882
| 0.755376
| 0.75
| 0.728047
| 0.728047
| 0.728047
| 0.728047
| 0
| 0.026966
| 0.247675
| 3,549
| 94
| 113
| 37.755319
| 0.808989
| 0.375317
| 0
| 0.695652
| 0
| 0
| 0.076332
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065217
| false
| 0
| 0.021739
| 0
| 0.152174
| 0.065217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dfb48236f31998b970eddb8a3cd084fad2722e1d
| 123
|
py
|
Python
|
python/marvin/tools/mixins/__init__.py
|
jvazquez77/marvin
|
f7f2c498c1a2cfa19ab96c51eebbbd4bde980984
|
[
"BSD-3-Clause"
] | 49
|
2016-11-04T19:20:50.000Z
|
2022-03-13T08:43:05.000Z
|
python/marvin/tools/mixins/__init__.py
|
jvazquez77/marvin
|
f7f2c498c1a2cfa19ab96c51eebbbd4bde980984
|
[
"BSD-3-Clause"
] | 703
|
2016-11-02T01:25:14.000Z
|
2022-03-31T19:20:03.000Z
|
python/marvin/tools/mixins/__init__.py
|
jvazquez77/marvin
|
f7f2c498c1a2cfa19ab96c51eebbbd4bde980984
|
[
"BSD-3-Clause"
] | 37
|
2016-11-09T08:51:48.000Z
|
2022-02-22T22:49:45.000Z
|
# flake8: noqa
from .aperture import *
from .dapall import *
from .nsa import *
from .mma import *
from .caching import *
| 15.375
| 23
| 0.707317
| 17
| 123
| 5.117647
| 0.529412
| 0.45977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.195122
| 123
| 7
| 24
| 17.571429
| 0.868687
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
dffb648db5cdaab93a5600b0c0d1c2f5afc4f9b2
| 144
|
py
|
Python
|
pyhawkes/internals/__init__.py
|
thonic/pyhawkes
|
99804deb9ea22ba3e1a99584420722abdf8eb56b
|
[
"MIT"
] | 221
|
2015-02-26T04:25:34.000Z
|
2022-03-27T13:06:10.000Z
|
pyhawkes/internals/__init__.py
|
thonic/pyhawkes
|
99804deb9ea22ba3e1a99584420722abdf8eb56b
|
[
"MIT"
] | 20
|
2015-08-04T01:47:19.000Z
|
2021-08-08T00:22:44.000Z
|
pyhawkes/internals/__init__.py
|
thonic/pyhawkes
|
99804deb9ea22ba3e1a99584420722abdf8eb56b
|
[
"MIT"
] | 86
|
2015-02-22T23:36:32.000Z
|
2021-11-13T20:56:07.000Z
|
from . import bias, continuous_time_helpers, distributions, impulses, network, parallel_adjacency_resampling, parent_updates, parents, weights
| 48
| 142
| 0.847222
| 16
| 144
| 7.3125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090278
| 144
| 2
| 143
| 72
| 0.89313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5f30014a8081188141c83da60b550e9126daf62f
| 146
|
py
|
Python
|
test/tests/try_continue.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1
|
2020-02-06T14:28:45.000Z
|
2020-02-06T14:28:45.000Z
|
test/tests/try_continue.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
test/tests/try_continue.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1
|
2020-02-06T14:29:00.000Z
|
2020-02-06T14:29:00.000Z
|
# should_error
# skip-if: '-x' in EXTRA_JIT_ARGS
# Syntax error to have a continue outside a loop.
def foo():
try: continue
finally: pass
| 20.857143
| 49
| 0.691781
| 24
| 146
| 4.083333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212329
| 146
| 6
| 50
| 24.333333
| 0.852174
| 0.630137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5f37a46ee4b1df6a8d7cd7afa088aae5ae1369e5
| 115
|
py
|
Python
|
examples/example_08_advanced.py
|
cdeil/prefect-tutorial
|
31c79c8dfe79da727c39d7aa2e4a7da9be849029
|
[
"MIT"
] | 7
|
2022-01-04T21:05:36.000Z
|
2022-03-27T01:41:14.000Z
|
examples/example_08_advanced.py
|
cdeil/prefect-tutorial
|
31c79c8dfe79da727c39d7aa2e4a7da9be849029
|
[
"MIT"
] | null | null | null |
examples/example_08_advanced.py
|
cdeil/prefect-tutorial
|
31c79c8dfe79da727c39d7aa2e4a7da9be849029
|
[
"MIT"
] | 1
|
2022-02-03T21:11:06.000Z
|
2022-02-03T21:11:06.000Z
|
"""Advanced example of a complex flow.
TODO: copy our ETL example that we're currently using, simplify a bit.
"""
| 23
| 70
| 0.730435
| 19
| 115
| 4.421053
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 115
| 4
| 71
| 28.75
| 0.884211
| 0.930435
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.25
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a035951d45a7cd9ab33022b6ca7f289038ae7590
| 4,370
|
py
|
Python
|
getDomainAge/tests/test_controllers/test_apis/test_job.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | 2
|
2020-03-12T14:43:19.000Z
|
2021-08-02T06:21:52.000Z
|
getDomainAge/tests/test_controllers/test_apis/test_job.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | 2
|
2021-05-25T16:01:29.000Z
|
2021-09-07T19:17:01.000Z
|
getDomainAge/tests/test_controllers/test_apis/test_job.py
|
ljnath/getDomainAge
|
a15337433d319597c1a705b49553e31620e00058
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch
import wtforms
from getDomainAge.models.enums import Endpoint, SessionParam
from getDomainAge.tests.test_controllers import app, do_login, test_client
def test_if_job_api_shows_add_job_page(test_client):
do_login(test_client) # performing login
response = test_client.get(Endpoint.API_JOB.value)
assert response.status_code == 200
assert 'Add a new job' in response.data.decode('utf-8')
with test_client.session_transaction() as session:
assert SessionParam.PAGE_INDEX.value in session
assert session[SessionParam.PAGE_INDEX.value] == 0
def test_if_job_api_adds_new_job(test_client):
do_login(test_client) # performing login
# validating redirection code
response = test_client.post(Endpoint.API_JOB.value, follow_redirects=False)
assert response.status_code == 307
# validating is redirected to correct endpoint
response = test_client.post(Endpoint.API_JOB.value, follow_redirects=True)
assert response.status_code == 200
assert response.request.path == Endpoint.API_JOB_ADD.value
def test_job_view_api_for_requestor_jobs(test_client):
do_login(test_client, 'user1@test.com') # performing login
# adding a new job by the logged-in email - user1@test.com
with patch.object(wtforms.Form, 'validate', return_value=True) as _:
test_client.post(Endpoint.API_JOB_ADD.value, follow_redirects=False)
response = test_client.get(Endpoint.API_JOB_VIEW.value, follow_redirects=False)
assert response.status_code == 200
assert 'List of jobs submitted by you' in response.data.decode('utf-8')
with test_client.session_transaction() as session:
assert SessionParam.PAGE_INDEX.value in session
assert SessionParam.VIEW_ALL.value in session
assert not session[SessionParam.VIEW_ALL.value]
assert session[SessionParam.PAGE_INDEX.value] == 1
def test_job_view_api_for_all_jobs(test_client):
do_login(test_client, 'user2@test.com') # performing login
# adding a new job by the logged-in email - user2@test.com
with patch.object(wtforms.Form, 'validate', return_value=True) as _:
test_client.post(Endpoint.API_JOB_ADD.value, follow_redirects=False)
response = test_client.get(f'{Endpoint.API_JOB_VIEW.value}?all', follow_redirects=False)
assert response.status_code == 200
assert 'List of jobs submitted by all user' in response.data.decode('utf-8')
with test_client.session_transaction() as session:
assert SessionParam.PAGE_INDEX.value in session
assert SessionParam.VIEW_ALL.value in session
assert session[SessionParam.VIEW_ALL.value]
assert session[SessionParam.PAGE_INDEX.value] == 2
def test_job_view_with_invalid_page_index(test_client):
do_login(test_client) # performing login
response = test_client.get(f'{Endpoint.API_JOB_VIEW.value}?page=ABC', follow_redirects=False)
assert response.status_code == 200
def test_if_job_add_api_redirects_to_add_job_page_with_missing_data(test_client):
do_login(test_client) # performing login
response = test_client.post(Endpoint.API_JOB_ADD.value, follow_redirects=True)
assert response.status_code == 200
assert 'Add a new job' in response.data.decode('utf-8')
def test_job_addition_via_job_add_endpoint(test_client):
do_login(test_client) # performing login
with patch.object(wtforms.Form, 'validate', return_value=True) as _:
response = test_client.post(Endpoint.API_JOB_ADD.value, follow_redirects=False)
assert response.status_code == 302
# assert 'Failed to added new job, please try aftersome.' in response.data.decode('utf-8')
# TODO: Test for redirected page with when flask>2.0.1 is released
def test_job_result_download_with_invalid_id(test_client):
do_login(test_client) # performing login
response = test_client.get(f'{Endpoint.API_JOB_DOWNLOAD.value}/ABC', follow_redirects=False)
assert response.status_code == 302
# TODO: Test for redirected page with when flask>2.0.1 is released
# response = test_client.get(f'{Endpoint.API_JOB_DOWNLOAD.value}/ABC', follow_redirects=True)
# assert response.status_code == 200
# assert response.request.path == Endpoint.APP_NAME.value
# assert 'You have made an invalid request.' in response.data.decode('utf-8')
| 40.462963
| 98
| 0.755606
| 634
| 4,370
| 4.954259
| 0.171924
| 0.101878
| 0.057943
| 0.076409
| 0.823305
| 0.808023
| 0.7695
| 0.740847
| 0.668577
| 0.668577
| 0
| 0.013297
| 0.156751
| 4,370
| 107
| 99
| 40.841122
| 0.839077
| 0.183066
| 0
| 0.47541
| 0
| 0
| 0.075775
| 0.030423
| 0
| 0
| 0
| 0.009346
| 0.393443
| 1
| 0.131148
| false
| 0
| 0.065574
| 0
| 0.196721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a03d7cc2fd5a7eaedbe93e5685da2476b8d3c89e
| 192
|
py
|
Python
|
pypbbot_examples/load_plugins.py
|
PHIKN1GHT/pypbbot
|
a327c173cd09b2461b755907ec32a7f151e47f75
|
[
"MIT"
] | 11
|
2021-01-31T12:58:39.000Z
|
2021-10-15T02:53:13.000Z
|
pypbbot_examples/load_plugins.py
|
ProtobufBot/pypbbot
|
a327c173cd09b2461b755907ec32a7f151e47f75
|
[
"MIT"
] | null | null | null |
pypbbot_examples/load_plugins.py
|
ProtobufBot/pypbbot
|
a327c173cd09b2461b755907ec32a7f151e47f75
|
[
"MIT"
] | 4
|
2021-01-31T12:58:42.000Z
|
2021-09-11T17:35:11.000Z
|
# python -m pypbbot_examples.load_plugins
import asyncio
from pypbbot.plugin import _loadedPlugins, load_plugins
asyncio.run(load_plugins("pypbbot_examples\\plugins"))
print(_loadedPlugins)
| 24
| 55
| 0.838542
| 24
| 192
| 6.416667
| 0.541667
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078125
| 192
| 7
| 56
| 27.428571
| 0.870057
| 0.203125
| 0
| 0
| 0
| 0
| 0.165563
| 0.165563
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a04ccd6f53a9aac217e76e67d3db1c29851dfcee
| 31
|
py
|
Python
|
boilermaker/__main__.py
|
spacemeat/boilermaker
|
f5ffb308d426806f098fdb9dc34327454dab9cf6
|
[
"MIT"
] | null | null | null |
boilermaker/__main__.py
|
spacemeat/boilermaker
|
f5ffb308d426806f098fdb9dc34327454dab9cf6
|
[
"MIT"
] | null | null | null |
boilermaker/__main__.py
|
spacemeat/boilermaker
|
f5ffb308d426806f098fdb9dc34327454dab9cf6
|
[
"MIT"
] | null | null | null |
from .boma import main
main()
| 7.75
| 22
| 0.709677
| 5
| 31
| 4.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 31
| 3
| 23
| 10.333333
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a067fa279cf0693af0311063e68ec25621cf79b7
| 695
|
py
|
Python
|
tigerforecast/utils/optimizers/tests/run_all_tests.py
|
danielsuo/TigerForecast
|
ae18b169d96dd81db88ab27a8b055036845d3a8f
|
[
"Apache-2.0"
] | 1
|
2020-07-28T09:07:29.000Z
|
2020-07-28T09:07:29.000Z
|
tigerforecast/utils/optimizers/tests/run_all_tests.py
|
danielsuo/TigerForecast
|
ae18b169d96dd81db88ab27a8b055036845d3a8f
|
[
"Apache-2.0"
] | null | null | null |
tigerforecast/utils/optimizers/tests/run_all_tests.py
|
danielsuo/TigerForecast
|
ae18b169d96dd81db88ab27a8b055036845d3a8f
|
[
"Apache-2.0"
] | 1
|
2021-04-12T22:39:26.000Z
|
2021-04-12T22:39:26.000Z
|
from tigerforecast.utils.optimizers.tests.test_adagrad import test_adagrad
from tigerforecast.utils.optimizers.tests.test_sgd import test_sgd
from tigerforecast.utils.optimizers.tests.test_ogd import test_ogd
from tigerforecast.utils.optimizers.tests.test_adam import test_adam
from tigerforecast.utils.optimizers.tests.test_ons import test_ons
# run all optimizers tests
def run_all_tests(steps=1000, show=False):
print("\nrunning all optimizers tests...\n")
test_sgd(show=show)
test_ogd(show=show)
test_adagrad(show=show)
test_adam(show=show)
test_ons(show=show)
print("\nall optimizers tests passed\n")
if __name__ == "__main__":
run_all_tests(show=False)
| 33.095238
| 74
| 0.788489
| 102
| 695
| 5.107843
| 0.264706
| 0.230326
| 0.211132
| 0.307102
| 0.393474
| 0.393474
| 0
| 0
| 0
| 0
| 0
| 0.006525
| 0.117986
| 695
| 20
| 75
| 34.75
| 0.843393
| 0.034532
| 0
| 0
| 0
| 0
| 0.110613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.066667
| 0.333333
| 0
| 0.4
| 0.133333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
a07ec5ff2fd47b2e139af5c87a7acb943f9e2590
| 54
|
py
|
Python
|
interest/router/__init__.py
|
interest-hub/interest
|
e6e1def4f2999222aac2fb1d290ae94250673b89
|
[
"MIT"
] | 14
|
2015-02-15T09:29:26.000Z
|
2016-03-24T15:30:54.000Z
|
interest/router/__init__.py
|
roll/interest-py
|
e6e1def4f2999222aac2fb1d290ae94250673b89
|
[
"MIT"
] | 17
|
2015-02-15T22:52:07.000Z
|
2016-02-28T23:40:18.000Z
|
interest/router/__init__.py
|
roll/interest-py
|
e6e1def4f2999222aac2fb1d290ae94250673b89
|
[
"MIT"
] | 1
|
2016-11-11T11:14:05.000Z
|
2016-11-11T11:14:05.000Z
|
from .router import Router
from .parser import Parser
| 18
| 26
| 0.814815
| 8
| 54
| 5.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a091f1ffceb87dbe37ec88e6e437efe343568b82
| 48
|
py
|
Python
|
app.py
|
HackerUSA-CE/ballpy-api
|
2f4b19a0cb552980947d741b70cc1f41122eac7b
|
[
"MIT"
] | null | null | null |
app.py
|
HackerUSA-CE/ballpy-api
|
2f4b19a0cb552980947d741b70cc1f41122eac7b
|
[
"MIT"
] | null | null | null |
app.py
|
HackerUSA-CE/ballpy-api
|
2f4b19a0cb552980947d741b70cc1f41122eac7b
|
[
"MIT"
] | null | null | null |
from ballpy import create_app
app = create_app()
| 24
| 29
| 0.8125
| 8
| 48
| 4.625
| 0.625
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 48
| 2
| 30
| 24
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a0bff684f8748bb8dacfddcaf6d90d1915bdc1fc
| 24
|
py
|
Python
|
__init__.py
|
GooZy/Z-Blog
|
a00cb2842fc4c9b49461c5be4363d175b2cbb307
|
[
"MIT"
] | null | null | null |
__init__.py
|
GooZy/Z-Blog
|
a00cb2842fc4c9b49461c5be4363d175b2cbb307
|
[
"MIT"
] | null | null | null |
__init__.py
|
GooZy/Z-Blog
|
a00cb2842fc4c9b49461c5be4363d175b2cbb307
|
[
"MIT"
] | null | null | null |
from .z_blog import app
| 12
| 23
| 0.791667
| 5
| 24
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
39ffdfb19e4c3f1ce67f4fe2f1a0c93932dc958b
| 79
|
py
|
Python
|
exporter/__init__.py
|
eabderh/exporter
|
019be35e66c3645bff1b52e99927bac01b00fe97
|
[
"MIT"
] | null | null | null |
exporter/__init__.py
|
eabderh/exporter
|
019be35e66c3645bff1b52e99927bac01b00fe97
|
[
"MIT"
] | null | null | null |
exporter/__init__.py
|
eabderh/exporter
|
019be35e66c3645bff1b52e99927bac01b00fe97
|
[
"MIT"
] | null | null | null |
from . import core
from .core import Export
from .globalize import globalize
| 13.166667
| 32
| 0.78481
| 11
| 79
| 5.636364
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177215
| 79
| 5
| 33
| 15.8
| 0.953846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
26741ac9be94eeb8531194ce6e70c753266ec68e
| 107
|
py
|
Python
|
evol/exceptions.py
|
bahia14/Evol
|
b092a309f5fd4091b76b7bfd773ff9daa8ae9b2f
|
[
"MIT"
] | 161
|
2017-12-01T08:44:15.000Z
|
2022-03-07T01:17:51.000Z
|
evol/exceptions.py
|
bahia14/Evol
|
b092a309f5fd4091b76b7bfd773ff9daa8ae9b2f
|
[
"MIT"
] | 104
|
2017-12-01T08:47:20.000Z
|
2021-01-22T07:48:09.000Z
|
evol/exceptions.py
|
bahia14/Evol
|
b092a309f5fd4091b76b7bfd773ff9daa8ae9b2f
|
[
"MIT"
] | 15
|
2018-01-04T14:37:53.000Z
|
2021-08-17T11:36:41.000Z
|
class PopulationIsNotEvaluatedException(RuntimeError):
pass
class StopEvolution(Exception):
pass
| 15.285714
| 54
| 0.794393
| 8
| 107
| 10.625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149533
| 107
| 6
| 55
| 17.833333
| 0.934066
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cd121ea51016210f5fe13ce4c5b780ad48f4469f
| 47
|
py
|
Python
|
angrmanagement/plugins/seed_table/__init__.py
|
DennyDai/angr-management
|
8a4ba5dafbf2f4d2ba558528a0d1ae099a199a04
|
[
"BSD-2-Clause"
] | 474
|
2015-08-10T17:47:15.000Z
|
2022-03-31T21:10:55.000Z
|
angrmanagement/plugins/seed_table/__init__.py
|
DennyDai/angr-management
|
8a4ba5dafbf2f4d2ba558528a0d1ae099a199a04
|
[
"BSD-2-Clause"
] | 355
|
2015-08-17T09:35:53.000Z
|
2022-03-31T21:29:52.000Z
|
angrmanagement/plugins/seed_table/__init__.py
|
DennyDai/angr-management
|
8a4ba5dafbf2f4d2ba558528a0d1ae099a199a04
|
[
"BSD-2-Clause"
] | 95
|
2015-08-11T14:36:12.000Z
|
2022-03-31T23:01:01.000Z
|
from .seed_table_plugin import SeedTablePlugin
| 23.5
| 46
| 0.893617
| 6
| 47
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cd2057f791e79303be1e8a91ddd0e235e7c96547
| 154
|
py
|
Python
|
nfnets/__init__.py
|
zuenko/nfnets-pytorch
|
2300d526205cba9eb3ce9b52d68d636b2abc4295
|
[
"MIT"
] | 312
|
2021-02-13T19:57:14.000Z
|
2022-03-21T07:44:11.000Z
|
nfnets/__init__.py
|
zuenko/nfnets-pytorch
|
2300d526205cba9eb3ce9b52d68d636b2abc4295
|
[
"MIT"
] | 31
|
2021-02-14T21:51:15.000Z
|
2021-08-22T09:37:44.000Z
|
nfnets/__init__.py
|
zuenko/nfnets-pytorch
|
2300d526205cba9eb3ce9b52d68d636b2abc4295
|
[
"MIT"
] | 27
|
2021-02-15T02:19:05.000Z
|
2021-12-07T02:01:52.000Z
|
from .base import WSConv1d, WSConv2d, WSConvTranspose2d, ScaledStdConv2d
from .sgd_agc import SGD_AGC
from .agc import AGC
from .utils import replace_conv
| 38.5
| 72
| 0.837662
| 22
| 154
| 5.727273
| 0.545455
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.116883
| 154
| 4
| 73
| 38.5
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cd4f9c4023ca6ce8cd75567cfbb4b310baaa2bfa
| 32
|
py
|
Python
|
2021/examples-in-class-2021-11-12/module_user.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | 1
|
2021-02-04T16:59:11.000Z
|
2021-02-04T16:59:11.000Z
|
2021/examples-in-class-2021-11-12/module_user.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | null | null | null |
2021/examples-in-class-2021-11-12/module_user.py
|
ati-ozgur/course-python
|
38237d120043c07230658b56dc3aeb01c3364933
|
[
"Apache-2.0"
] | 1
|
2019-10-30T14:37:48.000Z
|
2019-10-30T14:37:48.000Z
|
from hello import hello
hello()
| 10.666667
| 23
| 0.78125
| 5
| 32
| 5
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 32
| 3
| 24
| 10.666667
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cd5b391780bc9fd912db3d4e6e40a153a5ed6152
| 87
|
py
|
Python
|
tests/core/test_rast_client.py
|
freiburgermsu/ModelSEEDpy
|
f368354f99ddfe0b9f5ac0c8b2de72a89ccb6016
|
[
"MIT"
] | 5
|
2021-06-22T18:22:29.000Z
|
2022-03-02T02:17:49.000Z
|
tests/core/test_rast_client.py
|
freiburgermsu/ModelSEEDpy
|
f368354f99ddfe0b9f5ac0c8b2de72a89ccb6016
|
[
"MIT"
] | 8
|
2021-06-18T07:12:48.000Z
|
2022-03-30T15:35:06.000Z
|
tests/core/test_rast_client.py
|
freiburgermsu/ModelSEEDpy
|
f368354f99ddfe0b9f5ac0c8b2de72a89ccb6016
|
[
"MIT"
] | 7
|
2021-05-05T22:54:01.000Z
|
2022-03-11T10:18:43.000Z
|
### test examples for the rast_client.py file
def test_somefunction():
pass
| 12.428571
| 45
| 0.678161
| 12
| 87
| 4.75
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241379
| 87
| 6
| 46
| 14.5
| 0.863636
| 0.471264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
cd670396a8192db7017582dac8f1c90d2d02c739
| 279
|
py
|
Python
|
1-Patron/python/GestorDeUltimate.py
|
TanZng/patrones-combinados
|
93796d9d5296649bd76b7b2ee7c723156b9a120f
|
[
"MIT"
] | 1
|
2021-09-13T15:45:48.000Z
|
2021-09-13T15:45:48.000Z
|
1-Patron/python/GestorDeUltimate.py
|
TanZng/patrones-combinados
|
93796d9d5296649bd76b7b2ee7c723156b9a120f
|
[
"MIT"
] | null | null | null |
1-Patron/python/GestorDeUltimate.py
|
TanZng/patrones-combinados
|
93796d9d5296649bd76b7b2ee7c723156b9a120f
|
[
"MIT"
] | 1
|
2021-09-24T03:00:47.000Z
|
2021-09-24T03:00:47.000Z
|
import Ultimate
class GestorDeUltimate():
def __init__(self, ultimate):
self.__ultimate = ultimate
def subirDeNivel(self, incremento):
return self.__ultimate.incNivel(incremento)
def generarUltimate():
return Ultimate("Basic", 10, 1)
| 27.9
| 51
| 0.670251
| 27
| 279
| 6.62963
| 0.555556
| 0.201117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.236559
| 279
| 10
| 52
| 27.9
| 0.826291
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
2697ac0fda9afa9f02882c5afb49bead86c41864
| 5,170
|
py
|
Python
|
ckanext-hdx_package/ckanext/hdx_package/tests/test_aws/test_file_removal.py
|
OCHA-DAP/hdx-ckan
|
202e0c44adc4ea8d0b90141e69365b65cce68672
|
[
"Apache-2.0"
] | 58
|
2015-01-11T09:05:15.000Z
|
2022-03-17T23:44:07.000Z
|
ckanext-hdx_package/ckanext/hdx_package/tests/test_aws/test_file_removal.py
|
OCHA-DAP/hdx-ckan
|
202e0c44adc4ea8d0b90141e69365b65cce68672
|
[
"Apache-2.0"
] | 1,467
|
2015-01-01T16:47:44.000Z
|
2022-02-28T16:51:20.000Z
|
ckanext-hdx_package/ckanext/hdx_package/tests/test_aws/test_file_removal.py
|
OCHA-DAP/hdx-ckan
|
202e0c44adc4ea8d0b90141e69365b65cce68672
|
[
"Apache-2.0"
] | 17
|
2015-05-06T14:04:21.000Z
|
2021-11-11T19:58:16.000Z
|
import os
from werkzeug import FileStorage
import ckan.model as model
import ckan.plugins.toolkit as tk
from ckanext.hdx_package.tests.test_aws.hdx_s3_test_base import HDXS3TestBase
config = tk.config
_get_action = tk.get_action
class TestFileRemovalS3(HDXS3TestBase):
def test_resource_delete(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.dataset1_name)
resource_id = resource_dict['id']
assert self.__file_exists(resource_id, self.file1_name)
_get_action('resource_delete')(context, {'id': resource_id})
assert not self.__file_exists(resource_id, self.file1_name)
def test_resource_update(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.dataset1_name)
resource_id = resource_dict['id']
resource_dict2 = self._resource_update_with_upload(context, self.file2_name, self.file2_name, resource_id)
assert not self.__file_exists(resource_id, self.file1_name)
assert self.__file_exists(resource_id, self.file2_name)
_get_action('resource_delete')(context, {'id': resource_id})
assert not self.__file_exists(resource_id, self.file2_name)
def test_resource_update_with_same_resource_name(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.dataset1_name)
resource_id = resource_dict['id']
resource_dict2 = self._resource_update_with_upload(context, self.file2_name, 'Test resource1.csv', resource_id)
assert not self.__file_exists(resource_id, self.file1_name)
assert self.__file_exists(resource_id, self.file2_name)
_get_action('resource_delete')(context, {'id': resource_id})
assert not self.__file_exists(resource_id, self.file2_name)
def test_resource_update_with_same_file_name(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.dataset1_name)
resource_id = resource_dict['id']
resource_dict2 = self._resource_update_with_upload(context, self.file1_name, 'Test resource2.csv', resource_id)
assert self.__file_exists(resource_id, self.file1_name)
_get_action('resource_delete')(context, {'id': resource_id})
assert not self.__file_exists(resource_id, self.file1_name)
def test_package_purge(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
self._create_package_by_user(self.dataset2_name, 'testsysadmin', create_org_and_group=False)
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.dataset2_name)
resource_id = resource_dict['id']
_get_action('hdx_dataset_purge')(context, {'id': self.dataset2_name})
assert not self.__file_exists(resource_id, self.file1_name)
def test_package_revise(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
self._create_package_by_user(self.revise_dataset_name, 'testsysadmin', create_org_and_group=False)
resource_dict = self._resource_create_with_upload(context, self.file1_name, 'Test resource1.csv',
self.revise_dataset_name)
resource_id = resource_dict['id']
file_path = os.path.join(os.path.dirname(__file__), self.file2_name)
with open(file_path) as f:
file_upload = FileStorage(f)
package_dict = _get_action('package_revise')(context,
{
'match__name': self.revise_dataset_name,
'update__resources__1__name': 'Test resource2.csv',
'update__resources__1__upload': file_upload,
})
assert not self.__file_exists(resource_id, self.file1_name)
assert self.__file_exists(resource_id, self.file2_name)
_get_action('hdx_dataset_purge')(context, {'id': self.revise_dataset_name})
assert not self.__file_exists(resource_id, self.file2_name)
@classmethod
def __file_exists(cls, resource_id, file_name):
return bool(cls._fetch_s3_object(resource_id, file_name))
| 48.317757
| 119
| 0.638298
| 592
| 5,170
| 5.138514
| 0.126689
| 0.095332
| 0.064103
| 0.101249
| 0.772189
| 0.767587
| 0.749178
| 0.744905
| 0.723537
| 0.720907
| 0
| 0.013478
| 0.268085
| 5,170
| 106
| 120
| 48.773585
| 0.790433
| 0
| 0
| 0.533333
| 0
| 0
| 0.106576
| 0.010445
| 0
| 0
| 0
| 0
| 0.186667
| 1
| 0.093333
| false
| 0
| 0.066667
| 0.013333
| 0.186667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
26c4550f04bd541899e9cef884acf360650a7832
| 3,776
|
py
|
Python
|
unittests/scales/test_ordinal.py
|
xxao/pero
|
a7f0c84fae0b21fe120204e798bd61cdab3a125d
|
[
"MIT"
] | 13
|
2019-07-15T17:51:21.000Z
|
2022-03-15T06:13:43.000Z
|
unittests/scales/test_ordinal.py
|
xxao/pero
|
a7f0c84fae0b21fe120204e798bd61cdab3a125d
|
[
"MIT"
] | 1
|
2021-12-29T00:46:44.000Z
|
2022-01-21T16:18:48.000Z
|
unittests/scales/test_ordinal.py
|
xxao/pero
|
a7f0c84fae0b21fe120204e798bd61cdab3a125d
|
[
"MIT"
] | 3
|
2020-09-27T14:31:45.000Z
|
2022-01-22T14:28:15.000Z
|
# Created byMartin.cz
# Copyright (c) Martin Strohalm. All rights reserved.
import unittest
import pero
class TestCase(unittest.TestCase):
"""Test case for ordinal scale."""
def test_scale(self):
"""Tests whether scale works correctly."""
in_range = [1, 2, 3]
out_range = ["red", "green", "blue"]
scale = pero.OrdinalScale(
in_range = in_range,
out_range = out_range)
self.assertEqual(scale.scale(1), "red")
self.assertEqual(scale.scale(2), "green")
self.assertEqual(scale.scale(3), "blue")
def test_invert(self):
"""Tests whether invert works correctly."""
in_range = [1, 2, 3]
out_range = ["red", "green", "blue"]
scale = pero.OrdinalScale(
in_range = in_range,
out_range = out_range)
# check scale
self.assertEqual(scale.invert("red"), 1)
self.assertEqual(scale.invert("green"), 2)
self.assertEqual(scale.invert("blue"), 3)
def test_default(self):
"""Tests whether default value works correctly."""
in_range = [1, 2, 3]
out_range = ["red", "green", "blue"]
default = "black"
scale = pero.OrdinalScale(
in_range = in_range,
out_range = out_range,
default = default)
self.assertEqual(scale.scale(0), "black")
self.assertEqual(scale.invert("black"), None)
def test_implicit(self):
"""Tests whether implicit values work correctly."""
out_range = ["red", "green", "blue"]
default = None
# disable implicit
scale = pero.OrdinalScale(
out_range = out_range,
default = default,
implicit = False)
self.assertEqual(scale.scale(1), None)
self.assertEqual(scale.invert("red"), None)
# enable implicit
scale = pero.OrdinalScale(
out_range = out_range,
default = default,
implicit = True)
self.assertEqual(scale.scale(1), "red")
self.assertEqual(scale.scale(2), "green")
self.assertEqual(scale.scale(1), "red")
self.assertEqual(scale.scale(3), "blue")
self.assertEqual(scale.invert("red"), 1)
self.assertEqual(scale.invert("green"), 2)
self.assertEqual(scale.invert("blue"), 3)
def test_recycle(self):
"""Tests whether recycling works correctly."""
in_range = [1, 2, 3, 4, 5, 6, 7]
out_range = ["red", "green", "blue"]
default = None
# disable recycling
scale = pero.OrdinalScale(
in_range = in_range,
out_range = out_range,
default = default,
recycle = False)
self.assertEqual(scale.scale(1), "red")
self.assertEqual(scale.scale(7), None)
self.assertEqual(scale.invert("red"), 1)
# enable recycling
scale = pero.OrdinalScale(
in_range = in_range,
out_range = out_range,
default = default,
recycle = True)
self.assertEqual(scale.scale(1), "red")
self.assertEqual(scale.scale(4), "red")
self.assertEqual(scale.scale(5), "green")
self.assertEqual(scale.scale(6), "blue")
self.assertEqual(scale.scale(7), "red")
self.assertEqual(scale.invert("red"), 1)
self.assertEqual(scale.invert("green"), 2)
self.assertEqual(scale.invert("blue"), 3)
# run test case
if __name__ == "__main__":
unittest.main(verbosity=2)
| 29.271318
| 59
| 0.53893
| 400
| 3,776
| 4.9725
| 0.155
| 0.211161
| 0.281549
| 0.201106
| 0.75817
| 0.715435
| 0.655103
| 0.643037
| 0.604827
| 0.604827
| 0
| 0.017111
| 0.334481
| 3,776
| 128
| 60
| 29.5
| 0.774373
| 0.106992
| 0
| 0.666667
| 0
| 0
| 0.05189
| 0
| 0
| 0
| 0
| 0
| 0.358974
| 1
| 0.064103
| false
| 0
| 0.025641
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f802b29cc26c46e00e18fbc0dc009137475b16ff
| 8,638
|
py
|
Python
|
tests/integration/cartography/intel/azure/test_iam.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/cartography/intel/azure/test_iam.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | 11
|
2020-12-21T02:51:11.000Z
|
2022-03-15T14:30:43.000Z
|
tests/integration/cartography/intel/azure/test_iam.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | 1
|
2021-02-05T08:08:47.000Z
|
2021-02-05T08:08:47.000Z
|
from cartography.intel.azure import iam
from tests.data.azure.iam import DESCRIBE_APPLICATIONS
from tests.data.azure.iam import DESCRIBE_DOMAINS
from tests.data.azure.iam import DESCRIBE_GROUPS
from tests.data.azure.iam import DESCRIBE_ROLES
from tests.data.azure.iam import DESCRIBE_SERVICE_ACCOUNTS
from tests.data.azure.iam import DESCRIBE_USERS
TEST_TENANT_ID = '00-00-00-00'
TEST_UPDATE_TAG = 123456789
def test_load_users(neo4j_session):
iam.load_tenant_users(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_USERS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"gdvsd43562",
"gdvsd43562we34",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureUser) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_user_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_users(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_USERS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_TENANT_ID,
"gdvsd43562",
),
(
TEST_TENANT_ID,
"gdvsd43562we34",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureTenant)-[:RESOURCE]->(n2:AzureUser) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
def test_load_groups(neo4j_session):
iam.load_tenant_groups(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_GROUPS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"45b7d2e7-b882-4a80-ba97-10b7a63b8fa4",
"d7797254-3084-44d0-99c9-a3b5ab149538",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureGroup) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_group_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_groups(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_GROUPS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_TENANT_ID,
"45b7d2e7-b882-4a80-ba97-10b7a63b8fa4",
),
(
TEST_TENANT_ID,
"d7797254-3084-44d0-99c9-a3b5ab149538",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureTenant)-[:RESOURCE]->(n2:AzureGroup) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
def test_load_applications(neo4j_session):
iam.load_tenant_applications(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_APPLICATIONS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"00000000-0000-0000-0000-000000000001",
"00000000-0000-0000-0000-000000000002",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureApplication) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_application_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_applications(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_APPLICATIONS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_TENANT_ID,
"00000000-0000-0000-0000-000000000001",
),
(
TEST_TENANT_ID,
"00000000-0000-0000-0000-000000000002",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureTenant)-[:RESOURCE]->(n2:AzureApplication) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
def test_load_service_accounts(neo4j_session):
iam.load_tenant_service_accounts(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_SERVICE_ACCOUNTS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"86823hkhjhd",
"hvhg575757g",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureServiceAccount) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_service_account_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_service_accounts(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_SERVICE_ACCOUNTS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_TENANT_ID,
"86823hkhjhd",
),
(
TEST_TENANT_ID,
"hvhg575757g",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureTenant)-[:RESOURCE]->(n2:AzureServiceAccount) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
def test_load_domains(neo4j_session):
iam.load_tenant_domains(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_DOMAINS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"contoso1.com",
"contoso2.com",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureDomain) RETURN r.name;
""", )
actual_nodes = {n['r.name'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_domain_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_domains(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_DOMAINS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_TENANT_ID,
"contoso1.com",
),
(
TEST_TENANT_ID,
"contoso2.com",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureTenant)-[:RESOURCE]->(n2:AzureDomain) RETURN n1.id, n2.name;
""", )
actual = {(r['n1.id'], r['n2.name']) for r in result}
assert actual == expected
def test_load_roles(neo4j_session):
iam.load_roles(
neo4j_session,
DESCRIBE_ROLES,
TEST_UPDATE_TAG,
)
expected_nodes = {
"97254c67-852d-61c20eb66ffc",
"97254c67-852d-61c20eb66ffcsdds",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureRole) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_role_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureTenant{id: {tenant_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
tenant_id=TEST_TENANT_ID,
update_tag=TEST_UPDATE_TAG,
)
iam.load_tenant_service_accounts(
neo4j_session,
TEST_TENANT_ID,
DESCRIBE_SERVICE_ACCOUNTS,
TEST_UPDATE_TAG,
)
iam.load_roles(
neo4j_session,
DESCRIBE_ROLES,
TEST_UPDATE_TAG,
)
expected = {
(
"86823hkhjhd",
"97254c67-852d-61c20eb66ffc",
),
(
"hvhg575757g",
"97254c67-852d-61c20eb66ffcsdds",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureServiceAccount)-[:ASSUME_ROLE]->(n2:AzureRole) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
| 22.612565
| 90
| 0.57768
| 943
| 8,638
| 5.005302
| 0.096501
| 0.109322
| 0.071186
| 0.05339
| 0.834746
| 0.769915
| 0.727966
| 0.659746
| 0.659746
| 0.607839
| 0
| 0.073757
| 0.310952
| 8,638
| 381
| 91
| 22.671916
| 0.719254
| 0
| 0
| 0.722008
| 0
| 0
| 0.097882
| 0.059233
| 0
| 0
| 0
| 0
| 0.046332
| 1
| 0.046332
| false
| 0
| 0.027027
| 0
| 0.073359
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f83cb1293f4622b17cfb9c1f35a6f77fabc9ef6b
| 169
|
py
|
Python
|
loops/sum_numbers.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops/sum_numbers.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops/sum_numbers.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | 1
|
2022-01-14T17:12:44.000Z
|
2022-01-14T17:12:44.000Z
|
numbers_count = int(input())
sum_number = 0
for counter in range(numbers_count):
current_number = int(input())
sum_number += current_number
print(sum_number)
| 16.9
| 36
| 0.727811
| 24
| 169
| 4.833333
| 0.541667
| 0.232759
| 0.189655
| 0.293103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007092
| 0.16568
| 169
| 9
| 37
| 18.777778
| 0.815603
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f84139625f26bc68b65e2b38d2c7925f9f66b72f
| 140
|
py
|
Python
|
utils.py
|
llecxis/DataSet_from_IMU_data
|
38a7be00db8568c042d1b2bf7745cfc7a8041af0
|
[
"CC0-1.0"
] | null | null | null |
utils.py
|
llecxis/DataSet_from_IMU_data
|
38a7be00db8568c042d1b2bf7745cfc7a8041af0
|
[
"CC0-1.0"
] | null | null | null |
utils.py
|
llecxis/DataSet_from_IMU_data
|
38a7be00db8568c042d1b2bf7745cfc7a8041af0
|
[
"CC0-1.0"
] | null | null | null |
#common utilities for all module
def trap_exc_during_debug(*args):
# when app raises uncaught exception, print info
print(args)
| 28
| 53
| 0.735714
| 20
| 140
| 5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 140
| 5
| 54
| 28
| 0.892857
| 0.557143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f847f13a3159ccd5f83122474d7e1afba60452c8
| 65
|
py
|
Python
|
hubblestack/utils/__init__.py
|
buddwm/hubble
|
b384ee48556ca144ae6f09dd0b45db29288e5293
|
[
"Apache-2.0"
] | 363
|
2017-01-10T22:02:47.000Z
|
2022-03-21T10:44:40.000Z
|
hubblestack/utils/__init__.py
|
buddwm/hubble
|
b384ee48556ca144ae6f09dd0b45db29288e5293
|
[
"Apache-2.0"
] | 439
|
2017-01-12T22:39:42.000Z
|
2021-10-11T18:43:28.000Z
|
hubblestack/utils/__init__.py
|
buddwm/hubble
|
b384ee48556ca144ae6f09dd0b45db29288e5293
|
[
"Apache-2.0"
] | 138
|
2017-01-05T22:10:59.000Z
|
2021-09-01T14:35:00.000Z
|
# coding: utf-8
from hubblestack.utils.process import daemonize
| 16.25
| 47
| 0.8
| 9
| 65
| 5.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.123077
| 65
| 3
| 48
| 21.666667
| 0.894737
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f85fd154b92c44461bd520b75fb67982ea950929
| 331
|
py
|
Python
|
header_handlers/equipment_name_handler.py
|
cookyt/mhw_optimizer
|
05dc4483f9bb0b6f8c2d8e205239d58bbd4c274a
|
[
"MIT"
] | null | null | null |
header_handlers/equipment_name_handler.py
|
cookyt/mhw_optimizer
|
05dc4483f9bb0b6f8c2d8e205239d58bbd4c274a
|
[
"MIT"
] | null | null | null |
header_handlers/equipment_name_handler.py
|
cookyt/mhw_optimizer
|
05dc4483f9bb0b6f8c2d8e205239d58bbd4c274a
|
[
"MIT"
] | null | null | null |
from header_handlers.header_handler import HeaderHandler
class EquipmentNameHandler(HeaderHandler):
def __init__(self, body_part):
self.body_part = body_part
def generate_output(self, combination):
return combination.equipment[self.body_part].name
def name(self):
return self.body_part.name
| 25.461538
| 57
| 0.743202
| 40
| 331
| 5.85
| 0.475
| 0.17094
| 0.205128
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18429
| 331
| 12
| 58
| 27.583333
| 0.866667
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
f86cd2122c756ab12fbabe0d4ac3b093a91a406f
| 116
|
py
|
Python
|
chocolate/connection/__init__.py
|
Intelecy/chocolate
|
0ba4f6f0130eab851d32d5534241c8cac3f6666e
|
[
"BSD-3-Clause"
] | 105
|
2017-10-27T02:14:22.000Z
|
2022-01-13T12:57:05.000Z
|
chocolate/connection/__init__.py
|
Intelecy/chocolate
|
0ba4f6f0130eab851d32d5534241c8cac3f6666e
|
[
"BSD-3-Clause"
] | 31
|
2017-10-03T13:41:35.000Z
|
2021-08-20T21:01:29.000Z
|
chocolate/connection/__init__.py
|
Intelecy/chocolate
|
0ba4f6f0130eab851d32d5534241c8cac3f6666e
|
[
"BSD-3-Clause"
] | 38
|
2017-10-05T20:19:42.000Z
|
2022-03-28T11:34:04.000Z
|
from .mongodb import MongoDBConnection
from .pandas import DataFrameConnection
from .sqlite import SQLiteConnection
| 29
| 39
| 0.87069
| 12
| 116
| 8.416667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 3
| 40
| 38.666667
| 0.971154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f891725e41561e5a988a2bd355ea6644f2b7b88a
| 286
|
py
|
Python
|
examples/00simple/legacy_person.py
|
Danil-Grigorev/swagger-marshmallow-codegen
|
4c077f6e1ef535bcbdbf1f643f97bc4cbc62c0e8
|
[
"MIT"
] | 49
|
2017-02-05T17:32:18.000Z
|
2022-01-30T13:20:22.000Z
|
examples/00simple/legacy_person.py
|
Danil-Grigorev/swagger-marshmallow-codegen
|
4c077f6e1ef535bcbdbf1f643f97bc4cbc62c0e8
|
[
"MIT"
] | 62
|
2016-12-27T15:38:28.000Z
|
2021-09-30T02:47:00.000Z
|
examples/00simple/legacy_person.py
|
Danil-Grigorev/swagger-marshmallow-codegen
|
4c077f6e1ef535bcbdbf1f643f97bc4cbc62c0e8
|
[
"MIT"
] | 10
|
2017-07-19T12:38:25.000Z
|
2020-04-07T09:11:22.000Z
|
# this is auto-generated by swagger-marshmallow-codegen
from __future__ import annotations
from swagger_marshmallow_codegen.schema.legacy import LegacySchema
from marshmallow import fields
class Person(LegacySchema):
name = fields.String(required=True)
age = fields.Integer()
| 28.6
| 66
| 0.811189
| 35
| 286
| 6.457143
| 0.685714
| 0.159292
| 0.221239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125874
| 286
| 9
| 67
| 31.777778
| 0.904
| 0.185315
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f8924b5997d8b16ab480b711e02522f17e129fa1
| 214
|
py
|
Python
|
cvpro/__init__.py
|
Mohak-CODING-HEAVEN/CVPRO
|
09a2cb4a428738c9e77f17b71469d55eff5e3699
|
[
"MIT"
] | 5
|
2021-07-24T18:20:11.000Z
|
2022-03-23T09:58:27.000Z
|
cvpro/__init__.py
|
Mohak-CODING-HEAVEN/cvpro
|
09a2cb4a428738c9e77f17b71469d55eff5e3699
|
[
"MIT"
] | null | null | null |
cvpro/__init__.py
|
Mohak-CODING-HEAVEN/cvpro
|
09a2cb4a428738c9e77f17b71469d55eff5e3699
|
[
"MIT"
] | null | null | null |
"""
CVPRO - Computer Vision PROfessional
BY: MOHAK BAJAJ
CODING HEAVEN
"""
from cvpro.Utils import stackImages, cornerRect, findContours, overlayPNG, rotateImage, ColorFinder, FPS, LivePlot, putTextRect
| 23.777778
| 128
| 0.757009
| 22
| 214
| 7.363636
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163551
| 214
| 8
| 129
| 26.75
| 0.905028
| 0.313084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3e01b8a4d9267895ad5d36ade74d575f3744724e
| 221
|
py
|
Python
|
old_files/__init__.py
|
ZhouHUB/xpd_workflow
|
be3d5dee1f8021b45f7e810c96a3dfefa8942bb5
|
[
"BSD-3-Clause"
] | null | null | null |
old_files/__init__.py
|
ZhouHUB/xpd_workflow
|
be3d5dee1f8021b45f7e810c96a3dfefa8942bb5
|
[
"BSD-3-Clause"
] | 4
|
2016-08-25T02:59:05.000Z
|
2016-09-28T22:32:34.000Z
|
old_files/__init__.py
|
ZhouHUB/xpd_workflow
|
be3d5dee1f8021b45f7e810c96a3dfefa8942bb5
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Brookhaven National Lab 2O14
# All rights reserved
# BSD License
# See LICENSE for full text
from __future__ import (absolute_import, division, print_function,
unicode_literals)
| 22.1
| 66
| 0.705882
| 26
| 221
| 5.730769
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017964
| 0.244344
| 221
| 9
| 67
| 24.555556
| 0.874252
| 0.452489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
3e2047383a537d1f6a44c09d91e6eacf5537b4e0
| 815
|
py
|
Python
|
tests/problems/cracking_the_code_interview/test_is_unique.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | 1
|
2020-05-04T11:28:35.000Z
|
2020-05-04T11:28:35.000Z
|
tests/problems/cracking_the_code_interview/test_is_unique.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | null | null | null |
tests/problems/cracking_the_code_interview/test_is_unique.py
|
deniscostadsc/becoming-a-better-programmer
|
ec993d494ef5e9272429cc4d2ed1e7ffbe8e1bfd
|
[
"MIT"
] | null | null | null |
from problems.cracking_the_code_interview.is_unique import (
is_unique,
is_unique_with_no_extra_space,
)
def test_all_chars_are_unique():
assert is_unique("a")
assert is_unique("abcde")
def test_has_repeated_chars():
assert not is_unique("aa")
assert not is_unique("abccde")
def test_has_only_unique_chars_with_no_extra_space():
assert is_unique_with_no_extra_space("a")
assert is_unique_with_no_extra_space("abc")
assert is_unique_with_no_extra_space("abcd")
assert is_unique_with_no_extra_space("abcde")
def test_has_repeated_chars_with_no_extra_space():
assert not is_unique_with_no_extra_space("aa")
assert not is_unique_with_no_extra_space("aac")
assert not is_unique_with_no_extra_space("abccde")
assert not is_unique_with_no_extra_space("caa")
| 28.103448
| 60
| 0.785276
| 133
| 815
| 4.240602
| 0.233083
| 0.212766
| 0.214539
| 0.312057
| 0.691489
| 0.654255
| 0.446809
| 0.234043
| 0
| 0
| 0
| 0
| 0.133742
| 815
| 28
| 61
| 29.107143
| 0.798867
| 0
| 0
| 0
| 0
| 0
| 0.050307
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.05
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3e30880f06d3d9464862156404cbc350836fb698
| 92
|
py
|
Python
|
dips/net/__init__.py
|
shaharazulay/deep-semantic-prior
|
d1710e13f46c7e8ca365a815e0cd0ce6e8eb4d2b
|
[
"BSD-3-Clause"
] | 5
|
2021-12-08T03:20:40.000Z
|
2022-02-22T06:51:32.000Z
|
dips/net/__init__.py
|
shaharazulay/deep-semantic-prior
|
d1710e13f46c7e8ca365a815e0cd0ce6e8eb4d2b
|
[
"BSD-3-Clause"
] | null | null | null |
dips/net/__init__.py
|
shaharazulay/deep-semantic-prior
|
d1710e13f46c7e8ca365a815e0cd0ce6e8eb4d2b
|
[
"BSD-3-Clause"
] | 1
|
2021-12-22T07:22:46.000Z
|
2021-12-22T07:22:46.000Z
|
from .skip_model import skip, skip_mask
from .optimization import optimize, uneven_optimize
| 30.666667
| 51
| 0.847826
| 13
| 92
| 5.769231
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 92
| 2
| 52
| 46
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3e6dd412fbaef0b30a3c4c255ec69f0f86faa4f0
| 214
|
py
|
Python
|
UserCode/SiPixelAnalyzer/python/__init__.py
|
mverwe/UserCode
|
378dbc5e65dfc283c41355ace89db1b2e85e706d
|
[
"CC0-1.0"
] | null | null | null |
UserCode/SiPixelAnalyzer/python/__init__.py
|
mverwe/UserCode
|
378dbc5e65dfc283c41355ace89db1b2e85e706d
|
[
"CC0-1.0"
] | null | null | null |
UserCode/SiPixelAnalyzer/python/__init__.py
|
mverwe/UserCode
|
378dbc5e65dfc283c41355ace89db1b2e85e706d
|
[
"CC0-1.0"
] | null | null | null |
#Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/PixelReadoutDQM/SiPixelAnalyzer/',1)[0])+'/cfipython/slc6_amd64_gcc491/PixelReadoutDQM/SiPixelAnalyzer')
| 53.5
| 171
| 0.827103
| 26
| 214
| 6.423077
| 0.769231
| 0.107784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038647
| 0.03271
| 214
| 3
| 172
| 71.333333
| 0.768116
| 0.140187
| 0
| 0
| 0
| 0
| 0.508197
| 0.508197
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3e6ff076c9b200ebc63d0c460da024f339356788
| 333
|
py
|
Python
|
nlpaug/model/word_dict/word_dictionary.py
|
booltime/nlpaug
|
d21e51bacd170dcd3dddfc34a401f0215f91dbf1
|
[
"MIT"
] | 1
|
2021-09-08T09:18:02.000Z
|
2021-09-08T09:18:02.000Z
|
nlpaug/model/word_dict/word_dictionary.py
|
booltime/nlpaug
|
d21e51bacd170dcd3dddfc34a401f0215f91dbf1
|
[
"MIT"
] | null | null | null |
nlpaug/model/word_dict/word_dictionary.py
|
booltime/nlpaug
|
d21e51bacd170dcd3dddfc34a401f0215f91dbf1
|
[
"MIT"
] | null | null | null |
class WordDictionary:
def __init__(self, cache=True):
self.cache = cache
def train(self, data):
raise NotImplemented()
def predict(self, data):
raise NotImplemented()
def save(self, model_path):
raise NotImplemented()
def read(self, model_path):
raise NotImplemented()
| 20.8125
| 35
| 0.627628
| 36
| 333
| 5.638889
| 0.444444
| 0.374384
| 0.325123
| 0.26601
| 0.610837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276276
| 333
| 15
| 36
| 22.2
| 0.842324
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
e416aa0b38e16bab3772c8e1e5bf756d77e7cb07
| 207
|
py
|
Python
|
flatdata-generator/flatdata/generator/tree/nodes/trivial/__init__.py
|
gferon/flatdata
|
8839fb36be105e496fea8acc3fc907ae878dd063
|
[
"Apache-2.0"
] | 140
|
2018-01-26T21:59:38.000Z
|
2022-02-17T10:23:29.000Z
|
flatdata-generator/flatdata/generator/tree/nodes/trivial/__init__.py
|
gferon/flatdata
|
8839fb36be105e496fea8acc3fc907ae878dd063
|
[
"Apache-2.0"
] | 114
|
2018-01-26T17:49:20.000Z
|
2021-11-26T13:27:08.000Z
|
flatdata-generator/flatdata/generator/tree/nodes/trivial/__init__.py
|
gferon/flatdata
|
8839fb36be105e496fea8acc3fc907ae878dd063
|
[
"Apache-2.0"
] | 22
|
2018-01-26T16:51:24.000Z
|
2021-04-27T13:32:44.000Z
|
from .constant import Constant
from .field import Field
from .namespace import Namespace
from .structure import Structure
from .enumeration import Enumeration
from .enumeration_value import EnumerationValue
| 29.571429
| 47
| 0.855072
| 25
| 207
| 7.04
| 0.36
| 0.170455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 207
| 6
| 48
| 34.5
| 0.961749
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e43bf053c622a620566bc8815bad97e1df7f65b0
| 13
|
py
|
Python
|
myproject/constants.py
|
ianfaulkner/myproject
|
d4cb6bf48b9d46b979ddea01c31e794ddea29af0
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/constants.py
|
ianfaulkner/myproject
|
d4cb6bf48b9d46b979ddea01c31e794ddea29af0
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/constants.py
|
ianfaulkner/myproject
|
d4cb6bf48b9d46b979ddea01c31e794ddea29af0
|
[
"BSD-2-Clause"
] | 1
|
2019-10-02T17:08:17.000Z
|
2019-10-02T17:08:17.000Z
|
pi = 3.14159
| 6.5
| 12
| 0.615385
| 3
| 13
| 2.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.230769
| 13
| 1
| 13
| 13
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e45ef1830ce2809cc7aaac4272ab8a0ce25f4f35
| 245
|
py
|
Python
|
announce/urls.py
|
p2pu/learning-circles
|
ccd94208ec18082f8fda6d7f21eacdd71bad6023
|
[
"MIT"
] | 10
|
2016-05-03T20:41:25.000Z
|
2021-09-17T18:42:01.000Z
|
announce/urls.py
|
p2pu/learning-circles
|
ccd94208ec18082f8fda6d7f21eacdd71bad6023
|
[
"MIT"
] | 655
|
2016-05-04T19:00:35.000Z
|
2022-03-28T13:09:20.000Z
|
announce/urls.py
|
p2pu/learning-circles
|
ccd94208ec18082f8fda6d7f21eacdd71bad6023
|
[
"MIT"
] | 8
|
2016-05-06T10:24:27.000Z
|
2020-10-21T00:56:59.000Z
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^send/$', views.announce_webhook, name='announce_webhook'),
url(r'^mailchimp/(?P<webhook_secret>[\w-]+)$', views.mailchimp_webhook, name='announce_webhook'),
]
| 27.222222
| 101
| 0.706122
| 32
| 245
| 5.25
| 0.53125
| 0.267857
| 0.22619
| 0.309524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 245
| 8
| 102
| 30.625
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0.314286
| 0.155102
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e460b8dff81b5de80c35645a388d11f7933de4e7
| 8,713
|
py
|
Python
|
realworld-testbed/scenarios.py
|
jhuwyler/qpep
|
3b06f02f749cbc14ff9988ad6d9411f0a666f5f9
|
[
"Unlicense"
] | null | null | null |
realworld-testbed/scenarios.py
|
jhuwyler/qpep
|
3b06f02f749cbc14ff9988ad6d9411f0a666f5f9
|
[
"Unlicense"
] | null | null | null |
realworld-testbed/scenarios.py
|
jhuwyler/qpep
|
3b06f02f749cbc14ff9988ad6d9411f0a666f5f9
|
[
"Unlicense"
] | null | null | null |
from abc import ABC, abstractmethod
from loguru import logger
import docker
import time
import os
from dotenv import load_dotenv
load_dotenv()
load_dotenv(str(os.getenv("SERVER_ENV")))
load_dotenv(str(os.getenv("CLIENT_ENV")))
class Scenario(ABC):
def __init__(self, name, testbed, benchmarks):
self.name = name
self.testbed = testbed
self.benchmarks = benchmarks
@abstractmethod
def deploy_scenario(self):
self.testbed.start_testbed()
def run_benchmarks(self, deployed=False):
for benchmark in self.benchmarks:
if not deployed:
self.deploy_scenario()
benchmark.run()
def print_results(self):
print("*"*25)
print("Benchmark Results for ", self.name)
print("*"*25)
for benchmark in self.benchmarks:
print("****", benchmark.name, "****")
benchmark.print_results()
class PlainScenario(Scenario):
def deploy_scenario(self, testbed_up=False):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
terminal_workstation = docker_client.containers.get(os.getenv("WS_ST_CONTAINER_NAME"))
logger.debug("Configuring proxy on Terminal WS")
terminal_workstation.exec_run("export http_proxy=http://"+os.getenv("PROXY_SRV_URL")+":5001")
terminal_workstation.exec_run("export https_proxy=https://"+os.getenv("PROXY_SRV_URL")+":5001")
class OpenVPNScenario(Scenario):
def deploy_scenario(self, testbed_up=False):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
terminal_workstation = docker_client.containers.get(os.getenv("WS_ST_CONTAINER_NAME"))
# Satellite latency means that it takes OpenVPN a long time to establish the connection, waiting is easiest
logger.debug("Launching OVPN and waiting...remote "+str(os.getenv("WS_OVPN_URL"))+" "+str(os.getenv("WS_OVPN_PORT")))
terminal_workstation.exec_run("openvpn --remote "+str(os.getenv("WS_OVPN_URL"))+" "+str(os.getenv("WS_OVPN_PORT"))+" udp --config /root/client.ovpn --daemon")
time.sleep(20)
class OpenVPNTCPScenario(Scenario):
def deploy_scenario(self, testbed_up=False):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
terminal_workstation = docker_client.containers.get(os.getenv("WS_ST_CONTAINER_NAME"))
# Satellite latency means that it takes OpenVPN a long time to establish the connection, waiting is easiest
logger.debug("Launching OVPN and waiting...remote "+str(os.getenv("WS_OVPN_URL"))+" "+str(os.getenv("WS_OVPN_PORT")))
terminal_workstation.exec_run("openvpn --remote "+str(os.getenv("WS_OVPN_URL"))+" "+str(os.getenv("WS_OVPN_PORT"))+" tcp --config /root/client.ovpn --daemon")
time.sleep(20)
class QPEPScenario(Scenario):
def __init__(self, name, testbed, benchmarks, multi_stream=True):
self.multi_stream = multi_stream
super().__init__(name, testbed, benchmarks)
def deploy_scenario(self, testbed_up=False):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
logger.debug("Configuring Client Side of QPEP Proxy")
terminal_container = docker_client.containers.get(os.getenv("ST_CONTAINER_NAME"))
terminal_container.exec_run("bash ./tmp/config/configure_qpep.sh")
logger.debug("Configuring Gateway Side of QPEP Proxy")
docker_client_cloud = docker.DockerClient(base_url="ssh://"+os.getenv("DOCKER_REMOTE_URL"))
gateway_workstation = docker_client_cloud.containers.get(os.getenv('WS_GW_CONTAINER_NAME'))
if testbed_up:
# kill running QPEP services for fresh start
gateway_workstation.exec_run("pkill -9 main")
terminal_container.exec_run("pkill -9 main")
logger.debug("Launching QPEP Client")
terminal_container.exec_run("go run /root/go/src/qpep/main.go -client -gateway "+os.getenv("QPEP_SRV_URL")+" -port "+os.getenv("QPEP_SRV_PORT"), detach=True)
logger.debug("Launching QPEP Gateway")
gateway_workstation.exec_run("go run /root/go/src/qpep/main.go -port "+os.getenv("QPEP_SRV_PORT"), detach=True)
logger.success("QPEP Running")
class QPEPAckScenario(Scenario):
def deploy_scenario(self, testbed_up=False, ack_level=4):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
terminal_container = docker_client.containers.get(os.getenv("ST_CONTAINER_NAME"))
gateway_workstation = docker_client.containers.get(os.getenv("WS_GW_CONTAINER_NAME"))
if testbed_up:
logger.debug("Killing any prior QPEP")
terminal_container.exec_run("pkill -9 main")
gateway_workstation.exec_run("pkill -9 main")
time.sleep(1)
else:
logger.debug("Configuring Client Side of QPEP Proxy")
terminal_container.exec_run("bash /opensand_config/configure_qpep.sh")
logger.debug("Configuring Gateway Side of QPEP Proxy")
gateway_workstation.exec_run("bash /opensand_config/configure_qpep.sh")
logger.debug("Launching QPEP Client")
terminal_container.exec_run("go run /root/go/src/qpep/main.go -client -minBeforeDecimation 2 -ackDelay 8000 -varAckDelay 16.0 -gateway " + str(os.getenv("GW_NETWORK_HEAD")) + ".0.9 -acks " + str(ack_level), detach=True)
logger.debug("Launching QPEP Gateway")
gateway_workstation.exec_run("go run /root/go/src/qpep/main.go -minBeforeDecimation 2 -ackDelay 8000 -varAckDelay 16.0", detach=True)
logger.success("QPEP Running")
class QPEPCongestionScenario(Scenario):
def deploy_scenario(self, testbed_up=False, congestion_window=10):
if not testbed_up:
super().deploy_scenario()
docker_client = docker.from_env()
terminal_container = docker_client.containers.get(os.getenv("ST_CONTAINER_NAME"))
gateway_workstation = docker_client.containers.get(os.getenv("WS_GW_CONTAINER_NAME"))
if testbed_up:
logger.debug("Killing any prior QPEP")
terminal_container.exec_run("pkill -9 main")
gateway_workstation.exec_run("pkill -9 main")
time.sleep(1)
else:
logger.debug("Configuring Client Side of QPEP Proxy")
terminal_container.exec_run("bash /opensand_config/configure_qpep.sh")
logger.debug("Configuring Gateway Side of QPEP Proxy")
gateway_workstation.exec_run("bash /opensand_config/configure_qpep.sh")
logger.debug("Launching QPEP Client")
terminal_container.exec_run("go run /root/go/src/qpep/main.go -client -gateway " + str(os.getenv("GW_NETWORK_HEAD")) +".0.9 -congestion " + str(congestion_window), detach=True)
logger.debug("Launching QPEP Gateway")
gateway_workstation.exec_run("go run /root/go/src/qpep/main.go -congestion " + str(congestion_window), detach=True)
logger.success("QPEP Running")
class PEPsalScenario(Scenario):
def __init__(self, name, testbed, benchmarks, terminal=True, gateway=False):
self.terminal = terminal
self.gateway = gateway
super().__init__(name=name, testbed=testbed, benchmarks=benchmarks)
def deploy_scenario(self, testbed_up=False):
if not testbed_up:
super().deploy_scenario()
logger.debug("Starting PEPsal Scenario")
docker_client = docker.from_env()
terminal_workstation = docker_client.containers.get(os.getenv("WS_ST_CONTAINER_NAME"))
terminal_client = docker_client.containers.get(os.getenv("ST_CONTAINER_NAME"))
logger.debug("Configuring proxy on Terminal WS")
terminal_workstation.exec_run("export http_proxy=http://"+os.getenv("PROXY_SRV_URL")+":5001")
terminal_workstation.exec_run("export https_proxy=https://"+os.getenv("PROXY_SRV_URL")+":5001")
if self.terminal and self.gateway:
logger.debug("Deploying PEPsal in Distributed Mode")
if self.terminal:
logger.debug("Deploying PEPsal on Terminal Endpoint")
terminal_client.exec_run("bash ./tmp/config/launch_pepsal.sh")
if self.gateway:
logger.debug("Deploying PEPsal on Gateway Endpoint")
docker_client_cloud = docker.DockerClient(base_url="ssh://"+os.getenv("DOCKER_REMOTE_URL"))
gateway_workstation = docker_client_cloud.containers.get(os.getenv('WS_GW_CONTAINER_NAME'))
gateway_workstation.exec_run("bash ./tmp/launch_pepsal.sh")
| 49.505682
| 227
| 0.686331
| 1,098
| 8,713
| 5.216758
| 0.134791
| 0.046089
| 0.027933
| 0.043994
| 0.826466
| 0.790154
| 0.771648
| 0.701816
| 0.686802
| 0.645251
| 0
| 0.007857
| 0.196603
| 8,713
| 175
| 228
| 49.788571
| 0.810429
| 0.029152
| 0
| 0.591837
| 0
| 0.013605
| 0.263276
| 0.043406
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088435
| false
| 0
| 0.040816
| 0
| 0.183673
| 0.040816
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e49f8eceffac2fafe9b954704fc0b67bfc2a3831
| 55
|
py
|
Python
|
mittab/libs/outround_tab_logic/helpers.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 9
|
2015-01-22T01:19:15.000Z
|
2017-11-01T20:09:47.000Z
|
mittab/libs/outround_tab_logic/helpers.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 152
|
2018-04-06T14:32:51.000Z
|
2022-02-11T22:12:53.000Z
|
mittab/libs/outround_tab_logic/helpers.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 13
|
2015-09-14T00:40:06.000Z
|
2018-01-24T04:05:32.000Z
|
def offset_to_quotient(offset):
return 2 ** offset
| 18.333333
| 31
| 0.727273
| 8
| 55
| 4.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.181818
| 55
| 2
| 32
| 27.5
| 0.822222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e4c4df0b73765b2d795ce2b333de76d60cacfc7b
| 37,744
|
py
|
Python
|
mathml/pmathml/opdict_data.py
|
ahjulstad/mathdom-python3
|
1fbb7e49d736f03db532423e057c2380ef000dd7
|
[
"MIT"
] | 1
|
2016-11-02T06:27:02.000Z
|
2016-11-02T06:27:02.000Z
|
mathml/pmathml/opdict_data.py
|
ahjulstad/mathdom-python3
|
1fbb7e49d736f03db532423e057c2380ef000dd7
|
[
"MIT"
] | null | null | null |
mathml/pmathml/opdict_data.py
|
ahjulstad/mathdom-python3
|
1fbb7e49d736f03db532423e057c2380ef000dd7
|
[
"MIT"
] | null | null | null |
# Data taken from the MathML 2.0 reference
data = '''
"(" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
")" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"[" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"]" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"{" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"}" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"”" form="postfix" fence="true" lspace="0em" rspace="0em"
"’" form="postfix" fence="true" lspace="0em" rspace="0em"
"⟨" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"&LeftBracketingBar;" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⌈" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⟦" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"&LeftDoubleBracketingBar;" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⌊" form="prefix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"“" form="prefix" fence="true" lspace="0em" rspace="0em"
"‘" form="prefix" fence="true" lspace="0em" rspace="0em"
"⟩" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"&RightBracketingBar;" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⌉" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⟧" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"&RightDoubleBracketingBar;" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"⌋" form="postfix" fence="true" stretchy="true" lspace="0em" rspace="0em"
"&LeftSkeleton;" form="prefix" fence="true" lspace="0em" rspace="0em"
"&RightSkeleton;" form="postfix" fence="true" lspace="0em" rspace="0em"
"⁣" form="infix" separator="true" lspace="0em" rspace="0em"
"," form="infix" separator="true" lspace="0em" rspace="verythickmathspace"
"─" form="infix" stretchy="true" minsize="0" lspace="0em" rspace="0em"
"|" form="infix" stretchy="true" minsize="0" lspace="0em" rspace="0em"
";" form="infix" separator="true" lspace="0em" rspace="thickmathspace"
";" form="postfix" separator="true" lspace="0em" rspace="0em"
":=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≔" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∵" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∴" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"❘" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"//" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∷" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"&" form="prefix" lspace="0em" rspace="thickmathspace"
"&" form="postfix" lspace="thickmathspace" rspace="0em"
"*=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"-=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"+=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"/=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"->" form="infix" lspace="thickmathspace" rspace="thickmathspace"
":" form="infix" lspace="thickmathspace" rspace="thickmathspace"
".." form="postfix" lspace="mediummathspace" rspace="0em"
"..." form="postfix" lspace="mediummathspace" rspace="0em"
"∋" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⫤" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊨" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊤" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊣" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊢" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⇒" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥰" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"|" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"||" form="infix" lspace="mediummathspace" rspace="mediummathspace"
"⩔" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"&&" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩓" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"&" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"!" form="prefix" lspace="0em" rspace="thickmathspace"
"⫬" form="prefix" lspace="0em" rspace="thickmathspace"
"∃" form="prefix" lspace="0em" rspace="thickmathspace"
"∀" form="prefix" lspace="0em" rspace="thickmathspace"
"∄" form="prefix" lspace="0em" rspace="thickmathspace"
"∈" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∉" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∌" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊏̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋢" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊐̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋣" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊂⃒" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊈" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊃⃒" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊉" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∋" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊏" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊑" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊐" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊒" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋐" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊆" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊃" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊇" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⇐" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇔" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇒" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥐" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥞" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↽" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥖" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥟" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇁" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥗" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"←" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇤" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇆" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↔" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥎" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↤" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥚" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↼" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥒" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↙" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↘" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"→" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇥" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇄" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↦" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥛" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⇀" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⥓" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"←" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"→" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"↖" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"↗" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"<" form="infix" lspace="thickmathspace" rspace="thickmathspace"
">" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"!=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"==" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"<=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
">=" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≡" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≍" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≐" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∥" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩵" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≂" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⇌" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"≥" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋛" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≧" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪢" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≷" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩾" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≳" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≎" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≏" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊲" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⧏" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊴" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≤" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋚" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≦" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≶" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪡" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩽" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≲" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≫" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≪" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≢" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≭" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∦" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≠" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≂̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≯" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≱" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≧̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≫̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≹" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩾̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≵" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≎̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≏̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋪" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⧏̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋬" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≮" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≰" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"&NotLessFullEqual;" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≪̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⩽̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≴" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪢̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪡̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊀" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪯̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋠" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"&NotPrecedesTilde;" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋫" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⧐̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋭" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊁" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪰̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⋡" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≿̸" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≁" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≄" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≇" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≉" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∤" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≺" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪯" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≼" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≾" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∷" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∝" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⇋" form="infix" stretchy="true" lspace="thickmathspace" rspace="thickmathspace"
"⊳" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⧐" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊵" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≻" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⪰" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≽" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≿" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∼" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≃" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≅" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"≈" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊥" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"∣" form="infix" lspace="thickmathspace" rspace="thickmathspace"
"⊔" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"⋃" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"⊎" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"-" form="infix" lspace="mediummathspace" rspace="mediummathspace"
"+" form="infix" lspace="mediummathspace" rspace="mediummathspace"
"⋂" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"∓" form="infix" lspace="mediummathspace" rspace="mediummathspace"
"±" form="infix" lspace="mediummathspace" rspace="mediummathspace"
"⊓" form="infix" stretchy="true" lspace="mediummathspace" rspace="mediummathspace"
"⋁" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"⊖" form="prefix" largeop="true" movablelimits="true" lspace="0em" rspace="thinmathspace"
"⊕" form="prefix" largeop="true" movablelimits="true" lspace="0em" rspace="thinmathspace"
"∑" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"⋃" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"⊎" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"lim" form="prefix" movablelimits="true" lspace="0em" rspace="thinmathspace"
"max" form="prefix" movablelimits="true" lspace="0em" rspace="thinmathspace"
"min" form="prefix" movablelimits="true" lspace="0em" rspace="thinmathspace"
"⊖" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⊕" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"∲" form="prefix" largeop="true" stretchy="true" lspace="0em" rspace="0em"
"∮" form="prefix" largeop="true" stretchy="true" lspace="0em" rspace="0em"
"∳" form="prefix" largeop="true" stretchy="true" lspace="0em" rspace="0em"
"∯" form="prefix" largeop="true" stretchy="true" lspace="0em" rspace="0em"
"∫" form="prefix" largeop="true" stretchy="true" lspace="0em" rspace="0em"
"⋓" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋒" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"≀" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋀" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"⊗" form="prefix" largeop="true" movablelimits="true" lspace="0em" rspace="thinmathspace"
"∐" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"∏" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"⋂" form="prefix" largeop="true" movablelimits="true" stretchy="true" lspace="0em" rspace="thinmathspace"
"∐" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋆" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⊙" form="prefix" largeop="true" movablelimits="true" lspace="0em" rspace="thinmathspace"
"*" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⁢" form="infix" lspace="0em" rspace="0em"
"·" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⊗" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋁" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋀" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"⋄" form="infix" lspace="thinmathspace" rspace="thinmathspace"
"∖" form="infix" stretchy="true" lspace="thinmathspace" rspace="thinmathspace"
"/" form="infix" stretchy="true" lspace="thinmathspace" rspace="thinmathspace"
"-" form="prefix" lspace="0em" rspace="veryverythinmathspace"
"+" form="prefix" lspace="0em" rspace="veryverythinmathspace"
"∓" form="prefix" lspace="0em" rspace="veryverythinmathspace"
"±" form="prefix" lspace="0em" rspace="veryverythinmathspace"
"." form="infix" lspace="0em" rspace="0em"
"⨯" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"**" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"⊙" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"∘" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"□" form="prefix" lspace="0em" rspace="verythinmathspace"
"∇" form="prefix" lspace="0em" rspace="verythinmathspace"
"∂" form="prefix" lspace="0em" rspace="verythinmathspace"
"ⅅ" form="prefix" lspace="0em" rspace="verythinmathspace"
"ⅆ" form="prefix" lspace="0em" rspace="verythinmathspace"
"√" form="prefix" stretchy="true" lspace="0em" rspace="verythinmathspace"
"⇓" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟸" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟺" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟹" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇑" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇕" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↓" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⤓" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇵" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↧" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥡" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇃" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥙" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥑" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥠" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↿" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥘" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟵" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟷" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⟶" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥯" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥝" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇂" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥕" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥏" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥜" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↾" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥔" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↓" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"↑" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"↑" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⤒" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⇅" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↕" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"⥮" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"↥" form="infix" stretchy="true" lspace="verythinmathspace" rspace="verythinmathspace"
"^" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"<>" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"'" form="postfix" lspace="verythinmathspace" rspace="0em"
"!" form="postfix" lspace="verythinmathspace" rspace="0em"
"!!" form="postfix" lspace="verythinmathspace" rspace="0em"
"~" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"@" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"--" form="postfix" lspace="verythinmathspace" rspace="0em"
"--" form="prefix" lspace="0em" rspace="verythinmathspace"
"++" form="postfix" lspace="verythinmathspace" rspace="0em"
"++" form="prefix" lspace="0em" rspace="verythinmathspace"
"⁡" form="infix" lspace="0em" rspace="0em"
"?" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"_" form="infix" lspace="verythinmathspace" rspace="verythinmathspace"
"˘" form="postfix" accent="true" lspace="0em" rspace="0em"
"¸" form="postfix" accent="true" lspace="0em" rspace="0em"
"`" form="postfix" accent="true" lspace="0em" rspace="0em"
"˙" form="postfix" accent="true" lspace="0em" rspace="0em"
"˝" form="postfix" accent="true" lspace="0em" rspace="0em"
"&DiacriticalLeftArrow;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"&DiacriticalLeftRightArrow;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"&DiacriticalLeftRightVector;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"&DiacriticalLeftVector;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"´" form="postfix" accent="true" lspace="0em" rspace="0em"
"&DiacriticalRightArrow;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"&DiacriticalRightVector;" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"˜" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"¨" form="postfix" accent="true" lspace="0em" rspace="0em"
"̑" form="postfix" accent="true" lspace="0em" rspace="0em"
"ˇ" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"^" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"‾" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⏞" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⎴" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⏜" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⃛" form="postfix" accent="true" lspace="0em" rspace="0em"
"_" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⏟" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⎵" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
"⏝" form="postfix" accent="true" stretchy="true" lspace="0em" rspace="0em"
'''
| 88.809412
| 140
| 0.58918
| 2,854
| 37,744
| 7.791521
| 0.110722
| 0.102802
| 0.203445
| 0.311193
| 0.824302
| 0.823088
| 0.489949
| 0.477942
| 0.23254
| 0.221118
| 0
| 0.006234
| 0.273235
| 37,744
| 424
| 141
| 89.018868
| 0.804418
| 0.00106
| 0
| 0
| 0
| 0.152975
| 0.999602
| 0.377938
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e4e31c34b73ba64c360c9bcc56b3e00291f3e3ab
| 156
|
py
|
Python
|
src/yellowdog_client/model/add_application_request.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/add_application_request.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/add_application_request.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from typing import Optional
@dataclass
class AddApplicationRequest:
name: str
description: Optional[str] = None
| 17.333333
| 37
| 0.782051
| 17
| 156
| 7.176471
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 156
| 8
| 38
| 19.5
| 0.945736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9024ff33641938c6ca0fd3c2205fe578bca5edd1
| 3,002
|
py
|
Python
|
gans/models/discriminators/conditional_discriminator.py
|
tlatkowski/gans-2.0
|
974efc5bbcea39c0a7dec9405ba4514ada6dc39c
|
[
"MIT"
] | 78
|
2019-09-25T15:09:18.000Z
|
2022-02-09T09:56:15.000Z
|
gans/models/discriminators/conditional_discriminator.py
|
tlatkowski/gans-2.0
|
974efc5bbcea39c0a7dec9405ba4514ada6dc39c
|
[
"MIT"
] | 23
|
2019-10-09T21:24:39.000Z
|
2022-03-12T00:00:53.000Z
|
gans/models/discriminators/conditional_discriminator.py
|
tlatkowski/gans-2.0
|
974efc5bbcea39c0a7dec9405ba4514ada6dc39c
|
[
"MIT"
] | 18
|
2020-01-24T13:13:57.000Z
|
2022-02-15T18:58:12.000Z
|
from easydict import EasyDict as edict
from tensorflow.python.keras import Input
from tensorflow.python.keras import Model
from tensorflow.python.keras import layers
from gans.models import model
class ConditionalDiscriminator(model.Model):
def __init__(
self,
model_parameters: edict,
):
super().__init__(model_parameters)
def define_model(self):
input_img = Input(shape=[
self.model_parameters.img_height,
self.model_parameters.img_width,
self.model_parameters.num_channels
])
class_id = Input(shape=[1])
embedded_id = layers.Embedding(input_dim=10, output_dim=50)(class_id)
embedded_id = layers.Dense(units=input_img.shape[1] * input_img.shape[2])(embedded_id)
embedded_id = layers.Reshape(target_shape=(input_img.shape[1], input_img.shape[2], 1))(embedded_id)
x = layers.Concatenate(axis=3)([input_img, embedded_id])
x = layers.Conv2D(filters=64, kernel_size=(5, 5), strides=(2, 2), padding='same')(x)
x = layers.LeakyReLU()(x)
x = layers.Dropout(0.3)(x)
x = layers.Conv2D(filters=128, kernel_size=(5, 5), strides=(2, 2), padding='same')(x)
x = layers.LeakyReLU()(x)
x = layers.Dropout(rate=0.3)(x)
x = layers.Flatten()(x)
x = layers.Dense(units=1)(x)
model = Model(name=self.model_name, inputs=[input_img, class_id], outputs=x)
return model
class ConditionalDiscriminatorCifar10(model.Model):
def __init__(
self,
model_parameters: edict,
):
super().__init__(model_parameters)
def define_model(self):
input_img = Input(shape=[
self.model_parameters.img_height,
self.model_parameters.img_width,
self.model_parameters.num_channels,
])
class_id = Input(shape=[1])
embedded_id = layers.Embedding(input_dim=10, output_dim=50)(class_id)
embedded_id = layers.Dense(units=input_img.shape[1] * input_img.shape[2])(embedded_id)
embedded_id = layers.Flatten()(embedded_id)
x = layers.Conv2D(filters=128, kernel_size=(3, 3), strides=(1, 1), padding='same')(input_img)
x = layers.BatchNormalization(momentum=0.9)(x)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Conv2D(filters=128, kernel_size=(4, 4), strides=(2, 2), padding='same')(x)
x = layers.BatchNormalization(momentum=0.9)(x)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Conv2D(filters=128, kernel_size=(4, 4), strides=(2, 2), padding='same')(x)
x = layers.BatchNormalization(momentum=0.9)(x)
x = layers.LeakyReLU(alpha=0.1)(x)
x = layers.Flatten()(x)
x = layers.Concatenate()([x, embedded_id])
x = layers.Dense(units=512, activation='relu')(x)
x = layers.Dense(units=1)(x)
model = Model(name=self.model_name, inputs=[input_img, class_id], outputs=x)
return model
| 34.113636
| 107
| 0.633245
| 402
| 3,002
| 4.539801
| 0.179104
| 0.084384
| 0.074521
| 0.054795
| 0.836712
| 0.783562
| 0.761644
| 0.727123
| 0.69863
| 0.69863
| 0
| 0.034572
| 0.229181
| 3,002
| 87
| 108
| 34.505747
| 0.754105
| 0
| 0
| 0.698413
| 0
| 0
| 0.007995
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063492
| false
| 0
| 0.079365
| 0
| 0.206349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5facd093d044f47f18fc2b28a226bc2b05790aff
| 4,587
|
py
|
Python
|
test/endpoint_tests/test_graphql.py
|
boxwise/boxwise-flask
|
20ab248d73accbbe4b4edd6bc98c1c934ceb2998
|
[
"Apache-2.0"
] | 1
|
2020-03-10T08:28:26.000Z
|
2020-03-10T08:28:26.000Z
|
test/endpoint_tests/test_graphql.py
|
boxwise/boxwise-flask
|
20ab248d73accbbe4b4edd6bc98c1c934ceb2998
|
[
"Apache-2.0"
] | 25
|
2020-04-19T06:26:27.000Z
|
2020-10-13T16:57:43.000Z
|
test/endpoint_tests/test_graphql.py
|
boxwise/boxwise-flask
|
20ab248d73accbbe4b4edd6bc98c1c934ceb2998
|
[
"Apache-2.0"
] | null | null | null |
from boxwise_flask.db import db
from boxwise_flask.models.base import Base
from boxwise_flask.models.user import User
def get_base_from_graphql(id, base_query):
return [x for x in base_query if x["id"] == id][0]
def test_all_bases(client):
"""Verify allBases GraphQL query endpoint"""
bases = [
{"id": 1, "name": "oak-tree", "organisation_id": 1, "currency_name": "pound"},
{"id": 2, "name": "chicken", "organisation_id": 1, "currency_name": "peanuts"},
{"id": 3, "name": "sofa", "organisation_id": 1, "currency_name": "candles"},
]
db.connect_db()
for base in bases:
Base.create(**base)
db.close_db(None)
graph_ql_query_string = """query {
allBases {
id
organisationId
name
currencyName
}
}"""
data = {"query": graph_ql_query_string}
response_data = client.post("/graphql", json=data)
assert response_data.status_code == 200
all_bases = response_data.json["data"]["allBases"]
for expected_base in bases:
created_base = get_base_from_graphql(expected_base["id"], all_bases)
assert created_base["id"] == expected_base["id"]
assert created_base["organisationId"] == expected_base["organisation_id"]
assert created_base["name"] == expected_base["name"]
assert created_base["currencyName"] == expected_base["currency_name"]
def test_base(client):
"""Verify base GraphQL query endpoint"""
bases = [
{"id": 1, "name": "oak-tree", "organisation_id": 1, "currency_name": "pound"},
{"id": 2, "name": "chicken", "organisation_id": 1, "currency_name": "peanuts"},
{"id": 3, "name": "sofa", "organisation_id": 1, "currency_name": "candles"},
]
db.connect_db()
for base in bases:
Base.create(**base)
db.close_db(None)
test_id = 1
graph_ql_query_string = f"""query Base {{
base(id: "{test_id}") {{
id
organisationId
name
currencyName
}}
}}"""
data = {"query": graph_ql_query_string}
response_data = client.post("/graphql", json=data)
assert response_data.status_code == 200
expected_base = get_base_from_graphql(test_id, bases)
created_base = response_data.json["data"]["base"]
assert created_base["id"] == expected_base["id"]
assert created_base["organisationId"] == expected_base["organisation_id"]
assert created_base["name"] == expected_base["name"]
assert created_base["currencyName"] == expected_base["currency_name"]
def test_all_users(client):
"""Verify allUsers GraphQL query endpoint"""
db.connect_db()
emails = [
"mr-anderson@matrix.co.uk",
"hamburgerman@beef.co.uk",
"marmalade@jam.co.uk",
]
for i, email in enumerate(emails):
User.create(
id=i,
name="",
email=email,
usergroup_id="",
valid_firstday="",
valid_lastday="",
lastlogin="",
lastaction="",
)
db.close_db(None)
graph_ql_query_string = """query {
allUsers {
id
name
}
}"""
data = {"query": graph_ql_query_string}
response_data = client.post("/graphql", json=data)
print(response_data.json)
assert response_data.status_code == 200
assert response_data.json["data"]["allUsers"][0]["id"] == 0
def test_user(client):
"""Verify users GraphQL query endpoint"""
db.connect_db()
emails = [
"mr-anderson@matrix.co.uk",
"hamburgerman@beef.co.uk",
"marmalade@jam.co.uk",
]
for i, email in enumerate(emails):
User.create(
id=i,
name="",
email=email,
usergroup_id="",
valid_firstday="",
valid_lastday="",
lastlogin="",
lastaction="",
)
db.close_db(None)
test_id = 0
matrix_email = '"%s"' % emails[test_id]
graph_ql_query_string = (
"""query User {
user(email: %s) {
id
name
}
}"""
% matrix_email
)
data = {"query": graph_ql_query_string}
response_data = client.post("/graphql", json=data)
assert response_data.status_code == 200
assert response_data.json["data"]["user"]["id"] == test_id
| 29.216561
| 87
| 0.556573
| 509
| 4,587
| 4.776031
| 0.1611
| 0.064171
| 0.03949
| 0.059235
| 0.76306
| 0.7355
| 0.730564
| 0.730564
| 0.730564
| 0.711641
| 0
| 0.009088
| 0.304338
| 4,587
| 156
| 88
| 29.403846
| 0.752742
| 0.032265
| 0
| 0.689076
| 0
| 0
| 0.266916
| 0.021932
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.042017
| false
| 0
| 0.02521
| 0.008403
| 0.07563
| 0.008403
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5fba528e1dc15d850da292db0e8a0a2ab9036729
| 1,134
|
py
|
Python
|
pytest_cases/tests/cases/issues/test_issue_146.py
|
plammens/python-pytest-cases
|
a9083f2195714c49e5606b5191c6fe26d298a691
|
[
"BSD-3-Clause"
] | null | null | null |
pytest_cases/tests/cases/issues/test_issue_146.py
|
plammens/python-pytest-cases
|
a9083f2195714c49e5606b5191c6fe26d298a691
|
[
"BSD-3-Clause"
] | null | null | null |
pytest_cases/tests/cases/issues/test_issue_146.py
|
plammens/python-pytest-cases
|
a9083f2195714c49e5606b5191c6fe26d298a691
|
[
"BSD-3-Clause"
] | null | null | null |
# from pytest_cases import parametrize_with_cases, fixture
#
#
# def case_without_fixture():
# important_value = 1
# other_important_value = 2
# return important_value, other_important_value
#
#
# @parametrize_with_cases("important_value,other_important_value", cases='.')
# def test_case_without_fixture(request, important_value, other_important_value):
# # important_value and other_important_value are here no problem
# value = request.getfixturevalue('important_value').get(request)
#
# assert value == 1
# assert other_important_value == 2
#
#
# @fixture
# def some_fixture():
# return 1
#
#
# def case_with_fixture(some_fixture):
# important_value = some_fixture
# other_important_value = 2
# return important_value, other_important_value
#
#
# @parametrize_with_cases("important_value,other_important_value", cases='.')
# def test_case_with_fixture(request, important_value, other_important_value):
#
# value = request.getfixturevalue('important_value').get(request)
# assert value == important_value
# assert important_value == 1
# assert other_important_value == 2
| 30.648649
| 81
| 0.746032
| 136
| 1,134
| 5.816176
| 0.198529
| 0.424779
| 0.264223
| 0.212389
| 0.673831
| 0.673831
| 0.673831
| 0.480405
| 0.480405
| 0.323641
| 0
| 0.008333
| 0.153439
| 1,134
| 36
| 82
| 31.5
| 0.815625
| 0.934744
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5fcf8fe2e022d585674a3d86932c36f48fe90945
| 40
|
py
|
Python
|
Meus exemplos/conversor.py
|
josevini/python
|
45cde6d0ae8310b1d8ebb30ae1dd17c0ad0dd02a
|
[
"MIT"
] | null | null | null |
Meus exemplos/conversor.py
|
josevini/python
|
45cde6d0ae8310b1d8ebb30ae1dd17c0ad0dd02a
|
[
"MIT"
] | null | null | null |
Meus exemplos/conversor.py
|
josevini/python
|
45cde6d0ae8310b1d8ebb30ae1dd17c0ad0dd02a
|
[
"MIT"
] | null | null | null |
from func import conversor
conversor()
| 10
| 26
| 0.8
| 5
| 40
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 3
| 27
| 13.333333
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3963c2443c4421f4d9b778f0e8aefbe85043bd87
| 9,173
|
py
|
Python
|
src/models/mimic/resnet_layer.py
|
jsiloto/adaptive-cob
|
eb38e3b52c4927e3ac0a897142ad26fbc4eb82de
|
[
"MIT"
] | 1
|
2021-09-15T02:32:57.000Z
|
2021-09-15T02:32:57.000Z
|
src/models/mimic/resnet_layer.py
|
jsiloto/adaptive-cob
|
eb38e3b52c4927e3ac0a897142ad26fbc4eb82de
|
[
"MIT"
] | null | null | null |
src/models/mimic/resnet_layer.py
|
jsiloto/adaptive-cob
|
eb38e3b52c4927e3ac0a897142ad26fbc4eb82de
|
[
"MIT"
] | 2
|
2021-11-20T13:17:06.000Z
|
2022-03-08T13:42:52.000Z
|
from torch import nn
from models.slimmable.slimmable_ops import USConv2d, USBatchNorm2d
from models.ext.classifier import Ext4ResNet
from models.mimic.base import BottleneckBase4Ext, ExtEncoder, BottleneckIdentity
class Bottleneck4SmallResNet(BottleneckBase4Ext):
def __init__(self, bottleneck_channel, ext_config, bottleneck_transformer):
encoder = nn.Sequential(
nn.Conv2d(64, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 256, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(256, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, bottleneck_channel, kernel_size=2, padding=1, bias=False)
)
decoder = nn.Sequential(
nn.BatchNorm2d(bottleneck_channel),
nn.ReLU(inplace=True),
nn.Conv2d(bottleneck_channel, 64, kernel_size=2, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 128, kernel_size=2, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 64, kernel_size=2, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 64, kernel_size=2, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True)
)
encoder = ExtEncoder(encoder, Ext4ResNet(64) if ext_config is not None else None, ext_config)
super().__init__(encoder=encoder, decoder=decoder, bottleneck_transformer=bottleneck_transformer)
def get_ext_classifier(self):
return self.encoder.get_ext_classifier()
class Bottleneck4LargeResNet(BottleneckBase4Ext):
def __init__(self, bottleneck_channel, ext_config, bottleneck_transformer):
encoder = nn.Sequential(
nn.Conv2d(64, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 256, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(256, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, bottleneck_channel, kernel_size=2, padding=1, bias=False)
)
decoder = nn.Sequential(
nn.BatchNorm2d(bottleneck_channel),
nn.ReLU(inplace=True),
nn.Conv2d(bottleneck_channel, 64, kernel_size=2, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 128, kernel_size=2, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.Conv2d(256, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True)
)
encoder = ExtEncoder(encoder, Ext4ResNet(64) if ext_config is not None else None, ext_config)
super().__init__(encoder=encoder, decoder=decoder, bottleneck_transformer=bottleneck_transformer)
def get_ext_classifier(self):
return self.encoder.get_ext_classifier()
class SlimmableBottleneck4LargeResNet(BottleneckBase4Ext):
def __init__(self, width_mult_list, ext_config, bottleneck_transformer):
bottleneck_channel = 12
encoder = nn.Sequential(
nn.Conv2d(64, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 256, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(256, 64, kernel_size=2, padding=1, bias=False),
nn.BatchNorm2d(64),
USConv2d(64, bottleneck_channel, kernel_size=2, padding=1, bias=False, slimmable_input=False)
)
decoder = nn.Sequential(
USBatchNorm2d(bottleneck_channel, width_mult_list),
nn.ReLU(inplace=True),
USConv2d(bottleneck_channel, 64, kernel_size=2, bias=False, slimmable_output=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 128, kernel_size=2, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.Conv2d(256, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True)
)
encoder = ExtEncoder(encoder, Ext4ResNet(64) if ext_config is not None else None, ext_config)
super().__init__(encoder=encoder, decoder=decoder, bottleneck_transformer=bottleneck_transformer)
def get_ext_classifier(self):
return self.encoder.get_ext_classifier()
class FullySlimmableBottleneck4LargeResNet(BottleneckBase4Ext):
def __init__(self, width_mult_list, ext_config, bottleneck_transformer):
bottleneck_channel = 12
encoder = nn.Sequential(
USConv2d(64, 64, kernel_size=2, padding=1, bias=False),
USBatchNorm2d(64, width_mult_list),
USConv2d(64, 256, kernel_size=2, padding=1, bias=False),
USBatchNorm2d(256, width_mult_list),
nn.ReLU(inplace=True),
USConv2d(256, 64, kernel_size=2, padding=1, bias=False),
USBatchNorm2d(64, width_mult_list),
USConv2d(64, bottleneck_channel, kernel_size=2, padding=1, bias=False)
)
decoder = nn.Sequential(
USBatchNorm2d(bottleneck_channel, width_mult_list),
nn.ReLU(inplace=True),
USConv2d(bottleneck_channel, 64, kernel_size=2, bias=False, slimmable_output=False),
nn.BatchNorm2d(64),
nn.Conv2d(64, 128, kernel_size=2, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Conv2d(128, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.Conv2d(256, 256, kernel_size=2, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True)
)
encoder = ExtEncoder(encoder, Ext4ResNet(64) if ext_config is not None else None, ext_config)
super().__init__(encoder=encoder, decoder=decoder, bottleneck_transformer=bottleneck_transformer)
def get_ext_classifier(self):
return self.encoder.get_ext_classifier()
class FullySlimmableLayer0(nn.Module):
def __init__(self, width_mult_list):
super().__init__()
self.inplanes = 64
self.conv1 = USConv2d(3, self.inplanes, kernel_size=7, stride=2,
padding=3, bias=False, slimmable_input=False)
self.bn1 = USBatchNorm2d(self.inplanes, width_mult_list)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
return x
def get_mimic_layers(backbone_name, backbone_config, bottleneck_transformer=None):
layer0, layer1, layer2, layer3, layer4 = None, None, None, None, None
backbone_params_config = backbone_config['params']
layer0_config = backbone_params_config.get('layer0', None)
layer1_config = backbone_params_config.get('layer1', None)
if layer0_config is not None:
layer0_name = layer0_config['name']
if layer0_name == 'FullySlimmableLayer0':
layer0 = FullySlimmableLayer0(layer0_config['width_mult_list'])
else:
raise ValueError('layer1_name `{}` is not expected'.format(layer0_name))
if layer1_config is not None:
layer1_name = layer1_config['name']
ext_config = backbone_config.get('ext_config', None)
if layer1_name == 'Bottleneck4SmallResNet' and backbone_name in {'custom_resnet18', 'custom_resnet34'}:
layer1 = Bottleneck4LargeResNet(layer1_config['bottleneck_channel'], ext_config, bottleneck_transformer)
elif layer1_name == 'Bottleneck4LargeResNet'\
and backbone_name in {'custom_resnet50', 'custom_resnet101', 'custom_resnet152'}:
layer1 = Bottleneck4LargeResNet(layer1_config['bottleneck_channel'], ext_config, bottleneck_transformer)
elif layer1_name == 'SlimmableBottleneck4LargeResNet' \
and backbone_name in {'custom_resnet50', 'custom_resnet101', 'custom_resnet152'}:
layer1 = SlimmableBottleneck4LargeResNet(layer1_config['width_mult_list'], ext_config, bottleneck_transformer)
elif layer1_name == 'FullySlimmableBottleneck4LargeResNet' \
and backbone_name in {'custom_resnet50', 'custom_resnet101', 'custom_resnet152'}:
layer1 = FullySlimmableBottleneck4LargeResNet(layer1_config['width_mult_list'], ext_config, bottleneck_transformer)
elif layer1_name == 'BottleneckIdentity' \
and backbone_name in {'custom_resnet50', 'custom_resnet101', 'custom_resnet152'}:
layer1 = BottleneckIdentity(backbone_config, bottleneck_transformer)
else:
raise ValueError('layer1_name `{}` is not expected'.format(layer1_name))
return layer0, layer1, layer2, layer3, layer4
| 47.041026
| 127
| 0.652349
| 1,062
| 9,173
| 5.419962
| 0.089454
| 0.059069
| 0.061154
| 0.087908
| 0.775712
| 0.743919
| 0.73975
| 0.73975
| 0.73089
| 0.707957
| 0
| 0.064799
| 0.241252
| 9,173
| 194
| 128
| 47.283505
| 0.762213
| 0
| 0
| 0.639535
| 0
| 0
| 0.059741
| 0.012101
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063953
| false
| 0
| 0.023256
| 0.023256
| 0.151163
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
39b24716255eac18d3878079c4357bcddca19c59
| 768
|
py
|
Python
|
tests/test_punctuation_spacing_correction.py
|
KyleMaclean/Poetry-Generator
|
d69e30636403194668e22f87d6aecef24e1aba36
|
[
"MIT"
] | null | null | null |
tests/test_punctuation_spacing_correction.py
|
KyleMaclean/Poetry-Generator
|
d69e30636403194668e22f87d6aecef24e1aba36
|
[
"MIT"
] | null | null | null |
tests/test_punctuation_spacing_correction.py
|
KyleMaclean/Poetry-Generator
|
d69e30636403194668e22f87d6aecef24e1aba36
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from agents import punctuation_spacing_correction
class Test(TestCase):
def test_do(self):
lines = ['there was a mouse , cat , dog and flower', 'on the highest, weirdest , most abominable place']
expected_lines = ['there was a mouse, cat, dog and flower', 'on the highest, weirdest, most abominable place']
actual_lines = (punctuation_spacing_correction(lines))
self.assertEqual(expected_lines, actual_lines)
# reported
def test_do2(self):
lines = ['there was a mouse , cat , dog and flower !']
expected_lines = ['there was a mouse, cat, dog and flower!']
actual_lines = (punctuation_spacing_correction(lines))
self.assertEqual(expected_lines, actual_lines)
| 40.421053
| 118
| 0.695313
| 98
| 768
| 5.285714
| 0.346939
| 0.07722
| 0.100386
| 0.108108
| 0.779923
| 0.779923
| 0.779923
| 0.779923
| 0.779923
| 0.779923
| 0
| 0.001661
| 0.216146
| 768
| 18
| 119
| 42.666667
| 0.858804
| 0.010417
| 0
| 0.307692
| 0
| 0
| 0.335092
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.153846
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
39b2fa1a15410fe423799e60a080a0f429f16d9f
| 305
|
py
|
Python
|
tests/test_dds_contrib.py
|
clayne/bethesda-structs
|
596e2f205a7a06ec8fca367f3b4d4cc6e38e5d5e
|
[
"MIT"
] | 12
|
2018-01-12T11:08:56.000Z
|
2021-12-21T18:30:16.000Z
|
tests/test_dds_contrib.py
|
clayne/bethesda-structs
|
596e2f205a7a06ec8fca367f3b4d4cc6e38e5d5e
|
[
"MIT"
] | 14
|
2018-04-25T18:56:23.000Z
|
2020-12-23T08:41:36.000Z
|
tests/test_dds_contrib.py
|
clayne/bethesda-structs
|
596e2f205a7a06ec8fca367f3b4d4cc6e38e5d5e
|
[
"MIT"
] | 3
|
2019-01-20T03:52:04.000Z
|
2019-09-13T21:16:20.000Z
|
# Copyright (c) 2018 Stephen Bunn <stephen@bunn.io>
# MIT License <https://choosealicense.com/licenses/mit/>
from bethesda_structs.contrib.dds import MAKEFOURCC
from construct import Bytes, Int32ul
def test_MAKEFOURCC(makefourcc_pair):
assert MAKEFOURCC(*makefourcc_pair[0]) == makefourcc_pair[-1]
| 30.5
| 65
| 0.783607
| 40
| 305
| 5.85
| 0.7
| 0.179487
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.108197
| 305
| 9
| 66
| 33.888889
| 0.830882
| 0.340984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
39e3bad88cc477aa797807db2e76003340c4869e
| 277
|
py
|
Python
|
mwevents/sources/source.py
|
mediawiki-utilities/python-mwevents
|
cf53334b54b8abae6e5d345b200e5944aff9e5ba
|
[
"MIT"
] | 1
|
2015-10-14T18:28:41.000Z
|
2015-10-14T18:28:41.000Z
|
mwevents/sources/source.py
|
halfak/MediaWiki-events
|
cf53334b54b8abae6e5d345b200e5944aff9e5ba
|
[
"MIT"
] | null | null | null |
mwevents/sources/source.py
|
halfak/MediaWiki-events
|
cf53334b54b8abae6e5d345b200e5944aff9e5ba
|
[
"MIT"
] | 1
|
2018-09-19T11:14:51.000Z
|
2018-09-19T11:14:51.000Z
|
class RCListener:
def __iter__(self): raise NotImplementedError()
class Source:
def listener(self, *args, **kwargs):
raise NotImplementedError()
def query(self, start, end, *args, types=None, **kwargs):
raise NotImplementedError()
| 19.785714
| 61
| 0.638989
| 27
| 277
| 6.407407
| 0.592593
| 0.416185
| 0.346821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.249097
| 277
| 13
| 62
| 21.307692
| 0.831731
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f2ddf203ca4e06a0380daafdab644feb8b26556b
| 30
|
py
|
Python
|
uliweb/__main__.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 202
|
2015-01-12T08:10:48.000Z
|
2021-11-08T09:04:32.000Z
|
uliweb/__main__.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 30
|
2015-01-01T09:07:17.000Z
|
2021-06-03T12:58:45.000Z
|
uliweb/__main__.py
|
timgates42/uliweb
|
80c0459c5e5d257b665eb2e1d0b5f68ad55c42f1
|
[
"BSD-2-Clause"
] | 58
|
2015-01-12T03:28:54.000Z
|
2022-01-14T01:58:08.000Z
|
from manage import main
main()
| 15
| 23
| 0.8
| 5
| 30
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 2
| 24
| 15
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
84146e445f81e333c69a7f0b7b5036c35d0914c8
| 210
|
py
|
Python
|
src/sensor/__init__.py
|
mushroom-x/gamepad_python
|
fa1893c6f094521f769d8945ced699f39f102dbf
|
[
"MIT"
] | 2
|
2022-02-11T03:14:01.000Z
|
2022-02-11T03:17:42.000Z
|
src/sensor/__init__.py
|
JACKDONG-blue/gamepad_python
|
22dc9f537bbee584f37eb3693ae81148a5d29c6a
|
[
"MIT"
] | 1
|
2022-02-10T18:49:25.000Z
|
2022-02-10T18:49:25.000Z
|
src/sensor/__init__.py
|
JACKDONG-blue/gamepad_python
|
22dc9f537bbee584f37eb3693ae81148a5d29c6a
|
[
"MIT"
] | 1
|
2022-02-11T02:54:10.000Z
|
2022-02-11T02:54:10.000Z
|
from .sensor import Sensor
from .button import Button, ButtonValue, ButtonEventFlag
from .cross_button import CrossButtonSingle, CrossButtonSingleValue, CrossButton
from .joystick import JoyStickAxis, JoyStick
| 42
| 80
| 0.857143
| 22
| 210
| 8.136364
| 0.545455
| 0.134078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 210
| 4
| 81
| 52.5
| 0.94709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
841eeb953e1ffdd85066f62645f6bfbdd4cfe818
| 82
|
py
|
Python
|
datatest/__past__/api_dev1.py
|
drewdolan/datatest
|
1c168739f84328043c7f0be7cf25bb8e23cc259c
|
[
"Apache-2.0"
] | 277
|
2016-05-12T13:22:49.000Z
|
2022-03-11T00:18:32.000Z
|
datatest/__past__/api_dev1.py
|
drewdolan/datatest
|
1c168739f84328043c7f0be7cf25bb8e23cc259c
|
[
"Apache-2.0"
] | 57
|
2016-05-18T01:03:32.000Z
|
2022-02-17T13:48:43.000Z
|
datatest/__past__/api_dev1.py
|
drewdolan/datatest
|
1c168739f84328043c7f0be7cf25bb8e23cc259c
|
[
"Apache-2.0"
] | 16
|
2016-05-22T11:35:19.000Z
|
2021-12-01T19:41:42.000Z
|
"""alias for api06"""
from __future__ import absolute_import
from .api06 import *
| 20.5
| 38
| 0.768293
| 11
| 82
| 5.272727
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056338
| 0.134146
| 82
| 3
| 39
| 27.333333
| 0.760563
| 0.182927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
845d3a90f6d9f07f14f6c268877e5f2700b45971
| 150
|
py
|
Python
|
inputflow/utils.py
|
kverdecia/dj-input-flow
|
10060293dda783a796acdd475ea6b0aab2681499
|
[
"MIT"
] | null | null | null |
inputflow/utils.py
|
kverdecia/dj-input-flow
|
10060293dda783a796acdd475ea6b0aab2681499
|
[
"MIT"
] | 4
|
2021-10-15T01:51:14.000Z
|
2021-10-21T05:22:30.000Z
|
inputflow/utils.py
|
kverdecia/dj-input-flow
|
10060293dda783a796acdd475ea6b0aab2681499
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
class Utils:
@staticmethod
def write_to_dict(dictionary, path, value):
dictionary[path] = value
| 18.75
| 47
| 0.72
| 17
| 150
| 6.235294
| 0.823529
| 0.264151
| 0.358491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213333
| 150
| 7
| 48
| 21.428571
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
ffdebdaa179fa5751935d83c1a93cb03f9748edf
| 148
|
py
|
Python
|
lib/sequentia/classifiers/hmm/topologies/__init__.py
|
Prhmma/sequentia
|
9d15fc494e4e932174e9c321b6929b624bbccfc6
|
[
"MIT"
] | 32
|
2019-12-31T15:53:47.000Z
|
2022-03-31T12:40:40.000Z
|
lib/sequentia/classifiers/hmm/topologies/__init__.py
|
Prhmma/sequentia
|
9d15fc494e4e932174e9c321b6929b624bbccfc6
|
[
"MIT"
] | 25
|
2020-06-01T18:32:47.000Z
|
2022-01-22T13:15:36.000Z
|
lib/sequentia/classifiers/hmm/topologies/__init__.py
|
Prhmma/sequentia
|
9d15fc494e4e932174e9c321b6929b624bbccfc6
|
[
"MIT"
] | 6
|
2019-12-31T15:54:26.000Z
|
2022-01-23T07:09:28.000Z
|
from .topology import _Topology
from .left_right import _LeftRightTopology
from .linear import _LinearTopology
from .ergodic import _ErgodicTopology
| 37
| 42
| 0.871622
| 17
| 148
| 7.294118
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101351
| 148
| 4
| 43
| 37
| 0.932331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
081422200dc38069b0326bab5a25ded94e8ab6bb
| 44
|
py
|
Python
|
ansible_runner/tests/__init__.py
|
smk4664/nautobot-plugin-ansible-runner
|
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
|
[
"Apache-2.0"
] | null | null | null |
ansible_runner/tests/__init__.py
|
smk4664/nautobot-plugin-ansible-runner
|
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
|
[
"Apache-2.0"
] | null | null | null |
ansible_runner/tests/__init__.py
|
smk4664/nautobot-plugin-ansible-runner
|
14e3c57e9d4b21abe9bf81484fc3e26114fc7056
|
[
"Apache-2.0"
] | null | null | null |
"""Unit tests for ansible_runner plugin."""
| 22
| 43
| 0.727273
| 6
| 44
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 44
| 1
| 44
| 44
| 0.794872
| 0.840909
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0817f0bc7c1cc897208b5396e7bf5394cef5533a
| 80
|
py
|
Python
|
src/replay/__main__.py
|
karvla/replay
|
162b01581b0ecc1a7f726ec17c62a87204f81885
|
[
"MIT"
] | 5
|
2020-12-26T20:24:47.000Z
|
2021-09-04T22:32:54.000Z
|
src/replay/__main__.py
|
karvla/replay
|
162b01581b0ecc1a7f726ec17c62a87204f81885
|
[
"MIT"
] | null | null | null |
src/replay/__main__.py
|
karvla/replay
|
162b01581b0ecc1a7f726ec17c62a87204f81885
|
[
"MIT"
] | null | null | null |
from replay.main import make_video
if __name__ == "__main__":
make_video()
| 16
| 34
| 0.725
| 11
| 80
| 4.363636
| 0.727273
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 80
| 4
| 35
| 20
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f24e2f0e4807a75db3cdc27c47dee6c001098ca3
| 2,473
|
py
|
Python
|
src/mainapp.py
|
mattheuslee/PyPlayer
|
0cccff7130be40e3f547b7287e00cbe29e9a5796
|
[
"MIT"
] | null | null | null |
src/mainapp.py
|
mattheuslee/PyPlayer
|
0cccff7130be40e3f547b7287e00cbe29e9a5796
|
[
"MIT"
] | null | null | null |
src/mainapp.py
|
mattheuslee/PyPlayer
|
0cccff7130be40e3f547b7287e00cbe29e9a5796
|
[
"MIT"
] | null | null | null |
import sys
import time
import numpy as np
from ai.basicgoai import BasicGoAi
from game.basicgo import BasicGo
from game.player import Player
class MainApp:
def human_human_basicgo():
basicGo = BasicGo()
player = -1
np.set_printoptions(linewidth = 100, formatter = { "int_kind": lambda x: "%3d" % x})
while not basicGo.gameOver():
print(basicGo.board.tiles)
print("Current turn: " + str(basicGo.turn) + " Current score: " + str(basicGo.score))
i, j = map(int, input().split())
basicGo.set(i, j, player)
player = Player.otherPlayer(player)
print("Winner is " + str(basicGo.winner()))
def human_ai_basicgo():
basicGo = BasicGo()
player = -1
basicGoAi = BasicGoAi(1)
np.set_printoptions(linewidth = 100, formatter = { "int_kind": lambda x: "%3d" % x})
while not basicGo.gameOver():
print(basicGo.board.tiles)
print("Current turn: " + str(basicGo.turn) + " Current score: " + str(basicGo.score))
i, j = map(int, input().split())
basicGo.set(i, j, player)
i, j = basicGoAi.getMove(basicGo)
basicGo.set(i, j, Player.otherPlayer(player))
print("Winner is " + str(basicGo.winner()))
def ai_ai_basicgo():
basicGo = BasicGo()
player = -1
basicGoAi = BasicGoAi(-1)
basicGoAi2 = BasicGoAi(1)
np.set_printoptions(linewidth = 100, formatter = { "int_kind": lambda x: "%3d" % x})
while not basicGo.gameOver():
i, j = basicGoAi.getMove(basicGo)
basicGo.set(i, j, player)
print(basicGo.board.tiles)
print("Current turn: " + str(basicGo.turn) + " Current score: " + str(basicGo.score) + "\n\n")
time.sleep(0.1)
i, j = basicGoAi2.getMove(basicGo)
basicGo.set(i, j, Player.otherPlayer(player))
print(basicGo.board.tiles)
print("Current turn: " + str(basicGo.turn) + " Current score: " + str(basicGo.score) + "\n\n")
time.sleep(0.1)
print("Winner is " + str(basicGo.winner()))
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Error, type of game must be given")
elif sys.argv[1] == "hhbg":
MainApp.human_human_basicgo()
elif sys.argv[1] == "habg":
MainApp.human_ai_basicgo()
elif sys.argv[1] == "aabg":
MainApp.ai_ai_basicgo()
| 38.640625
| 106
| 0.574606
| 299
| 2,473
| 4.665552
| 0.220736
| 0.078853
| 0.039427
| 0.043011
| 0.781362
| 0.734767
| 0.713978
| 0.713978
| 0.713978
| 0.650896
| 0
| 0.015828
| 0.284674
| 2,473
| 63
| 107
| 39.253968
| 0.772753
| 0
| 0
| 0.586207
| 0
| 0
| 0.098666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.103448
| 0
| 0.172414
| 0.258621
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f26c522481525d0e28f6deb7415d1684693ae444
| 329
|
py
|
Python
|
speedyfx/externals/__init__.py
|
rth/py-speedyfx
|
836251b6903ef1070e5abdd361bcb422175d391f
|
[
"BSD-3-Clause"
] | 4
|
2016-09-17T12:38:13.000Z
|
2021-08-04T13:16:27.000Z
|
speedyfx/externals/__init__.py
|
rth/py-speedyfx
|
836251b6903ef1070e5abdd361bcb422175d391f
|
[
"BSD-3-Clause"
] | 7
|
2016-09-17T12:05:19.000Z
|
2016-09-26T11:26:51.000Z
|
speedyfx/externals/__init__.py
|
rth/py-speedyfx
|
836251b6903ef1070e5abdd361bcb422175d391f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
from sklearn.feature_extraction.text import VectorizerMixin
from sklearn.base import BaseEstimator
except ImportError:
from .sklearn_base import BaseEstimator, VectorizerMixin
| 25.307692
| 63
| 0.805471
| 38
| 329
| 6.552632
| 0.526316
| 0.120482
| 0.192771
| 0.168675
| 0.273092
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003559
| 0.145897
| 329
| 12
| 64
| 27.416667
| 0.882562
| 0.06383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.875
| 0
| 0.875
| 0.125
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f2732d792a6651af8b632e90b3f3b4d33bf2ec5b
| 6,568
|
py
|
Python
|
hlib/tests/test_graph/test_graph.py
|
hlatourette/pyprep
|
0adf87141457dc8c01984e5fe06a43475555972c
|
[
"MIT"
] | 1
|
2018-03-14T05:10:28.000Z
|
2018-03-14T05:10:28.000Z
|
hlib/tests/test_graph/test_graph.py
|
hlatourette/pyprep
|
0adf87141457dc8c01984e5fe06a43475555972c
|
[
"MIT"
] | 7
|
2018-04-12T04:54:36.000Z
|
2018-08-04T01:33:02.000Z
|
hlib/tests/test_graph/test_graph.py
|
hlatourette/study
|
0adf87141457dc8c01984e5fe06a43475555972c
|
[
"MIT"
] | null | null | null |
import unittest
from graph.graph import Graph
class TestStack(unittest.TestCase):
def setUp(self):
self.attr_key = 'w'
graph_adj_list = {
0: {1:{}, 2:{}, 3:{}},
1: {4:{}, 5:{}},
2: {5:{}},
3: {6:{}},
4: {6:{}},
5: {7:{}},
6: {6:{}}
}
digraph_adj_list = {
0: {1:{}, 2:{}, 3:{}},
1: {0:{}, 4:{}, 5:{}},
2: {5:{}},
3: {0:{}, 6:{}},
4: {1:{}, 6:{}},
5: {7:{}},
6: {},
7: {7:{}}
}
graph_adj_list_attr = {
0: {1:{self.attr_key: 1.0}, 2:{self.attr_key: 0.5}, 3:{self.attr_key: 9.7}},
1: {4:{self.attr_key: 4.2}, 5:{self.attr_key: 4.3}},
2: {5:{self.attr_key: 8.8}},
3: {6:{self.attr_key: 1.1}},
4: {6:{self.attr_key: 4.3}},
5: {7:{self.attr_key: 0.1}},
6: {6:{self.attr_key: 9.7}}
}
self.graph = Graph[int](graph_adj_list, directed=False)
self.digraph = Graph[int](digraph_adj_list, directed=True)
self.graph_attr = Graph[int](graph_adj_list_attr, directed=False)
def test_construct_graph(self):
self.assertEqual(self.graph[0], {1: {}, 2: {}, 3: {}})
self.assertEqual(self.graph[1], {0: {}, 4: {}, 5: {}})
self.assertEqual(self.graph[2], {0: {}, 5: {}})
self.assertEqual(self.graph[3], {0: {}, 6: {}})
self.assertEqual(self.graph[4], {1: {}, 6: {}})
self.assertEqual(self.graph[5], {1: {}, 2: {}, 7: {}})
self.assertEqual(self.graph[6], {3: {}, 4: {}, 6: {}})
self.assertEqual(self.graph[7], {5: {}})
def test_construct_digraph(self):
self.assertEqual(self.digraph[0], {1:{}, 2:{}, 3:{}})
self.assertEqual(self.digraph[1], {0:{}, 4:{}, 5:{}})
self.assertEqual(self.digraph[2], {5:{}})
self.assertEqual(self.digraph[3], {0:{}, 6:{}})
self.assertEqual(self.digraph[4], {1:{}, 6:{}})
self.assertEqual(self.digraph[5], {7:{}})
self.assertEqual(self.digraph[6], {})
self.assertEqual(self.digraph[7], {7:{}})
def test_construct_graph_attributes(self):
self.assertEqual(self.graph_attr[0], {1:{self.attr_key: 1.0}, 2: {self.attr_key: 0.5}, 3:{self.attr_key: 9.7}})
self.assertEqual(self.graph_attr[1], {0:{self.attr_key: 1.0}, 4:{self.attr_key: 4.2}, 5:{self.attr_key: 4.3}})
self.assertEqual(self.graph_attr[2], {0:{self.attr_key: 0.5}, 5:{self.attr_key: 8.8}})
self.assertEqual(self.graph_attr[3], {0:{self.attr_key: 9.7}, 6:{self.attr_key: 1.1}})
self.assertEqual(self.graph_attr[4], {1:{self.attr_key: 4.2}, 6:{self.attr_key: 4.3}})
self.assertEqual(self.graph_attr[5], {1:{self.attr_key: 4.3}, 2:{self.attr_key: 8.8}, 7:{self.attr_key: 0.1}})
self.assertEqual(self.graph_attr[6], {3:{self.attr_key: 1.1}, 4:{self.attr_key: 4.3}, 6:{self.attr_key: 9.7}})
self.assertEqual(self.graph_attr[7], {5:{self.attr_key: 0.1}})
def test_add_edge_graph(self):
self.graph.add_edge(2, 3)
self.assertEqual(self.graph[2], {0:{}, 3:{}, 5:{}})
self.assertEqual(self.graph[3], {0:{}, 2:{}, 6:{}})
def test_add_edge_digraph(self):
self.digraph.add_edge(2, 3)
self.assertEqual(self.digraph[2], {3:{}, 5:{}})
self.assertEqual(self.digraph[3], {0:{}, 6:{}})
def test_add_edge_graph_attr(self):
test_attr_key = 'clr'
test_attr_val = 'R'
self.graph_attr.add_edge(2, 3, {self.attr_key: 7.7, test_attr_key: test_attr_val})
self.assertEqual(self.graph_attr[2], {0:{self.attr_key: 0.5}, 3: {self.attr_key: 7.7, test_attr_key: test_attr_val}, 5:{self.attr_key: 8.8}})
self.assertEqual(self.graph_attr[3], {0:{self.attr_key: 9.7}, 2: {self.attr_key: 7.7, test_attr_key: test_attr_val}, 6:{self.attr_key: 1.1}})
def test_add_edge_existing_graph(self):
self.assertRaises(Exception, self.graph.add_edge, 0, 1)
self.assertRaises(Exception, self.graph.add_edge, 1, 0)
def test_add_edge_existing_digraph(self):
self.assertRaises(Exception, self.digraph.add_edge, 0, 1)
def test_add_edge_loop_graph(self):
self.graph.add_edge(0, 0)
self.assertEqual(self.graph[0], {0:{}, 1:{}, 2:{}, 3:{}})
def test_add_edge_loop_digraph(self):
self.digraph.add_edge(0, 0)
self.assertEqual(self.digraph[0], {0:{}, 1:{}, 2:{}, 3:{}})
def test_add_edge_new_u_vertex_graph(self):
self.graph.add_edge(8, 0)
self.assertEqual(self.graph[0], {1:{}, 2:{}, 3:{}, 8:{}})
self.assertEqual(self.graph[8], {0:{}})
def test_add_edge_new_u_vertex_digraph(self):
self.digraph.add_edge(8, 0)
self.assertEqual(self.digraph[0], {1:{}, 2:{}, 3:{}})
self.assertEqual(self.digraph[8], {0:{}})
def test_add_edge_new_v_vertex_graph(self):
self.graph.add_edge(0, 8)
self.assertEqual(self.graph[0], {1:{}, 2:{}, 3:{}, 8:{}})
self.assertEqual(self.graph[8], {0:{}})
def test_add_edge_new_v_vertex_digraph(self):
self.digraph.add_edge(0, 8)
self.assertEqual(self.digraph[0], {1:{}, 2:{}, 3:{}, 8:{}})
self.assertEqual(self.digraph[8], {})
def test_add_edges_graph(self):
self.graph.add_edges(adj_list={
0: {4: {}, 6: {}},
2: {3: {}}})
self.assertEqual(self.graph[0], {1:{}, 2:{}, 3:{}, 4:{}, 6:{}})
self.assertEqual(self.graph[2], {0:{}, 3:{}, 5:{}})
self.assertEqual(self.graph[3], {0:{}, 2:{}, 6:{}})
self.assertEqual(self.graph[4], {0:{}, 1:{}, 6:{}})
def test_len(self):
self.assertEqual(len(self.graph), 8)
self.assertEqual(len(self.digraph), 8)
def test_delete_vertex_graph(self):
del self.graph[0]
self.assertRaises(KeyError, self.graph.__getitem__, 0)
self.assertEqual(self.graph[1], {4:{}, 5:{}})
self.assertEqual(self.graph[2], {5:{}})
self.assertEqual(self.graph[3], {6:{}})
def test_delete_vertex_digraph(self):
del self.digraph[0]
self.assertRaises(KeyError, self.digraph.__getitem__, 0)
self.assertEqual(self.digraph[1], {4:{}, 5:{}})
self.assertEqual(self.digraph[3], {6:{}})
def test_delete_missing_vertex_graph(self):
self.assertRaises(KeyError, self.graph.__delitem__, 8)
def test_delete_missing_vertex_digraph(self):
self.assertRaises(KeyError, self.digraph.__delitem__, 8)
| 42.649351
| 149
| 0.556943
| 963
| 6,568
| 3.607477
| 0.057113
| 0.220207
| 0.267991
| 0.221071
| 0.829303
| 0.630685
| 0.494243
| 0.337363
| 0.28958
| 0.269142
| 0
| 0.064459
| 0.227619
| 6,568
| 154
| 150
| 42.649351
| 0.620343
| 0
| 0
| 0.138462
| 0
| 0
| 0.000761
| 0
| 0
| 0
| 0
| 0
| 0.446154
| 1
| 0.161538
| false
| 0
| 0.015385
| 0
| 0.184615
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4b666eded31b2a6bd1805188515509960d45d7fb
| 4,788
|
py
|
Python
|
src/input_getter.py
|
smileyface12349/notorious-narwhals
|
2bafd3799f3f21bef0a62135d302068ae4866918
|
[
"MIT"
] | 6
|
2021-07-09T15:32:01.000Z
|
2021-07-09T19:33:52.000Z
|
src/input_getter.py
|
smileyface12349/notorious-narwhals
|
2bafd3799f3f21bef0a62135d302068ae4866918
|
[
"MIT"
] | 22
|
2021-07-09T21:32:04.000Z
|
2021-07-16T20:22:06.000Z
|
src/input_getter.py
|
smileyface12349/notorious-narwhals
|
2bafd3799f3f21bef0a62135d302068ae4866918
|
[
"MIT"
] | null | null | null |
import curses
from threading import Thread
from typing import NoReturn, Optional
class InputGetter:
"""A class that uses curses.getch() to get input"""
def __init__(self, screen: curses.window):
"""Initialize the InputGetter"""
self.screen = screen
self.char_index_list = []
self.running = True
self.thread = Thread(target=self._loop)
self.thread.start()
@property
def char_list(self) -> list:
"""Get the char_index_list as char_list"""
return list(map(lambda x: chr(x), self.char_index_list))
def _loop(self) -> NoReturn:
"""Main InputGetter loop (called as thread)"""
while self.running:
self.char_index_list.append(self.screen.getch())
def quit(self) -> NoReturn:
"""Quit the main InputGetter loop"""
self.running = False
self.screen.clear()
self.screen.addstr(0, 0, "Press any key to quit")
self.screen.refresh()
self.thread.join()
def clear(self) -> NoReturn:
"""Clear the char_index_list"""
self.char_index_list = []
def get_first_char_index(self, remove: bool = False, clear: bool = False) -> Optional[int]:
"""Get the first (oldest) char_index the main loop got
If remove is true: get the first char_index and remove it from the list
If clear is true: get the first char_index and clear the list
If char_index_list is empty: return None
"""
if len(self.char_index_list) == 0:
return None
output = self.char_index_list[0]
if remove:
self.char_index_list.pop(0)
if clear:
self.clear()
return output
def get_first_char(self, remove: bool = False, clear: bool = False) -> Optional[str]:
"""Get the first (oldest) char the main loop got
If remove is true: get the first char and remove it from the list
If clear is true: get the first char and clear the list
If char_index_list is empty: return None
"""
if len(self.char_index_list) == 0:
return None
output = str(chr(self.char_index_list[0]))
if remove:
self.char_index_list.pop(0)
if clear:
self.clear()
return output
def get_last_char_index(self, remove: bool = False, clear: bool = False) -> Optional[int]:
"""Get the last (newest) char_index the main loop got
If remove is true: get the last char_index and remove it from the list
If clear is true: get the last char_index and clear the list
If char_index_list is empty: return None
"""
if len(self.char_index_list) == 0:
return None
output = self.char_index_list[-1]
if remove:
self.char_index_list.pop()
if clear:
self.clear()
return output
def get_last_char(self, remove: bool = False, clear: bool = False) -> Optional[str]:
"""Get the last (newest) char the main loop got
If remove is true: get the last char and remove it from the list
If clear is true: get the last char and clear the list
If char_index_list is empty: return None
"""
if len(self.char_index_list) == 0:
return None
output = str(chr(self.char_index_list[-1]))
if remove:
self.char_index_list.pop()
if clear:
self.clear()
return output
def get_char_index_at_index(self, index: int, remove: bool = False, clear: bool = False) -> Optional[int]:
"""Get the char_index at a certain index fo the list
If remove is true: get the char_index at the index and remove it from the list
If clear is true: get the char_index at the index and clear the list
If index is not in the char_index_list: return None
"""
if len(self.char_index_list) <= index:
return None
output = self.char_index_list[index]
if remove:
self.char_index_list.pop(index)
if clear:
self.clear()
return output
def get_char_at_index(self, index: int, remove: bool = False, clear: bool = False) -> Optional[str]:
"""Get the char at a certain index fo the list
If remove is true: get the char at the index and remove it from the list
If clear is true: get the char at the index and clear the list
If index is not in the char_index_list: return None
"""
if len(self.char_index_list) <= index:
return None
output = str(chr(self.char_index_list[index]))
if remove:
self.char_index_list.pop(index)
if clear:
self.clear()
return output
| 35.731343
| 110
| 0.608396
| 688
| 4,788
| 4.097384
| 0.111919
| 0.13409
| 0.138347
| 0.132671
| 0.768003
| 0.735722
| 0.735722
| 0.735722
| 0.735722
| 0.728627
| 0
| 0.003617
| 0.307018
| 4,788
| 133
| 111
| 36
| 0.845992
| 0.326023
| 0
| 0.564103
| 0
| 0
| 0.007075
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141026
| false
| 0
| 0.038462
| 0
| 0.358974
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4b7fed8c377c1928220f8ccea49e16251d2d23f6
| 100
|
py
|
Python
|
botcommands/pi.py
|
JokerQyou/bot
|
c67664099acd5338555ed6bb5c8b75b7e2fed746
|
[
"BSD-2-Clause"
] | 7
|
2015-07-31T14:26:59.000Z
|
2016-05-17T01:41:28.000Z
|
botcommands/pi.py
|
JokerQyou/bot
|
c67664099acd5338555ed6bb5c8b75b7e2fed746
|
[
"BSD-2-Clause"
] | 36
|
2015-07-19T15:49:35.000Z
|
2018-07-31T15:10:31.000Z
|
botcommands/pi.py
|
JokerQyou/bot
|
c67664099acd5338555ed6bb5c8b75b7e2fed746
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
from config import pi_command
@pi_command
def pi(msg=None, debug=False):
pass
| 12.5
| 30
| 0.72
| 17
| 100
| 4.117647
| 0.823529
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.18
| 100
| 7
| 31
| 14.285714
| 0.841463
| 0.13
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
29d521796bd2d45b513788e6834d38768a296bbc
| 295
|
py
|
Python
|
src/webapp/resources/__init__.py
|
minelminel/flask-boilerplate
|
39780a4f5d9fc725ca24f920aa65c2ac56a71496
|
[
"MIT"
] | null | null | null |
src/webapp/resources/__init__.py
|
minelminel/flask-boilerplate
|
39780a4f5d9fc725ca24f920aa65c2ac56a71496
|
[
"MIT"
] | null | null | null |
src/webapp/resources/__init__.py
|
minelminel/flask-boilerplate
|
39780a4f5d9fc725ca24f920aa65c2ac56a71496
|
[
"MIT"
] | null | null | null |
from .base import api
from .index import IndexResource
from .health import HealthResource
from .document import DocumentsResource
api.add_resource(IndexResource, '/')
api.add_resource(HealthResource, '/health')
api.add_resource(DocumentsResource, '/documents/<int:document_id>', '/documents')
| 29.5
| 81
| 0.80339
| 34
| 295
| 6.852941
| 0.441176
| 0.077253
| 0.180258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 295
| 9
| 82
| 32.777778
| 0.862963
| 0
| 0
| 0
| 0
| 0
| 0.155932
| 0.094915
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
29f05b2b890c04ef8916a639a3e3061caf65f099
| 71
|
py
|
Python
|
PySpectrograph/Spectra/__init__.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 18
|
2015-01-11T21:04:59.000Z
|
2021-08-06T18:30:47.000Z
|
PySpectrograph/Spectra/__init__.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 14
|
2015-04-23T09:39:16.000Z
|
2017-12-03T12:49:05.000Z
|
PySpectrograph/Spectra/__init__.py
|
crawfordsm/pyspectrograph
|
4237ba4b4fe08a69e1d6487924d959f089ecca46
|
[
"BSD-3-Clause"
] | 5
|
2015-04-23T08:17:37.000Z
|
2019-06-22T13:36:47.000Z
|
"""
Spectra includes models for a spectrum
"""
from . import Spectrum
| 11.833333
| 38
| 0.71831
| 9
| 71
| 5.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183099
| 71
| 5
| 39
| 14.2
| 0.87931
| 0.535211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a099be4700bfa6c9cabf6e07c5d892f85173ab4
| 3,307
|
py
|
Python
|
tests/parser/functions/test_default_function.py
|
upgradvisor/vyper
|
642884ea938a25793c1b2fac866e8458e63a7b49
|
[
"Apache-2.0"
] | 1,347
|
2019-11-22T06:49:38.000Z
|
2022-03-31T19:49:32.000Z
|
tests/parser/functions/test_default_function.py
|
upgradvisor/vyper
|
642884ea938a25793c1b2fac866e8458e63a7b49
|
[
"Apache-2.0"
] | 915
|
2019-11-21T05:48:16.000Z
|
2022-03-31T23:51:03.000Z
|
tests/parser/functions/test_default_function.py
|
upgradvisor/vyper
|
642884ea938a25793c1b2fac866e8458e63a7b49
|
[
"Apache-2.0"
] | 262
|
2019-11-28T01:44:04.000Z
|
2022-03-31T21:33:43.000Z
|
def test_throw_on_sending(w3, assert_tx_failed, get_contract_with_gas_estimation):
code = """
x: public(int128)
@external
def __init__():
self.x = 123
"""
c = get_contract_with_gas_estimation(code)
assert c.x() == 123
assert w3.eth.getBalance(c.address) == 0
assert_tx_failed(
lambda: w3.eth.sendTransaction({"to": c.address, "value": w3.toWei(0.1, "ether")})
)
assert w3.eth.getBalance(c.address) == 0
def test_basic_default(w3, get_logs, get_contract_with_gas_estimation):
code = """
event Sent:
sender: indexed(address)
@external
@payable
def __default__():
log Sent(msg.sender)
"""
c = get_contract_with_gas_estimation(code)
logs = get_logs(w3.eth.sendTransaction({"to": c.address, "value": 10 ** 17}), c, "Sent")
assert w3.eth.accounts[0] == logs[0].args.sender
assert w3.eth.getBalance(c.address) == w3.toWei(0.1, "ether")
def test_basic_default_default_param_function(w3, get_logs, get_contract_with_gas_estimation):
code = """
event Sent:
sender: indexed(address)
@external
@payable
def fooBar(a: int128 = 12345) -> int128:
log Sent(ZERO_ADDRESS)
return a
@external
@payable
def __default__():
log Sent(msg.sender)
"""
c = get_contract_with_gas_estimation(code)
logs = get_logs(w3.eth.sendTransaction({"to": c.address, "value": 10 ** 17}), c, "Sent")
assert w3.eth.accounts[0] == logs[0].args.sender
assert w3.eth.getBalance(c.address) == w3.toWei(0.1, "ether")
def test_basic_default_not_payable(w3, assert_tx_failed, get_contract_with_gas_estimation):
code = """
event Sent:
sender: indexed(address)
@external
def __default__():
log Sent(msg.sender)
"""
c = get_contract_with_gas_estimation(code)
assert_tx_failed(lambda: w3.eth.sendTransaction({"to": c.address, "value": 10 ** 17}))
def test_multi_arg_default(assert_compile_failed, get_contract_with_gas_estimation):
code = """
@payable
@external
def __default__(arg1: int128):
pass
"""
assert_compile_failed(lambda: get_contract_with_gas_estimation(code))
def test_always_public(assert_compile_failed, get_contract_with_gas_estimation):
code = """
@internal
def __default__():
pass
"""
assert_compile_failed(lambda: get_contract_with_gas_estimation(code))
def test_always_public_2(assert_compile_failed, get_contract_with_gas_estimation):
code = """
event Sent:
sender: indexed(address)
def __default__():
log Sent(msg.sender)
"""
assert_compile_failed(lambda: get_contract_with_gas_estimation(code))
def test_zero_method_id(w3, get_logs, get_contract_with_gas_estimation):
code = """
event Sent:
sig: uint256
@external
@payable
# function selector: 0x00000000
def blockHashAskewLimitary(v: uint256) -> uint256:
log Sent(2)
return 7
@external
def __default__():
log Sent(1)
"""
c = get_contract_with_gas_estimation(code)
assert c.blockHashAskewLimitary(0) == 7
logs = get_logs(w3.eth.sendTransaction({"to": c.address, "value": 0}), c, "Sent")
assert 1 == logs[0].args.sig
logs = get_logs(
# call blockHashAskewLimitary
w3.eth.sendTransaction({"to": c.address, "value": 0, "data": "0x00000000"}),
c,
"Sent",
)
assert 2 == logs[0].args.sig
| 24.679104
| 94
| 0.691261
| 450
| 3,307
| 4.748889
| 0.166667
| 0.082358
| 0.112307
| 0.134768
| 0.77679
| 0.75854
| 0.746373
| 0.718297
| 0.700983
| 0.607861
| 0
| 0.039237
| 0.175386
| 3,307
| 133
| 95
| 24.864662
| 0.744408
| 0.008165
| 0
| 0.647059
| 0
| 0
| 0.300793
| 0.007627
| 0
| 0
| 0.006101
| 0
| 0.196078
| 1
| 0.078431
| false
| 0.019608
| 0
| 0
| 0.098039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a1eac4eec62dcb75c3d1813b6ef3da37e9f3118
| 13,034
|
py
|
Python
|
azure-mgmt-cdn/azure/mgmt/cdn/operations/origins_operations.py
|
azuresdkci1x/azure-sdk-for-python-1722
|
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
|
[
"MIT"
] | 1
|
2018-11-09T06:16:34.000Z
|
2018-11-09T06:16:34.000Z
|
azure-mgmt-cdn/azure/mgmt/cdn/operations/origins_operations.py
|
azuresdkci1x/azure-sdk-for-python-1722
|
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
|
[
"MIT"
] | null | null | null |
azure-mgmt-cdn/azure/mgmt/cdn/operations/origins_operations.py
|
azuresdkci1x/azure-sdk-for-python-1722
|
e08fa6606543ce0f35b93133dbb78490f8e6bcc9
|
[
"MIT"
] | 1
|
2018-11-09T06:17:41.000Z
|
2018-11-09T06:17:41.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class OriginsOperations(object):
"""OriginsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def list_by_endpoint(
self, resource_group_name, profile_name, endpoint_name, custom_headers=None, raw=False, **operation_config):
"""Lists all of the existing origins within an endpoint.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is
unique globally.
:type endpoint_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`OriginPaged <azure.mgmt.cdn.models.OriginPaged>`
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.OriginPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.OriginPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get(
self, resource_group_name, profile_name, endpoint_name, origin_name, custom_headers=None, raw=False, **operation_config):
"""Gets an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is
unique globally.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the
endpoint.
:type origin_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Origin <azure.mgmt.cdn.models.Origin>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
'originName': self._serialize.url("origin_name", origin_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Origin', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, profile_name, endpoint_name, origin_name, origin_update_properties, custom_headers=None, raw=False, **operation_config):
"""Updates an existing origin within an endpoint.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param endpoint_name: Name of the endpoint under the profile which is
unique globally.
:type endpoint_name: str
:param origin_name: Name of the origin which is unique within the
endpoint.
:type origin_name: str
:param origin_update_properties: Origin properties
:type origin_update_properties: :class:`OriginUpdateParameters
<azure.mgmt.cdn.models.OriginUpdateParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Origin <azure.mgmt.cdn.models.Origin>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/endpoints/{endpointName}/origins/{originName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'),
'originName': self._serialize.url("origin_name", origin_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(origin_update_properties, 'OriginUpdateParameters')
# Construct and send request
def long_running_send():
request = self._client.patch(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Origin', response)
if response.status_code == 202:
deserialized = self._deserialize('Origin', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
| 46.55
| 175
| 0.660887
| 1,382
| 13,034
| 6.01809
| 0.129522
| 0.031261
| 0.03066
| 0.017194
| 0.79079
| 0.783576
| 0.777684
| 0.744018
| 0.71408
| 0.71408
| 0
| 0.003725
| 0.23784
| 13,034
| 279
| 176
| 46.716846
| 0.833501
| 0.295458
| 0
| 0.628788
| 0
| 0.022727
| 0.172398
| 0.094766
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a2110541c3679bd19aadfe8e88349a34b64e12f
| 262
|
py
|
Python
|
src/arduino_helper/Grettings.py
|
madkira/SCXML_to_FSM_for_Arduino
|
2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201
|
[
"MIT"
] | 1
|
2020-05-13T23:03:19.000Z
|
2020-05-13T23:03:19.000Z
|
src/arduino_helper/Grettings.py
|
madkira/SCXML_to_FSM_for_Arduino
|
2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201
|
[
"MIT"
] | null | null | null |
src/arduino_helper/Grettings.py
|
madkira/SCXML_to_FSM_for_Arduino
|
2e2443eca70ff6d5a8b8ac8c32b0c7e1d4440201
|
[
"MIT"
] | 1
|
2019-01-20T12:46:37.000Z
|
2019-01-20T12:46:37.000Z
|
def Grettings(name, object):
return "/******************************************************* \n"\
" * author : Raphaël KUMAR generator\n"\
" * copyright : SoFAB CC-BY-SA\n"\
" * file : "+ name +"\n"\
" * object : "+object+"\n"\
" ***/\n"
| 37.428571
| 73
| 0.385496
| 24
| 262
| 4.208333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21374
| 262
| 7
| 74
| 37.428571
| 0.490291
| 0
| 0
| 0
| 0
| 0
| 0.665399
| 0.212928
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0.142857
| 0.285714
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
8a2c77deaf39d7ce89c56a94efd8bd4caa08372f
| 68
|
py
|
Python
|
handlers/__init__.py
|
k-orolevsk-y/vk-bot-analyzer
|
81c07124561f8570354655a988ddb5fbe7cd0dc6
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
k-orolevsk-y/vk-bot-analyzer
|
81c07124561f8570354655a988ddb5fbe7cd0dc6
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
k-orolevsk-y/vk-bot-analyzer
|
81c07124561f8570354655a988ddb5fbe7cd0dc6
|
[
"MIT"
] | null | null | null |
from .RelationPartnerRemoved import *
from .RemovedFriends import *
| 22.666667
| 37
| 0.823529
| 6
| 68
| 9.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 38
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a2e8afb87b6978a4775563f4635befcf941d488
| 368,000
|
py
|
Python
|
nova/tests/unit/api/openstack/compute/test_serversV21.py
|
PannanaAlex/nova
|
348ae354533a34095b475b51bfcaf2b0eb0c0b63
|
[
"Apache-2.0"
] | 1
|
2019-04-17T04:21:17.000Z
|
2019-04-17T04:21:17.000Z
|
nova/tests/unit/api/openstack/compute/test_serversV21.py
|
PannanaAlex/nova
|
348ae354533a34095b475b51bfcaf2b0eb0c0b63
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/compute/test_serversV21.py
|
PannanaAlex/nova
|
348ae354533a34095b475b51bfcaf2b0eb0c0b63
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import datetime
import ddt
import fixtures
import iso8601
import mock
from oslo_policy import policy as oslo_policy
from oslo_serialization import base64
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from oslo_utils import fixture as utils_fixture
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves import range
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import wsgi as os_wsgi
from nova import availability_zones
from nova import block_device
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import context
from nova.db import api as db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import models
from nova import exception
from nova.image import glance
from nova import objects
from nova.objects import instance as instance_obj
from nova.objects.instance_group import InstanceGroup
from nova.objects import tag
from nova.policies import servers as server_policies
from nova import policy
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network
from nova.tests.unit.image import fake
from nova.tests.unit import matchers
from nova import utils as nova_utils
CONF = nova.conf.CONF
FAKE_UUID = fakes.FAKE_UUID
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
def fake_instance_get_all_with_locked(context, list_locked, **kwargs):
obj_list = []
s_id = 0
for locked in list_locked:
uuid = fakes.get_fake_uuid(locked)
s_id = s_id + 1
kwargs['locked_by'] = None if locked == 'not_locked' else locked
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_instance_get_all_with_description(context, list_desc, **kwargs):
obj_list = []
s_id = 0
for desc in list_desc:
uuid = fakes.get_fake_uuid(desc)
s_id = s_id + 1
kwargs['display_description'] = desc
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_compute_get_empty_az(*args, **kwargs):
inst = fakes.stub_instance(vm_state=vm_states.ACTIVE,
availability_zone='')
return fake_instance.fake_instance_obj(args[1], **inst)
def fake_bdms_get_all_by_instance_uuids(*args, **kwargs):
return [
fake_block_device.FakeDbBlockDeviceDict({
'id': 1,
'volume_id': 'some_volume_1',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': True,
}),
fake_block_device.FakeDbBlockDeviceDict({
'id': 2,
'volume_id': 'some_volume_2',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': False,
}),
]
def fake_get_inst_mappings_by_instance_uuids_from_db(*args, **kwargs):
return [{
'id': 1,
'instance_uuid': UUID1,
'cell_mapping': {
'id': 1, 'uuid': uuids.cell1, 'name': 'fake',
'transport_url': 'fake://nowhere/', 'updated_at': None,
'database_connection': uuids.cell1, 'created_at': None,
'disabled': False},
'project_id': 'fake-project'
}]
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(use_ipv6=False)
# Neutron security groups are tested in test_neutron_security_groups.py
self.flags(use_neutron=False)
fakes.stub_out_nw_api(self)
fakes.stub_out_key_pair_funcs(self)
fake.stub_out_image_service(self)
security_groups = [
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}]
return_server = fakes.fake_compute_get(id=2, availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=security_groups,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
return_servers = fakes.fake_compute_get_all()
# Server sort keys extension is enabled in v21 so sort data is passed
# to the instance API and the sorted DB API is invoked
self.mock_get_all = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_all', side_effect=return_servers)).mock
self.mock_get = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get', side_effect=return_server)).mock
self.stub_out('nova.db.api.instance_update_and_get_original',
instance_update_and_get_original)
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.flags(group='glance', api_servers=['http://localhost:9292'])
self.controller = servers.ServersController()
self.ips_controller = ips.IPsController()
policy.reset()
policy.init()
self.addCleanup(policy.reset)
fake_network.stub_out_nw_api_get_instance_nw_info(self)
# Assume that anything that hits the compute API and looks for a
# RequestSpec doesn't care about it, since testing logic that deep
# should be done in nova.tests.unit.compute.test_compute_api.
mock_reqspec = mock.patch('nova.objects.RequestSpec')
mock_reqspec.start()
self.addCleanup(mock_reqspec.stop)
# Similarly we shouldn't care about anything hitting conductor from
# these tests.
mock_conductor = mock.patch.object(
self.controller.compute_api, 'compute_task_api')
mock_conductor.start()
self.addCleanup(mock_conductor.stop)
class ServersControllerTest(ControllerTest):
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
@mock.patch('nova.objects.Instance.get_by_uuid')
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_cellsv1_instance_lookup_no_target(self, mock_get_im,
mock_get_inst):
self.flags(enable=True, group='cells')
ctxt = context.RequestContext('fake', 'fake')
self.controller._get_instance(ctxt, 'foo')
self.assertFalse(mock_get_im.called)
self.assertIsNone(ctxt.db_connection)
@mock.patch('nova.objects.Instance.get_by_uuid')
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_instance_lookup_targets(self, mock_get_im, mock_get_inst):
ctxt = context.RequestContext('fake', 'fake')
mock_get_im.return_value.cell_mapping.database_connection = uuids.cell1
self.controller._get_instance(ctxt, 'foo')
mock_get_im.assert_called_once_with(ctxt, 'foo')
self.assertIsNotNone(ctxt.db_connection)
def test_requested_networks_prefix(self):
self.flags(use_neutron=True)
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None, None, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(use_neutron=True)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_with_duplicate_networks_nova_net(self):
# duplicate networks are allowed only for nova neutron v2.0
self.flags(use_neutron=False)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_with_neutronv2_and_duplicate_networks(self):
# duplicate networks are allowed only for nova neutron v2.0
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None),
(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_conflict_on_fixed_ip(self):
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_neutronv2_disabled_with_port(self):
self.flags(use_neutron=False)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins(self):
def fake_get(*args, **kwargs):
expected_attrs = kwargs['expected_attrs']
self.assertEqual(['flavor', 'info_cache', 'metadata',
'numa_topology'], expected_attrs)
ctxt = context.RequestContext('fake', 'fake')
return fake_instance.fake_instance_obj(
ctxt, expected_attrs=expected_attrs)
self.mock_get.side_effect = fake_get
req = self.req('/fake/servers/%s' % FAKE_UUID)
self.controller.show(req, FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the host_id's are unique.
"""
def return_instance_with_host(context, *args, **kwargs):
project_id = uuidutils.generate_uuid()
return fakes.stub_instance_obj(context, id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
req = self.req('/fake/servers/%s' % FAKE_UUID)
self.mock_get.side_effect = return_instance_with_host
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server2",
"status": status,
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "2",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000002",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
uuid = FAKE_UUID
req = self.req('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_empty_az(self):
self.mock_get.side_effect = fakes.fake_compute_get(
availability_zone='')
uuid = FAKE_UUID
req = self.req('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
self.assertEqual(res_dict['server']['OS-EXT-AZ:availability_zone'], '')
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def test_get_server_with_id_image_ref_by_id(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def _generate_nw_cache_info(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return nw_cache
def test_get_server_addresses_from_cache(self):
nw_cache = self._generate_nw_cache_info()
self.mock_get.side_effect = fakes.fake_compute_get(nw_cache=nw_cache,
availability_zone='nova')
req = self.req('/fake/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=None, cell_down_support=False)
# Make sure we kept the addresses in order
self.assertIsInstance(res_dict['addresses'], collections.OrderedDict)
labels = [vif['network']['label'] for vif in nw_cache]
for index, label in enumerate(res_dict['addresses'].keys()):
self.assertEqual(label, labels[index])
def test_get_server_addresses_nonexistent_network(self):
url = '/v2/fake/servers/%s/ips/network_0' % FAKE_UUID
req = self.req(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
req = self.req('/fake/servers/%s/ips' % uuids.fake)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, uuids.fake)
self.mock_get.assert_called_once_with(
req.environ['nova.context'], uuids.fake, expected_attrs=None,
cell_down_support=False)
def test_show_server_hide_addresses_in_building(self):
uuid = FAKE_UUID
self.mock_get.side_effect = fakes.fake_compute_get(
uuid=uuid, vm_state=vm_states.BUILDING)
req = self.req('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
self.assertEqual({}, res_dict['server']['addresses'])
def test_show_server_addresses_in_non_building(self):
uuid = FAKE_UUID
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
self.mock_get.side_effect = fakes.fake_compute_get(
nw_cache=nw_cache, uuid=uuid, vm_state=vm_states.ACTIVE)
req = self.req('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
self.assertThat(res_dict['server']['addresses'],
matchers.DictMatches(expected['addresses']))
def test_detail_server_hide_addresses(self):
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
def fake_get_all(context, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(1,
vm_state=vm_states.BUILDING,
uuid=uuids.fake,
nw_cache=nw_cache),
fakes.stub_instance_obj(2,
vm_state=vm_states.ACTIVE,
uuid=uuids.fake2,
nw_cache=nw_cache)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
for server in servers:
if server['OS-EXT-STS:vm_state'] == 'building':
self.assertEqual({}, server['addresses'])
else:
self.assertThat(server['addresses'],
matchers.DictMatches(expected['addresses']))
def test_get_server_list_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req('/fake/servers')
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[], limit=1000,
marker=None, search_opts={'deleted': False, 'project_id': 'fake'},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_list_with_reservation_id(self):
req = self.req('/fake/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = self.req('/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = self.req('/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = self.req('/fake/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = self.req('/fake/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = self.req('/fake/servers?limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req('/fake/servers/detail')
expected_attrs = ['flavor', 'info_cache', 'metadata']
if api_version_request.is_supported(req, '2.16'):
expected_attrs.append('services')
res_dict = self.controller.detail(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'],
expected_attrs=expected_attrs,
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': 'fake'},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_details_with_bad_name(self):
req = self.req('/fake/servers/detail?name=%2Binstance')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_with_limit(self):
req = self.req('/fake/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = self.req('/fake/servers/detail?limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = self.req('/fake/servers/detail'
'?limit=3&blah=2:t'
'&sort_key=uuid&sort_dir=asc')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'],
'sort_key': ['uuid'], 'sort_dir': ['asc'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = self.req('/fake/servers?limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = self.req('/fake/servers?limit=asdf')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v2/fake/servers?marker=%s' % fakes.get_fake_uuid(2)
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = ('/v2/fake/servers?limit=2&marker=%s' %
fakes.get_fake_uuid(1))
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = self.req('/fake/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_filter_param(self):
req = self.req('/fake/servers?info_cache=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
req = self.req('/fake/servers?__foo__=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_regex_filter_param(self):
req = self.req('/fake/servers?flavor=[[[',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_empty_regex_filter_param(self):
empty_string = ''
req = self.req('/fake/servers?flavor=%s' % empty_string,
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_detail_with_empty_regex_filter_param(self):
empty_string = ''
req = self.req('/fake/servers/detail?flavor=%s' % empty_string,
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_servers_invalid_sort_key(self):
req = self.req('/fake/servers?sort_key=foo&sort_dir=desc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_ignore_sort_key(self):
req = self.req('/fake/servers?sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_only_one_dir(self):
req = self.req(
'/fake/servers?sort_key=user_id&sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'],
sort_dirs=['asc'], cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_no_sort_dir(self):
req = self.req('/fake/servers?sort_key=vcpus&sort_key=user_id')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_bad_sort_dir(self):
req = self.req('/fake/servers?sort_key=vcpus&sort_dir=bad_dir')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_non_admin_with_admin_only_sort_key(self):
req = self.req('/fake/servers?sort_key=host&sort_dir=desc')
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.index, req)
def test_get_servers_admin_with_admin_only_sort_key(self):
req = self.req('/fake/servers?sort_key=node&sort_dir=desc',
use_admin_context=True)
self.controller.detail(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['node'], sort_dirs=['desc'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_bad_option(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': 'fake'},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_allows_image(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual(search_opts['project_id'], 'fake')
return [fakes.stub_instance_obj(100)]
req = self.req('/fake/servers?tenant_id=newfake')
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_tenant_id_filter_admin_context(self):
""""Test tenant_id search opt is dropped if all_tenants is not set."""
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual('fake', search_opts['project_id'])
return [fakes.stub_instance_obj(100)]
req = self.req('/fake/servers?tenant_id=newfake',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_normal(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
req = self.req('/fake/servers?all_tenants',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_one(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?all_tenants=1',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_zero(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?all_tenants=0',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_false(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?all_tenants=false',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_invalid(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertEqual(search_opts['project_id'], 'fake')
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_pass_policy(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('project_id', search_opts)
self.assertTrue(context.is_admin)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
rules = {
"os_compute_api:servers:index": "project_id:fake",
"os_compute_api:servers:index:get_all_tenants": "project_id:fake"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
req = self.req('/fake/servers?all_tenants=1')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_fail_policy(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
return [fakes.stub_instance_obj(100)]
rules = {
"os_compute_api:servers:index:get_all_tenants":
"project_id:non_fake",
"os_compute_api:servers:get_all": "project_id:fake",
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_with_bad_flavor(self):
req = self.req('/fake/servers?flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = self.req('/fake/servers?flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_task_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(
100, uuid=uuids.fake, task_state=task_states.REBOOTING)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers), 1)
self.assertEqual(servers[0]['id'], uuids.fake)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = self.req('/fake/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = self.req('/fake/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_deleted_filter_str_to_bool(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake,
vm_state='deleted')])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = self.req('/fake/servers?deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that 'deleted' filter value is converted to boolean
# while calling get_all() method.
expected_search_opts = {'deleted': True, 'project_id': 'fake'}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_deleted_filter_invalid_str(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = fakes.HTTPRequest.blank('/fake/servers?deleted=abc',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that invalid 'deleted' filter value is converted to boolean
# False while calling get_all() method.
expected_search_opts = {'deleted': False, 'project_id': 'fake'}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_allows_name(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
self.assertEqual([], expected_attrs)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_flavor_not_found(self):
self.mock_get_all.side_effect = exception.FlavorNotFound(flavor_id=1)
req = fakes.HTTPRequest.blank(
'/fake/servers?status=active&flavor=abc')
servers = self.controller.index(req)['servers']
self.assertEqual(0, len(servers))
def test_get_servers_allows_changes_since(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-24T17:08:01Z'
req = self.req('/fake/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = self.req('/fake/servers?%s' % params)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_since_bad_value_on_compat_mode(self):
params = 'changes-since=asdf'
req = self.req('/fake/servers?%s' % params)
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index,
req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/fake/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*")
req = self.req('/fake/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_filters_as_user_with_policy_override(self):
"""Test getting servers by admin-only or unknown options when
context is not admin but policy allows.
"""
server_uuid = uuids.fake
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
rules = {
"os_compute_api:servers:index": "project_id:fake",
"os_compute_api:servers:allow_all_filters": "project_id:fake",
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*")
req = self.req('/fake/servers?%s' % query_str)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_ip6_with_new_version(self):
"""Test getting servers by ip6 with new version requested
and no admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?ip6=ffff.*')
req.api_version_request = api_version_request.APIVersionRequest('2.5')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v4(self):
"""Test getting servers by access_ip_v4 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v4', search_opts)
self.assertEqual(search_opts['access_ip_v4'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?access_ip_v4=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v6(self):
"""Test getting servers by access_ip_v6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v6', search_opts)
self.assertEqual(search_opts['access_ip_v6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers?access_ip_v6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def _assertServerUsage(self, server, launched_at, terminated_at):
resp_launched_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:launched_at'))
self.assertEqual(timeutils.normalize_time(resp_launched_at),
launched_at)
resp_terminated_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:terminated_at'))
self.assertEqual(timeutils.normalize_time(resp_terminated_at),
terminated_at)
def test_show_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
self.mock_get.side_effect = fakes.fake_compute_get(
id=1, uuid=FAKE_UUID, launched_at=DATE1, terminated_at=DATE2)
fakes.stub_out_secgroup_api(self)
req = self.req('/fake/servers/%s' % FAKE_UUID)
req.accept = 'application/json'
req.method = 'GET'
res = req.get_response(compute.APIRouterV21())
self.assertEqual(res.status_int, 200)
self.useFixture(utils_fixture.TimeFixture())
self._assertServerUsage(jsonutils.loads(res.body).get('server'),
launched_at=DATE1,
terminated_at=DATE2)
def test_detail_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
DATE3 = datetime.datetime(year=2013, month=4, day=5, hour=14)
def fake_compute_get_all(*args, **kwargs):
db_list = [
fakes.stub_instance_obj(context, id=2, uuid=FAKE_UUID,
launched_at=DATE2,
terminated_at=DATE3),
fakes.stub_instance_obj(context, id=3, uuid=FAKE_UUID,
launched_at=DATE1,
terminated_at=DATE3),
]
return objects.InstanceList(objects=db_list)
self.mock_get_all.side_effect = fake_compute_get_all
fakes.stub_out_secgroup_api(self)
req = self.req('/fake/servers/detail')
req.accept = 'application/json'
servers = req.get_response(compute.APIRouterV21())
self.assertEqual(servers.status_int, 200)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[0],
launched_at=DATE2,
terminated_at=DATE3)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[1],
launched_at=DATE1,
terminated_at=DATE3)
def test_get_all_server_details(self):
expected_flavor = {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/flavors/2',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/images/10',
},
],
}
req = self.req('/fake/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'ACTIVE')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostIds. In this test,
there are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(*args, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(None,
id=i + 1,
user_id='fake',
project_id='fake',
host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in range(5)])
self.mock_get_all.side_effect = return_servers_with_host
req = self.req('/fake/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_services(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
cur = api_version_request.APIVersionRequest(self.wsgi_api_version)
v216 = api_version_request.APIVersionRequest('2.16')
if cur >= v216:
self.assertIn('services', expected_attrs)
else:
self.assertNotIn('services', expected_attrs)
return objects.InstanceList()
self.mock_get_all.side_effect = fake_get_all
req = self.req('/fake/servers/detail', use_admin_context=True)
self.assertIn('servers', self.controller.detail(req))
req = fakes.HTTPRequest.blank('/fake/servers/detail',
use_admin_context=True,
version=self.wsgi_api_version)
self.assertIn('servers', self.controller.detail(req))
class ServersControllerTestV23(ServersControllerTest):
wsgi_api_version = '2.3'
def setUp(self):
super(ServersControllerTestV23, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV23,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def test_show(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1,
'description': 'foo',
'user_id': 'bar', 'project_id': 'baz',
'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1,
'description': 'foo',
'user_id': 'bar', 'project_id': 'baz',
'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_get_all(context)
req = self.req('/fake/servers/detail')
servers_list = self.controller.detail(req)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
class ServersControllerTestV29(ServersControllerTest):
wsgi_api_version = '2.9'
def setUp(self):
super(ServersControllerTestV29, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV29,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def _test_get_server_with_lock(self, locked_by):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, locked_by=locked_by, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['locked'] = True if locked_by else False
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def test_get_server_with_locked_by_admin(self):
res_dict = self._test_get_server_with_lock('admin')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_with_locked_by_owner(self):
res_dict = self._test_get_server_with_lock('owner')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_not_locked(self):
res_dict = self._test_get_server_with_lock(None)
self.assertFalse(res_dict['server']['locked'])
def _test_list_server_detail_with_lock(self,
s1_locked,
s2_locked):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_instance_get_all_with_locked(
context, [s1_locked, s2_locked],
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req('/fake/servers/detail')
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'locked' value
# and 'id' as they were created.
for locked in [s1_locked, s2_locked]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(locked)))
expected = False if locked == 'not_locked' else True
self.assertEqual(expected, server['locked'])
def test_list_server_detail_with_locked_s1_admin_s2_owner(self):
self._test_list_server_detail_with_lock('admin', 'owner')
def test_list_server_detail_with_locked_s1_owner_s2_admin(self):
self._test_list_server_detail_with_lock('owner', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_admin(self):
self._test_list_server_detail_with_lock('admin', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_not_locked(self):
self._test_list_server_detail_with_lock('admin', 'not_locked')
def test_list_server_detail_with_locked_s1_s2_not_locked(self):
self._test_list_server_detail_with_lock('not_locked',
'not_locked')
def test_get_servers_remove_non_search_options(self):
self.mock_get_all.side_effect = None
req = fakes.HTTPRequestV21.blank('/servers'
'?sort_key=uuid&sort_dir=asc'
'&sort_key=user_id&sort_dir=desc'
'&limit=1&marker=123',
use_admin_context=True)
self.controller.index(req)
kwargs = self.mock_get_all.call_args[1]
search_opts = kwargs['search_opts']
for key in ('sort_key', 'sort_dir', 'limit', 'marker'):
self.assertNotIn(key, search_opts)
class ServersControllerTestV216(ServersControllerTest):
wsgi_api_version = '2.16'
def setUp(self):
super(ServersControllerTestV216, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV216,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def test_show(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1,
'description': 'foo',
'user_id': 'bar', 'project_id': 'baz',
'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1,
'description': 'foo',
'user_id': 'bar', 'project_id': 'baz',
'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_get_all(context)
req = self.req('/fake/servers/detail')
servers_list = self.controller.detail(req)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
class ServersControllerTestV219(ServersControllerTest):
wsgi_api_version = '2.19'
def setUp(self):
super(ServersControllerTestV219, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100, description=None):
server_dict = super(ServersControllerTestV219,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']['description'] = description
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def _test_get_server_with_description(self, description):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, display_description=description, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0,
description=description)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def _test_list_server_detail_with_descriptions(self,
s1_desc,
s2_desc):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = (
fake_instance_get_all_with_description(context,
[s1_desc, s2_desc],
launched_at=None,
terminated_at=None))
req = self.req('/fake/servers/detail')
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'description' value
# and 'id' as they were created.
for desc in [s1_desc, s2_desc]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(desc)))
expected = desc
self.assertEqual(expected, server['description'])
def test_get_server_with_description(self):
self._test_get_server_with_description('test desc')
def test_list_server_detail_with_descriptions(self):
self._test_list_server_detail_with_descriptions('desc1', 'desc2')
class ServersControllerTestV226(ControllerTest):
wsgi_api_version = '2.26'
def test_get_server_with_tags_by_id(self):
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID,
version=self.wsgi_api_version)
ctxt = req.environ['nova.context']
tags = ['tag1', 'tag2']
def fake_get(*args, **kwargs):
self.assertIn('tags', kwargs['expected_attrs'])
fake_server = fakes.stub_instance_obj(
ctxt, id=2, vm_state=vm_states.ACTIVE, progress=100)
tag_list = objects.TagList(objects=[
objects.Tag(resource_id=FAKE_UUID, tag=tag)
for tag in tags])
fake_server.tags = tag_list
return fake_server
self.mock_get.side_effect = fake_get
res_dict = self.controller.show(req, FAKE_UUID)
self.assertIn('tags', res_dict['server'])
self.assertEqual(tags, res_dict['server']['tags'])
def _test_get_servers_allows_tag_filters(self, filter_name):
req = fakes.HTTPRequest.blank('/fake/servers?%s=t1,t2' % filter_name,
version=self.wsgi_api_version)
def fake_get_all(*a, **kw):
self.assertIsNotNone(kw['search_opts'])
self.assertIn(filter_name, kw['search_opts'])
self.assertEqual(kw['search_opts'][filter_name], ['t1', 't2'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(req.environ['nova.context'],
uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_tags_filter(self):
self._test_get_servers_allows_tag_filters('tags')
def test_get_servers_allows_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('tags-any')
def test_get_servers_allows_not_tags_filter(self):
self._test_get_servers_allows_tag_filters('not-tags')
def test_get_servers_allows_not_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('not-tags-any')
class ServerControllerTestV238(ControllerTest):
wsgi_api_version = '2.38'
def _test_invalid_status(self, is_admin):
req = fakes.HTTPRequest.blank('/fake/servers/detail?status=invalid',
version=self.wsgi_api_version,
use_admin_context=is_admin)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_list_servers_detail_invalid_status_for_admin(self):
self._test_invalid_status(True)
def test_list_servers_detail_invalid_status_for_non_admin(self):
self._test_invalid_status(False)
class ServerControllerTestV247(ControllerTest):
"""Server controller test for microversion 2.47
The intent here is simply to verify that when showing server details
after microversion 2.47 that the flavor is shown as a dict of flavor
information rather than as dict of id/links. The existence of the
'extra_specs' key is controlled by policy.
"""
wsgi_api_version = '2.47'
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details(self, mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'extra_specs': {},
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank('/fake/servers/detail',
version=self.wsgi_api_version)
hits = []
real_auth = policy.authorize
# Wrapper for authorize to count the number of times
# we authorize for extra-specs
def fake_auth(context, action, target):
if 'extra-specs' in action:
hits.append(1)
return real_auth(context, action, target)
with mock.patch('nova.policy.authorize') as mock_auth:
mock_auth.side_effect = fake_auth
res_dict = self.controller.detail(req)
# We should have found more than one servers, but only hit the
# policy check once
self.assertGreater(len(res_dict['servers']), 1)
self.assertEqual(1, len(hits))
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details_no_extra_spec(self,
mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
# Set the policy so we don't have permission to index
# flavor extra-specs but are able to get server details.
servers_rule = 'os_compute_api:servers:detail'
extraspec_rule = 'os_compute_api:os-flavor-extra-specs:index'
self.policy.set_rules({
extraspec_rule: 'rule:admin_api',
servers_rule: '@'})
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank('/fake/servers/detail',
version=self.wsgi_api_version)
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
class ServerControllerTestV266(ControllerTest):
"""Server controller test for microversion 2.66
Add changes-before parameter to get servers or servers details of
2.66 microversion.
Filters the response by a date and time stamp when the server last
changed. Those changed before the specified date and time stamp are
returned.
"""
wsgi_api_version = '2.66'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_servers_allows_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-before=2011-01-24T17:08:01Z'
req = self.req('/fake/servers?%s' % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_before_bad_value(self):
params = 'changes-before=asdf'
req = self.req('/fake/servers?%s' % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_before_bad_value_on_compat_mode(self):
params = 'changes-before=asdf'
req = self.req('/fake/servers?%s' % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_allows_changes_since_and_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 23, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-23T17:08:01Z&' \
'changes-before=2011-01-24T17:08:01Z'
req = self.req('/fake/servers?%s' % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_filters_with_distinct_changes_time_bad_request(self):
changes_since = '2018-09-04T05:45:27Z'
changes_before = '2018-09-03T05:45:27Z'
req = self.req('/fake/servers?changes-since=%s&changes-before=%s' %
(changes_since, changes_before))
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
class ServersControllerTestV271(ControllerTest):
wsgi_api_version = '2.71'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_show_server_group_not_exist(self):
req = self.req('/fake/servers/%s' % FAKE_UUID)
servers = self.controller.show(req, FAKE_UUID)
expect_sg = []
self.assertEqual(expect_sg, servers['server']['server_groups'])
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def fake_delete(api, context, instance):
if instance.uuid == uuids.non_existent_uuid:
raise exception.InstanceNotFound(instance_id=instance.uuid)
self.server_delete_called = True
self.stub_out('nova.compute.api.API.delete', fake_delete)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self, 0, 10)
req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % uuid)
req.method = 'DELETE'
fake_get = fakes.fake_compute_get(
uuid=uuid,
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid=uuids.non_existent_uuid)
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
@mock.patch.object(compute_api.API, 'delete',
side_effect=exception.InstanceIsLocked(
instance_uuid=FAKE_UUID))
def test_delete_locked_server(self, mock_delete):
req = self._create_delete_request(FAKE_UUID)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
mock_delete.assert_called_once_with(
req.environ['nova.context'], test.MatchType(objects.Instance))
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
self.controller.delete(req, FAKE_UUID)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
fake_get = fakes.fake_compute_get(
launched_at=None,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stub_out('nova.db.api.instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertTrue(self.server_delete_called)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
expected_key_name = False
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.useFixture(nova_fixtures.SingleCellSimple())
def fake_get(ctrl, ctxt, uuid):
if uuid == 'test_inst':
raise webob.exc.HTTPNotFound(explanation='fakeout')
return fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
def test_rebuild_server_with_image_not_uuid(self):
self.body['rebuild']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_full_url(self):
image_href = ('http://localhost/v2/fake/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
self.body['rebuild']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_empty_string(self):
self.body['rebuild']['imageRef'] = ''
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_name_with_spaces_in_the_middle(self):
self.body['rebuild']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces(self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces_compat_mode(
self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
def fake_rebuild(*args, **kwargs):
self.assertEqual('abc def', kwargs['display_name'])
with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild:
mock_rebuild.side_effect = fake_rebuild
self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10"))
def test_rebuild_instance_fails_when_min_ram_too_small(self, mock_show):
# make min_ram larger than our instance ram size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000"))
def test_rebuild_instance_fails_when_min_disk_too_small(self, mock_show):
# make min_disk larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=str(1000 * (1024 ** 3))))
def test_rebuild_instance_image_too_large(self, mock_show):
# make image size larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_name_all_blank(self):
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(fake._FakeImageService, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED'))
def test_rebuild_instance_with_deleted_image(self, mock_show):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with test.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_bad_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": "INVALID b64",
}]
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertNotIn('personality', body['server'])
def test_rebuild_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for rebuild. GET server and Rebuild share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for Rebuild
# response are not present.
body = {
"rebuild": {
"imageRef": self.image_uuid,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
get_only_fields = ['OS-EXT-AZ:availability_zone', 'config_drive',
'OS-EXT-SRV-ATTR:host',
'OS-EXT-SRV-ATTR:hypervisor_hostname',
'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:hostname'
'OS-EXT-SRV-ATTR:kernel_id',
'OS-EXT-SRV-ATTR:launch_index',
'OS-EXT-SRV-ATTR:ramdisk_id',
'OS-EXT-SRV-ATTR:reservation_id',
'OS-EXT-SRV-ATTR:root_device_name',
'OS-EXT-SRV-ATTR:user_data', 'host_status',
'OS-SRV-USG:launched_at',
'OS-SRV-USG:terminated_at']
if not self.expected_key_name:
get_only_fields.append('key_name')
for field in get_only_fields:
self.assertNotIn(field, body['server'])
@mock.patch.object(compute_api.API, 'start')
def test_start(self, mock_start):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
mock_start.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_not_ready)
def test_start_not_ready(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'start', fakes.fake_actions_to_locked_server)
def test_start_locked_server(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_invalid_state)
def test_start_invalid(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop')
def test_stop(self, mock_stop):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
mock_stop.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_not_ready)
def test_stop_not_ready(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'stop', fakes.fake_actions_to_locked_server)
def test_stop_locked_server(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_invalid_state)
def test_stop_invalid_state(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerRebuildTestV254(ServersControllerRebuildInstanceTest):
expected_key_name = True
def setUp(self):
super(ServersControllerRebuildTestV254, self).setUp()
fakes.stub_out_key_pair_funcs(self)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.54')
def _test_set_key_name_rebuild(self, set_key_name=True):
key_name = "key"
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_key_name:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['key_name'], key_name)
def test_rebuild_accepted_with_keypair_name(self):
self._test_set_key_name_rebuild()
def test_rebuild_key_not_changed(self):
self._test_set_key_name_rebuild(set_key_name=False)
def test_rebuild_invalid_microversion_253(self):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.53')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "key"
},
}
excpt = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('key_name', six.text_type(excpt))
def test_rebuild_with_not_existed_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "nonexistentkey"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_user_has_no_key_pair(self):
def no_key_pair(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out('nova.db.api.key_pair_get', no_key_pair)
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=None,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body['rebuild']['key_name'] = "a-key-name"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_with_non_string_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 12345
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_invalid_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "123\0d456"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_empty_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": ''
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_none_keypair_name(self):
key_name = None
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
with mock.patch.object(objects.KeyPair, 'get_by_name') as key_get:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body)
# NOTE: because the api will call _get_server twice. The server
# response will always be the same one. So we just use
# objects.KeyPair.get_by_name to verify test.
key_get.assert_not_called()
def test_rebuild_with_too_large_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 256 * "k"
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
class ServersControllerRebuildTestV257(ServersControllerRebuildTestV254):
"""Tests server rebuild at microversion 2.57 where user_data can be
provided and personality files are no longer accepted.
"""
def setUp(self):
super(ServersControllerRebuildTestV257, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.57')
def test_rebuild_personality(self):
"""Tests that trying to rebuild with personality files fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('personality', six.text_type(ex))
def test_rebuild_user_data_old_version(self):
"""Tests that trying to rebuild with user_data before 2.57 fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": "ZWNobyAiaGVsbG8gd29ybGQi"
}
}
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.55')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_malformed(self):
"""Tests that trying to rebuild with malformed user_data fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": b'invalid'
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_too_large(self):
"""Tests that passing user_data to rebuild that is too large fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": ('MQ==' * 16384)
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
@mock.patch.object(context.RequestContext, 'can')
@mock.patch('nova.db.api.instance_update_and_get_original')
def test_rebuild_reset_user_data(self, mock_update, mock_policy):
"""Tests that passing user_data=None resets the user_data on the
instance.
"""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
self.mock_get.side_effect = None
self.mock_get.return_value = fakes.stub_instance_obj(
context.RequestContext(self.req_user_id, self.req_project_id),
user_data='ZWNobyAiaGVsbG8gd29ybGQi')
def fake_instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs):
# save() is called twice and the second one has system_metadata
# in the updates, so we can ignore that one.
if 'system_metadata' not in values:
self.assertIn('user_data', values)
self.assertIsNone(values['user_data'])
return instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs)
mock_update.side_effect = fake_instance_update_and_get_original
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(2, mock_update.call_count)
class ServersControllerRebuildTestV219(ServersControllerRebuildInstanceTest):
def setUp(self):
super(ServersControllerRebuildTestV219, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def _rebuild_server(self, set_desc, desc):
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
display_description=desc,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_desc:
self.body['rebuild']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['description'], desc)
def test_rebuild_server_with_description(self):
self._rebuild_server(True, 'server desc')
def test_rebuild_server_empty_description(self):
self._rebuild_server(True, '')
def test_rebuild_server_without_description(self):
self._rebuild_server(False, '')
def test_rebuild_server_remove_description(self):
self._rebuild_server(True, None)
def test_rebuild_server_description_too_long(self):
self.body['rebuild']['description'] = 'x' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_server_description_invalid(self):
# Invalid non-printable control char in the desc.
self.body['rebuild']['description'] = "123\0d456"
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
# NOTE(jaypipes): Not based from ServersControllerRebuildInstanceTest because
# that test case's setUp is completely b0rked
class ServersControllerRebuildTestV263(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV263, self).setUp()
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get, certs=None,
conf_enabled=True, conf_certs=None):
fakes.stub_out_trusted_certs(self, certs=certs)
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, trusted_certs=certs,
project_id=self.req_project_id, user_id=self.req_user_id)
self.flags(default_trusted_certificate_ids=conf_certs, group='glance')
if conf_enabled:
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.body['rebuild']['trusted_image_certificates'] = certs
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
if certs:
self.assertEqual(certs, server['trusted_image_certificates'])
else:
if conf_enabled:
# configuration file default is used
self.assertEqual(
conf_certs, server['trusted_image_certificates'])
else:
# either not set or empty
self.assertIsNone(server['trusted_image_certificates'])
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_with_trusted_certs(self, get_min_ver):
"""Test rebuild with valid trusted_image_certificates argument"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
def test_rebuild_server_without_trusted_certs(self):
"""Test rebuild without trusted image certificates"""
self._rebuild_server()
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_conf_options_turned_off_set(self, get_min_ver):
"""Test rebuild with feature disabled and certs specified"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'], conf_enabled=False)
def test_rebuild_server_conf_options_turned_off_empty(self):
"""Test rebuild with feature disabled"""
self._rebuild_server(conf_enabled=False)
def test_rebuild_server_default_trusted_certificates_empty(self):
"""Test rebuild with feature enabled and no certs specified"""
self._rebuild_server(conf_enabled=True)
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_default_trusted_certificates(self, get_min_ver):
"""Test rebuild with certificate specified in configurations"""
self._rebuild_server(conf_enabled=True, conf_certs=['conf-id'])
def test_rebuild_server_with_empty_trusted_cert_id(self):
"""Make sure that we can't rebuild with an empty certificate ID"""
self.body['rebuild']['trusted_image_certificates'] = ['']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_empty_trusted_certs(self):
"""Make sure that we can't rebuild with an empty array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_too_many_trusted_certs(self):
"""Make sure that we can't rebuild with an array of >50 unique IDs"""
self.body['rebuild']['trusted_image_certificates'] = [
'cert{}'.format(i) for i in range(51)]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too long', six.text_type(ex))
def test_rebuild_server_with_nonunique_trusted_certs(self):
"""Make sure that we can't rebuild with a non-unique array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = ['cert', 'cert']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('has non-unique elements', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_cert_id(self):
"""Make sure that we can't rebuild with non-string certificate IDs"""
self.body['rebuild']['trusted_image_certificates'] = [1, 2]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_certs(self):
"""Make sure that we can't rebuild with certificates in a non-array"""
self.body['rebuild']['trusted_image_certificates'] = "not-an-array"
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_with_trusted_certs_pre_2_63_fails(self,
get_min_ver):
"""Make sure we can't use trusted_certs before 2.63"""
self._rebuild_server(certs=['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
def test_rebuild_server_with_trusted_certs_policy_failed(self):
rule_name = "os_compute_api:servers:rebuild:trusted_certs"
rules = {"os_compute_api:servers:rebuild": "@",
rule_name: "project:fake"}
self.policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self._rebuild_server,
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'])
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_server_with_cert_validation_error(
self, mock_rebuild):
mock_rebuild.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._rebuild_server,
certs=['trusted-cert-id'])
self.assertIn('test cert validation error',
six.text_type(ex))
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS - 1)
def test_rebuild_server_with_cert_validation_not_available(
self, get_min_ver):
ex = self.assertRaises(webob.exc.HTTPConflict,
self._rebuild_server,
certs=['trusted-cert-id'])
self.assertIn('Image signature certificate validation support '
'is not yet available',
six.text_type(ex))
class ServersControllerRebuildTestV271(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV271, self).setUp()
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action',
use_admin_context=True)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = (api_version_request.
APIVersionRequest('2.71'))
self.body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get):
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, project_id=self.req_project_id,
user_id=self.req_user_id)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
return server
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_rebuild_with_server_group_not_exist(self, mock_sg_get):
server = self._rebuild_server()
self.assertEqual([], server['server_groups'])
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dump_as_bytes(body)
fake_get = fakes.fake_compute_get(
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for PUT. GET server and PUT server share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for PUT
# response are not present.
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
get_only_fields = ['OS-EXT-AZ:availability_zone', 'config_drive',
'OS-EXT-SRV-ATTR:host',
'OS-EXT-SRV-ATTR:hypervisor_hostname',
'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:hostname'
'OS-EXT-SRV-ATTR:kernel_id',
'OS-EXT-SRV-ATTR:launch_index',
'OS-EXT-SRV-ATTR:ramdisk_id',
'OS-EXT-SRV-ATTR:reservation_id',
'OS-EXT-SRV-ATTR:root_device_name',
'OS-EXT-SRV-ATTR:user_data', 'host_status',
'key_name', 'OS-SRV-USG:launched_at',
'OS-SRV-USG:terminated_at']
for field in get_only_fields:
self.assertNotIn(field, res_dict['server'])
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_with_spaces_in_the_middle(self):
body = {'server': {'name': 'abc def'}}
req = self._get_request(body)
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' abc def '}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces_compat_mode(self):
body = {'server': {'name': ' abc def '}}
req = self._get_request(body)
req.set_legacy_v2()
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_admin_password_extra_arg(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_host_id(self):
inst_dict = dict(host_id='123')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'update_instance')
def test_update_server_not_found_on_update(self, mock_update_instance):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
mock_update_instance.side_effect = fake_update
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_policy_fail(self):
rule = {'compute:update': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body=body)
class ServersControllerTriggerCrashDumpTest(ControllerTest):
def setUp(self):
super(ServersControllerTriggerCrashDumpTest, self).setUp()
self.instance = fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id='fake')
def fake_get(ctrl, ctxt, uuid):
if uuid != FAKE_UUID:
raise webob.exc.HTTPNotFound(explanation='fakeout')
return self.instance
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
self.req = fakes.HTTPRequest.blank('/servers/%s/action' % FAKE_UUID)
self.req.api_version_request =\
api_version_request.APIVersionRequest('2.17')
self.body = dict(trigger_crash_dump=None)
@mock.patch.object(compute_api.API, 'trigger_crash_dump')
def test_trigger_crash_dump(self, mock_trigger_crash_dump):
ctxt = self.req.environ['nova.context']
self.controller._action_trigger_crash_dump(self.req, FAKE_UUID,
body=self.body)
mock_trigger_crash_dump.assert_called_with(ctxt, self.instance)
def test_trigger_crash_dump_policy_failed(self):
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
self.assertIn("os_compute_api:servers:trigger_crash_dump",
exc.format_message())
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_not_ready)
def test_trigger_crash_dump_not_ready(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fakes.fake_actions_to_locked_server)
def test_trigger_crash_dump_locked_server(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_invalid_state)
def test_trigger_crash_dump_invalid_state(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_with_bogus_id(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_trigger_crash_dump,
self.req, 'test_inst', body=self.body)
def test_trigger_crash_dump_schema_invalid_type(self):
self.body['trigger_crash_dump'] = 'not null'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_schema_extra_property(self):
self.body['extra_property'] = 'extra'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
class ServersControllerUpdateTestV219(ServersControllerUpdateTest):
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV219, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.19')
return req
def _update_server_desc(self, set_desc, desc=None):
body = {'server': {}}
if set_desc:
body['server']['description'] = desc
req = self._get_request()
res_dict = self.controller.update(req, FAKE_UUID, body=body)
return res_dict
def test_update_server_description(self):
res_dict = self._update_server_desc(True, 'server_desc')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_empty_description(self):
res_dict = self._update_server_desc(True, '')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], '')
def test_update_server_without_description(self):
res_dict = self._update_server_desc(False)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_remove_description(self):
res_dict = self._update_server_desc(True)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
'description': 'server_desc'
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_description_too_long(self):
body = {'server': {'description': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_description_invalid(self):
# Invalid non-printable control char in the desc.
body = {'server': {'description': "123\0d456"}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
class ServersControllerUpdateTestV271(ServersControllerUpdateTest):
body = {'server': {'name': 'server_test'}}
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV271, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.71')
return req
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_update_with_server_group_not_exist(self, mock_sg_get):
req = self._get_request(self.body)
res_dict = self.controller.update(req, FAKE_UUID, body=self.body)
self.assertEqual([], res_dict['server']['server_groups'])
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
self.flags(use_neutron=False)
fakes.stub_out_nw_api(self)
self.controller = servers.ServersController()
def _get_with_state(self, vm_state, task_state=None):
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_reboot_resize_policy_fail(self):
rule = {'compute:reboot': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
body={'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_confirm_resize_policy_fail(self):
rule = {'compute:confirm_resize': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_revert_resize_policy_fail(self):
rule = {'compute:revert_resize': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rule))
req = fakes.HTTPRequestV21.blank('/fake/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(enable_instance_password=True, group='api')
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self)
self.controller = servers.ServersController()
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/fake/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'display_description': inst['display_description'] or '',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
"security_groups": [
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None,
'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None,
'created_at': None}]
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update_and_get_original(
context, instance_uuid, params, columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
fakes.stub_out_key_pair_funcs(self)
fake.stub_out_image_service(self)
self.stub_out('nova.db.api.project_get_networks',
lambda c, u: dict(id='1', host='localhost'))
self.stub_out('nova.db.api.instance_create', instance_create)
self.stub_out('nova.db.api.instance_system_metadata_update',
lambda *a, **kw: None)
self.stub_out('nova.db.api.instance_get', instance_get)
self.stub_out('nova.db.api.instance_update', instance_update)
self.stub_out('nova.db.api.instance_update_and_get_original',
server_update_and_get_original)
self.stub_out('nova.network.manager.VlanManager.allocate_fixed_ip',
lambda *a, **kw: None)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
},
}
self.bdm_v2 = [{
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake',
'device_name': 'vdb',
'delete_on_termination': False,
}]
self.bdm = [{
'no_device': None,
'virtual_name': 'root',
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda',
'delete_on_termination': False
}]
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
server = dict(name='server_test', imageRef=FAKE_UUID, flavorRef=2)
body = {'server': server}
self.req.body = encodeutils.safe_encode(jsonutils.dumps(body))
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_none_value_port(self):
self.body['server'] = {'networks': [{'port': None, 'uuid': FAKE_UUID}]}
self.body['server']['name'] = 'test'
self._test_create_instance()
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'is_public': False,
}
flavors.create(**values)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_instance,
flavor=1324)
self.assertEqual('Flavor 1324 could not be found.', six.text_type(ex))
def test_create_server_bad_image_uuid(self):
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = 1,
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_server_with_deleted_image(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
# Get the fake image service so we can update the size of the image
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
def test_create_instance_with_image_non_uuid(self):
self.body['server']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_full_url(self):
image_href = ('http://localhost/v2/fake/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
self.body['server']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_empty_string(self):
self.body['server']['imageRef'] = ''
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortRequiresFixedIP(
port_id=uuids.port))
def test_create_instance_with_port_with_no_fixed_ips(self, mock_create):
requested_networks = [{'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_raise_user_data_too_large(self):
self.body['server']['user_data'] = (b'1' * 65536)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
# Make sure the failure was about user_data and not something else.
self.assertIn('user_data', six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkRequiresSubnet(
network_uuid=uuids.network))
def test_create_instance_with_network_with_no_subnet(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NoUniqueMatch(
"No Unique match found for ..."))
def test_create_instance_with_non_unique_secgroup_name(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_secgroup_leading_trailing_spaces(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_secgroup_leading_trailing_spaces_compat_mode(
self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
def fake_create(*args, **kwargs):
self.assertEqual([' sg '], kwargs['security_groups'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_with_networks_disabled_neutronv2(self):
self.flags(use_neutron=True)
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
with mock.patch('nova.compute.api.API.create', create):
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False, group='api')
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
self.body['server']['name'] = 'X' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_with_spaces_in_the_middle(self):
self.body['server']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces(self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces_in_compat_mode(
self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
self.controller.create(self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/fake/flavors/3'
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_uuid,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/fake/servers')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_az_with_leading_trailing_spaces(self):
self.body['server']['availability_zone'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_az_with_leading_trailing_spaces_in_compat_mode(
self):
self.body['server']['name'] = ' abc def '
self.body['server']['availability_zones'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
with mock.patch.object(availability_zones, 'get_availability_zones',
return_value=[' zone1 ']):
self.controller.create(self.req, body=self.body)
def test_create_instance(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
@mock.patch('nova.virt.hardware.numa_get_constraints')
def _test_create_instance_numa_topology_wrong(self, exc,
numa_constraints_mock):
numa_constraints_mock.side_effect = exc(**{
'name': None,
'source': 'flavor',
'requested': 'dummy',
'available': str(objects.fields.CPUAllocationPolicy.ALL),
'cpunum': 0,
'cpumax': 0,
'cpuset': None,
'memsize': 0,
'memtotal': 0})
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_numa_topology_wrong(self):
for exc in [exception.ImageNUMATopologyIncomplete,
exception.ImageNUMATopologyForbidden,
exception.ImageNUMATopologyAsymmetric,
exception.ImageNUMATopologyCPUOutOfRange,
exception.ImageNUMATopologyCPUDuplicates,
exception.ImageNUMATopologyCPUsUnassigned,
exception.InvalidCPUAllocationPolicy,
exception.InvalidCPUThreadAllocationPolicy,
exception.ImageNUMATopologyMemoryOutOfRange]:
self._test_create_instance_numa_topology_wrong(exc)
def test_create_instance_too_much_metadata(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def _create_instance_body_of_config_drive(self, param):
def create(*args, **kwargs):
self.assertIn('config_drive', kwargs)
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.body['server']['config_drive'] = param
self.req.body = jsonutils.dump_as_bytes(self.body)
def test_create_instance_with_config_drive(self):
param = True
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_config_drive_as_boolean_string(self):
param = 'false'
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_bad_config_drive(self):
param = 12345
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_config_drive(self):
def create(*args, **kwargs):
self.assertIsNone(kwargs['config_drive'])
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_config_drive(self):
param = ''
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def _test_create(self, params, no_image=False):
self. body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, {}, no_image=True)
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_no_image(
self, mock_bdm_image_metadata, mock_validate_bdm):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, mock.ANY, mock.ANY)
mock_bdm_image_metadata.assert_called_once_with(
mock.ANY, mock.ANY, False)
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_bdms_and_empty_imageRef(
self, mock_bdm_image_metadata, mock_validate_bdm):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2,
'imageRef': ''}
self._test_create(params)
def test_create_instance_with_imageRef_as_full_url(self):
bdm = [{'device_name': 'foo'}]
image_href = ('http://localhost/v2/fake/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
params = {'block_device_mapping_v2': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_non_uuid_imageRef(self):
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '123123abcd'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_invalid_bdm_in_2nd_dict(self):
bdm_1st = {"source_type": "image", "delete_on_termination": True,
"boot_index": 0,
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}
bdm_2nd = {"source_type": "volume",
"uuid": "99d92140-3d0c-4ea5-a49c-f94c38c607f0",
"destination_type": "invalid"}
bdm = [bdm_1st, bdm_2nd]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_boot_index_none_ok(self):
"""Tests creating a server with two block devices. One is the boot
device and the other is a non-bootable device.
"""
# From the docs:
# To disable a device from booting, set the boot index to a negative
# value or use the default boot index value, which is None. The
# simplest usage is, set the boot index of the boot device to 0 and use
# the default boot index value, None, for any other devices.
bdms = [
# This is the bootable device that would create a 20GB cinder
# volume from the given image.
{
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'volume_size': 20
},
# This is the non-bootable 10GB ext4 ephemeral block device.
{
'source_type': 'blank',
'destination_type': 'local',
'boot_index': None,
# If 'guest_format' is 'swap' then a swap device is created.
'guest_format': 'ext4'
}
]
params = {'block_device_mapping_v2': bdms}
self._test_create(params, no_image=True)
def test_create_instance_with_boot_index_none_image_local_fails(self):
"""Tests creating a server with a local image-based block device which
has a boot_index of None which is invalid.
"""
bdms = [{
'source_type': 'image',
'destination_type': 'local',
'boot_index': None,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6'
}]
params = {'block_device_mapping_v2': bdms}
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create,
params, no_image=True)
def test_create_instance_with_invalid_boot_index(self):
bdm = [{"source_type": "image", "delete_on_termination": True,
"boot_index": 'invalid',
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_device_name_not_string(self):
self.bdm_v2[0]['device_name'] = 123
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create, self.params)
def test_create_instance_with_device_name_empty(self):
self.bdm_v2[0]['device_name'] = ''
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_device_name_too_long(self):
self.bdm_v2[0]['device_name'] = 'a' * 256
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_space_in_device_name(self):
self.bdm_v2[0]['device_name'] = 'v da'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_invalid_size(self):
self.bdm_v2[0]['volume_size'] = 'hello world'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def _test_create_instance_with_destination_type_error(self,
destination_type):
self.bdm_v2[0]['destination_type'] = destination_type
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_destination_type_empty_string(self):
self._test_create_instance_with_destination_type_error('')
def test_create_instance_with_invalid_destination_type(self):
self._test_create_instance_with_destination_type_error('fake')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm(self, mock_validate_bdm):
bdm = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'uuid': 'fake_vol'
}]
bdm_expected = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'volume_id': 'fake_vol'
}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
for expected, received in zip(bdm_expected,
kwargs['block_device_mapping']):
self.assertThat(block_device.BlockDeviceDict(expected),
matchers.DictMatches(received))
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY)
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm_missing_device_name(self, mock_validate_bdm):
del self.bdm_v2[0]['device_name']
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
self.assertNotIn(None,
kwargs['block_device_mapping'][0]['device_name'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once_with(mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY)
@mock.patch.object(
block_device.BlockDeviceDict, '_validate',
side_effect=exception.InvalidBDMFormat(details='Wrong BDM'))
def test_create_instance_bdm_validation_error(self, mock_validate):
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params, no_image=True)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails(self, fake_bdm_meta):
params = {'block_device_mapping_v2': self.bdm_v2}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create, params,
no_image=True)
def test_create_instance_bdm_api_validation_fails(self):
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
bdm_exceptions = ((exception.InvalidBDMSnapshot, {'id': 'fake'}),
(exception.InvalidBDMVolume, {'id': 'fake'}),
(exception.InvalidBDMImage, {'id': 'fake'}),
(exception.InvalidBDMBootSequence, {}),
(exception.InvalidBDMLocalsLimit, {}))
ex_iter = iter(bdm_exceptions)
def _validate_bdm(*args, **kwargs):
self.validation_fail_test_validate_called = True
ex, kargs = next(ex_iter)
raise ex(**kargs)
def _instance_destroy(*args, **kwargs):
self.validation_fail_instance_destroy_called = True
self.stub_out('nova.compute.api.API._validate_bdm', _validate_bdm)
self.stub_out('nova.objects.Instance.destroy', _instance_destroy)
for _unused in range(len(bdm_exceptions)):
params = {'block_device_mapping_v2':
[self.bdm_v2[0].copy()]}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params)
self.assertTrue(self.validation_fail_test_validate_called)
self.assertFalse(self.validation_fail_instance_destroy_called)
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
@mock.patch.object(compute_api.API, '_validate_bdm')
def _test_create_bdm(self, params, mock_validate_bdm, no_image=False):
self.body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
mock_validate_bdm.assert_called_once_with(
test.MatchType(fakes.FakeRequestContext),
test.MatchType(objects.Instance),
test.MatchType(objects.Flavor),
test.MatchType(objects.BlockDeviceMappingList),
False)
def test_create_instance_with_volumes_enabled(self):
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_volumes_enabled_and_bdms_no_image(
self, mock_get_bdm_image_metadata):
"""Test that the create works if there is no image supplied but
os-volumes extension is enabled and bdms are supplied
"""
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_get_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params, no_image=True)
mock_get_bdm_image_metadata.assert_called_once_with(
mock.ANY, self.bdm, True)
@mock.patch.object(compute_api.API, '_get_bdm_image_metadata')
def test_create_instance_with_imageRef_as_empty_string(
self, mock_bdm_image_metadata):
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm,
'imageRef': ''}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_imageRef_as_full_url_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
image_href = ('http://localhost/v2/fake/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
params = {'block_device_mapping': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_non_uuid_imageRef_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm,
'imageRef': 'bad-format'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails_legacy_bdm(
self, fake_bdm_meta):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params, no_image=True)
def test_create_instance_with_device_name_not_string_legacy_bdm(self):
self.bdm[0]['device_name'] = 123
old_create = compute_api.API.create
self.params = {'block_device_mapping': self.bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_snapshot_volume_id_none(self):
old_create = compute_api.API.create
bdm = [{
'no_device': None,
'snapshot_id': None,
'volume_id': None,
'device_name': 'vda',
'delete_on_termination': False
}]
self.params = {'block_device_mapping': bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_legacy_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_device_name_empty_legacy_bdm(self):
self.bdm[0]['device_name'] = ''
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_device_name_too_long_legacy_bdm(self):
self.bdm[0]['device_name'] = 'a' * 256,
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_space_in_device_name_legacy_bdm(self):
self.bdm[0]['device_name'] = 'vd a',
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def _test_create_bdm_instance_with_size_error(self, size):
bdm = [{'delete_on_termination': True,
'device_name': 'vda',
'volume_size': size,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_invalid_size_legacy_bdm(self):
self._test_create_bdm_instance_with_size_error("hello world")
def test_create_instance_with_size_empty_string(self):
self._test_create_bdm_instance_with_size_error('')
def test_create_instance_with_size_zero(self):
self._test_create_bdm_instance_with_size_error("0")
def test_create_instance_with_size_greater_than_limit(self):
self._test_create_bdm_instance_with_size_error(db.MAX_INT + 1)
def test_create_instance_with_bdm_delete_on_termination(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'False'},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
expected_bdm = [
{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(expected_bdm, kwargs['block_device_mapping'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_bdm_delete_on_termination_invalid_2nd(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'invalid'}]
params = {'block_device_mapping': bdm}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_decide_format_legacy(self):
bdm = [{'device_name': 'foo1',
'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True}]
expected_legacy_flag = True
old_create = compute_api.API.create
def create(*args, **kwargs):
legacy_bdm = kwargs.get('legacy_bdm', True)
self.assertEqual(legacy_bdm, expected_legacy_flag)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm({})
params = {'block_device_mapping': bdm}
self._test_create_bdm(params)
def test_create_instance_both_bdm_formats(self):
bdm = [{'device_name': 'foo'}]
bdm_v2 = [{'source_type': 'volume',
'uuid': 'fake_vol'}]
params = {'block_device_mapping': bdm,
'block_device_mapping_v2': bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params)
def test_create_instance_invalid_key_name(self):
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_server_keypair_name_with_leading_trailing(self):
self.body['server']['key_name'] = ' abc '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_server_keypair_name_with_leading_trailing_compat_mode(
self, mock_create):
params = {'key_name': ' abc '}
def fake_create(*args, **kwargs):
self.assertEqual(' abc ', kwargs['key_name'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_invalid_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
flavor_ref = -1
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
@mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor())
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_non_existing_snapshot_id(
self, mock_create,
mock_get_flavor_by_flavor_id):
mock_create.side_effect = exception.SnapshotNotFound(snapshot_id='123')
self.body['server'] = {'name': 'server_test',
'flavorRef': self.flavor_ref,
'block_device_mapping_v2':
[{'source_type': 'snapshot',
'uuid': '123'}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_empty(self):
flavor_ref = ""
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False, group='api')
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dump_as_bytes(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
self.req.body = jsonutils.dump_as_bytes(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(encodeutils.safe_decode(robj['Location']), selfhref)
@mock.patch('nova.objects.Quotas.get_all_by_project')
@mock.patch('nova.objects.Quotas.get_all_by_project_and_user')
@mock.patch('nova.objects.Quotas.count_as_dict')
def _do_test_create_instance_above_quota(self, resource, allowed,
quota, expected_msg, mock_count, mock_get_all_pu,
mock_get_all_p):
count = {'project': {}, 'user': {}}
for res in ('instances', 'ram', 'cores'):
if res == resource:
value = quota - allowed
count['project'][res] = count['user'][res] = value
else:
count['project'][res] = count['user'][res] = 0
mock_count.return_value = count
mock_get_all_p.return_value = {'project_id': 'fake'}
mock_get_all_pu.return_value = {'project_id': 'fake',
'user_id': 'fake_user'}
if resource in db_api.PER_PROJECT_QUOTAS:
mock_get_all_p.return_value[resource] = quota
else:
mock_get_all_pu.return_value[resource] = quota
fakes.stub_out_instance_quota(self, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dump_as_bytes(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = ('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = ('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = ('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
@mock.patch.object(fakes.QUOTAS, 'limit_check')
def test_create_instance_above_quota_server_group_members(
self, mock_limit_check):
ctxt = self.req.environ['nova.context']
fake_group = objects.InstanceGroup(ctxt)
fake_group.project_id = ctxt.project_id
fake_group.user_id = ctxt.user_id
fake_group.create()
real_count = fakes.QUOTAS.count_as_dict
def fake_count(context, name, group, user_id):
if name == 'server_group_members':
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return {'user': {'server_group_members': 10}}
else:
return real_count(context, name, group, user_id)
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
mock_limit_check.side_effect = fake_limit_check
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
with mock.patch.object(fakes.QUOTAS, 'count_as_dict',
side_effect=fake_count):
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_group_hint(self):
ctxt = self.req.environ['nova.context']
test_group = objects.InstanceGroup(ctxt)
test_group.project_id = ctxt.project_id
test_group.user_id = ctxt.user_id
test_group.create()
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': test_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
test_group = objects.InstanceGroup.get_by_uuid(ctxt, test_group.uuid)
self.assertIn(server['id'], test_group.members)
def _test_create_instance_with_group_hint(self, hint,
hint_name='os:scheduler_hints'):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hint)
return ([fakes.stub_instance(1)], '')
self.stub_out('nova.compute.api.API.create', fake_create)
self.stub_out('nova.db.instance_destroy', fake_instance_destroy)
self.body[hint_name] = hint
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_group_hint_legacy(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'},
hint_name='OS-SCH-HNT:scheduler_hints')
def test_create_server_with_different_host_hint(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'})
self._test_create_instance_with_group_hint(
{'different_host': ['9c47bf55-e9d8-42da-94ab-7f9e80cd1857',
'82412fa6-0365-43a9-95e4-d8b20e00c0de']})
def test_create_instance_with_group_hint_group_not_found(self):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {
'group': '5b674f73-c8cf-40ef-9965-3b6fe4b304b1'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_with_group_hint_wrong_uuid_format(self):
self.body['os:scheduler_hints'] = {
'group': 'non-uuid'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_non_dict(self):
sch_hints = ['os:scheduler_hints', 'OS-SCH-HNT:scheduler_hints']
for hint in sch_hints:
self.body[hint] = 'non-dict'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_long_group(self):
self.body['os:scheduler_hints'] = {
'group': 'a' * 256}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_with_bad_different_host_hint(self):
self.body['os:scheduler_hints'] = {
'different_host': 'non-server-id'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
self.body['os:scheduler_hints'] = {
'different_host': ['non-server-id01', 'non-server-id02']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortInUse(port_id=uuids.port))
def test_create_instance_with_neutronv2_port_in_use(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
def test_multiple_create_with_string_type_min_and_max(self):
min_count = '2'
max_count = '3'
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsInstance(kwargs['min_count'], int)
self.assertIsInstance(kwargs['max_count'], int)
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_invalid_negative_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_negative_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_min_greater_than_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.flags(enable_instance_password=False, group='api')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_missing(res["server"])
def _create_multiple_instances_resv_id_return(self, resv_id_return):
"""Test creating multiple instances with asking for
reservation_id
"""
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'return_reservation_id': resv_id_return
}
}
res = self.controller.create(self.req, body=body)
reservation_id = res.obj['reservation_id']
self.assertNotEqual(reservation_id, "")
self.assertIsNotNone(reservation_id)
self.assertGreater(len(reservation_id), 1)
def test_create_multiple_instances_with_resv_id_return(self):
self._create_multiple_instances_resv_id_return(True)
def test_create_multiple_instances_with_string_resv_id_return(self):
self._create_multiple_instances_resv_id_return("True")
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'},
{'source_type': 'volume', 'uuid': 'vol-yyyy'}
]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instances_with_single_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested to boot from a single volume.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'}]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['block_device_mapping'][0]['volume_id'],
'vol-xxxx')
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instance_with_non_integer_max_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_with_non_integer_min_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_max_count_overquota_min_count_ok(self):
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
def create_db_entry_for_new_instance(*args, **kwargs):
instance = args[4]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API.create_db_entry_for_new_instance',
create_db_entry_for_new_instance)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
def test_create_multiple_instance_max_count_overquota_min_count_over(self):
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
self.req, body=body)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.MultiplePortsNotApplicable(
reason="Unable to launch multiple instances with "
"a single configured port ID. Please "
"launch your instance one by one with "
"different ports."))
def test_create_multiple_instance_with_neutronv2_port(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkNotFound(
network_id=uuids.network))
def test_create_instance_with_neutronv2_not_found_network(
self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortNotFound(port_id=uuids.port))
def test_create_instance_with_neutronv2_port_not_found(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.UnableToAutoAllocateNetwork(
project_id=FAKE_UUID))
def test_create_instance_with_unable_to_auto_allocate_network(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageNotAuthorized(
image_id=FAKE_UUID))
def test_create_instance_with_image_not_authorized(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMEphemeralSize)
def test_create_instance_raise_invalid_bdm_ephsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidNUMANodesNumber(
nodes='-1'))
def test_create_instance_raise_invalid_numa_nodes(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMFormat(details=''))
def test_create_instance_raise_invalid_bdm_format(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMSwapSize)
def test_create_instance_raise_invalid_bdm_swapsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDM)
def test_create_instance_raise_invalid_bdm(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageBadRequest(
image_id='dummy', response='dummy'))
def test_create_instance_raise_image_bad_request(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_invalid_availability_zone(self):
self.body['server']['availability_zone'] = 'invalid::::zone'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.FixedIpNotFoundForAddress(
address='dummy'))
def test_create_instance_raise_fixed_ip_not_found_bad_request(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.CPUThreadPolicyConfigurationInvalid())
def test_create_instance_raise_cpu_thread_policy_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUPinningForbidden())
def test_create_instance_raise_image_cpu_pinning_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUThreadPolicyForbidden())
def test_create_instance_raise_image_cpu_thread_policy_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeInvalid(pagesize='-1'))
def test_create_instance_raise_memory_page_size_invalid(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeForbidden(pagesize='1',
against='2'))
def test_create_instance_raise_memory_page_size_forbidden(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeConfigurationInvalid())
def test_create_instance_raise_realtime_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeMaskNotFoundOrInvalid())
def test_create_instance_raise_realtime_mask_not_found_or_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_invalid_personality(self, mock_create):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
codec = 'utf8'
content = encodeutils.safe_encode(
'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==')
start_position = 19
end_position = 20
msg = 'invalid start byte'
mock_create.side_effect = UnicodeDecodeError(codec, content,
start_position,
end_position, msg)
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
]
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_personality_should_get_empty_list(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual([], kwargs['injected_files'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_instance()
def test_create_instance_with_extra_personality_arg(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
"extra_arg": "extra value"
},
]
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciRequestAliasNotDefined(
alias='fake_name'))
def test_create_instance_pci_alias_not_defined(self, mock_create):
# Tests that PciRequestAliasNotDefined is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('PCI alias fake_name is not defined', six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciInvalidAlias(
reason='just because'))
def test_create_instance_pci_invalid_alias(self, mock_create):
# Tests that PciInvalidAlias is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('Invalid PCI alias definition', six.text_type(ex))
def test_create_instance_with_user_data(self):
value = base64.encode_as_text("A random string")
params = {'user_data': value}
self._test_create_extra(params)
def test_create_instance_with_bad_user_data(self):
value = "A random string"
params = {'user_data': value}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch('nova.compute.api.API.create')
def test_create_instance_with_none_allowd_for_v20_compat_mode(self,
mock_create):
def create(context, *args, **kwargs):
self.assertIsNone(kwargs['user_data'])
return ([fakes.stub_instance_obj(context)], None)
mock_create.side_effect = create
self.req.set_legacy_v2()
params = {'user_data': None}
self._test_create_extra(params)
class ServersControllerCreateTestV219(ServersControllerCreateTest):
def _create_instance_req(self, set_desc, desc=None):
if set_desc:
self.body['server']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def test_create_instance_with_description(self):
self._create_instance_req(True, 'server_desc')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_none_description(self):
self._create_instance_req(True)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_description(self):
self._create_instance_req(True, '')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_description(self):
self._create_instance_req(False)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_description_too_long(self):
self._create_instance_req(True, 'X' * 256)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_description_invalid(self):
self._create_instance_req(True, "abc\0ddef")
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
class ServersControllerCreateTestV232(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV232, self).setUp()
self.flags(use_neutron=True)
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
'block_device_mapping_v2': [{
'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1'
}]
}
}
self.req = fakes.HTTPRequestV21.blank('/fake/servers', version='2.32')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_server_no_tags_old_compute(self):
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=13),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self._create_server()
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=13)
def test_create_server_tagged_nic_old_compute_fails(self, get_min_ver):
self.body['server']['networks'][0]['tag'] = 'foo'
self.assertRaises(webob.exc.HTTPBadRequest, self._create_server)
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=13)
def test_create_server_tagged_bdm_old_compute_fails(self, get_min_ver):
self.body['server']['block_device_mapping_v2'][0]['tag'] = 'foo'
self.assertRaises(webob.exc.HTTPBadRequest, self._create_server)
def test_create_server_tagged_nic_new_compute(self):
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=14),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['networks'][0]['tag'] = 'foo'
self._create_server()
def test_create_server_tagged_bdm_new_compute(self):
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=14),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['block_device_mapping_v2'][0]['tag'] = 'foo'
self._create_server()
class ServersControllerCreateTestV237(test.NoDBTestCase):
"""Tests server create scenarios with the v2.37 microversion.
These tests are mostly about testing the validation on the 2.37
server create request with emphasis on negative scenarios.
"""
def setUp(self):
super(ServersControllerCreateTestV237, self).setUp()
# Set the use_neutron flag to process requested networks.
self.flags(use_neutron=True)
# Create the server controller.
self.controller = servers.ServersController()
# Define a basic server create request body which tests can customize.
self.body = {
'server': {
'name': 'auto-allocate-test',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
},
}
# Create a fake request using the 2.37 microversion.
self.req = fakes.HTTPRequestV21.blank('/fake/servers', version='2.37')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, networks):
self.body['server']['networks'] = networks
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_auth_pre_2_37_fails(self):
"""Negative test to make sure you can't pass 'auto' before 2.37"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.36')
self.assertRaises(exception.ValidationError, self._create_server,
'auto')
def test_create_server_no_requested_networks_fails(self):
"""Negative test for a server create request with no networks requested
which should fail with the v2.37 schema validation.
"""
self.assertRaises(exception.ValidationError, self._create_server, None)
def test_create_server_network_id_not_uuid_fails(self):
"""Negative test for a server create request where the requested
network id is not one of the auto/none enums.
"""
self.assertRaises(exception.ValidationError, self._create_server,
'not-auto-or-none')
def test_create_server_network_id_empty_string_fails(self):
"""Negative test for a server create request where the requested
network id is the empty string.
"""
self.assertRaises(exception.ValidationError, self._create_server, '')
@mock.patch.object(context.RequestContext, 'can')
def test_create_server_networks_none_skip_policy(self, context_can):
"""Test to ensure skip checking policy rule create:attach_network,
when networks is 'none' which means no network will be allocated.
"""
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=14),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f9bccadf-5ab1-4a56-9156-c00c178fe5f5'}],
1)),
):
network_policy = server_policies.SERVERS % 'create:attach_network'
self._create_server('none')
call_list = [c for c in context_can.call_args_list
if c[0][0] == network_policy]
self.assertEqual(0, len(call_list))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_auto_flavornotfound(self, get_flavor):
"""Tests that requesting auto networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'auto')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_none_flavornotfound(self, get_flavor):
"""Tests that requesting none for networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'none')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_multiple_specific_nics_flavornotfound(self,
get_flavor):
"""Tests that requesting multiple specific network IDs is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server,
[{'uuid': 'e3b686a8-b91d-4a61-a3fc-1b74bb619ddb'},
{'uuid': 'e0f00941-f85f-46ec-9315-96ded58c2f14'}])
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', six.text_type(ex))
def test_create_server_legacy_neutron_network_id_fails(self):
"""Tests that we no longer support the legacy br-<uuid> format for
a network id.
"""
uuid = 'br-00000000-0000-0000-0000-000000000000'
self.assertRaises(exception.ValidationError, self._create_server,
[{'uuid': uuid}])
@ddt.ddt
class ServersControllerCreateTestV252(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV252, self).setUp()
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}]
}
}
self.req = fakes.HTTPRequestV21.blank('/fake/servers', version='2.52')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, tags):
self.body['server']['tags'] = tags
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_with_tags_pre_2_52_fails(self):
"""Negative test to make sure you can't pass 'tags' before 2.52"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.51')
self.assertRaises(
exception.ValidationError, self._create_server, ['tag1'])
@ddt.data([','],
['/'],
['a' * (tag.MAX_TAG_LENGTH + 1)],
['a'] * (instance_obj.MAX_TAG_COUNT + 1),
[''],
[1, 2, 3],
{'tag': 'tag'})
def test_create_server_with_tags_incorrect_tags(self, tags):
"""Negative test to incorrect tags are not allowed"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.52')
self.assertRaises(
exception.ValidationError, self._create_server, tags)
class ServersControllerCreateTestV257(test.NoDBTestCase):
"""Tests that trying to create a server with personality files using
microversion 2.57 fails.
"""
def test_create_server_with_personality_fails(self):
controller = servers.ServersController()
body = {
'server': {
'name': 'no-personality-files',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': 'auto',
'personality': [{
'path': '/path/to/file',
'contents': 'ZWNobyAiaGVsbG8gd29ybGQi'
}]
}
}
req = fakes.HTTPRequestV21.blank('/servers', version='2.57')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
ex = self.assertRaises(
exception.ValidationError, controller.create, req, body=body)
self.assertIn('personality', six.text_type(ex))
@mock.patch('nova.compute.utils.check_num_instances_quota',
new=lambda *args, **kwargs: 1)
class ServersControllerCreateTestV260(test.NoDBTestCase):
"""Negative tests for creating a server with a multiattach volume."""
def setUp(self):
super(ServersControllerCreateTestV260, self).setUp()
self.useFixture(nova_fixtures.NoopQuotaDriverFixture())
self.controller = servers.ServersController()
get_flavor_mock = mock.patch(
'nova.compute.flavors.get_flavor_by_flavor_id',
return_value=fake_flavor.fake_flavor_obj(
context.get_admin_context(), flavorid='1'))
get_flavor_mock.start()
self.addCleanup(get_flavor_mock.stop)
reqspec_create_mock = mock.patch(
'nova.objects.RequestSpec.create')
reqspec_create_mock.start()
self.addCleanup(reqspec_create_mock.stop)
volume_get_mock = mock.patch(
'nova.volume.cinder.API.get',
return_value={'id': uuids.fake_volume_id, 'multiattach': True})
volume_get_mock.start()
self.addCleanup(volume_get_mock.stop)
def _post_server(self, version=None):
body = {
'server': {
'name': 'multiattach',
'flavorRef': '1',
'networks': 'none',
'block_device_mapping_v2': [{
'uuid': uuids.fake_volume_id,
'source_type': 'volume',
'destination_type': 'volume',
'boot_index': 0,
'delete_on_termination': True}]
}
}
req = fakes.HTTPRequestV21.blank(
'/servers', version=version or '2.60')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
return self.controller.create(req, body=body)
def test_create_server_with_multiattach_fails_old_microversion(self):
"""Tests the case that the user tries to boot from volume with a
multiattach volume but before using microversion 2.60.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._post_server, '2.59')
self.assertIn('Multiattach volumes are only supported starting with '
'compute API version 2.60', six.text_type(ex))
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=compute_api.MIN_COMPUTE_MULTIATTACH - 1)
def test_create_server_with_multiattach_fails_not_available(
self, mock_get_min_version_all_cells):
"""Tests the case that the user tries to boot from volume with a
multiattach volume but before the deployment is fully upgraded.
"""
ex = self.assertRaises(webob.exc.HTTPConflict, self._post_server)
self.assertIn('Multiattach volume support is not yet available',
six.text_type(ex))
class ServersControllerCreateTestV263(ServersControllerCreateTest):
def _create_instance_req(self, certs=None):
self.body['server']['trusted_image_certificates'] = certs
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_create_instance_with_trusted_certs(self, get_min_ver):
"""Test create with valid trusted_image_certificates argument"""
self._create_instance_req(
['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_trusted_certs(self):
"""Test create without trusted image certificates"""
self._create_instance_req()
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_trusted_cert_id(self):
"""Make sure we can't create with an empty certificate ID"""
self._create_instance_req([''])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_create_instance_with_empty_trusted_certs(self):
"""Make sure we can't create with an empty array of IDs"""
self.body['server']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_create_instance_with_too_many_trusted_certs(self):
"""Make sure we can't create with an array of >50 unique IDs"""
self._create_instance_req(['cert{}'.format(i) for i in range(51)])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too long', six.text_type(ex))
def test_create_instance_with_nonunique_trusted_certs(self):
"""Make sure we can't create with a non-unique array of IDs"""
self._create_instance_req(['cert', 'cert'])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('has non-unique elements', six.text_type(ex))
def test_create_instance_with_invalid_trusted_cert_id(self):
"""Make sure we can't create with non-string certificate IDs"""
self._create_instance_req([1, 2])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_create_instance_with_invalid_trusted_certs(self):
"""Make sure we can't create with certificates in a non-array"""
self._create_instance_req("not-an-array")
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_create_server_with_trusted_certs_pre_2_63_fails(self):
"""Make sure we can't use trusted_certs before 2.63"""
self._create_instance_req(['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
def test_create_server_with_trusted_certs_policy_failed(self):
rule_name = "os_compute_api:servers:create:trusted_certs"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
"os_compute_api:servers:create:attach_volume": "@",
"os_compute_api:servers:create:attach_network": "@",
rule_name: "project:fake"}
self._create_instance_req(['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'])
self.policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create, self.req,
body=self.body)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(compute_api.API, 'create')
def test_create_server_with_cert_validation_error(
self, mock_create):
mock_create.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
self._create_instance_req(['trusted-cert-id'])
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req,
body=self.body)
self.assertIn('test cert validation error',
six.text_type(ex))
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS - 1)
def test_create_server_with_cert_validation_not_available(
self, mock_get_min_version_all_cells):
self._create_instance_req(['trusted-cert-id'])
ex = self.assertRaises(webob.exc.HTTPConflict,
self.controller.create, self.req,
body=self.body)
self.assertIn('Image signature certificate validation support '
'is not yet available',
six.text_type(ex))
class ServersControllerCreateTestV267(ServersControllerCreateTest):
def setUp(self):
super(ServersControllerCreateTestV267, self).setUp()
self.block_device_mapping_v2 = [
{'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1',
'volume_type': 'fake-lvm-1'
}]
def _test_create_extra(self, *args, **kwargs):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.67')
return super(ServersControllerCreateTestV267, self)._test_create_extra(
*args, **kwargs)
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_VOLUME_TYPE)
def test_create_server_with_trusted_volume_type_pre_2_67_fails(self,
get_min_ver):
"""Make sure we can't use volume_type before 2.67"""
self.body['server'].update(
{'block_device_mapping_v2': self.block_device_mapping_v2})
self.req.body = jsonutils.dump_as_bytes(self.block_device_mapping_v2)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.66')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn("'volume_type' was unexpected", six.text_type(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.VolumeTypeNotFound(
id_or_name='fake-lvm-1'))
def test_create_instance_with_volume_type_not_found(self, mock_create):
"""Trying to boot from volume with a volume type that does not exist
will result in a 400 error.
"""
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertIn('Volume type fake-lvm-1 could not be found',
six.text_type(ex))
@mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=compute_api.MIN_COMPUTE_VOLUME_TYPE - 1)
def test_check_volume_type_new_inst_old_compute(self, get_min_version):
"""Trying to boot from volume with a volume_type but not all computes
are upgraded will result in a 409 error.
"""
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
self.assertIn('Volume type support is not yet available',
six.text_type(ex))
def test_create_instance_with_volume_type_empty_string(self):
"""Test passing volume_type='' which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = ''
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_none_volume_type(self):
"""Test passing volume_type=None which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = None
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_without_volume_type(self):
"""Test passing without volume_type which is accepted but not used."""
self.block_device_mapping_v2[0].pop('volume_type')
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_volume_type_too_long(self):
"""Tests the maxLength schema validation on volume_type."""
self.block_device_mapping_v2[0]['volume_type'] = 'X' * 256
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertIn('is too long', six.text_type(ex))
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(enable_instance_password=True, group='api')
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self,
create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_invalid_fixed_ip(self,
create_mock):
self.flags(use_neutron=True)
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '999.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertFalse(create_mock.called)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
self.flags(use_ipv6=True)
# Neutron security groups are tested in test_neutron_security_groups.py
self.flags(use_neutron=False)
fakes.stub_out_nw_api(self)
self.flags(group='glance', api_servers=['http://localhost:9292'])
nw_cache_info = self._generate_nw_cache_info()
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid=FAKE_UUID,
display_name="test_server",
include_fake_metadata=False,
availability_zone='nova',
nw_cache=nw_cache_info,
launched_at=None,
terminated_at=None,
security_groups=[
{'name': 'fake-0-0', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None},
{'name': 'fake-0-1', 'id': 1, 'description': 'foo',
'user_id': 'bar', 'project_id': 'baz', 'deleted': False,
'deleted_at': None, 'updated_at': None, 'created_at': None}],
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
fakes.stub_out_nw_api_get_instance_nw_info(self, nw_info)
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequestV21.blank("/fake")
self.request.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
self.self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
self.bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
def _generate_nw_cache_info(self):
fixed_ipv4 = ('192.168.1.100', '192.168.2.100', '192.168.3.100')
fixed_ipv6 = ('2001:db8:0:1::1',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.1.0/24',
'ips': [_ip(fixed_ipv4[0])]},
{'cidr': 'b33f::/64',
'ips': [_ip(fixed_ipv6[0])]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.2.0/24',
'ips': [_ip(fixed_ipv4[1])]}]}},
{'address': 'cc:cc:cc:cc:cc:cc',
'id': 3,
'network': {'bridge': 'br0',
'id': 2,
'label': 'test2',
'subnets': [{'cidr': '192.168.3.0/24',
'ips': [_ip(fixed_ipv4[2])]}]}}]
return nw_cache
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/fake/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance,
False)
self.assertEqual(result, expected)
@mock.patch('nova.context.scatter_gather_cells')
def test_get_volumes_attached_with_faily_cells(self, mock_sg):
bdms = fake_bdms_get_all_by_instance_uuids()
# just faking a nova list scenario
mock_sg.return_value = {
uuids.cell1: bdms[0],
uuids.cell2: exception.BDMNotFound(id='fake')
}
ctxt = context.RequestContext('fake', 'fake')
result = self.view_builder._get_instance_bdms_in_multiple_cells(
ctxt, [self.instance.uuid])
# will get the result from cell1
self.assertEqual(result, bdms[0])
mock_sg.assert_called_once()
def test_build_server(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ERROR,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
# Regardless of vm_state deleted servers should be DELETED
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_no_instance_mapping(self,
mock_im):
self.instance['vm_state'] = vm_states.ERROR
mock_im.side_effect = exception.InstanceMappingNotFound(uuid='foo')
self.request.context = context.RequestContext('fake', 'fake')
self.view_builder.show(self.request, self.instance)
mock_im.assert_called_once_with(mock.ANY, self.uuid)
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_loaded(self, mock_im):
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
self.request.context = context.RequestContext('fake', 'fake')
self.view_builder.show(self.request, self.instance)
self.assertFalse(mock_im.called)
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersViewBuilderTestV269(ServersViewBuilderTest):
"""Server ViewBuilder test for microversion 2.69
The intent here is simply to verify that when showing server details
after microversion 2.69 the response could have missing keys for those
servers from the down cells.
"""
wsgi_api_version = '2.69'
def setUp(self):
super(ServersViewBuilderTestV269, self).setUp()
self.view_builder = views.servers.ViewBuilder()
self.ctxt = context.RequestContext('fake', 'fake')
def fake_is_supported(req, min_version="2.1", max_version="2.69"):
return (fakes.api_version.APIVersionRequest(max_version) >=
req.api_version_request >=
fakes.api_version.APIVersionRequest(min_version))
self.stub_out('nova.api.openstack.api_version_request.is_supported',
fake_is_supported)
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_server_list_detail_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id='fake',
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/fake/servers/detail')
output = self.view_builder.detail(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
image_bookmark = "http://localhost/fake/images/5"
expected = {
"servers": [{
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"tags": [],
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"OS-EXT-SRV-ATTR:root_device_name": None,
"accessIPv4": '',
"accessIPv6": '',
"host_status": '',
"OS-EXT-SRV-ATTR:user_data": None,
"trusted_image_certificates": None,
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-SRV-ATTR:kernel_id": '',
"OS-EXT-SRV-ATTR:reservation_id": '',
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:hostname": 'test_server',
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"locked": False,
"description": None,
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'fake-0-0'},
{'name': 'fake-0-1'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"OS-EXT-SRV-ATTR:launch_index": 0,
"OS-EXT-SRV-ATTR:ramdisk_id": '',
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False},
]
},
{
'created': '1955-11-05T00:00:00Z',
'id': uuids.fake1,
'tenant_id': 'fake',
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
uuids.fake1,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" %
uuids.fake1,
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_list_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id='fake',
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/fake/servers')
output = self.view_builder.index(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
expected = {
"servers": [{
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
]
},
{
'id': uuids.fake1,
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
uuids.fake1,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" %
uuids.fake1,
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_with_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=self.instance.user_id,
image_ref=self.instance.image_ref,
power_state=0,
flavor=self.instance.flavor,
availability_zone=self.instance.availability_zone
)
req = self.req('/fake/servers/%s' % FAKE_UUID)
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# ten fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
image_bookmark = "http://localhost/fake/images/5"
expected = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" %
self.uuid,
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_without_image_avz_user_id_set_from_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=None,
image_ref=None,
power_state=0,
flavor=self.instance.flavor,
availability_zone=None
)
req = self.req('/fake/servers/%s' % FAKE_UUID)
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# nine fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
expected = {
"server": {
"id": self.uuid,
"user_id": "UNKNOWN",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": "",
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "UNKNOWN",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" %
self.uuid,
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
class ServersAllExtensionsTestCase(test.TestCase):
"""Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, AccessIPsController extends servers.Controller::
| @wsgi.extends
| def create(self, req, resp_obj, body):
| context = req.environ['nova.context']
| if authorize(context) and 'server' in resp_obj.obj:
| resp_obj.attach(xml=AccessIPTemplate())
| server = resp_obj.obj['server']
| self._extend_server(req, server)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV21()
@mock.patch.object(compute_api.API, 'create',
side_effect=test.TestingException(
"Should not reach the compute API."))
def test_create_missing_server(self, mock_create):
# Test create with malformed body.
req = fakes.HTTPRequestV21.blank('/fake/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
# Test update with malformed body.
req = fakes.HTTPRequestV21.blank('/fake/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
with mock.patch('nova.objects.Instance.save') as mock_save:
res = req.get_response(self.app)
self.assertFalse(mock_save.called)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
"""Tests of places we throw 400 Bad Request from."""
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
self.controller = servers.ServersController()
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV21.blank('/fake/servers')
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
# TODO(alex_xu): There isn't specified file for ips extension. Most of
# unittest related to ips extension is in this file. So put the ips policy
# enforcement tests at here until there is specified file for ips extension.
class IPsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(IPsPolicyEnforcementV21, self).setUp()
self.controller = ips.IPsController()
self.req = fakes.HTTPRequest.blank("/v2/fake")
def test_index_policy_failed(self):
rule_name = "os_compute_api:ips:index"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_show_policy_failed(self):
rule_name = "os_compute_api:ips:show"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show, self.req, fakes.FAKE_UUID, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class ServersPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ServersPolicyEnforcementV21, self).setUp()
self.useFixture(nova_fixtures.AllServicesCurrent())
self.controller = servers.ServersController()
self.req = fakes.HTTPRequest.blank('')
self.image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def _common_policy_check(self, rules, rule_name, func, *arg, **kwarg):
self.policy.set_rules(rules)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch.object(servers.ServersController, '_get_instance')
def test_start_policy_failed(self, _get_instance_mock):
_get_instance_mock.return_value = None
rule_name = "os_compute_api:servers:start"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._start_server,
self.req, FAKE_UUID, body={})
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_policy_failed_with_other_project(
self, _get_instance_mock):
_get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:trigger_crash_dump"
rule = {rule_name: "project_id:%(project_id)s"}
self.req.api_version_request =\
api_version_request.APIVersionRequest('2.17')
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body={'trigger_crash_dump': None})
@mock.patch('nova.compute.api.API.trigger_crash_dump')
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_pass_with_same_project(
self, _get_instance_mock, trigger_crash_dump_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
_get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
self.controller._action_trigger_crash_dump(
self.req, fakes.FAKE_UUID, body={'trigger_crash_dump': None})
trigger_crash_dump_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_failed_with_other_user(
self, _get_instance_mock):
_get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_trigger_crash_dump,
self.req,
fakes.FAKE_UUID,
body={'trigger_crash_dump': None})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
@mock.patch('nova.compute.api.API.trigger_crash_dump')
@mock.patch.object(servers.ServersController, '_get_instance')
def test_trigger_crash_dump_overridden_policy_pass_with_same_user(
self, _get_instance_mock, trigger_crash_dump_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
_get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:trigger_crash_dump"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
self.req.api_version_request = (
api_version_request.APIVersionRequest('2.17'))
self.controller._action_trigger_crash_dump(
self.req, fakes.FAKE_UUID, body={'trigger_crash_dump': None})
trigger_crash_dump_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
def test_index_policy_failed(self):
rule_name = "os_compute_api:servers:index"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.index, self.req)
def test_detail_policy_failed(self):
rule_name = "os_compute_api:servers:detail"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.detail, self.req)
def test_detail_get_tenants_policy_failed(self):
req = fakes.HTTPRequest.blank('')
req.GET["all_tenants"] = "True"
rule_name = "os_compute_api:servers:detail:get_all_tenants"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._get_servers, req, True)
def test_index_get_tenants_policy_failed(self):
req = fakes.HTTPRequest.blank('')
req.GET["all_tenants"] = "True"
rule_name = "os_compute_api:servers:index:get_all_tenants"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller._get_servers, req, False)
@mock.patch.object(common, 'get_instance')
def test_show_policy_failed(self, get_instance_mock):
get_instance_mock.return_value = None
rule_name = "os_compute_api:servers:show"
rule = {rule_name: "project:non_fake"}
self._common_policy_check(
rule, rule_name, self.controller.show, self.req, FAKE_UUID)
@mock.patch.object(common, 'get_instance')
def test_delete_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:delete"
rule = {rule_name: "project_id:%(project_id)s"}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller.delete, self.req, FAKE_UUID)
@mock.patch('nova.compute.api.API.soft_delete')
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_pass_with_same_project(self,
get_instance_mock,
soft_delete_mock):
self.flags(reclaim_instance_interval=3600)
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:delete"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
self.controller.delete(self.req, fakes.FAKE_UUID)
soft_delete_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:delete"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self._common_policy_check(
rule, rule_name, self.controller.delete, self.req, FAKE_UUID)
@mock.patch('nova.compute.api.API.soft_delete')
@mock.patch('nova.api.openstack.common.get_instance')
def test_delete_overridden_policy_pass_with_same_user(self,
get_instance_mock,
soft_delete_mock):
self.flags(reclaim_instance_interval=3600)
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:delete"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
self.controller.delete(self.req, fakes.FAKE_UUID)
soft_delete_mock.assert_called_once_with(
self.req.environ['nova.context'], instance)
@mock.patch.object(common, 'get_instance')
def test_update_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'])
rule_name = "os_compute_api:servers:update"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'server': {'name': 'server_test'}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller.update, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch.object(compute_api.API, 'update_instance')
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_pass_with_same_project(
self, get_instance_mock, update_instance_mock, view_show_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:update"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
body = {'server': {'name': 'server_test'}}
self.controller.update(self.req, fakes.FAKE_UUID, body=body)
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:update"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller.update, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch.object(compute_api.API, 'update_instance')
@mock.patch.object(common, 'get_instance')
def test_update_overridden_policy_pass_with_same_user(self,
get_instance_mock,
update_instance_mock,
view_show_mock,
mock_port_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:update"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'server': {'name': 'server_test'}}
self.controller.update(self.req, fakes.FAKE_UUID, body=body)
def test_confirm_resize_policy_failed(self):
rule_name = "os_compute_api:servers:confirm_resize"
rule = {rule_name: "project:non_fake"}
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_policy_failed(self):
rule_name = "os_compute_api:servers:revert_resize"
rule = {rule_name: "project:non_fake"}
body = {'server': {'name': 'server_test'}}
self._common_policy_check(
rule, rule_name, self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_reboot_policy_failed(self):
rule_name = "os_compute_api:servers:reboot"
rule = {rule_name: "project:non_fake"}
body = {'reboot': {'type': 'HARD'}}
self._common_policy_check(
rule, rule_name, self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:resize"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'resize': {'flavorRef': '1'}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_resize, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.compute.api.API.resize')
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_pass_with_same_project(self,
get_instance_mock,
resize_mock,
mock_post_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:resize"
self.policy.set_rules({rule_name: "project_id:%(project_id)s"})
body = {'resize': {'flavorRef': '1'}}
self.controller._action_resize(self.req, fakes.FAKE_UUID, body=body)
resize_mock.assert_called_once_with(self.req.environ['nova.context'],
instance, '1')
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:resize"
rule = {rule_name: "user_id:%(user_id)s"}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = {'resize': {'flavorRef': '1'}}
self._common_policy_check(
rule, rule_name, self.controller._action_resize, self.req,
FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=False)
@mock.patch('nova.compute.api.API.resize')
@mock.patch('nova.api.openstack.common.get_instance')
def test_resize_overridden_policy_pass_with_same_user(self,
get_instance_mock,
resize_mock,
mock_port_check):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:resize"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'resize': {'flavorRef': '1'}}
self.controller._action_resize(self.req, fakes.FAKE_UUID, body=body)
resize_mock.assert_called_once_with(self.req.environ['nova.context'],
instance, '1')
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_policy_failed_with_other_project(self, get_instance_mock):
get_instance_mock.return_value = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
project_id=self.req.environ['nova.context'].project_id)
rule_name = "os_compute_api:servers:rebuild"
rule = {rule_name: "project_id:%(project_id)s"}
body = {'rebuild': {'imageRef': self.image_uuid}}
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
self._common_policy_check(
rule, rule_name, self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_overridden_policy_failed_with_other_user_in_same_project(
self, get_instance_mock):
get_instance_mock.return_value = (
fake_instance.fake_instance_obj(self.req.environ['nova.context']))
rule_name = "os_compute_api:servers:rebuild"
rule = {rule_name: "user_id:%(user_id)s"}
body = {'rebuild': {'imageRef': self.image_uuid}}
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
self._common_policy_check(
rule, rule_name, self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.api.openstack.compute.views.servers.ViewBuilder.show')
@mock.patch('nova.compute.api.API.rebuild')
@mock.patch('nova.api.openstack.common.get_instance')
def test_rebuild_overridden_policy_pass_with_same_user(self,
get_instance_mock,
rebuild_mock,
view_show_mock):
instance = fake_instance.fake_instance_obj(
self.req.environ['nova.context'],
user_id=self.req.environ['nova.context'].user_id)
get_instance_mock.return_value = instance
rule_name = "os_compute_api:servers:rebuild"
self.policy.set_rules({rule_name: "user_id:%(user_id)s"})
body = {'rebuild': {'imageRef': self.image_uuid,
'adminPass': 'dumpy_password'}}
self.controller._action_rebuild(self.req, fakes.FAKE_UUID, body=body)
rebuild_mock.assert_called_once_with(self.req.environ['nova.context'],
instance,
self.image_uuid,
'dumpy_password')
def test_create_image_policy_failed(self):
rule_name = "os_compute_api:servers:create_image"
rule = {rule_name: "project:non_fake"}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rule, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.utils.is_volume_backed_instance',
return_value=True)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(servers.ServersController, '_get_server')
def test_create_vol_backed_img_snapshotting_policy_blocks_project(self,
mock_get_server,
mock_get_uuidi,
mock_is_vol_back):
"""Don't permit a snapshot of a volume backed instance if configured
not to based on project
"""
rule_name = "os_compute_api:servers:create_image:allow_volume_backed"
rules = {
rule_name: "project:non_fake",
"os_compute_api:servers:create_image": "",
}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rules, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.utils.is_volume_backed_instance',
return_value=True)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
@mock.patch.object(servers.ServersController, '_get_server')
def test_create_vol_backed_img_snapshotting_policy_blocks_role(self,
mock_get_server,
mock_get_uuidi,
mock_is_vol_back):
"""Don't permit a snapshot of a volume backed instance if configured
not to based on role
"""
rule_name = "os_compute_api:servers:create_image:allow_volume_backed"
rules = {
rule_name: "role:non_fake",
"os_compute_api:servers:create_image": "",
}
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self._common_policy_check(
rules, rule_name, self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def _create_policy_check(self, rules, rule_name):
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': flavor_ref,
'availability_zone': "zone1:host1:node1",
'block_device_mapping': [{'device_name': "/dev/sda1"}],
'networks': [{'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}],
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self._common_policy_check(
rules, rule_name, self.controller.create, self.req, body=body)
def test_create_policy_failed(self):
rule_name = "os_compute_api:servers:create"
rules = {rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
def test_create_forced_host_policy_failed(self):
rule_name = "os_compute_api:servers:create:forced_host"
rule = {"os_compute_api:servers:create": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rule, rule_name)
def test_create_attach_volume_policy_failed(self):
rule_name = "os_compute_api:servers:create:attach_volume"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
def test_create_attach_attach_network_policy_failed(self):
rule_name = "os_compute_api:servers:create:attach_network"
rules = {"os_compute_api:servers:create": "@",
"os_compute_api:servers:create:forced_host": "@",
"os_compute_api:servers:create:attach_volume": "@",
rule_name: "project:non_fake"}
self._create_policy_check(rules, rule_name)
class ServersActionsJsonTestV239(test.NoDBTestCase):
def setUp(self):
super(ServersActionsJsonTestV239, self).setUp()
self.controller = servers.ServersController()
self.req = fakes.HTTPRequest.blank('', version='2.39')
@mock.patch.object(common, 'check_img_metadata_properties_quota')
@mock.patch.object(common, 'get_instance')
def test_server_create_image_no_quota_checks(self, mock_get_instance,
mock_check_quotas):
# 'mock_get_instance' helps to skip the whole logic of the action,
# but to make the test
mock_get_instance.side_effect = webob.exc.HTTPNotFound
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_create_image, self.req,
FAKE_UUID, body=body)
# starting from version 2.39 no quota checks on Nova side are performed
# for 'createImage' action after removing 'image-metadata' proxy API
mock_check_quotas.assert_not_called()
| 44.15117
| 79
| 0.583345
| 40,842
| 368,000
| 4.982396
| 0.034866
| 0.019745
| 0.014502
| 0.016512
| 0.837325
| 0.800581
| 0.761675
| 0.724636
| 0.697441
| 0.664241
| 0
| 0.018327
| 0.305348
| 368,000
| 8,334
| 80
| 44.156467
| 0.777704
| 0.040693
| 0
| 0.643211
| 0
| 0
| 0.151788
| 0.057816
| 0
| 0
| 0
| 0.00012
| 0.10271
| 1
| 0.103001
| false
| 0.007576
| 0.008013
| 0.002768
| 0.13884
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a359317e48fb59e16d0e08f54415c78e9de25ae
| 153
|
py
|
Python
|
storage/admin.py
|
Webwiznitr/Project-Lava
|
d63a861e3af57002718cf5e07686df07214295b6
|
[
"MIT"
] | null | null | null |
storage/admin.py
|
Webwiznitr/Project-Lava
|
d63a861e3af57002718cf5e07686df07214295b6
|
[
"MIT"
] | null | null | null |
storage/admin.py
|
Webwiznitr/Project-Lava
|
d63a861e3af57002718cf5e07686df07214295b6
|
[
"MIT"
] | 2
|
2020-12-31T05:47:06.000Z
|
2021-02-07T14:39:08.000Z
|
from django.contrib import admin
from .models import Task , Profile
# Register your models here.
admin.site.register(Task)
admin.site.register(Profile)
| 21.857143
| 34
| 0.797386
| 22
| 153
| 5.545455
| 0.545455
| 0.147541
| 0.278689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 153
| 6
| 35
| 25.5
| 0.903704
| 0.169935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.